flash-attention/training/configs/datamodule/thepile.yaml

15 lines
429 B
YAML
Raw Normal View History

2022-11-29 09:31:19 +08:00
_target_: src.datamodules.language_modeling_hf.LMDataModule
dataset_name: the_pile
dataset_config_name: null
tokenizer_name: gpt2
cache_dir: ${oc.env:DATA_DIR,${data_dir}}/the_pile/cache
max_length: 2048
add_eos: True
batch_size: 4 # per GPU
batch_size_eval: ${eval:${.batch_size} * 2}
num_workers: 64 # For preprocessing only
use_shmem: False
shuffle: True
pin_memory: True
__train_len: ${div_up:374337375694, ${.max_length}}