flash-attention/training/configs/experiment/pile/gpt3m-flash-8k.yaml
2022-11-28 17:34:40 -08:00

11 lines
224 B
YAML

# @package _global_
defaults:
- /experiment/pile/gpt3m-flash.yaml
datamodule:
max_length: 8192
batch_size: ${eval:"2 if ${train.gpu_mem} < 24 else (4 if ${train.gpu_mem} < 40 else 8)"}
train:
global_batch_size: 64