flash-attention/training/configs/experiment/pile/gpt3s-flash.yaml

19 lines
473 B
YAML

# @package _global_
defaults:
- /experiment/pile/base.yaml
- override /model: gpt2
- override /model/gpt2model: gpt2-small
model:
config:
# n_positions is already set to ${datamodule.max_length}
residual_in_fp32: True
use_flash_attn: True
fused_dropout_add_ln: True
fused_mlp: True
fused_bias_fc: True
pad_vocab_size_multiple: 8
datamodule:
batch_size: ${eval:"8 if ${train.gpu_mem} < 24 else (16 if ${train.gpu_mem} < 40 else 32)"}