flash-attention/training/configs/experiment/pile/gpt3s-flash.yaml

19 lines
473 B
YAML
Raw Normal View History

2022-11-29 09:31:19 +08:00
# @package _global_
defaults:
- /experiment/pile/base.yaml
- override /model: gpt2
- override /model/gpt2model: gpt2-small
model:
config:
# n_positions is already set to ${datamodule.max_length}
residual_in_fp32: True
2022-11-29 09:31:19 +08:00
use_flash_attn: True
fused_dropout_add_ln: True
fused_mlp: True
2022-11-29 09:31:19 +08:00
fused_bias_fc: True
pad_vocab_size_multiple: 8
datamodule:
batch_size: ${eval:"8 if ${train.gpu_mem} < 24 else (16 if ${train.gpu_mem} < 40 else 32)"}