flash-attention/training/configs/experiment/pile/gpt3l-hf.yaml

17 lines
196 B
YAML
Raw Normal View History

2022-11-29 20:13:51 +08:00
# @package _global_
defaults:
- /experiment/pile/gpt3s-hf.yaml
model:
config:
n_embd: 1536
n_head: 16
n_layer: 24
datamodule:
batch_size: 2
train:
optimizer:
lr: 2.5e-4