flash-attention/training/configs/experiment/pile/gpt3s-flash-rotary-8k.yaml
2022-11-28 17:34:40 -08:00

9 lines
188 B
YAML

# @package _global_
defaults:
- /experiment/pile/gpt3s-flash-8k.yaml
model:
config:
max_position_embeddings: 0 # Disable absolute position embedding
rotary_emb_fraction: 0.5