[Minor] Add comment on skipping rope caches (#2004)
This commit is contained in:
parent
1aa1361510
commit
24cde76a15
@ -322,9 +322,10 @@ class LlamaForCausalLM(nn.Module):
|
||||
model_name_or_path, cache_dir, load_format, revision):
|
||||
if "rotary_emb.inv_freq" in name:
|
||||
continue
|
||||
if "rotary_emb.cos_cached" in name:
|
||||
continue
|
||||
if "rotary_emb.sin_cached" in name:
|
||||
if ("rotary_emb.cos_cached" in name
|
||||
or "rotary_emb.sin_cached" in name):
|
||||
# Models trained using ColossalAI may include these tensors in
|
||||
# the checkpoint. Skip them.
|
||||
continue
|
||||
for (param_name, weight_name, shard_id) in stacked_params_mapping:
|
||||
if weight_name not in name:
|
||||
|
||||
Loading…
Reference in New Issue
Block a user