Skip to content

Commit

Permalink
Fix vocab size padding in Llama3 config (#1334)
Browse files Browse the repository at this point in the history
  • Loading branch information
awaelchli authored Apr 22, 2024
1 parent 63a0a10 commit 54628ec
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions litgpt/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -847,8 +847,8 @@ def norm_class(self) -> Type:
name="Llama-3-8B{}",
hf_config=dict(org="meta-llama", name="Meta-Llama-3-8B{}"),
block_size=8192,
vocab_size=128256,
padding_multiple=64,
vocab_size=128000,
padded_vocab_size=128256,
n_layer=32,
n_head=32,
n_query_groups=8,
Expand All @@ -865,8 +865,8 @@ def norm_class(self) -> Type:
name="Llama-3-70B{}",
hf_config=dict(org="meta-llama", name="Meta-Llama-3-70B{}"),
block_size=8192,
vocab_size=128256,
padding_multiple=64,
vocab_size=128000,
padded_vocab_size=128256,
n_layer=80,
n_head=64,
n_embd=8192,
Expand Down

0 comments on commit 54628ec

Please sign in to comment.