Skip to content

Commit

Permalink
Reduce microbatch size (#1212)
Browse files Browse the repository at this point in the history
  • Loading branch information
rasbt authored Apr 1, 2024
1 parent 1336976 commit 449eb29
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 7 deletions.
4 changes: 2 additions & 2 deletions litgpt/finetune/adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ def setup(
train: TrainArgs = TrainArgs(
save_interval=1000,
log_interval=1,
global_batch_size=128,
micro_batch_size=4,
global_batch_size=16,
micro_batch_size=1,
lr_warmup_steps=100,
epochs=5,
learning_rate=1e-3,
Expand Down
4 changes: 2 additions & 2 deletions litgpt/finetune/adapter_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ def setup(
train: TrainArgs = TrainArgs(
save_interval=1000,
log_interval=1,
global_batch_size=128,
micro_batch_size=4,
global_batch_size=16,
micro_batch_size=1,
lr_warmup_steps=100,
epochs=5,
learning_rate=1e-3,
Expand Down
2 changes: 1 addition & 1 deletion litgpt/finetune/full.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def setup(
train: TrainArgs = TrainArgs(
save_interval=1000,
log_interval=1,
global_batch_size=64,
global_batch_size=16,
micro_batch_size=1,
lr_warmup_steps=100,
epochs=5,
Expand Down
4 changes: 2 additions & 2 deletions litgpt/finetune/lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ def setup(
train: TrainArgs = TrainArgs(
save_interval=1000,
log_interval=1,
global_batch_size=128,
micro_batch_size=4,
global_batch_size=16,
micro_batch_size=1,
lr_warmup_steps=100,
epochs=5,
learning_rate=3e-4,
Expand Down

0 comments on commit 449eb29

Please sign in to comment.