Skip to content

Commit

Permalink
fix chat with lora
Browse files Browse the repository at this point in the history
  • Loading branch information
metame-none committed Apr 7, 2024
1 parent d78730a commit b6be749
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions litgpt/chat/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,9 +137,9 @@ def main(
checkpoint_path = checkpoint_dir / "lit_model.pth"

# Merge if this is a raw LoRA checkpoint
if (checkpoint_path / "lit_model.pth.lora").is_file() and not checkpoint_path.is_file():
if (checkpoint_dir / "lit_model.pth.lora").is_file() and not checkpoint_path.is_file():
print("Merging LoRA weights with the base model. This won't take long and is a one-time-only thing.")
merge_lora(checkpoint_path)
merge_lora(checkpoint_dir)

with fabric.init_module(empty_init=True):
model = GPT(config)
Expand Down

0 comments on commit b6be749

Please sign in to comment.