Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
awaelchli committed Apr 4, 2024
1 parent 64bd9eb commit 66c8510
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions litgpt/scripts/merge_lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def merge_lora(
fabric = L.Fabric(devices=1, precision=precision, accelerator="cpu")
config = Config.from_file(checkpoint_dir / "model_config.yaml", **lora_params)

with fabric.init_module(), torch.device("meta"):
with fabric.init_module():
model = GPT(config)

lora_path = checkpoint_dir / "lit_model.pth.lora"
Expand All @@ -52,7 +52,7 @@ def merge_lora(

# Merge LoRA weights into the base model
pretrained_checkpoint.update(lora_checkpoint.get("model", lora_checkpoint))
model.load_state_dict(pretrained_checkpoint, assign=True)
model.load_state_dict(pretrained_checkpoint)
merge_lora_weights(model)

# Remove LoRA parameters and the LoRA linear substring
Expand Down

0 comments on commit 66c8510

Please sign in to comment.