diff --git a/litgpt/finetune/lora.py b/litgpt/finetune/lora.py index 00fe7b84c8..d142385011 100644 --- a/litgpt/finetune/lora.py +++ b/litgpt/finetune/lora.py @@ -264,7 +264,7 @@ def fit( fabric.barrier() if train.save_interval is not None and not is_accumulating and step_count % train.save_interval == 0: - checkpoint_file = out_dir / f"step-{step_count:06d}" / "lit_model.pth" + checkpoint_file = out_dir / f"step-{step_count:06d}" / "lit_model.pth.lora" checkpoint_file.parent.mkdir(parents=True, exist_ok=True) save_lora_checkpoint(fabric, model, checkpoint_file) if fabric.global_rank == 0: