diff --git a/deepmd/main.py b/deepmd/main.py index e6f4de6359..915e7ca3bc 100644 --- a/deepmd/main.py +++ b/deepmd/main.py @@ -771,7 +771,6 @@ def main_parser() -> argparse.ArgumentParser: ), ) parser_list_model_branch.add_argument("INPUT", help="The input multi-task pre-trained model file") - return parser diff --git a/deepmd/pt/entrypoints/main.py b/deepmd/pt/entrypoints/main.py index b47f07f847..c765bdfcce 100644 --- a/deepmd/pt/entrypoints/main.py +++ b/deepmd/pt/entrypoints/main.py @@ -44,9 +44,6 @@ from deepmd.pt.train import ( training, ) -from deepmd.pt.utils import ( - env, -) from deepmd.pt.utils.dataloader import ( DpLoaderSet, ) @@ -313,21 +310,7 @@ def main(args: Optional[Union[List[str], argparse.Namespace]] = None): log.info("DeepMD version: %s", __version__) if FLAGS.command == "train": - if FLAGS.list_model_branch: - assert ( - FLAGS.finetune is not None - ), "Error: The '--list-model-branch' option requires the '--finetune' argument to specify the model." - state_dict = torch.load(FLAGS.finetune, map_location=env.DEVICE) - if "model" in state_dict: - state_dict = state_dict["model"] - model_params = state_dict["_extra_state"]["model_params"] - finetune_from_multi_task = "model_dict" in model_params - # Pretrained model must be multitask mode - assert finetune_from_multi_task, "Error: The '--list-model-branch' option requires a multitask pretrained model. The provided model does not meet this criterion." - model_branch = list(model_params["model_dict"].keys()) - log.info(f"Available model branches are {model_branch}") - else: - train(FLAGS) + train(FLAGS) elif FLAGS.command == "freeze": if Path(FLAGS.checkpoint_folder).is_dir(): checkpoint_path = Path(FLAGS.checkpoint_folder) diff --git a/deepmd/pt/train/training.py b/deepmd/pt/train/training.py index 2ba7789821..4056b30d87 100644 --- a/deepmd/pt/train/training.py +++ b/deepmd/pt/train/training.py @@ -83,7 +83,6 @@ ) log = logging.getLogger(__name__) -from IPython import embed class Trainer: @@ -519,7 +518,6 @@ def update_single_finetune_params( for i in _random_state_dict.keys() if i != "_extra_state" and f".{_model_key}." in i ] - embed() for item_key in target_keys: if _new_fitting and ".fitting_net." in item_key: # print(f'Keep {item_key} in old model!') diff --git a/doc/train/finetuning.md b/doc/train/finetuning.md index 7c2a6b2394..7e25aa260c 100644 --- a/doc/train/finetuning.md +++ b/doc/train/finetuning.md @@ -102,7 +102,7 @@ $ dp --pt train input.json --finetune multitask_pretrained.pt --model-branch CHO One can check the available model branches in multi-task pre-trained model by refering to the documentation of the pre-trained model or by using the following command: ```bash -$ dp --pt train input.json --finetune multitask_pretrained.pt --list-model-branch +$ dp --pt list-model-branch multitask_pretrained.pt ``` :::