diff --git a/chgnet/model/layers.py b/chgnet/model/layers.py index 94e8248b..f087ecc4 100644 --- a/chgnet/model/layers.py +++ b/chgnet/model/layers.py @@ -250,6 +250,7 @@ def forward( new_bond_feas = aggregate( bond_update, bond_graph[:, 1], average=False, num_owner=len(bond_feas) ) + # New bond features if self.use_mlp_out: new_bond_feas = self.mlp_out(new_bond_feas) diff --git a/chgnet/trainer/trainer.py b/chgnet/trainer/trainer.py index 2b46b6af..68cab025 100644 --- a/chgnet/trainer/trainer.py +++ b/chgnet/trainer/trainer.py @@ -143,16 +143,23 @@ def __init__( self.scheduler = ExponentialLR(self.optimizer, **scheduler_params) self.scheduler_type = "exp" elif scheduler in ["CosineAnnealingLR", "CosLR", "Cos", "cos"]: + scheduler_params = kwargs.pop("scheduler_params", {"decay_fraction": 1e-2}) + decay_fraction = scheduler_params.pop("decay_fraction") self.scheduler = CosineAnnealingLR( self.optimizer, T_max=10 * epochs, # Maximum number of iterations. - eta_min=1e-2 * learning_rate, + eta_min=decay_fraction * learning_rate, ) self.scheduler_type = "cos" elif scheduler in ["CosRestartLR"]: - scheduler_params = kwargs.pop("scheduler_params", {"T_0": 10, "T_mult": 2}) + scheduler_params = kwargs.pop( + "scheduler_params", {"decay_fraction": 1e-2, "T_0": 10, "T_mult": 2} + ) + decay_fraction = scheduler_params.pop("decay_fraction") self.scheduler = CosineAnnealingWarmRestarts( - self.optimizer, eta_min=1e-2 * learning_rate, **scheduler_params + self.optimizer, + eta_min=decay_fraction * learning_rate, + **scheduler_params, ) self.scheduler_type = "cosrestart" else: