From ce2c80a3054fe67341a5cd89a503809a1226c550 Mon Sep 17 00:00:00 2001 From: Mike Walmsley Date: Sat, 2 Mar 2024 11:24:42 -0500 Subject: [PATCH] try cosine uncommented but False --- zoobot/pytorch/training/finetune.py | 62 ++++++++++++++--------------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/zoobot/pytorch/training/finetune.py b/zoobot/pytorch/training/finetune.py index 5608fa43..7434ddaa 100644 --- a/zoobot/pytorch/training/finetune.py +++ b/zoobot/pytorch/training/finetune.py @@ -253,37 +253,37 @@ def configure_optimizers(self): opt = torch.optim.AdamW(params, weight_decay=self.weight_decay) # lr included in params dict logging.info('Optimizer ready, configuring scheduler') - # if self.cosine_schedule: - # # logging.info('Using cosine schedule, warmup for {} epochs, max for {} epochs'.format(self.warmup_epochs, self.max_cosine_epochs)) - # # from lightly.utils.scheduler import CosineWarmupScheduler # new dependency for zoobot, TBD - maybe just copy - # # # https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers - # # # Dictionary, with an "optimizer" key, and (optionally) a "lr_scheduler" key whose value is a single LR scheduler or lr_scheduler_config. - # # lr_scheduler = CosineWarmupScheduler( - # # optimizer=opt, - # # warmup_epochs=self.warmup_epochs, - # # max_epochs=self.max_cosine_epochs, - # # start_value=self.learning_rate, - # # end_value=self.learning_rate * self.max_learning_rate_reduction_factor, - # # ) - - # logging.info('Using cosine schedule, warmup not supported, max for {} epochs'.format(self.max_cosine_epochs)) - # lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( - # optimizer=opt, - # T_max=self.max_cosine_epochs, - # eta_min=self.learning_rate * self.max_learning_rate_reduction_factor - # ) - - # # lr_scheduler_config default is frequency=1, interval=epoch - # return { - # "optimizer": opt, - # "lr_scheduler": { - # 'scheduler': lr_scheduler, - # 'interval': 'epoch', - # 'frequency': 1 - # } - # } - # else: - # logging.info('Learning rate scheduler not used') + if self.cosine_schedule: + logging.info('Using cosine schedule, warmup for {} epochs, max for {} epochs'.format(self.warmup_epochs, self.max_cosine_epochs)) + from lightly.utils.scheduler import CosineWarmupScheduler # new dependency for zoobot, TBD - maybe just copy + # https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers + # Dictionary, with an "optimizer" key, and (optionally) a "lr_scheduler" key whose value is a single LR scheduler or lr_scheduler_config. + lr_scheduler = CosineWarmupScheduler( + optimizer=opt, + warmup_epochs=self.warmup_epochs, + max_epochs=self.max_cosine_epochs, + start_value=self.learning_rate, + end_value=self.learning_rate * self.max_learning_rate_reduction_factor, + ) + + # logging.info('Using cosine schedule, warmup not supported, max for {} epochs'.format(self.max_cosine_epochs)) + # lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( + # optimizer=opt, + # T_max=self.max_cosine_epochs, + # eta_min=self.learning_rate * self.max_learning_rate_reduction_factor + # ) + + # lr_scheduler_config default is frequency=1, interval=epoch + return { + "optimizer": opt, + "lr_scheduler": { + 'scheduler': lr_scheduler, + 'interval': 'epoch', + 'frequency': 1 + } + } + else: + logging.info('Learning rate scheduler not used') return opt