Skip to content

Commit

Permalink
torch cosine
Browse files Browse the repository at this point in the history
  • Loading branch information
mwalmsley committed Mar 2, 2024
1 parent eaa98ce commit 6b754b9
Showing 1 changed file with 16 additions and 17 deletions.
33 changes: 16 additions & 17 deletions zoobot/pytorch/training/finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,26 +256,25 @@ def configure_optimizers(self):
logging.info('Optimizer ready, configuring scheduler')

if self.cosine_schedule:
logging.info('Using cosine schedule, warmup for {} epochs, max for {} epochs'.format(self.warmup_epochs, self.max_cosine_epochs))
from lightly.utils.scheduler import CosineWarmupScheduler # new dependency for zoobot, TBD - maybe just copy
# https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
# Dictionary, with an "optimizer" key, and (optionally) a "lr_scheduler" key whose value is a single LR scheduler or lr_scheduler_config.
lr_scheduler = CosineWarmupScheduler(
optimizer=opt,
warmup_epochs=self.warmup_epochs,
max_epochs=self.max_cosine_epochs,
start_value=self.learning_rate,
end_value=self.learning_rate * self.max_learning_rate_reduction_factor,
)

# logging.info('Using cosine schedule, warmup not supported, max for {} epochs'.format(self.max_cosine_epochs))
# lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
# logging.info('Using lightly cosine schedule, warmup for {} epochs, max for {} epochs'.format(self.warmup_epochs, self.max_cosine_epochs))
# from lightly.utils.scheduler import CosineWarmupScheduler # new dependency for zoobot, TBD - maybe just copy
# # https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
# # Dictionary, with an "optimizer" key, and (optionally) a "lr_scheduler" key whose value is a single LR scheduler or lr_scheduler_config.
# lr_scheduler = CosineWarmupScheduler(
# optimizer=opt,
# T_max=self.max_cosine_epochs,
# eta_min=self.learning_rate * self.max_learning_rate_reduction_factor
# warmup_epochs=self.warmup_epochs,
# max_epochs=self.max_cosine_epochs,
# start_value=self.learning_rate,
# end_value=self.learning_rate * self.max_learning_rate_reduction_factor,
# )

# lr_scheduler_config default is frequency=1, interval=epoch
logging.info('Using CosineAnnealingLR schedule, warmup not supported, max for {} epochs'.format(self.max_cosine_epochs))
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
optimizer=opt,
T_max=self.max_cosine_epochs,
eta_min=self.learning_rate * self.max_learning_rate_reduction_factor
)

return {
"optimizer": opt,
"lr_scheduler": {
Expand Down

0 comments on commit 6b754b9

Please sign in to comment.