Skip to content

Commit

Permalink
pure pytorch?
Browse files Browse the repository at this point in the history
  • Loading branch information
mwalmsley committed Mar 2, 2024
1 parent 9d8b791 commit 09c70ba
Showing 1 changed file with 22 additions and 10 deletions.
32 changes: 22 additions & 10 deletions zoobot/pytorch/training/finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,21 +253,33 @@ def configure_optimizers(self):
opt = torch.optim.AdamW(params, weight_decay=self.weight_decay) # lr included in params dict

if self.cosine_schedule:
logging.info('Using cosine schedule, warmup for {} epochs, max for {} epochs'.format(self.warmup_epochs, self.max_cosine_epochs))
from lightly.utils.scheduler import CosineWarmupScheduler # new dependency for zoobot, TBD - maybe just copy
# https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
# Dictionary, with an "optimizer" key, and (optionally) a "lr_scheduler" key whose value is a single LR scheduler or lr_scheduler_config.
lr_scheduler = CosineWarmupScheduler(
# logging.info('Using cosine schedule, warmup for {} epochs, max for {} epochs'.format(self.warmup_epochs, self.max_cosine_epochs))
# from lightly.utils.scheduler import CosineWarmupScheduler # new dependency for zoobot, TBD - maybe just copy
# # https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
# # Dictionary, with an "optimizer" key, and (optionally) a "lr_scheduler" key whose value is a single LR scheduler or lr_scheduler_config.
# lr_scheduler = CosineWarmupScheduler(
# optimizer=opt,
# warmup_epochs=self.warmup_epochs,
# max_epochs=self.max_cosine_epochs,
# start_value=self.learning_rate,
# end_value=self.learning_rate * self.max_learning_rate_reduction_factor,
# )

logging.info('Using cosine schedule, warmup not supported, max for {} epochs'.format(self.max_cosine_epochs))
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
optimizer=opt,
warmup_epochs=self.warmup_epochs,
max_epochs=self.max_cosine_epochs,
start_value=self.learning_rate,
end_value=self.learning_rate * self.max_learning_rate_reduction_factor,
T_max=self.max_cosine_epochs,
eta_min=self.learning_rate * self.max_learning_rate_reduction_factor
)

# lr_scheduler_config default is frequency=1, interval=epoch
return {
"optimizer": opt,
"lr_scheduler": lr_scheduler
"lr_scheduler": {
'scheduler': lr_scheduler,
'interval': 'epoch',
'frequency': 1
}
}
else:
return opt
Expand Down

0 comments on commit 09c70ba

Please sign in to comment.