Skip to content

Commit

Permalink
fix bug (#8329)
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangbo9674 committed Apr 28, 2024
1 parent 71cc404 commit db52319
Showing 1 changed file with 5 additions and 1 deletion.
6 changes: 5 additions & 1 deletion llm/llama/auto_parallel/run_pretrain_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -574,7 +574,11 @@ def fn(layer):
# Create the learning_rate sheduler and optimizer
if training_args.decay_steps is None:
training_args.decay_steps = training_args.max_steps
warmup_steps = training_args.warmup_ratio * training_args.max_steps

if training_args.warmup_steps > 0:
warmup_steps = training_args.warmup_steps
else:
warmup_steps = training_args.warmup_ratio * training_args.max_steps

lr_scheduler = None
if training_args.lr_scheduler_type.value == "cosine":
Expand Down

0 comments on commit db52319

Please sign in to comment.