warmup_lr.py 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. """Warm up learning rate scheduler module."""
  2. from typing import Union
  3. import torch
  4. from torch.optim.lr_scheduler import _LRScheduler
  5. from typeguard import check_argument_types
  6. from funasr.schedulers.abs_scheduler import AbsBatchStepScheduler
  7. class WarmupLR(_LRScheduler, AbsBatchStepScheduler):
  8. """The WarmupLR scheduler
  9. This scheduler is almost same as NoamLR Scheduler except for following difference:
  10. NoamLR:
  11. lr = optimizer.lr * model_size ** -0.5
  12. * min(step ** -0.5, step * warmup_step ** -1.5)
  13. WarmupLR:
  14. lr = optimizer.lr * warmup_step ** 0.5
  15. * min(step ** -0.5, step * warmup_step ** -1.5)
  16. Note that the maximum lr equals to optimizer.lr in this scheduler.
  17. """
  18. def __init__(
  19. self,
  20. optimizer: torch.optim.Optimizer,
  21. warmup_steps: Union[int, float] = 25000,
  22. last_epoch: int = -1,
  23. ):
  24. assert check_argument_types()
  25. self.warmup_steps = warmup_steps
  26. # __init__() must be invoked before setting field
  27. # because step() is also invoked in __init__()
  28. super().__init__(optimizer, last_epoch)
  29. def __repr__(self):
  30. return f"{self.__class__.__name__}(warmup_steps={self.warmup_steps})"
  31. def get_lr(self):
  32. step_num = self.last_epoch + 1
  33. return [
  34. lr
  35. * self.warmup_steps**0.5
  36. * min(step_num**-0.5, step_num * self.warmup_steps**-1.5)
  37. for lr in self.base_lrs
  38. ]