__init__.py 852 B

1234567891011121314151617181920212223
  1. import torch
  2. import torch.multiprocessing
  3. import torch.nn
  4. import torch.optim
  5. from funasr.schedulers.noam_lr import NoamLR
  6. from funasr.schedulers.tri_stage_scheduler import TriStageLR
  7. from funasr.schedulers.warmup_lr import WarmupLR
  8. scheduler_classes = dict(
  9. ReduceLROnPlateau=torch.optim.lr_scheduler.ReduceLROnPlateau,
  10. lambdalr=torch.optim.lr_scheduler.LambdaLR,
  11. steplr=torch.optim.lr_scheduler.StepLR,
  12. multisteplr=torch.optim.lr_scheduler.MultiStepLR,
  13. exponentiallr=torch.optim.lr_scheduler.ExponentialLR,
  14. CosineAnnealingLR=torch.optim.lr_scheduler.CosineAnnealingLR,
  15. noamlr=NoamLR,
  16. warmuplr=WarmupLR,
  17. tri_stage=TriStageLR,
  18. cycliclr=torch.optim.lr_scheduler.CyclicLR,
  19. onecyclelr=torch.optim.lr_scheduler.OneCycleLR,
  20. CosineAnnealingWarmRestarts=torch.optim.lr_scheduler.CosineAnnealingWarmRestarts,
  21. )