runtime_sdk_download_tool.py 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738
  1. from pathlib import Path
  2. import os
  3. import argparse
  4. from funasr.utils.types import str2bool
  5. parser = argparse.ArgumentParser()
  6. parser.add_argument('--model-name', type=str, required=True)
  7. parser.add_argument('--export-dir', type=str, required=True)
  8. parser.add_argument('--type', type=str, default='onnx', help='["onnx", "torch"]')
  9. parser.add_argument('--device', type=str, default='cpu', help='["cpu", "cuda"]')
  10. parser.add_argument('--quantize', type=str2bool, default=False, help='export quantized model')
  11. parser.add_argument('--fallback-num', type=int, default=0, help='amp fallback number')
  12. parser.add_argument('--audio_in', type=str, default=None, help='["wav", "wav.scp"]')
  13. parser.add_argument('--calib_num', type=int, default=200, help='calib max num')
  14. args = parser.parse_args()
  15. model_dir = args.model_name
  16. if not Path(args.model_name).exists():
  17. from modelscope.hub.snapshot_download import snapshot_download
  18. try:
  19. model_dir = snapshot_download(args.model_name, cache_dir=args.export_dir)
  20. except:
  21. raise "model_dir must be model_name in modelscope or local path downloaded from modelscope, but is {}".format \
  22. (model_dir)
  23. model_file = os.path.join(model_dir, 'model.onnx')
  24. if args.quantize:
  25. model_file = os.path.join(model_dir, 'model_quant.onnx')
  26. if not os.path.exists(model_file):
  27. print(".onnx is not exist, begin to export onnx")
  28. from funasr.export.export_model import ModelExport
  29. export_model = ModelExport(
  30. cache_dir=args.export_dir,
  31. onnx=True,
  32. device="cpu",
  33. quant=args.quantize,
  34. )
  35. export_model.export(model_dir)