tp_inference_launch.py 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146
  1. #!/usr/bin/env python3
  2. # Copyright ESPnet (https://github.com/espnet/espnet). All Rights Reserved.
  3. # Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
  4. import torch
  5. torch.set_num_threads(1)
  6. import argparse
  7. import logging
  8. import os
  9. import sys
  10. from typing import Union, Dict, Any
  11. from funasr.utils import config_argparse
  12. from funasr.utils.cli_utils import get_commandline_args
  13. from funasr.utils.types import str2bool
  14. from funasr.utils.types import str2triple_str
  15. from funasr.utils.types import str_or_none
  16. def get_parser():
  17. parser = config_argparse.ArgumentParser(
  18. description="Timestamp Prediction Inference",
  19. formatter_class=argparse.ArgumentDefaultsHelpFormatter,
  20. )
  21. # Note(kamo): Use '_' instead of '-' as separator.
  22. # '-' is confusing if written in yaml.
  23. parser.add_argument(
  24. "--log_level",
  25. type=lambda x: x.upper(),
  26. default="INFO",
  27. choices=("CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"),
  28. help="The verbose level of logging",
  29. )
  30. parser.add_argument("--output_dir", type=str, required=False)
  31. parser.add_argument(
  32. "--ngpu",
  33. type=int,
  34. default=0,
  35. help="The number of gpus. 0 indicates CPU mode",
  36. )
  37. parser.add_argument(
  38. "--njob",
  39. type=int,
  40. default=1,
  41. help="The number of jobs for each gpu",
  42. )
  43. parser.add_argument(
  44. "--gpuid_list",
  45. type=str,
  46. default="",
  47. help="The visible gpus",
  48. )
  49. parser.add_argument("--seed", type=int, default=0, help="Random seed")
  50. parser.add_argument(
  51. "--dtype",
  52. default="float32",
  53. choices=["float16", "float32", "float64"],
  54. help="Data type",
  55. )
  56. parser.add_argument(
  57. "--num_workers",
  58. type=int,
  59. default=1,
  60. help="The number of workers used for DataLoader",
  61. )
  62. group = parser.add_argument_group("Input data related")
  63. group.add_argument(
  64. "--data_path_and_name_and_type",
  65. type=str2triple_str,
  66. required=True,
  67. action="append",
  68. )
  69. group.add_argument("--key_file", type=str_or_none)
  70. group.add_argument("--allow_variable_data_keys", type=str2bool, default=False)
  71. group = parser.add_argument_group("The model configuration related")
  72. group.add_argument(
  73. "--timestamp_infer_config",
  74. type=str,
  75. help="VAD infer configuration",
  76. )
  77. group.add_argument(
  78. "--timestamp_model_file",
  79. type=str,
  80. help="VAD model parameter file",
  81. )
  82. group.add_argument(
  83. "--timestamp_cmvn_file",
  84. type=str,
  85. help="Global CMVN file",
  86. )
  87. group = parser.add_argument_group("The inference configuration related")
  88. group.add_argument(
  89. "--batch_size",
  90. type=int,
  91. default=1,
  92. help="The batch size for inference",
  93. )
  94. return parser
  95. def inference_launch(mode, **kwargs):
  96. if mode == "tp_norm":
  97. from funasr.bin.tp_inference import inference_modelscope
  98. return inference_modelscope(**kwargs)
  99. else:
  100. logging.info("Unknown decoding mode: {}".format(mode))
  101. return None
  102. def main(cmd=None):
  103. print(get_commandline_args(), file=sys.stderr)
  104. parser = get_parser()
  105. parser.add_argument(
  106. "--mode",
  107. type=str,
  108. default="tp_norm",
  109. help="The decoding mode",
  110. )
  111. args = parser.parse_args(cmd)
  112. kwargs = vars(args)
  113. kwargs.pop("config", None)
  114. # set logging messages
  115. logging.basicConfig(
  116. level=args.log_level,
  117. format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s",
  118. )
  119. logging.info("Decoding args: {}".format(kwargs))
  120. # gpu setting
  121. if args.ngpu > 0:
  122. jobid = int(args.output_dir.split(".")[-1])
  123. gpuid = args.gpuid_list.split(",")[(jobid - 1) // args.njob]
  124. os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
  125. os.environ["CUDA_VISIBLE_DEVICES"] = gpuid
  126. inference_launch(**kwargs)
  127. if __name__ == "__main__":
  128. main()