游雁 3 лет назад
Родитель
Сommit
bda3527dbb
26 измененных файлов с 24 добавлено и 27 удалено
  1. 1 1
      funasr/runtime/python/grpc/grpc_server.py
  2. 3 2
      funasr/runtime/python/libtorch/README.md
  3. 1 1
      funasr/runtime/python/libtorch/demo.py
  4. 0 0
      funasr/runtime/python/libtorch/funasr_torch/__init__.py
  5. 0 0
      funasr/runtime/python/libtorch/funasr_torch/paraformer_bin.py
  6. 0 0
      funasr/runtime/python/libtorch/funasr_torch/utils/__init__.py
  7. 0 0
      funasr/runtime/python/libtorch/funasr_torch/utils/compute_wer.py
  8. 0 0
      funasr/runtime/python/libtorch/funasr_torch/utils/frontend.py
  9. 0 0
      funasr/runtime/python/libtorch/funasr_torch/utils/postprocess_utils.py
  10. 0 0
      funasr/runtime/python/libtorch/funasr_torch/utils/timestamp_utils.py
  11. 0 0
      funasr/runtime/python/libtorch/funasr_torch/utils/utils.py
  12. 3 3
      funasr/runtime/python/libtorch/setup.py
  13. 4 4
      funasr/runtime/python/onnxruntime/README.md
  14. BIN
      funasr/runtime/python/onnxruntime/debug.png
  15. 1 1
      funasr/runtime/python/onnxruntime/demo.py
  16. 2 0
      funasr/runtime/python/onnxruntime/funasr_onnx/__init__.py
  17. 1 3
      funasr/runtime/python/onnxruntime/funasr_onnx/paraformer_bin.py
  18. 0 0
      funasr/runtime/python/onnxruntime/funasr_onnx/utils/__init__.py
  19. 0 0
      funasr/runtime/python/onnxruntime/funasr_onnx/utils/frontend.py
  20. 0 0
      funasr/runtime/python/onnxruntime/funasr_onnx/utils/postprocess_utils.py
  21. 0 0
      funasr/runtime/python/onnxruntime/funasr_onnx/utils/timestamp_utils.py
  22. 0 0
      funasr/runtime/python/onnxruntime/funasr_onnx/utils/utils.py
  23. 0 4
      funasr/runtime/python/onnxruntime/rapid_paraformer/__init__.py
  24. 4 4
      funasr/runtime/python/onnxruntime/setup.py
  25. 2 2
      funasr/runtime/python/utils/infer.py
  26. 2 2
      funasr/runtime/python/utils/test_rtf.py

+ 1 - 1
funasr/runtime/python/grpc/grpc_server.py

@@ -24,7 +24,7 @@ class ASRServicer(paraformer_pb2_grpc.ASRServicer):
             self.inference_16k_pipeline = pipeline(task=Tasks.auto_speech_recognition, model=model, vad_model=vad_model, punc_model=punc_model)
         elif self.backend == "onnxruntime":
             try:
-                from rapid_paraformer.paraformer_onnx import Paraformer
+                from funasr_onnx import Paraformer
             except ImportError:
                 raise ImportError(f"Please install onnxruntime environment")
             self.inference_16k_pipeline = Paraformer(model_dir=onnx_dir)

+ 3 - 2
funasr/runtime/python/libtorch/README.md

@@ -27,10 +27,11 @@
          ```
 
 
-2. Install the `torch_paraformer`.
+2. Install the `funasr_torch`.
     ```shell
     git clone https://github.com/alibaba/FunASR.git && cd FunASR
     cd funasr/runtime/python/libtorch
+    python setup.py build
     python setup.py install
     ```
 
@@ -41,7 +42,7 @@
    - Output: `List[str]`: recognition result.
    - Example:
         ```python
-        from torch_paraformer import Paraformer
+        from funasr_torch import Paraformer
 
         model_dir = "/nfs/zhifu.gzf/export/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch"
         model = Paraformer(model_dir, batch_size=1)

+ 1 - 1
funasr/runtime/python/libtorch/demo.py

@@ -1,5 +1,5 @@
 
-from torch_paraformer import Paraformer
+from funasr_torch import Paraformer
 
 model_dir = "/nfs/zhifu.gzf/export/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch"
 model = Paraformer(model_dir, batch_size=1)

+ 0 - 0
funasr/runtime/python/libtorch/torch_paraformer/__init__.py → funasr/runtime/python/libtorch/funasr_torch/__init__.py


+ 0 - 0
funasr/runtime/python/libtorch/torch_paraformer/paraformer_bin.py → funasr/runtime/python/libtorch/funasr_torch/paraformer_bin.py


+ 0 - 0
funasr/runtime/python/libtorch/torch_paraformer/utils/__init__.py → funasr/runtime/python/libtorch/funasr_torch/utils/__init__.py


+ 0 - 0
funasr/runtime/python/libtorch/torch_paraformer/utils/compute_wer.py → funasr/runtime/python/libtorch/funasr_torch/utils/compute_wer.py


+ 0 - 0
funasr/runtime/python/libtorch/torch_paraformer/utils/frontend.py → funasr/runtime/python/libtorch/funasr_torch/utils/frontend.py


+ 0 - 0
funasr/runtime/python/libtorch/torch_paraformer/utils/postprocess_utils.py → funasr/runtime/python/libtorch/funasr_torch/utils/postprocess_utils.py


+ 0 - 0
funasr/runtime/python/libtorch/torch_paraformer/utils/timestamp_utils.py → funasr/runtime/python/libtorch/funasr_torch/utils/timestamp_utils.py


+ 0 - 0
funasr/runtime/python/libtorch/torch_paraformer/utils/utils.py → funasr/runtime/python/libtorch/funasr_torch/utils/utils.py


+ 3 - 3
funasr/runtime/python/libtorch/setup.py

@@ -14,8 +14,8 @@ def get_readme():
 
 
 setuptools.setup(
-    name='torch_paraformer',
-    version='0.0.1',
+    name='funasr_torch',
+    version='0.0.2',
     platforms="Any",
     url="https://github.com/alibaba-damo-academy/FunASR.git",
     author="Speech Lab, Alibaba Group, China",
@@ -31,7 +31,7 @@ setuptools.setup(
                       "PyYAML>=5.1.2", "torch-quant >= 0.4.0"],
     packages=find_packages(include=["torch_paraformer*"]),
     keywords=[
-        'funasr,paraformer'
+        'funasr,paraformer, funasr_torch'
     ],
     classifiers=[
         'Programming Language :: Python :: 3.6',

+ 4 - 4
funasr/runtime/python/onnxruntime/README.md

@@ -32,8 +32,8 @@
          ```
 
 
-2. Install the `rapid_paraformer`.
-   - Build the rapid_paraformer `whl`
+2. Install the `funasr_onnx`.
+   - Build the funasr_onnx `whl`
      ```shell
      git clone https://github.com/alibaba/FunASR.git && cd FunASR
      cd funasr/runtime/python/onnxruntime
@@ -41,7 +41,7 @@
      ```
    - Install the build `whl`
      ```bash
-     pip install dist/rapid_paraformer-0.0.1-py3-none-any.whl
+     pip install dist/funasr_onnx-0.0.2-py3-none-any.whl
      ```
 
 3. Run the demo.
@@ -50,7 +50,7 @@
    - Output: `List[str]`: recognition result.
    - Example:
         ```python
-        from rapid_paraformer import Paraformer
+        from funasr_onnx import Paraformer
 
         model_dir = "/nfs/zhifu.gzf/export/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch"
         model = Paraformer(model_dir, batch_size=1)

BIN
funasr/runtime/python/onnxruntime/debug.png


+ 1 - 1
funasr/runtime/python/onnxruntime/demo.py

@@ -1,5 +1,5 @@
 
-from rapid_paraformer import Paraformer
+from funasr_onnx import Paraformer
 
 #model_dir = "/Users/shixian/code/funasr/export/damo/speech_paraformer-large-vad-punc_asr_nat-zh-cn-16k-common-vocab8404-pytorch"
 #model_dir = "/Users/shixian/code/funasr/export/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch"

+ 2 - 0
funasr/runtime/python/onnxruntime/funasr_onnx/__init__.py

@@ -0,0 +1,2 @@
+# -*- encoding: utf-8 -*-
+from .paraformer_bin import Paraformer

+ 1 - 3
funasr/runtime/python/onnxruntime/rapid_paraformer/paraformer_onnx.py → funasr/runtime/python/onnxruntime/funasr_onnx/paraformer_bin.py

@@ -1,7 +1,5 @@
 # -*- encoding: utf-8 -*-
-# @Author: SWHL
-# @Contact: liekkaskono@163.com
-from cgitb import text
+
 import os.path
 from pathlib import Path
 from typing import List, Union, Tuple

+ 0 - 0
funasr/runtime/python/onnxruntime/rapid_paraformer/utils/__init__.py → funasr/runtime/python/onnxruntime/funasr_onnx/utils/__init__.py


+ 0 - 0
funasr/runtime/python/onnxruntime/rapid_paraformer/utils/frontend.py → funasr/runtime/python/onnxruntime/funasr_onnx/utils/frontend.py


+ 0 - 0
funasr/runtime/python/onnxruntime/rapid_paraformer/utils/postprocess_utils.py → funasr/runtime/python/onnxruntime/funasr_onnx/utils/postprocess_utils.py


+ 0 - 0
funasr/runtime/python/onnxruntime/rapid_paraformer/utils/timestamp_utils.py → funasr/runtime/python/onnxruntime/funasr_onnx/utils/timestamp_utils.py


+ 0 - 0
funasr/runtime/python/onnxruntime/rapid_paraformer/utils/utils.py → funasr/runtime/python/onnxruntime/funasr_onnx/utils/utils.py


+ 0 - 4
funasr/runtime/python/onnxruntime/rapid_paraformer/__init__.py

@@ -1,4 +0,0 @@
-# -*- encoding: utf-8 -*-
-# @Author: SWHL
-# @Contact: liekkaskono@163.com
-from .paraformer_onnx import Paraformer

+ 4 - 4
funasr/runtime/python/onnxruntime/setup.py

@@ -12,17 +12,17 @@ def get_readme():
     return readme
 
 
-MODULE_NAME = 'rapid_paraformer'
+MODULE_NAME = 'funasr_onnx'
 VERSION_NUM = '0.0.1'
 
 setuptools.setup(
     name=MODULE_NAME,
     version=VERSION_NUM,
     platforms="Any",
-    description="Using paraformer with ONNXRuntime",
-    author="FunASR",
+    url="https://github.com/alibaba-damo-academy/FunASR.git",
+    author="Speech Lab, Alibaba Group, China",
     author_email="funasr@list.alibaba-inc.com",
-    url="https://github.com/alibaba-damo-academy/FunASR",
+    description="FunASR: A Fundamental End-to-End Speech Recognition Toolkit",
     license='MIT',
     long_description=get_readme(),
     long_description_content_type='text/markdown',

+ 2 - 2
funasr/runtime/python/utils/infer.py

@@ -15,9 +15,9 @@ parser.add_argument('--output_dir', type=str, default=None, help='amp fallback n
 args = parser.parse_args()
 
 
-from funasr.runtime.python.libtorch.torch_paraformer import Paraformer
+from funasr.runtime.python.libtorch.funasr_torch import Paraformer
 if args.backend == "onnx":
-	from funasr.runtime.python.onnxruntime.rapid_paraformer import Paraformer
+	from funasr.runtime.python.onnxruntime.funasr_onnx import Paraformer
 	
 model = Paraformer(args.model_dir, batch_size=1, quantize=args.quantize, intra_op_num_threads=args.intra_op_num_threads)
 

+ 2 - 2
funasr/runtime/python/utils/test_rtf.py

@@ -14,9 +14,9 @@ parser.add_argument('--intra_op_num_threads', type=int, default=1, help='intra_o
 args = parser.parse_args()
 
 
-from funasr.runtime.python.libtorch.torch_paraformer import Paraformer
+from funasr.runtime.python.libtorch.funasr_torch import Paraformer
 if args.backend == "onnx":
-	from funasr.runtime.python.onnxruntime.rapid_paraformer import Paraformer
+	from funasr.runtime.python.onnxruntime.funasr_onnx import Paraformer
 	
 model = Paraformer(args.model_dir, batch_size=1, quantize=args.quantize, intra_op_num_threads=args.intra_op_num_threads)