Browse Source

update wss server&client

雾聪 2 years ago
parent
commit
704bd3692b

+ 20 - 6
funasr/runtime/onnxruntime/src/offline-stream.cpp

@@ -1,11 +1,11 @@
 #include "precomp.h"
+#include <unistd.h>
 
 namespace funasr {
 OfflineStream::OfflineStream(std::map<std::string, std::string>& model_path, int thread_num)
 {
     // VAD model
     if(model_path.find(VAD_DIR) != model_path.end()){
-        use_vad = true;
         string vad_model_path;
         string vad_cmvn_path;
         string vad_config_path;
@@ -16,8 +16,16 @@ OfflineStream::OfflineStream(std::map<std::string, std::string>& model_path, int
         }
         vad_cmvn_path = PathAppend(model_path.at(VAD_DIR), VAD_CMVN_NAME);
         vad_config_path = PathAppend(model_path.at(VAD_DIR), VAD_CONFIG_NAME);
-        vad_handle = make_unique<FsmnVad>();
-        vad_handle->InitVad(vad_model_path, vad_cmvn_path, vad_config_path, thread_num);
+        if (access(vad_model_path.c_str(), F_OK) != 0 ||
+            access(vad_cmvn_path.c_str(), F_OK) != 0 ||
+            access(vad_config_path.c_str(), F_OK) != 0 )
+        {
+            LOG(INFO) << "VAD model file is not exist, skip load vad model.";
+        }else{
+            vad_handle = make_unique<FsmnVad>();
+            vad_handle->InitVad(vad_model_path, vad_cmvn_path, vad_config_path, thread_num);
+            use_vad = true;
+        }
     }
 
     // AM model
@@ -39,7 +47,6 @@ OfflineStream::OfflineStream(std::map<std::string, std::string>& model_path, int
 
     // PUNC model
     if(model_path.find(PUNC_DIR) != model_path.end()){
-        use_punc = true;
         string punc_model_path;
         string punc_config_path;
     
@@ -49,8 +56,15 @@ OfflineStream::OfflineStream(std::map<std::string, std::string>& model_path, int
         }
         punc_config_path = PathAppend(model_path.at(PUNC_DIR), PUNC_CONFIG_NAME);
 
-        punc_handle = make_unique<CTTransformer>();
-        punc_handle->InitPunc(punc_model_path, punc_config_path, thread_num);
+        if (access(punc_model_path.c_str(), F_OK) != 0 ||
+            access(punc_config_path.c_str(), F_OK) != 0 )
+        {
+            LOG(INFO) << "PUNC model file is not exist, skip load punc model.";
+        }else{
+            punc_handle = make_unique<CTTransformer>();
+            punc_handle->InitPunc(punc_model_path, punc_config_path, thread_num);
+            use_punc = true;
+        }
     }
 }
 

+ 4 - 4
funasr/runtime/websocket/CMakeLists.txt

@@ -56,8 +56,8 @@ add_subdirectory(${PROJECT_SOURCE_DIR}/../onnxruntime/third_party/glog glog)
 # install openssl first apt-get install libssl-dev
 find_package(OpenSSL REQUIRED)
 
-add_executable(funasr-ws-server "funasr-ws-server.cpp" "websocket-server.cpp")
-add_executable(funasr-ws-client "funasr-ws-client.cpp")
+add_executable(funasr-wss-server "funasr-wss-server.cpp" "websocket-server.cpp")
+add_executable(funasr-wss-client "funasr-wss-client.cpp")
 
-target_link_libraries(funasr-ws-client PUBLIC funasr ssl crypto)
-target_link_libraries(funasr-ws-server PUBLIC funasr ssl crypto)
+target_link_libraries(funasr-wss-client PUBLIC funasr ssl crypto)
+target_link_libraries(funasr-wss-server PUBLIC funasr ssl crypto)

+ 0 - 0
funasr/runtime/websocket/funasr-ws-client.cpp → funasr/runtime/websocket/funasr-wss-client.cpp


+ 14 - 15
funasr/runtime/websocket/funasr-ws-server.cpp → funasr/runtime/websocket/funasr-wss-server.cpp

@@ -15,10 +15,8 @@
 using namespace std;
 void GetValue(TCLAP::ValueArg<std::string>& value_arg, string key,
               std::map<std::string, std::string>& model_path) {
-  if (value_arg.isSet()) {
     model_path.insert({key, value_arg.getValue()});
     LOG(INFO) << key << " : " << value_arg.getValue();
-  }
 }
 int main(int argc, char* argv[]) {
   try {
@@ -28,31 +26,32 @@ int main(int argc, char* argv[]) {
     TCLAP::CmdLine cmd("funasr-ws-server", ' ', "1.0");
     TCLAP::ValueArg<std::string> model_dir(
         "", MODEL_DIR,
-        "the asr model path, which contains model.onnx, config.yaml, am.mvn",
-        true, "", "string");
+        "default: /workspace/models/asr, the asr model path, which contains model.onnx, config.yaml, am.mvn",
+        false, "/workspace/models/asr", "string");
     TCLAP::ValueArg<std::string> quantize(
         "", QUANTIZE,
-        "false (Default), load the model of model.onnx in model_dir. If set "
+        "true (Default), load the model of model.onnx in model_dir. If set "
         "true, load the model of model_quant.onnx in model_dir",
-        false, "false", "string");
+        false, "true", "string");
     TCLAP::ValueArg<std::string> vad_dir(
         "", VAD_DIR,
-        "the vad model path, which contains model.onnx, vad.yaml, vad.mvn",
-        false, "", "string");
+        "default: /workspace/models/vad, the vad model path, which contains model.onnx, vad.yaml, vad.mvn",
+        false, "/workspace/models/vad", "string");
     TCLAP::ValueArg<std::string> vad_quant(
         "", VAD_QUANT,
-        "false (Default), load the model of model.onnx in vad_dir. If set "
+        "true (Default), load the model of model.onnx in vad_dir. If set "
         "true, load the model of model_quant.onnx in vad_dir",
-        false, "false", "string");
+        false, "true", "string");
     TCLAP::ValueArg<std::string> punc_dir(
         "", PUNC_DIR,
-        "the punc model path, which contains model.onnx, punc.yaml", false, "",
+        "default: /workspace/models/punc, the punc model path, which contains model.onnx, punc.yaml", 
+        false, "/workspace/models/punc",
         "string");
     TCLAP::ValueArg<std::string> punc_quant(
         "", PUNC_QUANT,
-        "false (Default), load the model of model.onnx in punc_dir. If set "
+        "true (Default), load the model of model.onnx in punc_dir. If set "
         "true, load the model of model_quant.onnx in punc_dir",
-        false, "false", "string");
+        false, "true", "string");
 
     TCLAP::ValueArg<std::string> listen_ip("", "listen_ip", "listen_ip", false,
                                            "0.0.0.0", "string");
@@ -64,9 +63,9 @@ int main(int argc, char* argv[]) {
     TCLAP::ValueArg<int> model_thread_num("", "model_thread_num",
                                           "model_thread_num", false, 1, "int");
 
-    TCLAP::ValueArg<std::string> certfile("", "certfile", "certfile", false, "",
+    TCLAP::ValueArg<std::string> certfile("", "certfile", "certfile", false, "../../../ssl_key/server.crt",
                                           "string");
-    TCLAP::ValueArg<std::string> keyfile("", "keyfile", "keyfile", false, "",
+    TCLAP::ValueArg<std::string> keyfile("", "keyfile", "keyfile", false, "../../../ssl_key/server.key",
                                          "string");
 
     cmd.add(certfile);

+ 10 - 13
funasr/runtime/websocket/readme.md

@@ -51,7 +51,7 @@ make
 
 ```shell
 cd bin
-   ./funasr-ws-server  [--model_thread_num <int>] [--decoder_thread_num <int>]
+   ./funasr-wss-server  [--model_thread_num <int>] [--decoder_thread_num <int>]
                     [--io_thread_num <int>] [--port <int>] [--listen_ip
                     <string>] [--punc-quant <string>] [--punc-dir <string>]
                     [--vad-quant <string>] [--vad-dir <string>] [--quantize
@@ -59,19 +59,19 @@ cd bin
                     [--certfile <string>] [--] [--version] [-h]
 Where:
    --model-dir <string>
-     (required)  the asr model path, which contains model.onnx, config.yaml, am.mvn
+     default: /workspace/models/asr, the asr model path, which contains model.onnx, config.yaml, am.mvn
    --quantize <string>
-     false (Default), load the model of model.onnx in model_dir. If set true, load the model of model_quant.onnx in model_dir
+     true (Default), load the model of model.onnx in model_dir. If set true, load the model of model_quant.onnx in model_dir
 
    --vad-dir <string>
-     the vad model path, which contains model.onnx, vad.yaml, vad.mvn
+     default: /workspace/models/vad, the vad model path, which contains model.onnx, vad.yaml, vad.mvn
    --vad-quant <string>
-     false (Default), load the model of model.onnx in vad_dir. If set true, load the model of model_quant.onnx in vad_dir
+     true (Default), load the model of model.onnx in vad_dir. If set true, load the model of model_quant.onnx in vad_dir
 
    --punc-dir <string>
-     the punc model path, which contains model.onnx, punc.yaml
+     default: /workspace/models/punc, the punc model path, which contains model.onnx, punc.yaml
    --punc-quant <string>
-     false (Default), load the model of model.onnx in punc_dir. If set true, load the model of model_quant.onnx in punc_dir
+     true (Default), load the model of model.onnx in punc_dir. If set true, load the model of model_quant.onnx in punc_dir
 
    --decoder_thread_num <int>
      number of threads for decoder, default:8
@@ -84,17 +84,14 @@ Where:
    --keyfile <string>
      path of keyfile for WSS connection
   
-   Required:  --model-dir <string>
-   If use vad, please add: --vad-dir <string>
-   If use punc, please add: --punc-dir <string>
 example:
-   funasr-ws-server --model-dir /FunASR/funasr/runtime/onnxruntime/export/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch
+   funasr-wss-server --model-dir /FunASR/funasr/runtime/onnxruntime/export/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch
 ```
 
 ## Run websocket client test
 
 ```shell
-./funasr-ws-client  --server-ip <string>
+./funasr-wss-client  --server-ip <string>
                     --port <string>
                     --wav-path <string>
                     [--thread-num <int>] 
@@ -119,7 +116,7 @@ Where:
      is-ssl is 1 means use wss connection, or use ws connection
 
 example:
-./funasr-ws-client --server-ip 127.0.0.1 --port 8889 --wav-path test.wav --thread-num 1 --is-ssl 0
+./funasr-wss-client --server-ip 127.0.0.1 --port 8889 --wav-path test.wav --thread-num 1 --is-ssl 1
 
 result json, example like:
 {"mode":"offline","text":"欢迎大家来体验达摩院推出的语音识别模型","wav_name":"wav2"}