shixian.shi %!s(int64=2) %!d(string=hai) anos
pai
achega
4bbc661aa5

+ 2 - 1
funasr/datasets/large_datasets/dataset.py

@@ -181,7 +181,8 @@ def Dataset(data_list_file,
     hw_config = {"sample_rate": conf.get("sample_rate", 0.6),
                  "double_rate": conf.get("double_rate", 0.1),
                  "hotword_min_length": conf.get("hotword_min_length", 2),
-                 "hotword_max_length": conf.get("hotword_max_length", 8)}
+                 "hotword_max_length": conf.get("hotword_max_length", 8),
+                 "pre_prob": conf.get("pre_prob", 0.0)}
 
     if pre_hwfile is not None:
         pre_hwlist = []

+ 1 - 0
funasr/datasets/large_datasets/utils/tokenize.py

@@ -58,6 +58,7 @@ def tokenize(data,
     if 'hw_tag' in data:
         hotword_indxs = sample_hotword(length, **hw_config)
         data[hotword_indxs] = hotword_indxs
+        del data['hw_tag']
     for i in range(length):
         x = text[i]
         if i == length-1 and "punc" in data and x.startswith("vad:"):