Quellcode durchsuchen

fix transformerLM inference recipe

lzr265946 vor 2 Jahren
Ursprung
Commit
9b4e0969f2
2 geänderte Dateien mit 4 neuen und 1 gelöschten Zeilen
  1. 3 1
      egs/aishell2/transformerLM/run.sh
  2. 1 0
      funasr/bin/lm_inference_launch.py

+ 3 - 1
egs/aishell2/transformerLM/run.sh

@@ -214,13 +214,15 @@ if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then
     echo "Stage 3: Calc perplexity: ${lm_test_text}"
     
     python ../../../funasr/bin/lm_inference_launch.py \
-        --output_dir "${lm_exp}/perplexity_test" \
+        --output_dir "${lm_exp}/perplexity_test/output.1" \
         --ngpu "${gpu_num}" \
         --batch_size 1 \
         --train_config "${lm_exp}"/config.yaml \
         --model_file "${lm_exp}/${inference_lm}" \
         --data_path_and_name_and_type "${lm_test_text},text,text" \
         --num_workers 1 \
+        --gpuid_list 0 \
+        --mode "transformer" \
         --split_with_space false 
 fi
 

+ 1 - 0
funasr/bin/lm_inference_launch.py

@@ -183,6 +183,7 @@ def inference_lm(
             dtype=dtype,
             batch_size=batch_size,
             key_file=key_file,
+            preprocess_fn=preprocessor,
             num_workers=num_workers,
         )