|
@@ -217,7 +217,7 @@ if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then
|
|
|
if [ -n "${inference_config}" ]; then
|
|
if [ -n "${inference_config}" ]; then
|
|
|
_opts+="--config ${inference_config} "
|
|
_opts+="--config ${inference_config} "
|
|
|
fi
|
|
fi
|
|
|
- ${infer_cmd} --gpu "${_ngpu}" --max-jobs-run "${_nj}" JOB=1: "${_nj}" "${_logdir}"/asr_inference.JOB.log \
|
|
|
|
|
|
|
+ ${infer_cmd} --gpu "${_ngpu}" --max-jobs-run "${_nj}" JOB=1:"${_nj}" "${_logdir}"/asr_inference.JOB.log \
|
|
|
python -m funasr.bin.asr_inference_launch \
|
|
python -m funasr.bin.asr_inference_launch \
|
|
|
--batch_size 1 \
|
|
--batch_size 1 \
|
|
|
--ngpu "${_ngpu}" \
|
|
--ngpu "${_ngpu}" \
|