-
Notifications
You must be signed in to change notification settings - Fork 7
/
run.sh
87 lines (71 loc) · 2.46 KB
/
run.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
#!/bin/bash
# coding=utf-8
# Copyright (C) ATHENA AUTHORS
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
# train options
stage=0
stop_stage=100
gpu_num=8
train_conf=examples/asr/misp/conf/mtl_conformer_sp_fbank80_batch_bins.json
deocode_conf=examples/asr/misp/conf/mtl_conformer_sp_fbank80_batch_bins_decode.json
# seletion options
rnnlm=true # rnn language model training is provided ,set to false,if use ngram language model
# source some path
. ./tools/env.sh
if [ "athena" != $(basename "$PWD") ]; then
echo "You should run this script in athena directory!!"
exit 1
fi
# ASR training stage
if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then
# training command
if [ $gpu_number -gt 1 ]; then
echo "Multi-gpu training"
echo "horovodrun -np $gpu_num -H localhost:$gpu_num python athena/horovod_main.py $train_conf"
horovodrun -np $gpu_num -H localhost:$gpu_num python athena/horovod_main.py $train_conf
else:
echo "Single-gpu training"
echo "athena/main.py $train_conf"
python athena/main.py $train_conf
fi
fi
# language model trianing
if $rnnlm;then
# training rnnlm
if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then
echo "training rnnlm"
bash examples/asr/misp/local/misp_train_rnnlm.sh --lm_conf lm_conf
fi
else
# training ngram lm
if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then
echo "training ngram lm"
bash examples/asr/misp/local/misp_train_lm.sh --lm_conf lm_conf
fi
fi
# decode
if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then
echo "Decoding"
python athena/inference.py \
$decode_conf || exit 1
fi
# score-computing stage
if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then
echo "computing score with sclite ..."
bash examples/asr/hkust/local/run_score.sh inference.log score_hkust examples/asr/hkust/data/vocab
fi
echo "$0: Finished MISP training examples"