1 #!/bin/bash
3 ###########################################################################################
4 # This script was copied from egs/fisher_swbd/s5/local/chain/run_blstm_6h.sh.
5 # The source commit was e69198c3dc5633f98eb88e1cdf20b2521a598f21
6 # Changes made:
7 # - TODO
8 ###########################################################################################
10 # based on run_tdnn_6h.sh
12 set -e
14 # configs for 'chain'
15 stage=12
16 train_stage=-10
17 get_egs_stage=-10
18 dir=exp/chain/blstm_6h
19 decode_iter=
20 decode_dir_affix=
22 # training options
23 num_epochs=4
24 remove_egs=false
25 common_egs_dir=
26 affix=
27 chunk_width=150
28 chunk_left_context=40
29 chunk_right_context=40
31 # End configuration section.
32 echo "$0 $@" # Print the command line for logging
34 . ./cmd.sh
35 . ./path.sh
36 . ./utils/parse_options.sh
38 if ! cuda-compiled; then
39 cat <<EOF && exit 1
40 This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
41 If you want to use GPUs (and have them), go to src/, and configure and make on a machine
42 where "nvcc" is installed.
43 EOF
44 fi
46 dir=$dir${affix:+_$affix}
47 train_set=train_nodup_sp
48 ali_dir=exp/tri5a_ali_nodup
49 treedir=exp/chain/tri6_tree_11000
50 lang=data/lang_chain
52 # The iVector-extraction and feature-dumping parts are the same as the standard
53 # nnet3 setup, and you can skip them by setting "--stage 8" if you have already
54 # run those things.
55 local/nnet3/run_ivector_common.sh --stage $stage \
56 --speed-perturb true \
57 --generate-alignments false || exit 1;
59 if [ $stage -le 9 ]; then
60 # Get the alignments as lattices (gives the CTC training more freedom).
61 # use the same num-jobs as the alignments
62 nj=$(cat $ali_dir/num_jobs) || exit 1;
63 steps/align_fmllr_lats.sh --nj $nj --cmd "$train_cmd" data/$train_set \
64 data/lang exp/tri5a exp/tri5a_lats_nodup_sp
65 rm exp/tri5a_lats_nodup_sp/fsts.*.gz # save space
66 fi
68 if [ $stage -le 10 ]; then
69 # Create a version of the lang/ directory that has one state per phone in the
70 # topo file. [note, it really has two states.. the first one is only repeated
71 # once, the second one has zero or more repeats.]
72 rm -rf $lang
73 cp -r data/lang $lang
74 silphonelist=$(cat $lang/phones/silence.csl) || exit 1;
75 nonsilphonelist=$(cat $lang/phones/nonsilence.csl) || exit 1;
76 # Use our special topology... note that later on may have to tune this
77 # topology.
78 steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >$lang/topo
79 fi
81 if [ $stage -le 11 ]; then
82 # Build a tree using our new topology.
83 steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \
84 --cmd "$train_cmd" 11000 data/$train_set $lang $ali_dir $treedir
85 fi
87 if [ $stage -le 12 ]; then
88 echo "$0: creating neural net configs";
90 steps/nnet3/lstm/make_configs.py \
91 --feat-dir data/${train_set}_hires \
92 --ivector-dir exp/nnet3/ivectors_${train_set} \
93 --tree-dir $treedir \
94 --splice-indexes="-2,-1,0,1,2 0 0" \
95 --lstm-delay=" [-3,3] [-3,3] [-3,3] " \
96 --xent-regularize 0.1 \
97 --include-log-softmax false \
98 --num-lstm-layers 3 \
99 --cell-dim 1024 \
100 --hidden-dim 1024 \
101 --recurrent-projection-dim 256 \
102 --non-recurrent-projection-dim 256 \
103 --label-delay 0 \
104 --self-repair-scale 0.00001 \
105 $dir/configs || exit 1;
107 fi
109 if [ $stage -le 13 ]; then
110 if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then
111 utils/create_split_dir.pl \
112 /export/b0{5,6,7,8}/$USER/kaldi-data/egs/fisher_swbd-$(date +'%m_%d_%H_%M')/s5c/$dir/egs/storage $dir/egs/storage
113 fi
115 touch $dir/egs/.nodelete # keep egs around when that run dies.
117 steps/nnet3/chain/train.py --stage $train_stage \
118 --cmd "$decode_cmd" \
119 --feat.online-ivector-dir exp/nnet3/ivectors_${train_set} \
120 --feat.cmvn-opts "--norm-means=false --norm-vars=false" \
121 --chain.xent-regularize 0.1 \
122 --chain.leaky-hmm-coefficient 0.1 \
123 --chain.l2-regularize 0.00005 \
124 --chain.apply-deriv-weights false \
125 --chain.lm-opts="--num-extra-lm-states=2000" \
126 --trainer.num-chunk-per-minibatch 64 \
127 --trainer.frames-per-iter 1200000 \
128 --trainer.max-param-change 1.414 \
129 --trainer.num-epochs $num_epochs \
130 --trainer.optimization.shrink-value 0.99 \
131 --trainer.optimization.num-jobs-initial 3 \
132 --trainer.optimization.num-jobs-final 16 \
133 --trainer.optimization.initial-effective-lrate 0.001 \
134 --trainer.optimization.final-effective-lrate 0.0001 \
135 --trainer.optimization.momentum 0.0 \
136 --trainer.deriv-truncate-margin 8 \
137 --egs.stage $get_egs_stage \
138 --egs.opts "--frames-overlap-per-eg 0" \
139 --egs.chunk-width $chunk_width \
140 --egs.chunk-left-context $chunk_left_context \
141 --egs.chunk-right-context $chunk_right_context \
142 --egs.dir "$common_egs_dir" \
143 --cleanup.remove-egs $remove_egs \
144 --feat-dir data/${train_set}_hires \
145 --tree-dir $treedir \
146 --lat-dir exp/tri5a_lats_nodup_sp \
147 --dir $dir || exit 1;
148 fi
150 if [ $stage -le 14 ]; then
151 # Note: it might appear that this $lang directory is mismatched, and it is as
152 # far as the 'topo' is concerned, but this script doesn't read the 'topo' from
153 # the lang directory.
154 utils/mkgraph.sh --self-loop-scale 1.0 data/lang_fsh_sw1_tg $dir $dir/graph_fsh_sw1_tg
155 fi
157 decode_suff=fsh_sw1_tg
158 graph_dir=$dir/graph_fsh_sw1_tg
159 if [ $stage -le 15 ]; then
160 iter_opts=
161 if [ ! -z $decode_iter ]; then
162 iter_opts=" --iter $decode_iter "
163 fi
165 # decoding options
166 extra_left_context=$[$chunk_left_context+10]
167 extra_right_context=$[$chunk_right_context+10]
169 for decode_set in eval2000 rt03; do
170 (
171 num_jobs=`cat data/${decode_set}_hires/utt2spk|cut -d' ' -f2|sort -u|wc -l`
172 steps/nnet3/decode.sh --acwt 1.0 --post-decode-acwt 10.0 \
173 --nj $num_jobs --cmd "$decode_cmd" $iter_opts \
174 --extra-left-context $extra_left_context \
175 --extra-right-context $extra_right_context \
176 --frames-per-chunk $chunk_width \
177 --online-ivector-dir exp/nnet3/ivectors_${decode_set} \
178 $graph_dir data/${decode_set}_hires $dir/decode_${decode_set}${decode_dir_affix:+_$decode_dir_affix}_${decode_suff} || exit 1;
179 steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" \
180 data/lang_fsh_sw1_{tg,fg} data/${decode_set}_hires \
181 $dir/decode_${decode_set}${decode_dir_affix:+_$decode_dir_affix}_fsh_sw1_{tg,fg} || exit 1;
182 fi
183 ) &
184 done
185 fi
186 wait;
187 exit 0;