Blame view
egs/gale_arabic/s5/local/nnet/run_dnn.sh
2.09 KB
8dcb6dfcb first commit |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
#!/bin/bash # Copyright 2014 QCRI (author: Ahmed Ali) # Apache 2.0 . ./path.sh . ./cmd.sh ## You'll want to change cmd.sh to something that will work on your system. ## This relates to the queue. nDecodeJobs=40 nDecodeJobs=120 #train DNN mfcc_fmllr_dir=mfcc_fmllr baseDir=exp/tri3b alignDir=exp/tri3b_ali dnnDir=exp/tri3b_dnn_2048x5 align_dnnDir=exp/tri3b_dnn_2048x5_ali dnnLatDir=exp/tri3b_dnn_2048x5_denlats dnnMPEDir=exp/tri3b_dnn_2048x5_smb trainTr90=data/train_tr90 trainCV=data/train_cv10 steps/nnet/make_fmllr_feats.sh --nj 10 --cmd "$cuda_cmd" \ --transform-dir $baseDir/decode data/test_fmllr data/test \ $baseDir $mfcc_fmllr_dir/log_test $mfcc_fmllr_dir || exit 1; steps/nnet/make_fmllr_feats.sh --nj 10 --cmd "$cuda_cmd" \ --transform-dir $alignDir data/train_fmllr data/train \ $baseDir $mfcc_fmllr_dir/log_train $mfcc_fmllr_dir || exit 1; utils/subset_data_dir_tr_cv.sh data/train_fmllr $trainTr90 $trainCV || exit 1; (tail --pid=$$ -F $dnnDir/train_nnet.log 2>/dev/null)& $cuda_cmd $dnnDir/train_nnet.log \ steps/train_nnet.sh --hid-dim 2048 --hid-layers 5 --learn-rate 0.008 \ $trainTr90 $trainCV data/lang $alignDir $alignDir $dnnDir || exit 1; steps/decode_nnet.sh --nj $nDecodeJobs --cmd "$decode_cmd" \ --config conf/decode_dnn.config --nnet $dnnDir/final.nnet \ --acwt 0.08 $baseDir/graph data/test_fmllr $dnnDir/decode # steps/nnet/align.sh --nj $nDecodeJobs --cmd "$train_cmd" data/train_fmllr data/lang \ $dnnDir $align_dnnDir || exit 1; steps/nnet/make_denlats.sh --nj $nJobs --cmd "$train_cmd" --config conf/decode_dnn.config --acwt 0.1 \ data/train_fmllr data/lang $dnnDir $dnnLatDir || exit 1; steps/nnet/train_mpe.sh --cmd "$cuda_cmd" --num-iters 6 --acwt 0.1 --do-smbr true \ data/train_fmllr data/lang $dnnDir $align_dnnDir $dnnLatDir $dnnMPEDir || exit 1; #decode for n in 1 2 3 4 5 6; do steps/decode_nnet.sh --nj $nDecodeJobs --cmd "$train_cmd" --config conf/decode_dnn.config \ --nnet $dnnMPEDir/$n.nnet --acwt 0.08 \ $baseDir/graph data/test_fmllr $dnnMPEDir/decode_test_it$n || exit 1; done echo DNN success # End of DNN |