Blame view

Scripts/steps/decode_nnet.sh 5.04 KB
ec85f8892   bigot benjamin   first commit
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
  #!/bin/bash
  
  # Copyright 2012-2013 Karel Vesely, Daniel Povey
  # Apache 2.0
  
  # Begin configuration section.  
  nnet= # Optionally pre-select network to use for getting state-likelihoods
  feature_transform= # Optionally pre-select feature transform (in front of nnet)
  model= # Optionally pre-select transition model
  class_frame_counts= # Optionally pre-select class-counts used to compute PDF priors 
  
  stage=0 # stage=1 skips lattice generation
  nj=4
  cmd=run.pl
  max_active=7000 # maximum of active tokens
  max_mem=50000000 # limit the fst-size to 50MB (larger fsts are minimized)
  beam=13.0 # GMM:13.0
  latbeam=8.0 # GMM:6.0
  acwt=0.10 # GMM:0.0833, note: only really affects pruning (scoring is on lattices).
  scoring_opts="--min-lmwt 4 --max-lmwt 15"
  skip_scoring=false
  use_gpu_id=-1 # disable gpu
  parallel_opts="-pe smp 2" # use 2 CPUs (1 DNN-forward, 1 decoder)
  srcdir= # optionaly select dir with DNN model
  # End configuration section.
  
  echo "$0 $@"  # Print the command line for logging
  
  [ -f ./path.sh ] && . ./path.sh; # source the path.
  . parse_options.sh || exit 1;
  
  if [ $# != 3 ]; then
     echo "Usage: $0 [options] <graph-dir> <data-dir> <decode-dir>"
     echo "... where <decode-dir> is assumed to be a sub-directory of the directory"
     echo " where the DNN + transition model is."
     echo "e.g.: $0 exp/dnn1/graph_tgpr data/test exp/dnn1/decode_tgpr"
     echo ""
     echo "This script works on plain or modified features (CMN,delta+delta-delta),"
     echo "which are then sent through feature-transform. It works out what type"
     echo "of features you used from content of srcdir."
     echo ""
     echo "main options (for others, see top of script file)"
     echo "  --config <config-file>                           # config containing options"
     echo "  --nj <nj>                                        # number of parallel jobs"
     echo "  --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs."
     echo ""
     echo "  --nnet <nnet>                                    # which nnet to use (opt.)"
     echo "  --feature-transform <nnet>                       # select transform in front of nnet (opt.)"
     echo "  --class-frame-counts <file>                      # file with frame counts (used to compute priors) (opt.)"
     echo "  --model <model>                                  # which transition model to use (opt.)"
     echo ""
     echo "  --acwt <float>                                   # select acoustic scale for decoding"
     echo "  --scoring-opts <opts>                            # options forwarded to local/score.sh"
     exit 1;
  fi
  
  
  graphdir=$1
  data=$2
  dir=$3
  [ -z $srcdir ] && srcdir=`dirname $dir`; # Or back-off to: model directory one level up from decoding directory.
  sdata=$data/split$nj;
  
  mkdir -p $dir/log
  [[ -d $sdata && $data/feats.scp -ot $sdata ]] || split_data.sh $data $nj || exit 1;
  echo $nj > $dir/num_jobs
  
  if [ -z "$nnet" ]; then # if --nnet <nnet> was not specified on the command line...
    nnet=$srcdir/final.nnet; 
  fi
  [ -z "$nnet" ] && echo "Error nnet '$nnet' does not exist!" && exit 1;
  
  if [ -z "$model" ]; then # if --model <mdl> was not specified on the command line...
    model=$srcdir/final.mdl;
  fi
  
  # find the feature_transform to use
  if [ -z "$feature_transform" ]; then
    feature_transform=$srcdir/final.feature_transform
  fi
  if [ ! -f $feature_transform ]; then
    echo "Missing feature_transform '$feature_transform'"
    exit 1
  fi
  
  # check that files exist
  for f in $sdata/1/feats.scp $nnet_i $nnet $model $graphdir/HCLG.fst; do
    [ ! -f $f ] && echo "$0: no such file $f" && exit 1;
  done
  
  # PREPARE THE LOG-POSTERIOR COMPUTATION PIPELINE
  if [ -z "$class_frame_counts" ]; then
    class_frame_counts=$srcdir/ali_train_pdf.counts
  else
    echo "Overriding class_frame_counts by $class_frame_counts"
  fi
  
  # Create the feature stream:
  feats="ark,s,cs:copy-feats scp:$sdata/JOB/feats.scp ark:- |"
  # Optionally add cmvn
  if [ -f $srcdir/norm_vars ]; then
    norm_vars=$(cat $srcdir/norm_vars 2>/dev/null)
    [ ! -f $sdata/1/cmvn.scp ] && echo "$0: cannot find cmvn stats $sdata/1/cmvn.scp" && exit 1
    feats="$feats apply-cmvn --norm-vars=$norm_vars --utt2spk=ark:$sdata/JOB/utt2spk scp:$sdata/JOB/cmvn.scp ark:- ark:- |"
  fi
  # Optionally add deltas
  if [ -f $srcdir/delta_order ]; then
    delta_order=$(cat $srcdir/delta_order)
    feats="$feats add-deltas --delta-order=$delta_order ark:- ark:- |"
  fi
  
  
  # Run the decoding in the queue
  if [ $stage -le 0 ]; then
    $cmd $parallel_opts JOB=1:$nj $dir/log/decode.JOB.log \
      nnet-forward --feature-transform=$feature_transform --no-softmax=true --class-frame-counts=$class_frame_counts --use-gpu-id=$use_gpu_id $nnet "$feats" ark:- \| \
      latgen-faster-mapped --max-active=$max_active --max-mem=$max_mem --beam=$beam --lattice-beam=$latbeam \
      --acoustic-scale=$acwt --allow-partial=true --word-symbol-table=$graphdir/words.txt \
      $model $graphdir/HCLG.fst ark:- "ark:|gzip -c > $dir/lat.JOB.gz" || exit 1;
  fi
  
  # Run the scoring
  if ! $skip_scoring ; then
    [ ! -x local/score.sh ] && \
      echo "Not scoring because local/score.sh does not exist or not executable." && exit 1;
    local/score.sh $scoring_opts --cmd "$cmd" $data $graphdir $dir || exit 1;
  fi
  
  exit 0;