Blame view

egs/wsj/s5/steps/segmentation/decode_sad.sh 1.45 KB
8dcb6dfcb   Yannick Estève   first commit
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
  #!/bin/bash
  
  # Copyright 2016  Vimal Manohar
  # Apache 2.0.
  
  # This script does Viterbi decoding using a matrix of frame log-likelihoods 
  # with the columns corresponding to the pdfs.
  # It is a wrapper around the binary decode-faster.
  
  set -e
  set -o pipefail
  
  cmd=run.pl
  nj=4
  acwt=0.1
  beam=8
  max_active=1000
  transform=   # Transformation matrix to apply on the input archives read from output.scp
  
  . ./path.sh
  
  . utils/parse_options.sh
  
  if [ $# -ne 3 ]; then
    echo "Usage: $0 <graph-dir> <nnet_output_dir> <decode-dir>"
    echo " e.g.: $0 "
    exit 1 
  fi
  
  graph_dir=$1
  nnet_output_dir=$2
  dir=$3
  
  mkdir -p $dir/log
  
  echo $nj > $dir/num_jobs
  
  for f in $graph_dir/HCLG.fst $nnet_output_dir/output.scp $extra_files; do
    if [ ! -f $f ]; then
      echo "$0: Could not find file $f"
      exit 1
    fi
  done
  
  rspecifier="ark:utils/split_scp.pl -j $nj \$[JOB-1] $nnet_output_dir/output.scp | copy-feats scp:- ark:- |"
  
  # Apply a transformation on the input matrix to combine 
  # probs from different columns to pseudo-likelihoods
  if [ ! -z "$transform" ]; then
    rspecifier="$rspecifier transform-feats $transform ark:- ark:- |"
  fi
  
  # Convert pseudo-likelihoods to pseudo log-likelihood
  rspecifier="$rspecifier copy-matrix --apply-log ark:- ark:- |"
  
  decoder_opts+=(--acoustic-scale=$acwt --beam=$beam --max-active=$max_active)
  
  $cmd JOB=1:$nj $dir/log/decode.JOB.log \
    decode-faster ${decoder_opts[@]} \
    $graph_dir/HCLG.fst "$rspecifier" \
    ark:/dev/null "ark:| gzip -c > $dir/ali.JOB.gz"