run_nnet_cpu.sh
937 Bytes
#!/bin/bash
. ./00_init_paths.sh
# I'm using basically the same setup as for Switchboard 100 hours,
# but slightly fewer parameters (8M -> 7M) as we have slightly less
# data (81 hours).
steps/train_nnet_cpu.sh \
--mix-up 8000 \
--initial-learning-rate 0.01 --final-learning-rate 0.001 \
--num-jobs-nnet 16 --num-hidden-layers 4 \
--num-parameters 8000000 \
data/train lang_train exp/tri4b_ali exp/nnet5c1 || exit 1
steps/decode_nnet_cpu.sh --nj 6 \
--transform-dir exp/tri4b/decode_dev \
exp/tri4b/graph data/dev exp/nnet5c1/decode_dev
steps/train_nnet_cpu_mmi.sh --boost 0.1 --initial-learning-rate 0.001 \
--minibatch-size 128 --transform-dir exp/tri4b_ali \
data/train lang_train exp/tri5c1_nnet exp/tri5c1_nnet exp/tri5c1_denlats exp/tri5c1_mmi_a
steps/decode_nnet_cpu.sh --nj 6 \
--transform-dir exp/tri3b/decode_dev \
exp/tri3b/graph data/dev exp/tri5c1_mmi_a/decode_dev