lattice-lmrescore-const-arpa.cc
4.58 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
// latbin/lattice-lmrescore-const-arpa.cc
// Copyright 2014 Guoguo Chen
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "base/kaldi-common.h"
#include "fstext/fstext-lib.h"
#include "lat/kaldi-lattice.h"
#include "lat/lattice-functions.h"
#include "lm/const-arpa-lm.h"
#include "util/common-utils.h"
int main(int argc, char *argv[]) {
try {
using namespace kaldi;
typedef kaldi::int32 int32;
typedef kaldi::int64 int64;
const char *usage =
"Rescores lattice with the ConstArpaLm format language model. The LM\n"
"will be wrapped into the DeterministicOnDemandFst interface and the\n"
"rescoring is done by composing with the wrapped LM using a special\n"
"type of composition algorithm. Determinization will be applied on\n"
"the composed lattice.\n"
"\n"
"Usage: lattice-lmrescore-const-arpa [options] lattice-rspecifier \\\n"
" const-arpa-in lattice-wspecifier\n"
" e.g.: lattice-lmrescore-const-arpa --lm-scale=-1.0 ark:in.lats \\\n"
" const_arpa ark:out.lats\n";
ParseOptions po(usage);
BaseFloat lm_scale = 1.0;
po.Register("lm-scale", &lm_scale, "Scaling factor for language model "
"costs; frequently 1.0 or -1.0");
po.Read(argc, argv);
if (po.NumArgs() != 3) {
po.PrintUsage();
exit(1);
}
std::string lats_rspecifier = po.GetArg(1),
lm_rxfilename = po.GetArg(2),
lats_wspecifier = po.GetArg(3);
// Reads the language model in ConstArpaLm format.
ConstArpaLm const_arpa;
ReadKaldiObject(lm_rxfilename, &const_arpa);
// Reads and writes as compact lattice.
SequentialCompactLatticeReader compact_lattice_reader(lats_rspecifier);
CompactLatticeWriter compact_lattice_writer(lats_wspecifier);
int32 n_done = 0, n_fail = 0;
for (; !compact_lattice_reader.Done(); compact_lattice_reader.Next()) {
std::string key = compact_lattice_reader.Key();
CompactLattice clat = compact_lattice_reader.Value();
compact_lattice_reader.FreeCurrent();
if (lm_scale != 0.0) {
// Before composing with the LM FST, we scale the lattice weights
// by the inverse of "lm_scale". We'll later scale by "lm_scale".
// We do it this way so we can determinize and it will give the
// right effect (taking the "best path" through the LM) regardless
// of the sign of lm_scale.
fst::ScaleLattice(fst::GraphLatticeScale(1.0/lm_scale), &clat);
ArcSort(&clat, fst::OLabelCompare<CompactLatticeArc>());
// Wraps the ConstArpaLm format language model into FST. We re-create it
// for each lattice to prevent memory usage increasing with time.
ConstArpaLmDeterministicFst const_arpa_fst(const_arpa);
// Composes lattice with language model.
CompactLattice composed_clat;
ComposeCompactLatticeDeterministic(clat,
&const_arpa_fst, &composed_clat);
// Determinizes the composed lattice.
Lattice composed_lat;
ConvertLattice(composed_clat, &composed_lat);
Invert(&composed_lat);
CompactLattice determinized_clat;
DeterminizeLattice(composed_lat, &determinized_clat);
fst::ScaleLattice(fst::GraphLatticeScale(lm_scale), &determinized_clat);
if (determinized_clat.Start() == fst::kNoStateId) {
KALDI_WARN << "Empty lattice for utterance " << key
<< " (incompatible LM?)";
n_fail++;
} else {
compact_lattice_writer.Write(key, determinized_clat);
n_done++;
}
} else {
// Zero scale so nothing to do.
n_done++;
compact_lattice_writer.Write(key, clat);
}
}
KALDI_LOG << "Done " << n_done << " lattices, failed for " << n_fail;
return (n_done != 0 ? 0 : 1);
} catch(const std::exception &e) {
std::cerr << e.what();
return -1;
}
}