Blame view

src/ivectorbin/logistic-regression-train.cc 3.5 KB
8dcb6dfcb   Yannick Estève   first commit
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
  // ivectorbin/logistic-regression-train.cc
  
  // Copyright 2014  David Snyder
  
  // See ../../COPYING for clarification regarding multiple authors
  //
  // Licensed under the Apache License, Version 2.0 (the "License");
  // you may not use this file except in compliance with the License.
  // You may obtain a copy of the License at
  //
  //  http://www.apache.org/licenses/LICENSE-2.0
  //
  // THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  // KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
  // WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
  // MERCHANTABLITY OR NON-INFRINGEMENT.
  // See the Apache 2 License for the specific language governing permissions and
  // limitations under the License.
  
  
  #include "base/kaldi-common.h"
  #include "util/common-utils.h"
  #include "ivector/logistic-regression.h"
  
  
  int main(int argc, char *argv[]) {
    using namespace kaldi;
    typedef kaldi::int32 int32;
    try {
      const char *usage =
          "Trains a model using Logistic Regression with L-BFGS from
  "
          "a set of vectors. The class labels in <classes-rspecifier>
  "
          "must be a set of integers such that there are no gaps in 
  "
          "its range and the smallest label must be 0.
  "
          "Usage: logistic-regression-train <vector-rspecifier>
  "
          "<classes-rspecifier> <model-out>
  ";
  
      ParseOptions po(usage);
  
      bool binary = true;
      LogisticRegressionConfig config;
      config.Register(&po);
      po.Register("binary", &binary, "Write output in binary mode");
      po.Read(argc, argv);
  
      if (po.NumArgs() != 3) {
        po.PrintUsage();
        exit(1);
      }
  
      std::string vector_rspecifier = po.GetArg(1),
          class_rspecifier = po.GetArg(2),
          model_out = po.GetArg(3);
  
      RandomAccessBaseFloatVectorReader vector_reader(vector_rspecifier);
      SequentialInt32Reader class_reader(class_rspecifier);
  
      std::vector<int32> ys;
      std::vector<std::string> utt_ids;
      std::vector<Vector<BaseFloat> > vectors;
  
      int32 num_utt_done = 0, num_utt_err = 0;
  
      int32 num_classes = 0;
      for (; !class_reader.Done(); class_reader.Next()) {
        std::string utt = class_reader.Key();
        int32 class_label = class_reader.Value();
        if (!vector_reader.HasKey(utt)) {
          KALDI_WARN << "No vector for utterance " << utt;
          num_utt_err++;
        } else {
          ys.push_back(class_label);
          const Vector<BaseFloat> &vector = vector_reader.Value(utt);
          vectors.push_back(vector);
  
          // Since there are no gaps in the class labels and we
          // start at 0, the largest label is the number of the
          // of the classes - 1.
          if (class_label > num_classes) {
            num_classes = class_label;
          }
          num_utt_done++;
        }
      }
  
      // Since the largest label is 1 minus the number of
      // classes.
      num_classes += 1;
  
      KALDI_LOG << "Retrieved " << num_utt_done << " vectors with "
                << num_utt_err << " missing. "
                << "There were " << num_classes << " class labels.";
  
      if (num_utt_done == 0)
        KALDI_ERR << "No vectors processed. Unable to train.";
  
      Matrix<BaseFloat> xs(vectors.size(), vectors[0].Dim());
      for (int i = 0; i < vectors.size(); i++) {
        xs.Row(i).CopyFromVec(vectors[i]);
      }
      vectors.clear();
  
      LogisticRegression classifier = LogisticRegression();
      classifier.Train(xs, ys, config);
      WriteKaldiObject(classifier, model_out, binary);
  
      return 0;
    } catch(const std::exception &e) {
      std::cerr << e.what();
      return -1;
    }
  }