nnet-train-simple.cc
Go to the documentation of this file.
1 // nnet2bin/nnet-train-simple.cc
2 
3 // Copyright 2012 Johns Hopkins University (author: Daniel Povey)
4 
5 // See ../../COPYING for clarification regarding multiple authors
6 //
7 // Licensed under the Apache License, Version 2.0 (the "License");
8 // you may not use this file except in compliance with the License.
9 // You may obtain a copy of the License at
10 //
11 // http://www.apache.org/licenses/LICENSE-2.0
12 //
13 // THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
15 // WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
16 // MERCHANTABLITY OR NON-INFRINGEMENT.
17 // See the Apache 2 License for the specific language governing permissions and
18 // limitations under the License.
19 
20 #include "base/kaldi-common.h"
21 #include "util/common-utils.h"
22 #include "hmm/transition-model.h"
23 #include "nnet2/train-nnet.h"
24 #include "nnet2/am-nnet.h"
25 
26 
27 int main(int argc, char *argv[]) {
28  try {
29  using namespace kaldi;
30  using namespace kaldi::nnet2;
31  typedef kaldi::int32 int32;
32  typedef kaldi::int64 int64;
33 
34  const char *usage =
35  "Train the neural network parameters with backprop and stochastic\n"
36  "gradient descent using minibatches. Training examples would be\n"
37  "produced by nnet-get-egs.\n"
38  "\n"
39  "Usage: nnet-train-simple [options] <model-in> <training-examples-in> <model-out>\n"
40  "\n"
41  "e.g.:\n"
42  "nnet-train-simple 1.nnet ark:1.egs 2.nnet\n";
43 
44  bool binary_write = true;
45  bool zero_stats = true;
46  int32 srand_seed = 0;
47  std::string use_gpu = "yes";
48  NnetSimpleTrainerConfig train_config;
49 
50  ParseOptions po(usage);
51  po.Register("binary", &binary_write, "Write output in binary mode");
52  po.Register("zero-stats", &zero_stats, "If true, zero occupation "
53  "counts stored with the neural net (only affects mixing up).");
54  po.Register("srand", &srand_seed, "Seed for random number generator "
55  "(relevant if you have layers of type AffineComponentPreconditioned "
56  "with l2-penalty != 0.0");
57  po.Register("use-gpu", &use_gpu,
58  "yes|no|optional|wait, only has effect if compiled with CUDA");
59 
60  train_config.Register(&po);
61 
62  po.Read(argc, argv);
63 
64  if (po.NumArgs() != 3) {
65  po.PrintUsage();
66  exit(1);
67  }
68  srand(srand_seed);
69 
70 #if HAVE_CUDA==1
71  CuDevice::Instantiate().SelectGpuId(use_gpu);
72 #endif
73 
74  std::string nnet_rxfilename = po.GetArg(1),
75  examples_rspecifier = po.GetArg(2),
76  nnet_wxfilename = po.GetArg(3);
77 
78  int64 num_examples;
79 
80  {
81  TransitionModel trans_model;
82  AmNnet am_nnet;
83  {
84  bool binary_read;
85  Input ki(nnet_rxfilename, &binary_read);
86  trans_model.Read(ki.Stream(), binary_read);
87  am_nnet.Read(ki.Stream(), binary_read);
88  }
89 
90  if (zero_stats) am_nnet.GetNnet().ZeroStats();
91 
92  SequentialNnetExampleReader example_reader(examples_rspecifier);
93 
94  num_examples = TrainNnetSimple(train_config, &(am_nnet.GetNnet()),
95  &example_reader);
96 
97  {
98  Output ko(nnet_wxfilename, binary_write);
99  trans_model.Write(ko.Stream(), binary_write);
100  am_nnet.Write(ko.Stream(), binary_write);
101  }
102  }
103 #if HAVE_CUDA==1
104  CuDevice::Instantiate().PrintProfile();
105 #endif
106 
107  KALDI_LOG << "Finished training, processed " << num_examples
108  << " training examples. Wrote model to "
109  << nnet_wxfilename;
110  return (num_examples == 0 ? 1 : 0);
111  } catch(const std::exception &e) {
112  std::cerr << e.what() << '\n';
113  return -1;
114  }
115 }
116 
117 
This code computes Goodness of Pronunciation (GOP) and extracts phone-level pronunciation feature for...
Definition: chain.dox:20
void Register(OptionsItf *opts)
Definition: train-nnet.h:38
void PrintUsage(bool print_command_line=false)
Prints the usage documentation [provided in the constructor].
void Read(std::istream &is, bool binary)
Definition: am-nnet.cc:39
kaldi::int32 int32
void Register(const std::string &name, bool *ptr, const std::string &doc)
std::istream & Stream()
Definition: kaldi-io.cc:826
The class ParseOptions is for parsing command-line options; see Parsing command-line options for more...
Definition: parse-options.h:36
std::ostream & Stream()
Definition: kaldi-io.cc:701
int64 TrainNnetSimple(const NnetSimpleTrainerConfig &config, Nnet *nnet, SequentialNnetExampleReader *reader, double *tot_weight_ptr, double *tot_logprob_ptr)
Train on all the examples it can read from the reader.
Definition: train-nnet.cc:147
int main(int argc, char *argv[])
void Read(std::istream &is, bool binary)
void Write(std::ostream &os, bool binary) const
Definition: am-nnet.cc:31
A templated class for reading objects sequentially from an archive or script file; see The Table conc...
Definition: kaldi-table.h:287
int Read(int argc, const char *const *argv)
Parses the command line options and fills the ParseOptions-registered variables.
std::string GetArg(int param) const
Returns one of the positional parameters; 1-based indexing for argc/argv compatibility.
int NumArgs() const
Number of positional parameters (c.f. argc-1).
void Write(std::ostream &os, bool binary) const
#define KALDI_LOG
Definition: kaldi-error.h:153
const Nnet & GetNnet() const
Definition: am-nnet.h:61