nnet-train-parallel.cc
Go to the documentation of this file.
1 // nnet2bin/nnet-train-parallel.cc
2 
3 // Copyright 2012 Johns Hopkins University (author: Daniel Povey)
4 
5 // See ../../COPYING for clarification regarding multiple authors
6 //
7 // Licensed under the Apache License, Version 2.0 (the "License");
8 // you may not use this file except in compliance with the License.
9 // You may obtain a copy of the License at
10 //
11 // http://www.apache.org/licenses/LICENSE-2.0
12 //
13 // THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
15 // WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
16 // MERCHANTABLITY OR NON-INFRINGEMENT.
17 // See the Apache 2 License for the specific language governing permissions and
18 // limitations under the License.
19 
20 #include "base/kaldi-common.h"
21 #include "util/common-utils.h"
22 #include "hmm/transition-model.h"
24 #include "nnet2/am-nnet.h"
25 
26 
27 int main(int argc, char *argv[]) {
28  try {
29  using namespace kaldi;
30  using namespace kaldi::nnet2;
31  typedef kaldi::int32 int32;
32  typedef kaldi::int64 int64;
33 
34  const char *usage =
35  "Train the neural network parameters with backprop and stochastic\n"
36  "gradient descent using minibatches. As nnet-train-simple, but\n"
37  "uses multiple threads in a Hogwild type of update (for CPU, not GPU).\n"
38  "\n"
39  "Usage: nnet-train-parallel [options] <model-in> <training-examples-in> <model-out>\n"
40  "\n"
41  "e.g.:\n"
42  "nnet-train-parallel --num-threads=8 1.nnet ark:1.1.egs 2.nnet\n";
43 
44  bool binary_write = true;
45  bool zero_stats = true;
46  int32 minibatch_size = 1024;
47  int32 srand_seed = 0;
48 
49  ParseOptions po(usage);
50  po.Register("binary", &binary_write, "Write output in binary mode");
51  po.Register("zero-stats", &zero_stats, "If true, zero stats "
52  "stored with the neural net (only affects mixing up).");
53  po.Register("srand", &srand_seed,
54  "Seed for random number generator (e.g., for dropout)");
55  po.Register("num-threads", &g_num_threads, "Number of training threads to use "
56  "in the parallel update. [Note: if you use a parallel "
57  "implementation of BLAS, the actual number of threads may be larger.]");
58  po.Register("minibatch-size", &minibatch_size, "Number of examples to use for "
59  "each minibatch during training.");
60 
61  po.Read(argc, argv);
62  srand(srand_seed);
63 
64  if (po.NumArgs() != 3) {
65  po.PrintUsage();
66  exit(1);
67  }
68 
69  std::string nnet_rxfilename = po.GetArg(1),
70  examples_rspecifier = po.GetArg(2),
71  nnet_wxfilename = po.GetArg(3);
72 
73  TransitionModel trans_model;
74  AmNnet am_nnet;
75  {
76  bool binary_read;
77  Input ki(nnet_rxfilename, &binary_read);
78  trans_model.Read(ki.Stream(), binary_read);
79  am_nnet.Read(ki.Stream(), binary_read);
80  }
81 
82  KALDI_ASSERT(minibatch_size > 0);
83 
84  if (zero_stats) am_nnet.GetNnet().ZeroStats();
85 
86  double num_examples = 0;
87  SequentialNnetExampleReader example_reader(examples_rspecifier);
88 
89 
90  DoBackpropParallel(am_nnet.GetNnet(),
91  minibatch_size,
92  &example_reader,
93  &num_examples,
94  &(am_nnet.GetNnet()));
95 
96  {
97  Output ko(nnet_wxfilename, binary_write);
98  trans_model.Write(ko.Stream(), binary_write);
99  am_nnet.Write(ko.Stream(), binary_write);
100  }
101 
102  KALDI_LOG << "Finished training, processed " << num_examples
103  << " training examples (weighted). Wrote model to "
104  << nnet_wxfilename;
105  return (num_examples == 0 ? 1 : 0);
106  } catch(const std::exception &e) {
107  std::cerr << e.what() << '\n';
108  return -1;
109  }
110 }
111 
112 
This code computes Goodness of Pronunciation (GOP) and extracts phone-level pronunciation feature for...
Definition: chain.dox:20
void PrintUsage(bool print_command_line=false)
Prints the usage documentation [provided in the constructor].
int32 g_num_threads
Definition: kaldi-thread.cc:25
void Read(std::istream &is, bool binary)
Definition: am-nnet.cc:39
kaldi::int32 int32
int main(int argc, char *argv[])
void Register(const std::string &name, bool *ptr, const std::string &doc)
std::istream & Stream()
Definition: kaldi-io.cc:826
The class ParseOptions is for parsing command-line options; see Parsing command-line options for more...
Definition: parse-options.h:36
std::ostream & Stream()
Definition: kaldi-io.cc:701
void Read(std::istream &is, bool binary)
void Write(std::ostream &os, bool binary) const
Definition: am-nnet.cc:31
A templated class for reading objects sequentially from an archive or script file; see The Table conc...
Definition: kaldi-table.h:287
int Read(int argc, const char *const *argv)
Parses the command line options and fills the ParseOptions-registered variables.
std::string GetArg(int param) const
Returns one of the positional parameters; 1-based indexing for argc/argv compatibility.
int NumArgs() const
Number of positional parameters (c.f. argc-1).
void Write(std::ostream &os, bool binary) const
#define KALDI_ASSERT(cond)
Definition: kaldi-error.h:185
#define KALDI_LOG
Definition: kaldi-error.h:153
double DoBackpropParallel(const Nnet &nnet, int32 minibatch_size, SequentialNnetExampleReader *examples_reader, double *tot_weight, Nnet *nnet_to_update)
This function is similar to "DoBackprop" in nnet-update.h This function computes the objective functi...
const Nnet & GetNnet() const
Definition: am-nnet.h:61