NnetTrainerOptions Struct Reference

#include <nnet-training.h>

Collaboration diagram for NnetTrainerOptions:

Public Member Functions

 NnetTrainerOptions ()
 
void Register (OptionsItf *opts)
 

Public Attributes

bool zero_component_stats
 
bool store_component_stats
 
int32 print_interval
 
bool debug_computation
 
BaseFloat momentum
 
BaseFloat l2_regularize_factor
 
BaseFloat backstitch_training_scale
 
int32 backstitch_training_interval
 
BaseFloat batchnorm_stats_scale
 
std::string read_cache
 
std::string write_cache
 
bool binary_write_cache
 
BaseFloat max_param_change
 
NnetOptimizeOptions optimize_config
 
NnetComputeOptions compute_config
 
CachingOptimizingCompilerOptions compiler_config
 

Detailed Description

Definition at line 34 of file nnet-training.h.

Constructor & Destructor Documentation

◆ NnetTrainerOptions()

NnetTrainerOptions ( )
inline

Definition at line 51 of file nnet-training.h.

51  :
54  print_interval(100),
55  debug_computation(false),
56  momentum(0.0),
61  binary_write_cache(true),
62  max_param_change(2.0) { }

Member Function Documentation

◆ Register()

void Register ( OptionsItf opts)
inline

Definition at line 63 of file nnet-training.h.

References OptionsItf::Register(), NnetComputeOptions::Register(), NnetOptimizeOptions::Register(), and CachingOptimizingCompilerOptions::Register().

Referenced by main(), NnetChainTrainingOptions::Register(), and NnetDiscriminativeOptions::Register().

63  {
64  opts->Register("store-component-stats", &store_component_stats,
65  "If true, store activations and derivatives for nonlinear "
66  "components during training.");
67  opts->Register("zero-component-stats", &zero_component_stats,
68  "If both this and --store-component-stats are true, then "
69  "the component stats are zeroed before training.");
70  opts->Register("print-interval", &print_interval, "Interval (measured in "
71  "minibatches) after which we print out objective function "
72  "during training\n");
73  opts->Register("max-param-change", &max_param_change, "The maximum change in "
74  "parameters allowed per minibatch, measured in Euclidean norm "
75  "over the entire model (change will be clipped to this value)");
76  opts->Register("momentum", &momentum, "Momentum constant to apply during "
77  "training (help stabilize update). e.g. 0.9. Note: we "
78  "automatically multiply the learning rate by (1-momenum) "
79  "so that the 'effective' learning rate is the same as "
80  "before (because momentum would normally increase the "
81  "effective learning rate by 1/(1-momentum))");
82  opts->Register("l2-regularize-factor", &l2_regularize_factor, "Factor that "
83  "affects the strength of l2 regularization on model "
84  "parameters. The primary way to specify this type of "
85  "l2 regularization is via the 'l2-regularize'"
86  "configuration value at the config-file level. "
87  " --l2-regularize-factor will be multiplied by the component-level "
88  "l2-regularize values and can be used to correct for effects "
89  "related to parallelization by model averaging.");
90  opts->Register("batchnorm-stats-scale", &batchnorm_stats_scale,
91  "Factor by which we scale down the accumulated stats of batchnorm "
92  "layers after processing each minibatch. Ensure that the final "
93  "model we write out has batchnorm stats that are fairly fresh.");
94  opts->Register("backstitch-training-scale", &backstitch_training_scale,
95  "backstitch training factor. "
96  "if 0 then in the normal training mode. It is referred as "
97  "'\\alpha' in our publications.");
98  opts->Register("backstitch-training-interval",
100  "do backstitch training with the specified interval of "
101  "minibatches. It is referred as 'n' in our publications.");
102  opts->Register("read-cache", &read_cache, "The location from which to read "
103  "the cached computation.");
104  opts->Register("write-cache", &write_cache, "The location to which to write "
105  "the cached computation.");
106  opts->Register("binary-write-cache", &binary_write_cache, "Write "
107  "computation cache in binary mode");
108 
109  // register the optimization options with the prefix "optimization".
110  ParseOptions optimization_opts("optimization", opts);
111  optimize_config.Register(&optimization_opts);
112  ParseOptions compiler_opts("compiler", opts);
113  compiler_config.Register(&compiler_opts);
114  // register the compute options with the prefix "computation".
115  ParseOptions compute_opts("computation", opts);
116  compute_config.Register(&compute_opts);
117  }
void Register(OptionsItf *opts)
Definition: nnet-optimize.h:84
NnetOptimizeOptions optimize_config
Definition: nnet-training.h:48
CachingOptimizingCompilerOptions compiler_config
Definition: nnet-training.h:50
void Register(OptionsItf *opts)
Definition: nnet-compute.h:42
NnetComputeOptions compute_config
Definition: nnet-training.h:49

Member Data Documentation

◆ backstitch_training_interval

int32 backstitch_training_interval

◆ backstitch_training_scale

◆ batchnorm_stats_scale

◆ binary_write_cache

◆ compiler_config

Definition at line 50 of file nnet-training.h.

◆ compute_config

◆ debug_computation

bool debug_computation

Definition at line 38 of file nnet-training.h.

◆ l2_regularize_factor

◆ max_param_change

◆ momentum

◆ optimize_config

NnetOptimizeOptions optimize_config

Definition at line 48 of file nnet-training.h.

◆ print_interval

◆ read_cache

◆ store_component_stats

bool store_component_stats

◆ write_cache

◆ zero_component_stats


The documentation for this struct was generated from the following file: