20 #ifndef KALDI_NNET2_NNET_FIX_H_    21 #define KALDI_NNET2_NNET_FIX_H_    53                    parameter_factor(2.0), relu_bias_change(1.0) { }
    55     opts->
Register(
"min-average-deriv", &min_average_deriv, 
"Miniumum derivative, "    56                    "averaged over the training data, that we allow for a nonlinearity,"    57                    "expressed relative to the maximum derivative of the nonlinearity,"    58                    "i.e. 1.0 for tanh or 0.25 for sigmoid, 1.0 for rectified linear.");
    59     opts->
Register(
"max-average-deriv", &max_average_deriv, 
"Maximum derivative, "    60                    "averaged over the training data, that we allow for the nonlinearity "    61                    "associated with one neuron.");
    62     opts->
Register(
"parameter-factor", ¶meter_factor, 
"Maximum factor by which we change "    63                    "the set of parameters associated with a neuron.");
    64     opts->
Register(
"relu-bias-change", &relu_bias_change, 
"For ReLUs, change in bias when "    65                    "we identify a component that's too frequently on or off.");
    74 #endif // KALDI_NNET2_NNET_FIX_H_ This code computes Goodness of Pronunciation (GOP) and extracts phone-level pronunciation feature for...
 
void FixNnet(const NnetFixConfig &config, Nnet *nnet)
 
void Register(OptionsItf *opts)
 
virtual void Register(const std::string &name, bool *ptr, const std::string &doc)=0
 
BaseFloat min_average_deriv
 
BaseFloat relu_bias_change
 
BaseFloat max_average_deriv
 
BaseFloat parameter_factor