47   std::vector<std::string> configs;
    50   for (
size_t j = 0; 
j < configs.size(); 
j++) {
    51     KALDI_LOG << 
"Input config[" << 
j << 
"] is: " << configs[
j];
    52     std::istringstream is(configs[
j]);
    57   std::vector<Matrix<BaseFloat> > inputs;
    66     std::ostringstream os;
    67     computation.
Print(os, nnet);
    68     KALDI_LOG << 
"Generated computation with no optimization or shortcut is: " << os.str();
    81     std::ostringstream os;
    82     computation_opt.
Print(os, nnet);
    83     KALDI_LOG << 
"Optimized computation is: " << os.str();
    88     compute_opts.
debug = 
true;
    93   Nnet nnet_to_update(nnet);  
   109   Nnet nnet_opt_to_update(nnet_opt);
   117                             &nnet_opt_to_update);
   120   for (
size_t i = 0; 
i < request.
inputs.size(); 
i++) {
   131   KALDI_LOG << 
"Running non-optimized forward computation";
   135   KALDI_LOG << 
"Running optimized forward computation";
   141   KALDI_LOG << 
"Output sum (not optimized) is " << output.Sum();
   143   KALDI_LOG << 
"Output sum (optimized) is " << output_opt.Sum();
   145     KALDI_WARN << 
"Non-optimized and optimized versions of the computation give "   146                << 
"different outputs: " << output << 
" vs. " << output_opt;
   154   if (request.
outputs[0].has_deriv) {
   156     computer_opt.
AcceptInput(
"output", &output_deriv_opt);
   158     KALDI_LOG << 
"Running non-optimized backward computation";
   160     KALDI_LOG << 
"Running optimized backward computation";
   162     for (
size_t i = 0; 
i < request.
inputs.size(); 
i++) {
   163       if (request.
inputs[
i].has_deriv) {
   169                   << 
"' (non-optimized) is " << in_deriv.
Sum();
   171                   << 
"' (optimized) is " << in_deriv_opt.
Sum();
   173           KALDI_WARN << 
"Non-optimized and optimized versions of the "   174                      << 
"computation give different input-derivs.";
   182                                   nnet_opt_to_update, 1.0e-05)) {
   183     KALDI_WARN << 
"Neural networks differ after training, between "   184                << 
"optimized and non-optimized computation.";
   220   compiler = compiler_all;
   226   optimize = optimize_all;
   231   optimize = optimize_all;
   236   optimize = optimize_all;
   241   optimize = optimize_all;
   246   optimize = optimize_all;
   251   optimize = optimize_all;
   256   optimize = optimize_all;
   261   optimize = optimize_all;
   266   optimize = optimize_all;
   274   optimize = optimize_all;
   277 #define KALDI_SUCCFAIL(b) ((b) ? "SUCCESS" : "FAILURE")   279     << 
"Test failed with all optimizations enabled. Retried test with the "   280     << 
"following optimizations turned off:"   282     << 
"\n  propagate_in_place   ... " << 
KALDI_SUCCFAIL(succ_no_propagate_in_place)
   283     << 
"\n  backprop_in_place    ... " << 
KALDI_SUCCFAIL(succ_no_backprop_in_place)
   285     << 
"\n  convert_addition     ... " << 
KALDI_SUCCFAIL(succ_no_convert_addition)
   286     << 
"\n  remove_assignments   ... " << 
KALDI_SUCCFAIL(succ_no_remove_assignments)
   287     << 
"\n  initialize_undefined ... " << 
KALDI_SUCCFAIL(succ_no_initialize_undefined)
   288     << 
"\n  allocate_from_other  ... " << 
KALDI_SUCCFAIL(succ_no_allocate_from_other)
   289     << 
"\n  move_sizing_commands ... " << 
KALDI_SUCCFAIL(succ_no_move_sizing_commands)
   292 #undef KALDI_SUCCFAIL   296   for (
int32 srand_seed = 0; srand_seed < 40; srand_seed++) {
   297     KALDI_LOG << 
"About to run UnitTestNnetOptimizeInternal with srand_seed = "   309   using namespace kaldi;
   314   CuDevice::Instantiate().SetDebugStrideMode(
true);
   315   CuDevice::Instantiate().SelectGpuId(
"no");
   317   CuDevice::Instantiate().SelectGpuId(
"yes");
 static void UnitTestNnetOptimizeInternal(int32 srand_seed)
 
This code computes Goodness of Pronunciation (GOP) and extracts phone-level pronunciation feature for...
 
void ScaleNnet(BaseFloat scale, Nnet *nnet)
Scales the nnet parameters and stats by this scale. 
 
void ReadConfig(std::istream &config_file)
 
bool NnetParametersAreIdentical(const Nnet &nnet1, const Nnet &nnet2, BaseFloat threshold=1.0e-05)
Used for testing that the updatable parameters in two networks are the same. 
 
int32 max_deriv_time_relative
 
#define KALDI_SUCCFAIL(b)
 
void ComputeCudaIndexes()
 
bool move_sizing_commands
 
This class enables you to do the compilation and optimization in one call, and also ensures that if t...
 
This file contains various routines that are useful in test code. 
 
void Print(std::ostream &os, const Nnet &nnet) const
 
This file contains utilities for analyzing and checking computations, which are used in the optimizat...
 
std::vector< IoSpecification > inputs
 
This class represents a matrix that's stored on the GPU if we have one, and in memory if not...
 
static bool UnitTestNnetOptimizeWithOptions(int32 srand_seed, NnetOptimizeOptions opt_config, CachingOptimizingCompilerOptions compiler_config)
 
void SetNnetAsGradient(Nnet *nnet)
Sets nnet as gradient by Setting is_gradient_ to true and learning_rate_ to 1 for each UpdatableCompo...
 
void SetVerboseLevel(int32 i)
This should be rarely used, except by programs using Kaldi as library; command-line programs set the ...
 
void AcceptInput(const std::string &node_name, CuMatrix< BaseFloat > *input)
e.g. 
 
void ResetGenerators(Nnet *nnet)
This function calls 'ResetGenerator()' on all components in 'nnet' that inherit from class RandomComp...
 
void ComputeExampleComputationRequestSimple(const Nnet &nnet, ComputationRequest *request, std::vector< Matrix< BaseFloat > > *inputs)
This function computes an example computation request, for testing purposes. 
 
const CuMatrixBase< BaseFloat > & GetOutput(const std::string &node_name)
 
static void UnitTestNnetOptimize()
 
std::shared_ptr< const NnetComputation > Compile(const ComputationRequest &request)
Does the compilation and returns a const pointer to the result, which is owned by this class...
 
Matrix for CUDA computing. 
 
class NnetComputer is responsible for executing the computation described in the "computation" object...
 
void CreateComputation(const CompilerOptions &opts, NnetComputation *computation)
 
std::vector< IoSpecification > outputs
 
This class creates an initial version of the NnetComputation, without any optimization or sharing of ...
 
void GenerateConfigSequence(const NnetGenerationOptions &opts, std::vector< std::string > *configs)
Generates a sequence of at least one config files, output as strings, where the first in the sequence...
 
bool initialize_undefined
 
static bool ApproxEqual(float a, float b, float relative_tolerance=0.001)
return abs(a - b) <= relative_tolerance * (abs(a)+abs(b)). 
 
int32 RandInt(int32 min_val, int32 max_val, struct RandomState *state)
 
void Run()
This does either the forward or backward computation, depending when it is called (in a typical compu...