29 for (
int32 c = 0; c < nc; c++) {
31 if (dynamic_cast<const SoftmaxComponent*>(component) != NULL) {
32 if (index != -1)
return -1;
42 KALDI_ASSERT(c_to_insert >= 0 && c_to_insert <= dest_nnet->NumComponents());
44 std::vector<Component*> components(c_tot);
45 for (
int32 c = 0; c < c_to_insert; c++)
54 dest_nnet->
Init(&components);
61 KALDI_ASSERT(num_to_remove >= 0 && num_to_remove <= dest_nnet->NumComponents());
64 std::vector<Component*> components;
65 for (
int32 c = 0; c < c_orig; c++)
72 dest_nnet->
Init(&components);
This code computes Goodness of Pronunciation (GOP) and extracts phone-level pronunciation feature for...
const Component & GetComponent(int32 c) const
int32 IndexOfSoftmaxLayer(const Nnet &nnet)
If "nnet" has exactly one softmax layer, this function will return its index; otherwise it will retur...
Abstract class, basic element of the network, it is a box with defined inputs, outputs, and tranformation functions interface.
virtual Component * Copy() const =0
Copy component (deep copy).
int32 NumComponents() const
Returns number of components– think of this as similar to # of layers, but e.g.
void ReplaceLastComponents(const Nnet &src_nnet, int32 num_to_remove, Nnet *dest_nnet)
Removes the last "num_to_remove" components and adds the components from "src_nnet".
void InsertComponents(const Nnet &src_nnet, int32 c_to_insert, Nnet *dest_nnet)
Inserts the components of one neural network into a particular place in the other one...
#define KALDI_ASSERT(cond)
void Init(std::istream &is)
Initialize from config file.