20 #ifndef KALDI_DECODER_LATTICE_INCREMENTAL_DECODER_H_    21 #define KALDI_DECODER_LATTICE_INCREMENTAL_DECODER_H_    25 #include "fst/fstlib.h"   135         max_active(std::numeric_limits<
int32>::max()),
   142         determinize_max_delay(60),
   143         determinize_min_chunk_size(20) {
   148     opts->
Register(
"beam", &beam, 
"Decoding beam.  Larger->slower, more accurate.");
   149     opts->
Register(
"max-active", &max_active,
   150                    "Decoder max active states.  Larger->slower; "   152     opts->
Register(
"min-active", &min_active, 
"Decoder minimum #active states.");
   153     opts->
Register(
"lattice-beam", &lattice_beam,
   154                    "Lattice generation beam.  Larger->slower, "   155                    "and deeper lattices");
   156     opts->
Register(
"prune-interval", &prune_interval,
   157                    "Interval (in frames) at "   158                    "which to prune tokens");
   159     opts->
Register(
"beam-delta", &beam_delta,
   160                    "Increment used in decoding-- this "   161                    "parameter is obscure and relates to a speedup in the way the "   162                    "max-active constraint is applied.  Larger is more accurate.");
   163     opts->
Register(
"hash-ratio", &hash_ratio,
   164                    "Setting used in decoder to "   165                    "control hash behavior");
   166     opts->
Register(
"determinize-max-delay", &determinize_max_delay,
   167                    "Maximum frames of delay between decoding a frame and "   169     opts->
Register(
"determinize-min-chunk-size", &determinize_min_chunk_size,
   170                    "Minimum chunk size used in determinization");
   174     if (!(beam > 0.0 && max_active > 1 && lattice_beam > 0.0 &&
   175           min_active <= max_active && prune_interval > 0 &&
   176           beam_delta > 0.0 && hash_ratio >= 1.0 &&
   177           prune_scale > 0.0 && prune_scale < 1.0 &&
   178           determinize_max_delay > determinize_min_chunk_size &&
   179           determinize_min_chunk_size > 0))
   180         KALDI_ERR << 
"Invalid options given to decoder";
   184       KALDI_ERR << 
"Invalid determinization options given to decoder.";
   206       trans_model_(trans_model), config_(config) { }
   232   void InitializeRawLatticeChunk(
   234       unordered_map<Label, LatticeArc::StateId> *token_label2state);
   257   bool AcceptRawLatticeChunk(
Lattice *raw_fst);
   282   void SetFinalCosts(
const unordered_map<Label, BaseFloat> *token_label2final_cost = NULL);
   290   enum  { kStateLabelOffset = (int)1e8,  kTokenLabelOffset = (
int)2e8, kMaxTokenLabel = (int)3e8 };
   303   void GetRawLatticeFinalCosts(
const Lattice &raw_fst,
   304                                std::unordered_map<Label, BaseFloat> *old_final_costs);
   307   void GetNonFinalRedetStates();
   333   bool ProcessArcsFromChunkStartState(
   335       std::unordered_map<CompactLattice::StateId, CompactLattice::StateId> *state_map);
   361   void TransferArcsToClat(
   364       const std::unordered_map<CompactLattice::StateId, CompactLattice::StateId> &state_map,
   365       const std::unordered_map<CompactLattice::StateId, Label> &chunk_state_to_token,
   366       const std::unordered_map<Label, BaseFloat> &old_final_costs);
   386   void IdentifyTokenFinalStates(
   388       std::unordered_map<CompactLattice::StateId, CompactLatticeArc::Label> *token_map) 
const;
   420   std::vector<std::vector<std::pair<CompactLattice::StateId, int32> > > 
arcs_in_;
   464 template <
typename FST, 
typename Token = decoder::StdToken>
   515     return FinalRelativeCost() != std::numeric_limits<BaseFloat>::infinity();
   558                                    bool use_final_probs = 
false);
   606   void FinalizeDecoding();
   613   inline static void DeleteForwardLinks(Token *tok);
   622         : toks(NULL), must_prune_forward_links(true), must_prune_tokens(true),
   626   void PossiblyResizeHash(
size_t num_toks);
   627   inline Token *FindOrAddToken(
StateId state, 
int32 frame_plus_one,
   628                                BaseFloat tot_cost, Token *backpointer, 
bool *changed);
   629   void PruneForwardLinks(
int32 frame_plus_one, 
bool *extra_costs_changed,
   631   void ComputeFinalCosts(unordered_map<Token *, BaseFloat> *final_costs,
   634   void PruneForwardLinksFinal();
   635   void PruneTokensForFrame(
int32 frame_plus_one);
   640   void ProcessNonemitting(
BaseFloat cost_cutoff);
   698   void DeleteElems(
Elem *list);
   700   void ClearActiveTokens();
   717   void UpdateLatticeDeterminization();
 
fst::StdArc::StateId StateId
 
This code computes Goodness of Pronunciation (GOP) and extracts phone-level pronunciation feature for...
 
LatticeIncrementalDecoderConfig()
 
LatticeIncrementalDecoderConfig config_
 
void Register(kaldi::OptionsItf *opts)
 
std::unordered_set< CompactLattice::StateId > non_final_redet_states_
 
unordered_map< Token *, StateId > temp_token_map_
 
std::unordered_set< int32 > temp_
 
DecodableInterface provides a link between the (acoustic-modeling and feature-processing) code and th...
 
unordered_map< Token *, Label > token2label_map_
 
fst::DeterminizeLatticePhonePrunedOptions det_opts
 
This is an extention to the "normal" lattice-generating decoder. 
 
For an extended explanation of the framework of which grammar-fsts are a part, please see Support for...
 
unordered_map< Token *, Label > token2label_map_temp_
 
void SetOptions(const LatticeIncrementalDecoderConfig &config)
 
typename Arc::Weight Weight
 
#define KALDI_DISALLOW_COPY_AND_ASSIGN(type)
 
bool must_prune_forward_links
 
typename Arc::StateId StateId
 
typename HashList< StateId, decoder::BackpointerToken *>::Elem Elem
 
virtual void Register(const std::string &name, bool *ptr, const std::string &doc)=0
 
const CompactLattice & GetLattice()
 
const CompactLattice & GetDeterminizedLattice() const
 
typename LatticeArc::Label Label
 
This class is used inside LatticeIncrementalDecoderTpl; it handles some of the details of incremental...
 
std::vector< TokenList > active_toks_
 
int32 determinize_max_delay
 
int32 num_frames_in_lattice_
num_frames_in_lattice_ is the highest `num_frames_to_include_` argument for any prior call to GetLatt...
 
int NumFramesInLattice() const
 
const LatticeIncrementalDecoderConfig & config_
 
std::vector< BaseFloat > tmp_array_
 
fst::VectorFst< LatticeArc > Lattice
 
std::vector< BaseFloat > forward_costs_
 
LatticeIncrementalDeterminizer(const TransitionModel &trans_model, const LatticeIncrementalDecoderConfig &config)
 
BaseFloat final_best_cost_
 
LatticeIncrementalDecoderTpl< fst::StdFst, decoder::StdToken > LatticeIncrementalDecoder
 
std::vector< StateId > queue_
 
fst::VectorFst< CompactLatticeArc > CompactLattice
 
std::vector< BaseFloat > cost_offsets_
 
fst::StdArc::Weight Weight
 
BaseFloat final_relative_cost_
 
The normal decoder, lattice-faster-decoder.h, sometimes has an issue when doing real-time application...
 
HashList< StateId, Token * > toks_
 
void Register(OptionsItf *opts)
 
const TransitionModel & trans_model_
 
int32 NumFramesDecoded() const
Returns the number of frames decoded so far. 
 
Label AllocateNewTokenLabel()
 
const LatticeIncrementalDecoderConfig & GetOptions() const
 
typename Arc::Label Label
 
fst::ArcTpl< CompactLatticeWeight > CompactLatticeArc
 
int32 determinize_min_chunk_size
 
LatticeIncrementalDeterminizer determinizer_
Much of the the incremental determinization algorithm is encapsulated in the determinize_ object...
 
std::vector< std::vector< std::pair< CompactLattice::StateId, int32 > > > arcs_in_
 
bool ReachedFinal() const
says whether a final-state was active on the last frame. 
 
std::vector< CompactLatticeArc > final_arcs_
 
unordered_map< Token *, BaseFloat > final_costs_