From 77ffb5556c825adcf22138d387967c627d54c415 Mon Sep 17 00:00:00 2001 From: Ilia Dzenzeliuk <43926347+dzen03@users.noreply.github.com> Date: Sun, 21 Jan 2024 18:08:34 +0300 Subject: [PATCH] Fix unused but set variable --- src/base/kaldi-error.h | 8 ++++---- src/bin/matrix-sum.cc | 6 ++---- src/bin/vector-sum.cc | 3 +-- src/chainbin/nnet3-chain-copy-egs.cc | 4 +--- src/fstext/pre-determinize-inl.h | 3 --- src/gmm/mle-diag-gmm-test.cc | 7 ++----- src/gmm/mle-full-gmm-test.cc | 7 ++----- src/gmmbin/gmm-acc-mllt-global.cc | 4 +--- src/ivector/ivector-extractor-test.cc | 3 +-- src/kwsbin/kws-search.cc | 2 -- src/latbin/lattice-oracle.cc | 3 +-- src/latbin/lattice-prune.cc | 3 +-- src/latbin/lattice-to-mpe-post.cc | 3 +-- src/latbin/lattice-to-smbr-post.cc | 3 +-- src/matrix/matrix-functions.cc | 2 -- src/nnet2/nnet-compute-discriminative.cc | 3 +-- src/nnet3/nnet-example-utils.cc | 2 -- src/online2bin/apply-cmvn-online.cc | 3 +-- src/online2bin/ivector-extract-online2.cc | 3 +-- src/tree/build-tree-utils.cc | 2 -- 20 files changed, 21 insertions(+), 53 deletions(-) diff --git a/src/base/kaldi-error.h b/src/base/kaldi-error.h index a9904a752cd..572cbb4effd 100644 --- a/src/base/kaldi-error.h +++ b/src/base/kaldi-error.h @@ -185,12 +185,12 @@ class MessageLogger { #define KALDI_ASSERT(cond) \ do { \ if (cond) \ - (void)0; \ + (void)(cond); \ else \ ::kaldi::KaldiAssertFailure_(__func__, __FILE__, __LINE__, #cond); \ } while (0) #else -#define KALDI_ASSERT(cond) (void)0 +#define KALDI_ASSERT(cond) (void)(cond) #endif // Some more expensive asserts only checked if this defined. @@ -198,12 +198,12 @@ class MessageLogger { #define KALDI_PARANOID_ASSERT(cond) \ do { \ if (cond) \ - (void)0; \ + (void)(cond); \ else \ ::kaldi::KaldiAssertFailure_(__func__, __FILE__, __LINE__, #cond); \ } while (0) #else -#define KALDI_PARANOID_ASSERT(cond) (void)0 +#define KALDI_PARANOID_ASSERT(cond) (void)(cond) #endif /***** THIRD-PARTY LOG-HANDLER *****/ diff --git a/src/bin/matrix-sum.cc b/src/bin/matrix-sum.cc index 3c93dfd0d39..6aee0c5ce78 100644 --- a/src/bin/matrix-sum.cc +++ b/src/bin/matrix-sum.cc @@ -49,7 +49,7 @@ int32 TypeOneUsage(const ParseOptions &po, } int32 n_utts = 0, n_total_matrices = 0, - n_success = 0, n_missing = 0, n_other_errors = 0; + n_success = 0, n_missing = 0; for (; !matrix_reader1.Done(); matrix_reader1.Next()) { std::string key = matrix_reader1.Key(); @@ -78,7 +78,6 @@ int32 TypeOneUsage(const ParseOptions &po, << matrix_in_fns[i] << " vs " << matrix_out.NumRows() << " by " << matrix_out.NumCols() << " primary matrix, rspecifier:" << matrix_in_fn1; - n_other_errors++; } } else { KALDI_WARN << "No matrix found for utterance " << key << " for " @@ -124,7 +123,7 @@ int32 TypeOneUsageAverage(const ParseOptions &po) { } int32 n_utts = 0, n_total_matrices = 0, - n_success = 0, n_missing = 0, n_other_errors = 0; + n_success = 0, n_missing = 0; for (; !matrix_reader1.Done(); matrix_reader1.Next()) { std::string key = matrix_reader1.Key(); @@ -151,7 +150,6 @@ int32 TypeOneUsageAverage(const ParseOptions &po) { << matrix_in_fns[i] << " vs " << matrix_out.NumRows() << " by " << matrix_out.NumCols() << " primary matrix, rspecifier:" << matrix_in_fn1; - n_other_errors++; } } else { KALDI_WARN << "No matrix found for utterance " << key << " for " diff --git a/src/bin/vector-sum.cc b/src/bin/vector-sum.cc index 3e622cafdc7..d03bf671245 100644 --- a/src/bin/vector-sum.cc +++ b/src/bin/vector-sum.cc @@ -52,7 +52,7 @@ int32 TypeOneUsage(const ParseOptions &po) { } int32 n_utts = 0, n_total_vectors = 0, - n_success = 0, n_missing = 0, n_other_errors = 0; + n_success = 0, n_missing = 0; for (; !vector_reader1.Done(); vector_reader1.Next()) { std::string key = vector_reader1.Key(); @@ -75,7 +75,6 @@ int32 TypeOneUsage(const ParseOptions &po) { << "system " << (i + 2) << ", rspecifier: " << vector_in_fns[i] << " vs " << vector_out.Dim() << " primary vector, rspecifier:" << vector_in_fn1; - n_other_errors++; } } else { KALDI_WARN << "No vector found for utterance " << key << " for " diff --git a/src/chainbin/nnet3-chain-copy-egs.cc b/src/chainbin/nnet3-chain-copy-egs.cc index 0117fe2200f..60a2645b31b 100644 --- a/src/chainbin/nnet3-chain-copy-egs.cc +++ b/src/chainbin/nnet3-chain-copy-egs.cc @@ -347,7 +347,7 @@ int main(int argc, char *argv[]) { // not configurable for now. exclude_names.push_back(std::string("ivector")); - int64 num_read = 0, num_written = 0, num_err = 0; + int64 num_read = 0, num_written = 0; for (; !example_reader.Done(); example_reader.Next(), num_read++) { const std::string &key = example_reader.Key(); NnetChainExample &eg = example_reader.Value(); @@ -361,7 +361,6 @@ int main(int argc, char *argv[]) { BaseFloat weight = 1.0; if (!egs_weight_reader.HasKey(key)) { KALDI_WARN << "No weight for example key " << key; - num_err++; continue; } weight = egs_weight_reader.Value(key); @@ -371,7 +370,6 @@ int main(int argc, char *argv[]) { if (!eg_output_name_rspecifier.empty()) { if (!output_name_reader.HasKey(key)) { KALDI_WARN << "No new output-name for example key " << key; - num_err++; continue; } std::string new_output_name = output_name_reader.Value(key); diff --git a/src/fstext/pre-determinize-inl.h b/src/fstext/pre-determinize-inl.h index b67b0ba6fa6..ea6608ce38a 100644 --- a/src/fstext/pre-determinize-inl.h +++ b/src/fstext/pre-determinize-inl.h @@ -411,8 +411,6 @@ void PreDeterminize(MutableFst *fst, std::vector d_vec(max_state+1, false); // "done vector". Purely for debugging. - size_t num_extra_det_states = 0; - // (D)(v) while (Q.size() != 0) { @@ -491,7 +489,6 @@ void PreDeterminize(MutableFst *fst, assert(m_map.count(this_pr.first) == 0); m_map[this_pr.first] = k; k++; - num_extra_det_states++; } } else { // Create the set V_t. V_t.insert(this_pr.second); diff --git a/src/gmm/mle-diag-gmm-test.cc b/src/gmm/mle-diag-gmm-test.cc index d1af7725d20..a91832cd254 100644 --- a/src/gmm/mle-diag-gmm-test.cc +++ b/src/gmm/mle-diag-gmm-test.cc @@ -139,12 +139,10 @@ void test_flags_driven_update(const DiagGmm &gmm, // now both models gmm_all_update, gmm_all_update have the same params updated // compute loglike for models for check - double loglike0 = 0.0; double loglike1 = 0.0; double loglike2 = 0.0; for (int32 i = 0; i < feats.NumRows(); i++) { - loglike0 += static_cast( - gmm.LogLikelihood(feats.Row(i))); + gmm.LogLikelihood(feats.Row(i)); loglike1 += static_cast( gmm_all_update.LogLikelihood(feats.Row(i))); loglike2 += static_cast( @@ -366,9 +364,8 @@ UnitTestEstimateDiagGmm() { est_gmm.Resize(gmm->NumGauss(), gmm->Dim(), flags_all); est_gmm.SetZero(flags_all); - float loglike = 0.0; for (size_t i = 0; i < counter; i++) { - loglike += est_gmm.AccumulateFromDiag(*gmm, feats.Row(i), 1.0F); + est_gmm.AccumulateFromDiag(*gmm, feats.Row(i), 1.0F); } test_io(*gmm, est_gmm, false, feats); // ASCII mode test_io(*gmm, est_gmm, true, feats); // Binary mode diff --git a/src/gmm/mle-full-gmm-test.cc b/src/gmm/mle-full-gmm-test.cc index 472db88d501..26c5460f024 100644 --- a/src/gmm/mle-full-gmm-test.cc +++ b/src/gmm/mle-full-gmm-test.cc @@ -200,12 +200,10 @@ void test_flags_driven_update(const FullGmm &gmm, // now both models gmm_all_update, gmm_all_update have the same params updated // compute loglike for models for check - double loglike0 = 0.0; double loglike1 = 0.0; double loglike2 = 0.0; for (int32 i = 0; i < feats.NumRows(); i++) { - loglike0 += static_cast( - gmm.LogLikelihood(feats.Row(i))); + gmm.LogLikelihood(feats.Row(i)); loglike1 += static_cast( gmm_all_update.LogLikelihood(feats.Row(i))); loglike2 += static_cast( @@ -462,9 +460,8 @@ UnitTestEstimateFullGmm() { est_gmm.Resize(gmm->NumGauss(), gmm->Dim(), flags_all); est_gmm.SetZero(flags_all); - float loglike = 0.0; for (int32 i = 0; i < counter; i++) { - loglike += est_gmm.AccumulateFromFull(*gmm, feats.Row(i), 1.0F); + est_gmm.AccumulateFromFull(*gmm, feats.Row(i), 1.0F); } test_io(*gmm, est_gmm, false, feats); test_io(*gmm, est_gmm, true, feats); diff --git a/src/gmmbin/gmm-acc-mllt-global.cc b/src/gmmbin/gmm-acc-mllt-global.cc index bed91c053d3..b6b7a2b5635 100644 --- a/src/gmmbin/gmm-acc-mllt-global.cc +++ b/src/gmmbin/gmm-acc-mllt-global.cc @@ -72,7 +72,7 @@ int main(int argc, char *argv[]) { SequentialBaseFloatMatrixReader feature_reader(feature_rspecifier); RandomAccessInt32VectorVectorReader gselect_reader(gselect_rspecifier); - int32 num_done = 0, num_err = 0; + int32 num_done = 0; for (; !feature_reader.Done(); feature_reader.Next()) { std::string utt = feature_reader.Key(); const Matrix &mat = feature_reader.Value(); @@ -88,7 +88,6 @@ int main(int argc, char *argv[]) { } else { if (!gselect_reader.HasKey(utt)) { KALDI_WARN << "No gselect information for utterance " << utt; - num_err++; continue; } const std::vector > &gselect= gselect_reader.Value(utt); @@ -96,7 +95,6 @@ int main(int argc, char *argv[]) { KALDI_WARN << "Gselect information has wrong size for utterance " << utt << ", " << gselect.size() << " vs. " << mat.NumRows(); - num_err++; continue; } diff --git a/src/ivector/ivector-extractor-test.cc b/src/ivector/ivector-extractor-test.cc index cb08464fbe8..ffd5a2561cc 100644 --- a/src/ivector/ivector-extractor-test.cc +++ b/src/ivector/ivector-extractor-test.cc @@ -94,11 +94,10 @@ void TestIvectorExtraction(const IvectorExtractor &extractor, ivector_dim = extractor.IvectorDim(); Posterior post(num_frames); - double tot_log_like = 0.0; for (int32 t = 0; t < num_frames; t++) { SubVector frame(feats, t); Vector posterior(fgmm.NumGauss(), kUndefined); - tot_log_like += fgmm.ComponentPosteriors(frame, &posterior); + fgmm.ComponentPosteriors(frame, &posterior); for (int32 i = 0; i < posterior.Dim(); i++) post[t].push_back(std::make_pair(i, posterior(i))); } diff --git a/src/kwsbin/kws-search.cc b/src/kwsbin/kws-search.cc index 8e2b2a84def..c76a5d46eb9 100644 --- a/src/kwsbin/kws-search.cc +++ b/src/kwsbin/kws-search.cc @@ -287,7 +287,6 @@ int main(int argc, char *argv[]) { ArcSort(&index, fst::ILabelCompare()); int32 n_done = 0; - int32 n_fail = 0; for (; !keyword_reader.Done(); keyword_reader.Next()) { std::string key = keyword_reader.Key(); VectorFst keyword = keyword_reader.Value(); @@ -336,7 +335,6 @@ int main(int argc, char *argv[]) { if (result_fst.Final(arc.nextstate) != Weight::One()) { KALDI_WARN << "The resulting FST does not have " << "the expected structure for key " << key; - n_fail++; continue; } diff --git a/src/latbin/lattice-oracle.cc b/src/latbin/lattice-oracle.cc index 5f2513131d7..054a0676e37 100644 --- a/src/latbin/lattice-oracle.cc +++ b/src/latbin/lattice-oracle.cc @@ -257,7 +257,7 @@ int main(int argc, char *argv[]) { } int32 n_done = 0, n_fail = 0; - int32 tot_correct = 0, tot_substitutions = 0, + int32 tot_substitutions = 0, tot_insertions = 0, tot_deletions = 0, tot_words = 0; for (; !lattice_reader.Done(); lattice_reader.Next()) { @@ -320,7 +320,6 @@ int main(int argc, char *argv[]) { KALDI_LOG << "%WER " << (100.*tot_errs) / num_words << " [ " << tot_errs << " / " << num_words << ", " << insertions << " insertions, " << deletions << " deletions, " << substitutions << " sub ]"; - tot_correct += correct; tot_substitutions += substitutions; tot_insertions += insertions; tot_deletions += deletions; diff --git a/src/latbin/lattice-prune.cc b/src/latbin/lattice-prune.cc index 49399f748e4..d87f5ded28f 100644 --- a/src/latbin/lattice-prune.cc +++ b/src/latbin/lattice-prune.cc @@ -68,7 +68,7 @@ int main(int argc, char *argv[]) { SequentialCompactLatticeReader compact_lattice_reader(lats_rspecifier); CompactLatticeWriter compact_lattice_writer(lats_wspecifier); - int32 n_done = 0, n_err = 0; + int32 n_done = 0; int64 n_arcs_in = 0, n_arcs_out = 0, n_states_in = 0, n_states_out = 0; @@ -86,7 +86,6 @@ int main(int argc, char *argv[]) { CompactLattice pruned_clat(clat); if (!PruneLattice(beam, &pruned_clat)) { KALDI_WARN << "Error pruning lattice for utterance " << key; - n_err++; } int64 pruned_narcs = NumArcs(pruned_clat), pruned_nstates = pruned_clat.NumStates(); diff --git a/src/latbin/lattice-to-mpe-post.cc b/src/latbin/lattice-to-mpe-post.cc index 7961cc5c438..771399a32a4 100644 --- a/src/latbin/lattice-to-mpe-post.cc +++ b/src/latbin/lattice-to-mpe-post.cc @@ -94,7 +94,7 @@ int main(int argc, char *argv[]) { trans_model.Read(ki.Stream(), binary); } - int32 num_done = 0, num_err = 0; + int32 num_done = 0; double total_lat_frame_acc = 0.0, lat_frame_acc; double total_time = 0, lat_time; @@ -114,7 +114,6 @@ int main(int argc, char *argv[]) { if (!alignments_reader.HasKey(key)) { KALDI_WARN << "No alignment for utterance " << key; - num_err++; } else { const std::vector &alignment = alignments_reader.Value(key); Posterior post; diff --git a/src/latbin/lattice-to-smbr-post.cc b/src/latbin/lattice-to-smbr-post.cc index e2772316954..6b2861b395f 100644 --- a/src/latbin/lattice-to-smbr-post.cc +++ b/src/latbin/lattice-to-smbr-post.cc @@ -95,7 +95,7 @@ int main(int argc, char *argv[]) { trans_model.Read(ki.Stream(), binary); } - int32 num_done = 0, num_err = 0; + int32 num_done = 0; double total_lat_frame_acc = 0.0, lat_frame_acc; double total_time = 0, lat_time; @@ -115,7 +115,6 @@ int main(int argc, char *argv[]) { if (!alignments_reader.HasKey(key)) { KALDI_WARN << "No alignment for utterance " << key; - num_err++; } else { const std::vector &alignment = alignments_reader.Value(key); Posterior post; diff --git a/src/matrix/matrix-functions.cc b/src/matrix/matrix-functions.cc index 496c09f5344..6942b220da6 100644 --- a/src/matrix/matrix-functions.cc +++ b/src/matrix/matrix-functions.cc @@ -669,12 +669,10 @@ void ComputePca(const MatrixBase &X, Nsp.TopEigs(&l, &Vtmp); } - MatrixIndexT num_zeroed = 0; for (MatrixIndexT g = 0; g < G; g++) { if (l(g) < 0.0) { KALDI_WARN << "In PCA, setting element " << l(g) << " to zero."; l(g) = 0.0; - num_zeroed++; } } SortSvd(&l, &Vtmp); // Make sure zero elements are last, this diff --git a/src/nnet2/nnet-compute-discriminative.cc b/src/nnet2/nnet-compute-discriminative.cc index 65c48097bf9..16d34160508 100644 --- a/src/nnet2/nnet-compute-discriminative.cc +++ b/src/nnet2/nnet-compute-discriminative.cc @@ -296,7 +296,7 @@ void NnetDiscriminativeUpdater::LatticeComputations() { ScalePosterior(eg_.weight, &post); - double tot_num_post = 0.0, tot_den_post = 0.0; + double tot_num_post = 0.0; std::vector > sv_labels; sv_labels.reserve(answers.size()); for (int32 t = 0; t < post.size(); t++) { @@ -304,7 +304,6 @@ void NnetDiscriminativeUpdater::LatticeComputations() { int32 pdf_id = post[t][i].first; BaseFloat weight = post[t][i].second; if (weight > 0.0) { tot_num_post += weight; } - else { tot_den_post -= weight; } MatrixElement elem = {t, pdf_id, weight}; sv_labels.push_back(elem); } diff --git a/src/nnet3/nnet-example-utils.cc b/src/nnet3/nnet-example-utils.cc index facbbb19be0..06278610553 100644 --- a/src/nnet3/nnet-example-utils.cc +++ b/src/nnet3/nnet-example-utils.cc @@ -673,11 +673,9 @@ void UtteranceSplitter::InitSplits(std::vector > *splits) con vec.push_back(config_.num_frames[i]); if (j > 0) vec.push_back(config_.num_frames[j]); - int32 n = 0; while (DefaultDurationOfSplit(vec) <= default_duration_ceiling) { if (!vec.empty()) // Don't allow the empty vector as a split. splits_set.insert(vec); - n++; vec.push_back(primary_length); std::sort(vec.begin(), vec.end()); } diff --git a/src/online2bin/apply-cmvn-online.cc b/src/online2bin/apply-cmvn-online.cc index 06157d0fcdf..615941f760a 100644 --- a/src/online2bin/apply-cmvn-online.cc +++ b/src/online2bin/apply-cmvn-online.cc @@ -68,7 +68,7 @@ int main(int argc, char *argv[]) { BaseFloatMatrixWriter feature_writer(feature_wspecifier); - int32 num_done = 0, num_err = 0; + int32 num_done = 0; int64 tot_t = 0; if (spk2utt_rspecifier != "") { @@ -82,7 +82,6 @@ int main(int argc, char *argv[]) { std::string utt = uttlist[i]; if (!feature_reader.HasKey(utt)) { KALDI_WARN << "No features for utterance " << utt; - num_err++; continue; } const Matrix &feats = feature_reader.Value(utt); diff --git a/src/online2bin/ivector-extract-online2.cc b/src/online2bin/ivector-extract-online2.cc index e697de6d15a..eafc0e64124 100644 --- a/src/online2bin/ivector-extract-online2.cc +++ b/src/online2bin/ivector-extract-online2.cc @@ -82,7 +82,7 @@ int main(int argc, char *argv[]) { feature_rspecifier = po.GetArg(2), ivectors_wspecifier = po.GetArg(3); - double tot_ubm_loglike = 0.0, tot_objf_impr = 0.0, tot_t = 0.0, + double tot_objf_impr = 0.0, tot_t = 0.0, tot_length = 0.0, tot_length_utt_end = 0.0; int32 num_done = 0, num_err = 0; @@ -166,7 +166,6 @@ int main(int argc, char *argv[]) { } // Update diagnostics. - tot_ubm_loglike += T * ivector_feature.UbmLogLikePerFrame(); tot_objf_impr += T * ivector_feature.ObjfImprPerFrame(); tot_length_utt_end += T * ivectors.Row(num_ivectors - 1).Norm(2.0); for (int32 i = 0; i < num_ivectors; i++) diff --git a/src/tree/build-tree-utils.cc b/src/tree/build-tree-utils.cc index 254d7ec36d8..cf88a408fcb 100644 --- a/src/tree/build-tree-utils.cc +++ b/src/tree/build-tree-utils.cc @@ -538,7 +538,6 @@ EventMap *SplitDecisionTree(const EventMap &input_map, BaseFloat *obj_impr_out, BaseFloat *smallest_split_change_out) { KALDI_ASSERT(num_leaves != NULL && *num_leaves > 0); // can't be 0 or input_map would be empty. - int32 num_empty_leaves = 0; BaseFloat like_impr = 0.0; BaseFloat smallest_split_change = 1.0e+20; std::vector builders; @@ -550,7 +549,6 @@ EventMap *SplitDecisionTree(const EventMap &input_map, builders.resize(split_stats.size()); // size == #leaves. for (size_t i = 0;i < split_stats.size();i++) { EventAnswerType leaf = static_cast(i); - if (split_stats[i].size() == 0) num_empty_leaves++; builders[i] = new DecisionTreeSplitter(leaf, split_stats[i], q_opts); } }