diff --git a/include/cppjieba/DictTrie.hpp b/include/cppjieba/DictTrie.hpp index 38a5dd0..c0e75c4 100644 --- a/include/cppjieba/DictTrie.hpp +++ b/include/cppjieba/DictTrie.hpp @@ -97,7 +97,7 @@ class DictTrie { size_t lineno = 0; for (size_t i = 0; i < files.size(); i++) { ifstream ifs(files[i].c_str()); - CHECK(ifs.is_open()) << "open " << files[i] << " failed"; + XCHECK(ifs.is_open()) << "open " << files[i] << " failed"; string line; DictUnit node_info; vector buf; @@ -118,7 +118,7 @@ class DictTrie { } } } - LOG(INFO) << "load userdicts " << filePaths << ", lines: " << lineno; + XLOG(INFO) << "load userdicts " << filePaths << ", lines: " << lineno; } bool MakeNodeInfo(DictUnit& node_info, @@ -126,7 +126,7 @@ class DictTrie { double weight, const string& tag) { if (!TransCode::Decode(word, node_info.word)) { - LOG(ERROR) << "Decode " << word << " failed."; + XLOG(ERROR) << "Decode " << word << " failed."; return false; } node_info.weight = weight; @@ -136,14 +136,14 @@ class DictTrie { void LoadDict(const string& filePath) { ifstream ifs(filePath.c_str()); - CHECK(ifs.is_open()) << "open " << filePath << " failed."; + XCHECK(ifs.is_open()) << "open " << filePath << " failed."; string line; vector buf; DictUnit node_info; for (size_t lineno = 0; getline(ifs, line); lineno++) { Split(line, buf, " "); - CHECK(buf.size() == DICT_COLUMN_NUM) << "split result illegal, line:" << line; + XCHECK(buf.size() == DICT_COLUMN_NUM) << "split result illegal, line:" << line; MakeNodeInfo(node_info, buf[0], atof(buf[1].c_str()), @@ -157,7 +157,7 @@ class DictTrie { } void SetStaticWordWeights(UserWordWeightOption option) { - CHECK(!static_node_infos_.empty()); + XCHECK(!static_node_infos_.empty()); vector x = static_node_infos_; sort(x.begin(), x.end(), WeightCompare); min_weight_ = x[0].weight; diff --git a/include/cppjieba/HMMModel.hpp b/include/cppjieba/HMMModel.hpp index e14b956..d83a45a 100644 --- a/include/cppjieba/HMMModel.hpp +++ b/include/cppjieba/HMMModel.hpp @@ -33,43 +33,43 @@ struct HMMModel { } void LoadModel(const string& filePath) { ifstream ifile(filePath.c_str()); - CHECK(ifile.is_open()) << "open " << filePath << " failed"; + XCHECK(ifile.is_open()) << "open " << filePath << " failed"; string line; vector tmp; vector tmp2; //Load startProb - CHECK(GetLine(ifile, line)); + XCHECK(GetLine(ifile, line)); Split(line, tmp, " "); - CHECK(tmp.size() == STATUS_SUM); + XCHECK(tmp.size() == STATUS_SUM); for (size_t j = 0; j< tmp.size(); j++) { startProb[j] = atof(tmp[j].c_str()); } //Load transProb for (size_t i = 0; i < STATUS_SUM; i++) { - CHECK(GetLine(ifile, line)); + XCHECK(GetLine(ifile, line)); Split(line, tmp, " "); - CHECK(tmp.size() == STATUS_SUM); + XCHECK(tmp.size() == STATUS_SUM); for (size_t j =0; j < STATUS_SUM; j++) { transProb[i][j] = atof(tmp[j].c_str()); } } //Load emitProbB - CHECK(GetLine(ifile, line)); - CHECK(LoadEmitProb(line, emitProbB)); + XCHECK(GetLine(ifile, line)); + XCHECK(LoadEmitProb(line, emitProbB)); //Load emitProbE - CHECK(GetLine(ifile, line)); - CHECK(LoadEmitProb(line, emitProbE)); + XCHECK(GetLine(ifile, line)); + XCHECK(LoadEmitProb(line, emitProbE)); //Load emitProbM - CHECK(GetLine(ifile, line)); - CHECK(LoadEmitProb(line, emitProbM)); + XCHECK(GetLine(ifile, line)); + XCHECK(LoadEmitProb(line, emitProbM)); //Load emitProbS - CHECK(GetLine(ifile, line)); - CHECK(LoadEmitProb(line, emitProbS)); + XCHECK(GetLine(ifile, line)); + XCHECK(LoadEmitProb(line, emitProbS)); } double GetEmitProb(const EmitProbMap* ptMp, Rune key, double defVal)const { @@ -102,11 +102,11 @@ struct HMMModel { for (size_t i = 0; i < tmp.size(); i++) { Split(tmp[i], tmp2, ":"); if (2 != tmp2.size()) { - LOG(ERROR) << "emitProb illegal."; + XLOG(ERROR) << "emitProb illegal."; return false; } if (!TransCode::Decode(tmp2[0], unicode) || unicode.size() != 1) { - LOG(ERROR) << "TransCode failed."; + XLOG(ERROR) << "TransCode failed."; return false; } mp[unicode[0]] = atof(tmp2[1].c_str()); diff --git a/include/cppjieba/KeywordExtractor.hpp b/include/cppjieba/KeywordExtractor.hpp index 02c2efb..e8cf18c 100644 --- a/include/cppjieba/KeywordExtractor.hpp +++ b/include/cppjieba/KeywordExtractor.hpp @@ -86,7 +86,7 @@ class KeywordExtractor { private: void LoadIdfDict(const string& idfPath) { ifstream ifs(idfPath.c_str()); - CHECK(ifs.is_open()) << "open " << idfPath << " failed"; + XCHECK(ifs.is_open()) << "open " << idfPath << " failed"; string line ; vector buf; double idf = 0.0; @@ -95,12 +95,12 @@ class KeywordExtractor { for (; getline(ifs, line); lineno++) { buf.clear(); if (line.empty()) { - LOG(ERROR) << "lineno: " << lineno << " empty. skipped."; + XLOG(ERROR) << "lineno: " << lineno << " empty. skipped."; continue; } Split(line, buf, " "); if (buf.size() != 2) { - LOG(ERROR) << "line: " << line << ", lineno: " << lineno << " empty. skipped."; + XLOG(ERROR) << "line: " << line << ", lineno: " << lineno << " empty. skipped."; continue; } idf = atof(buf[1].c_str()); @@ -115,7 +115,7 @@ class KeywordExtractor { } void LoadStopWordDict(const string& filePath) { ifstream ifs(filePath.c_str()); - CHECK(ifs.is_open()) << "open " << filePath << " failed"; + XCHECK(ifs.is_open()) << "open " << filePath << " failed"; string line ; while (getline(ifs, line)) { stopWords_.insert(line); diff --git a/include/cppjieba/PosTagger.hpp b/include/cppjieba/PosTagger.hpp index e64108e..26941da 100644 --- a/include/cppjieba/PosTagger.hpp +++ b/include/cppjieba/PosTagger.hpp @@ -35,7 +35,7 @@ class PosTagger { assert(dict != NULL); for (vector::iterator itr = CutRes.begin(); itr != CutRes.end(); ++itr) { if (!TransCode::Decode(*itr, unico)) { - LOG(ERROR) << "Decode failed."; + XLOG(ERROR) << "Decode failed."; return false; } tmp = dict->Find(unico.begin(), unico.end());