diff --git a/src/main/java/org/wltea/analyzer/help/Sleep.java b/src/main/java/org/wltea/analyzer/help/Sleep.java index 703b816..484a8ef 100644 --- a/src/main/java/org/wltea/analyzer/help/Sleep.java +++ b/src/main/java/org/wltea/analyzer/help/Sleep.java @@ -1,6 +1,12 @@ package org.wltea.analyzer.help; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; + public class Sleep { + + public static ESLogger logger= Loggers.getLogger("ik-analyzer"); + public enum Type{MSEC,SEC,MIN,HOUR}; public static void sleep(Type type,int num){ try { @@ -18,7 +24,7 @@ public class Sleep { Thread.sleep(num*60*60*1000); return; default: - System.err.println("输入类型错误,应为MSEC,SEC,MIN,HOUR之一"); + logger.error("输入类型错误,应为MSEC,SEC,MIN,HOUR之一"); return; } } catch (InterruptedException e) { diff --git a/src/main/java/org/wltea/analyzer/query/IKQueryExpressionParser.java b/src/main/java/org/wltea/analyzer/query/IKQueryExpressionParser.java index 679ec12..47f3187 100644 --- a/src/main/java/org/wltea/analyzer/query/IKQueryExpressionParser.java +++ b/src/main/java/org/wltea/analyzer/query/IKQueryExpressionParser.java @@ -28,6 +28,8 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.*; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; import java.util.ArrayList; import java.util.LinkedList; @@ -44,7 +46,9 @@ import java.util.Stack; * */ public class IKQueryExpressionParser { - + + public static ESLogger logger= Loggers.getLogger("ik-analyzer"); + //public static final String LUCENE_SPECIAL_CHAR = "&&||-()':={}[],"; private List elements = new ArrayList(); @@ -705,7 +709,7 @@ public class IKQueryExpressionParser { //String ikQueryExp = "newsTitle:'的两款《魔兽世界》插件Bigfoot和月光宝盒'"; String ikQueryExp = "(id='ABcdRf' && date:{'20010101','20110101'} && keyword:'魔兽中国') || (content:'KSHT-KSH-A001-18' || ulr='www.ik.com') - name:'林良益'"; Query result = parser.parseExp(ikQueryExp , true); - System.out.println(result); + logger.info(result.toString()); } diff --git a/src/main/java/org/wltea/analyzer/sample/IKAnalzyerDemo.java b/src/main/java/org/wltea/analyzer/sample/IKAnalzyerDemo.java index ea26e3c..72d9df4 100644 --- a/src/main/java/org/wltea/analyzer/sample/IKAnalzyerDemo.java +++ b/src/main/java/org/wltea/analyzer/sample/IKAnalzyerDemo.java @@ -33,6 +33,8 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; import org.wltea.analyzer.lucene.IKAnalyzer; /** @@ -41,7 +43,9 @@ import org.wltea.analyzer.lucene.IKAnalyzer; * */ public class IKAnalzyerDemo { - + + public static ESLogger logger= Loggers.getLogger("ik-analyzer"); + public static void main(String[] args){ //构建IK分词器,使用smart分词模式 Analyzer analyzer = new IKAnalyzer(true); @@ -63,7 +67,7 @@ public class IKAnalzyerDemo { ts.reset(); //迭代获取分词结果 while (ts.incrementToken()) { - System.out.println(offset.startOffset() + " - " + offset.endOffset() + " : " + term.toString() + " | " + type.type()); + logger.info(offset.startOffset() + " - " + offset.endOffset() + " : " + term.toString() + " | " + type.type()); } //关闭TokenStream(关闭StringReader) ts.end(); // Perform end-of-stream operations, e.g. set the final offset. diff --git a/src/main/java/org/wltea/analyzer/sample/LuceneIndexAndSearchDemo.java b/src/main/java/org/wltea/analyzer/sample/LuceneIndexAndSearchDemo.java index 93f32c9..cf863f5 100644 --- a/src/main/java/org/wltea/analyzer/sample/LuceneIndexAndSearchDemo.java +++ b/src/main/java/org/wltea/analyzer/sample/LuceneIndexAndSearchDemo.java @@ -48,6 +48,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Version; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; import org.wltea.analyzer.lucene.IKAnalyzer; @@ -61,7 +63,8 @@ import org.wltea.analyzer.lucene.IKAnalyzer; * */ public class LuceneIndexAndSearchDemo { - + + public static ESLogger logger= Loggers.getLogger("ik-analyzer"); /** * 模拟: @@ -107,16 +110,16 @@ public class LuceneIndexAndSearchDemo { QueryParser qp = new QueryParser(fieldName, analyzer); qp.setDefaultOperator(QueryParser.AND_OPERATOR); Query query = qp.parse(keyword); - System.out.println("Query = " + query); + logger.info("Query = " + query); //搜索相似度最高的5条记录 TopDocs topDocs = isearcher.search(query , 5); - System.out.println("命中:" + topDocs.totalHits); + logger.info("命中:" + topDocs.totalHits); //输出结果 ScoreDoc[] scoreDocs = topDocs.scoreDocs; for (int i = 0; i < topDocs.totalHits; i++){ Document targetDoc = isearcher.doc(scoreDocs[i].doc); - System.out.println("内容:" + targetDoc.toString()); + logger.info("内容:" + targetDoc.toString()); } } catch (CorruptIndexException e) {