Merge pull request #141 from DevFactory/release/use-log-instead-of-standard-output-fix-1

Replace the usage of System.out or System.err by a logger
This commit is contained in:
Medcl 2016-01-26 09:41:42 +08:00
commit afe9345ba5
4 changed files with 26 additions and 9 deletions

View File

@ -1,6 +1,12 @@
package org.wltea.analyzer.help; package org.wltea.analyzer.help;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
public class Sleep { public class Sleep {
public static ESLogger logger= Loggers.getLogger("ik-analyzer");
public enum Type{MSEC,SEC,MIN,HOUR}; public enum Type{MSEC,SEC,MIN,HOUR};
public static void sleep(Type type,int num){ public static void sleep(Type type,int num){
try { try {
@ -18,7 +24,7 @@ public class Sleep {
Thread.sleep(num*60*60*1000); Thread.sleep(num*60*60*1000);
return; return;
default: default:
System.err.println("输入类型错误应为MSEC,SEC,MIN,HOUR之一"); logger.error("输入类型错误应为MSEC,SEC,MIN,HOUR之一");
return; return;
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {

View File

@ -28,6 +28,8 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedList; import java.util.LinkedList;
@ -44,7 +46,9 @@ import java.util.Stack;
* *
*/ */
public class IKQueryExpressionParser { public class IKQueryExpressionParser {
public static ESLogger logger= Loggers.getLogger("ik-analyzer");
//public static final String LUCENE_SPECIAL_CHAR = "&&||-()':={}[],"; //public static final String LUCENE_SPECIAL_CHAR = "&&||-()':={}[],";
private List<Element> elements = new ArrayList<Element>(); private List<Element> elements = new ArrayList<Element>();
@ -705,7 +709,7 @@ public class IKQueryExpressionParser {
//String ikQueryExp = "newsTitle:'的两款《魔兽世界》插件Bigfoot和月光宝盒'"; //String ikQueryExp = "newsTitle:'的两款《魔兽世界》插件Bigfoot和月光宝盒'";
String ikQueryExp = "(id='ABcdRf' && date:{'20010101','20110101'} && keyword:'魔兽中国') || (content:'KSHT-KSH-A001-18' || ulr='www.ik.com') - name:'林良益'"; String ikQueryExp = "(id='ABcdRf' && date:{'20010101','20110101'} && keyword:'魔兽中国') || (content:'KSHT-KSH-A001-18' || ulr='www.ik.com') - name:'林良益'";
Query result = parser.parseExp(ikQueryExp , true); Query result = parser.parseExp(ikQueryExp , true);
System.out.println(result); logger.info(result.toString());
} }

View File

@ -33,6 +33,8 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.wltea.analyzer.lucene.IKAnalyzer; import org.wltea.analyzer.lucene.IKAnalyzer;
/** /**
@ -41,7 +43,9 @@ import org.wltea.analyzer.lucene.IKAnalyzer;
* *
*/ */
public class IKAnalzyerDemo { public class IKAnalzyerDemo {
public static ESLogger logger= Loggers.getLogger("ik-analyzer");
public static void main(String[] args){ public static void main(String[] args){
//构建IK分词器使用smart分词模式 //构建IK分词器使用smart分词模式
Analyzer analyzer = new IKAnalyzer(true); Analyzer analyzer = new IKAnalyzer(true);
@ -63,7 +67,7 @@ public class IKAnalzyerDemo {
ts.reset(); ts.reset();
//迭代获取分词结果 //迭代获取分词结果
while (ts.incrementToken()) { while (ts.incrementToken()) {
System.out.println(offset.startOffset() + " - " + offset.endOffset() + " : " + term.toString() + " | " + type.type()); logger.info(offset.startOffset() + " - " + offset.endOffset() + " : " + term.toString() + " | " + type.type());
} }
//关闭TokenStream关闭StringReader //关闭TokenStream关闭StringReader
ts.end(); // Perform end-of-stream operations, e.g. set the final offset. ts.end(); // Perform end-of-stream operations, e.g. set the final offset.

View File

@ -48,6 +48,8 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.wltea.analyzer.lucene.IKAnalyzer; import org.wltea.analyzer.lucene.IKAnalyzer;
@ -61,7 +63,8 @@ import org.wltea.analyzer.lucene.IKAnalyzer;
* *
*/ */
public class LuceneIndexAndSearchDemo { public class LuceneIndexAndSearchDemo {
public static ESLogger logger= Loggers.getLogger("ik-analyzer");
/** /**
* 模拟 * 模拟
@ -107,16 +110,16 @@ public class LuceneIndexAndSearchDemo {
QueryParser qp = new QueryParser(fieldName, analyzer); QueryParser qp = new QueryParser(fieldName, analyzer);
qp.setDefaultOperator(QueryParser.AND_OPERATOR); qp.setDefaultOperator(QueryParser.AND_OPERATOR);
Query query = qp.parse(keyword); Query query = qp.parse(keyword);
System.out.println("Query = " + query); logger.info("Query = " + query);
//搜索相似度最高的5条记录 //搜索相似度最高的5条记录
TopDocs topDocs = isearcher.search(query , 5); TopDocs topDocs = isearcher.search(query , 5);
System.out.println("命中:" + topDocs.totalHits); logger.info("命中:" + topDocs.totalHits);
//输出结果 //输出结果
ScoreDoc[] scoreDocs = topDocs.scoreDocs; ScoreDoc[] scoreDocs = topDocs.scoreDocs;
for (int i = 0; i < topDocs.totalHits; i++){ for (int i = 0; i < topDocs.totalHits; i++){
Document targetDoc = isearcher.doc(scoreDocs[i].doc); Document targetDoc = isearcher.doc(scoreDocs[i].doc);
System.out.println("内容:" + targetDoc.toString()); logger.info("内容:" + targetDoc.toString());
} }
} catch (CorruptIndexException e) { } catch (CorruptIndexException e) {