add ik tokenizers and remove some useless class[IkAnalyzer].
This commit is contained in:
parent
fb1aa9b9ad
commit
963fac4771
@ -12,4 +12,11 @@ public class IkAnalysisBinderProcessor extends AnalysisModule.AnalysisBinderProc
|
||||
analyzersBindings.processAnalyzer("ik", IkAnalyzerProvider.class);
|
||||
super.processAnalyzers(analyzersBindings);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void processTokenizers(TokenizersBindings tokenizersBindings) {
|
||||
tokenizersBindings.processTokenizer("ik", IkTokenizerFactory.class);
|
||||
super.processTokenizers(tokenizersBindings);
|
||||
}
|
||||
}
|
||||
|
@ -1,33 +0,0 @@
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.wltea.analyzer.lucene.IKTokenizer;
|
||||
//import org.wltea.lucene.IKTokenizer;
|
||||
|
||||
import java.io.Reader;
|
||||
|
||||
|
||||
public class IkAnalyzer extends Analyzer {
|
||||
// private boolean isMaxWordLength = false;
|
||||
// @Override public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
// return new IKTokenizer(reader,true);
|
||||
// }
|
||||
|
||||
|
||||
public IkAnalyzer() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TokenStreamComponents createComponents(String s, Reader reader) {
|
||||
// new TokenStreamComponents
|
||||
Tokenizer tokenizer = new IKTokenizer(reader, true);
|
||||
return new TokenStreamComponents(tokenizer, null); //To change body of implemented methods use File | Settings | File Templates.
|
||||
}
|
||||
|
||||
// public boolean isMaxWordLength() {
|
||||
// return isMaxWordLength;
|
||||
// }
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.wltea.analyzer.dic.Dictionary;
|
||||
import org.wltea.analyzer.lucene.IKTokenizer;
|
||||
|
||||
public class IkTokenizerFactory extends AbstractTokenizerFactory {
|
||||
private boolean useSmart = false;
|
||||
|
||||
public IkTokenizerFactory(Index index, Settings indexSettings, String name, Settings settings) {
|
||||
super(index, indexSettings, name, settings);
|
||||
Dictionary.getInstance().Init(indexSettings);
|
||||
|
||||
if (settings.get("use_smart", "true").equals("true")) {
|
||||
useSmart = true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Tokenizer create(Reader reader) {
|
||||
return new IKTokenizer(reader, useSmart);
|
||||
}
|
||||
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user