fix #1742 replace with synonym_graph

This commit is contained in:
Shinsuke Sugaya 2018-07-05 19:52:03 +09:00
parent 4d8bd5aa73
commit f6c5cf0909
2 changed files with 21 additions and 27 deletions

View file

@ -29,15 +29,6 @@
<param name="plugin.version" value="6.3.1" />
<param name="plugin.zip.version" value="6.3.1" />
</antcall>
<!-- analysis-synonym -->
<antcall target="install.plugin">
<param name="repo.url" value="${maven.release.repo.url}" />
<param name="plugin.groupId" value="org/codelibs" />
<param name="plugin.name.prefix" value="elasticsearch-" />
<param name="plugin.name" value="analysis-synonym" />
<param name="plugin.version" value="6.3.1" />
<param name="plugin.zip.version" value="6.3.1" />
</antcall>
<!-- configsync -->
<antcall target="install.plugin">
<param name="repo.url" value="${maven.release.repo.url}" />

View file

@ -530,6 +530,10 @@
"seed" : 1,
"bit" : 2,
"size" : 64
},
"synonym_filter" : {
"type" : "synonym_graph",
"synonyms_path": "${fess.dictionary.path}synonym.txt"
}
},
"tokenizer": {
@ -554,19 +558,15 @@
"sentence_detector": false,
"ambiguities_resolved": false
},
"unigram_synonym_tokenizer": {
"type": "ngram_synonym",
"n": "1",
"synonyms_path": "${fess.dictionary.path}synonym.txt",
"dynamic_reload":true,
"reload_interval":"1m"
"unigram_tokenizer": {
"type": "ngram",
"min_gram": "1",
"max_gram": "1"
},
"bigram_synonym_tokenizer": {
"type": "ngram_synonym",
"n": "2",
"synonyms_path": "${fess.dictionary.path}synonym.txt",
"dynamic_reload":true,
"reload_interval":"1m"
"bigram_tokenizer": {
"type": "ngram",
"min_gram": "2",
"max_gram": "2"
}
},
"analyzer": {
@ -976,12 +976,13 @@
"char_filter": [
"mapping_filter"
],
"tokenizer": "unigram_synonym_tokenizer",
"tokenizer": "unigram_tokenizer",
"filter": [
"alphanum_word_filter",
"cjk_bigram",
"stopword_en_filter",
"lowercase",
"stopword_en_filter",
"synonym_filter",
"cjk_bigram",
"english_keywords",
"stemmer_en_filter"
]
@ -998,12 +999,14 @@
"char_filter": [
"mapping_ja_filter"
],
"tokenizer": "unigram_synonym_tokenizer",
"tokenizer": "unigram_tokenizer",
"filter": [
"alphanum_word_filter",
"cjk_bigram",
"stopword_en_filter",
"lowercase",
"stopword_en_filter",
"synonym_filter",
"cjk_bigram",
"english_keywords",
"stemmer_en_filter",
"minhash_filter"
]