remove ar/de normalyzer from suggest analyzer
This commit is contained in:
parent
ecd54ef16c
commit
557c19f915
1 changed files with 6 additions and 6 deletions
|
@ -137,12 +137,12 @@
|
|||
"type" : "custom",
|
||||
"tokenizer" : "keyword",
|
||||
"char_filter" : ["mapping_char"],
|
||||
"filter" : ["lowercase", "arabic_normalization"]
|
||||
"filter" : ["lowercase"]
|
||||
},
|
||||
"contents_analyzer_ar" : {
|
||||
"type" : "custom",
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["lowercase", "stopword_en_filter", "content_length_filter", "limit_token_count_filter", "arabic_stop", "arabic_normalization", "arabic_keywords"]
|
||||
"filter" : ["lowercase", "stopword_en_filter", "content_length_filter", "limit_token_count_filter", "arabic_stop", "arabic_keywords"]
|
||||
},
|
||||
"contents_reading_analyzer_ar" : {
|
||||
"type" : "custom",
|
||||
|
@ -343,13 +343,13 @@
|
|||
"type" : "custom",
|
||||
"tokenizer" : "keyword",
|
||||
"char_filter" : ["mapping_char"],
|
||||
"filter" : ["lowercase", "german_normalization"]
|
||||
"filter" : ["lowercase"]
|
||||
},
|
||||
"contents_analyzer_de" : {
|
||||
"type" : "custom",
|
||||
"tokenizer" : "standard",
|
||||
"char_filter" : ["mapping_char"],
|
||||
"filter" : ["lowercase", "stopword_en_filter", "content_length_filter", "limit_token_count_filter", "german_stop", "german_keywords", "german_normalization"]
|
||||
"filter" : ["lowercase", "stopword_en_filter", "content_length_filter", "limit_token_count_filter", "german_stop", "german_keywords"]
|
||||
},
|
||||
"contents_reading_analyzer_de" : {
|
||||
"type" : "custom",
|
||||
|
@ -552,13 +552,13 @@
|
|||
"type" : "custom",
|
||||
"tokenizer" : "keyword",
|
||||
"char_filter" : ["mapping_char"],
|
||||
"filter" : ["lowercase", "arabic_normalization", "persian_normalization"]
|
||||
"filter" : ["lowercase"]
|
||||
},
|
||||
"contents_analyzer_fa" : {
|
||||
"type" : "custom",
|
||||
"tokenizer" : "standard",
|
||||
"char_filter" : ["mapping_char"],
|
||||
"filter" : ["lowercase", "stopword_en_filter", "content_length_filter", "limit_token_count_filter", "arabic_normalization", "persian_normalization", "persian_stop"]
|
||||
"filter" : ["lowercase", "stopword_en_filter", "content_length_filter", "limit_token_count_filter", "persian_stop"]
|
||||
},
|
||||
"contents_reading_analyzer_fa" : {
|
||||
"type" : "custom",
|
||||
|
|
Loading…
Add table
Reference in a new issue