mirror of
https://gitlab.com/SIGBUS/nyaa.git
synced 2024-12-22 20:40:00 +00:00
Fix weird offset error with word_delimiter_graph
yet another es7-ism i guess
This commit is contained in:
parent
093eabf158
commit
72521c40c7
|
@ -10,7 +10,6 @@ settings:
|
||||||
char_filter:
|
char_filter:
|
||||||
- my_char_filter
|
- my_char_filter
|
||||||
filter:
|
filter:
|
||||||
- standard
|
|
||||||
- lowercase
|
- lowercase
|
||||||
my_index_analyzer:
|
my_index_analyzer:
|
||||||
type: custom
|
type: custom
|
||||||
|
@ -66,9 +65,13 @@ settings:
|
||||||
type: pattern_capture
|
type: pattern_capture
|
||||||
patterns: ["0*([0-9]*)"]
|
patterns: ["0*([0-9]*)"]
|
||||||
word_delimit:
|
word_delimit:
|
||||||
type: word_delimiter
|
type: word_delimiter_graph
|
||||||
preserve_original: true
|
preserve_original: true
|
||||||
split_on_numerics: false
|
split_on_numerics: false
|
||||||
|
# https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-word-delimiter-graph-tokenfilter.html#word-delimiter-graph-tokenfilter-configure-parms
|
||||||
|
# since we're using "trim" filters downstream, otherwise
|
||||||
|
# you get weird lucene errors about startOffset
|
||||||
|
adjust_offsets: false
|
||||||
char_filter:
|
char_filter:
|
||||||
my_char_filter:
|
my_char_filter:
|
||||||
type: mapping
|
type: mapping
|
||||||
|
|
Loading…
Reference in a new issue