NGram Tokenizer
输出示例
POST _analyze
{
"tokenizer": "ngram",
"text": "Quick Fox"
}[ Q, Qu, u, ui, i, ic, c, ck, k, "k ", " ", " F", F, Fo, o, ox, x ]配置
配置示例
Last updated
POST _analyze
{
"tokenizer": "ngram",
"text": "Quick Fox"
}[ Q, Qu, u, ui, i, ic, c, ck, k, "k ", " ", " F", F, Fo, o, ox, x ]Last updated
PUT my_index
{
"settings": {
"analysis": {
"analyzer": {
"my_analyzer": {
"tokenizer": "my_tokenizer"
}
},
"tokenizer": {
"my_tokenizer": {
"type": "ngram",
"min_gram": 3,
"max_gram": 3,
"token_chars": [
"letter",
"digit"
]
}
}
}
}
}
POST my_index/_analyze
{
"analyzer": "my_analyzer",
"text": "2 Quick Foxes."
}[ Qui, uic, ick, Fox, oxe, xes ]