示例:
# mapping 中自定义 analyzer
## char_filter
# html_strip
POST _analyze
{
"tokenizer": "keyword",
"char_filter": ["html_strip"],
"text": "<b>hello world</b>"
}
# mapping
POST _analyze
{
"tokenizer": "standard",
"char_filter": [
{
"type": "mapping",
"mappings": ["-=>_"]
}
],
"text": "123-456, i-test"
}
# 正则表达式
POST _analyze
{
"tokenizer": "standard",
"char_filter": [
{
"type": "pattern_replace",
"pattern": "http://(.*)",
"replacement": "$1"
}
],
"text": "http://www.elastic.co"
}
## tokenizer
# path_hierarchy
POST _analyze
{
"tokenizer": "path_hierarchy",
"text": "/usr/abc/efg"
}
## filter
# whitespace and stop
POST _analyze
{
"tokenizer": "whitespace",
"filter": ["stop"],
"text": "The rain in Spain falls mainly on the plain."
}
POST _analyze
{
"tokenizer": "whitespace",
"filter": ["lowercase", "stop"],
"text": "The rain in Spain falls mainly on the plain."
}
来源:oschina
链接:https://my.oschina.net/lemos/blog/3196165
