demo
GET _analyze
{
"analyzer": "standard",
"text": "this is a test"
}
GET _analyze
{
"analyzer": "standard",
"text": [
"This is a test",
"the second text"
]
}
GET _analyze
{
"tokenizer" : "keyword",
"filter" : ["uppercase"],
"text" : "this is a test"
}
GET _analyze
{
"tokenizer" : "keyword",
"filter" : ["lowercase"],
"text" : "this is a <b>test</b>"
}
GET _analyze
{
"tokenizer" : "keyword",
"filter" : ["lowercase"],
"char_filter" : ["html_strip"],
"text" : "this is a <b>test</b>"
}
GET _analyze
{
"tokenizer" : "keyword",
"filter" : ["lowercase"],
"char_filter" : ["html_strip"],
"text" : "this is a <b>test</b>"
}
GET _analyze
{
"tokenizer": "whitespace",
"filter": [
"lowercase",
{
"type": "stop",
"stopwords": [
"a",
"is",
"this"
]
}
],
"text": "this is a test"
}
GET twitter/_analyze
{
"text" : "this is a test"
}
GET twitter/_analyze
{
"analyzer": "whitespace",
"text": "this is a test"
}
GET twitter
GET twitter/_analyze
{
"field" : "title",
"text" : "this is a test"
}
GET _analyze
{
"tokenizer": "standard",
"filter": [
"snowball"
],
"text": "detailed output",
"explain": true,
"attributes": [
"keyword"
]
}
PUT twitter
{
"settings" : {
"index.analyze.max_token_count" : 20000
}
}