standard-tokenizer.asciidoc 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267
  1. [[analysis-standard-tokenizer]]
  2. === Standard Tokenizer
  3. The `standard` tokenizer provides grammar based tokenization (based on the
  4. Unicode Text Segmentation algorithm, as specified in
  5. http://unicode.org/reports/tr29/[Unicode Standard Annex #29]) and works well
  6. for most languages.
  7. [float]
  8. === Example output
  9. [source,js]
  10. ---------------------------
  11. POST _analyze
  12. {
  13. "tokenizer": "standard",
  14. "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone."
  15. }
  16. ---------------------------
  17. // CONSOLE
  18. /////////////////////
  19. [source,console-result]
  20. ----------------------------
  21. {
  22. "tokens": [
  23. {
  24. "token": "The",
  25. "start_offset": 0,
  26. "end_offset": 3,
  27. "type": "<ALPHANUM>",
  28. "position": 0
  29. },
  30. {
  31. "token": "2",
  32. "start_offset": 4,
  33. "end_offset": 5,
  34. "type": "<NUM>",
  35. "position": 1
  36. },
  37. {
  38. "token": "QUICK",
  39. "start_offset": 6,
  40. "end_offset": 11,
  41. "type": "<ALPHANUM>",
  42. "position": 2
  43. },
  44. {
  45. "token": "Brown",
  46. "start_offset": 12,
  47. "end_offset": 17,
  48. "type": "<ALPHANUM>",
  49. "position": 3
  50. },
  51. {
  52. "token": "Foxes",
  53. "start_offset": 18,
  54. "end_offset": 23,
  55. "type": "<ALPHANUM>",
  56. "position": 4
  57. },
  58. {
  59. "token": "jumped",
  60. "start_offset": 24,
  61. "end_offset": 30,
  62. "type": "<ALPHANUM>",
  63. "position": 5
  64. },
  65. {
  66. "token": "over",
  67. "start_offset": 31,
  68. "end_offset": 35,
  69. "type": "<ALPHANUM>",
  70. "position": 6
  71. },
  72. {
  73. "token": "the",
  74. "start_offset": 36,
  75. "end_offset": 39,
  76. "type": "<ALPHANUM>",
  77. "position": 7
  78. },
  79. {
  80. "token": "lazy",
  81. "start_offset": 40,
  82. "end_offset": 44,
  83. "type": "<ALPHANUM>",
  84. "position": 8
  85. },
  86. {
  87. "token": "dog's",
  88. "start_offset": 45,
  89. "end_offset": 50,
  90. "type": "<ALPHANUM>",
  91. "position": 9
  92. },
  93. {
  94. "token": "bone",
  95. "start_offset": 51,
  96. "end_offset": 55,
  97. "type": "<ALPHANUM>",
  98. "position": 10
  99. }
  100. ]
  101. }
  102. ----------------------------
  103. /////////////////////
  104. The above sentence would produce the following terms:
  105. [source,text]
  106. ---------------------------
  107. [ The, 2, QUICK, Brown, Foxes, jumped, over, the, lazy, dog's, bone ]
  108. ---------------------------
  109. [float]
  110. === Configuration
  111. The `standard` tokenizer accepts the following parameters:
  112. [horizontal]
  113. `max_token_length`::
  114. The maximum token length. If a token is seen that exceeds this length then
  115. it is split at `max_token_length` intervals. Defaults to `255`.
  116. [float]
  117. === Example configuration
  118. In this example, we configure the `standard` tokenizer to have a
  119. `max_token_length` of 5 (for demonstration purposes):
  120. [source,js]
  121. ----------------------------
  122. PUT my_index
  123. {
  124. "settings": {
  125. "analysis": {
  126. "analyzer": {
  127. "my_analyzer": {
  128. "tokenizer": "my_tokenizer"
  129. }
  130. },
  131. "tokenizer": {
  132. "my_tokenizer": {
  133. "type": "standard",
  134. "max_token_length": 5
  135. }
  136. }
  137. }
  138. }
  139. }
  140. POST my_index/_analyze
  141. {
  142. "analyzer": "my_analyzer",
  143. "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone."
  144. }
  145. ----------------------------
  146. // CONSOLE
  147. /////////////////////
  148. [source,console-result]
  149. ----------------------------
  150. {
  151. "tokens": [
  152. {
  153. "token": "The",
  154. "start_offset": 0,
  155. "end_offset": 3,
  156. "type": "<ALPHANUM>",
  157. "position": 0
  158. },
  159. {
  160. "token": "2",
  161. "start_offset": 4,
  162. "end_offset": 5,
  163. "type": "<NUM>",
  164. "position": 1
  165. },
  166. {
  167. "token": "QUICK",
  168. "start_offset": 6,
  169. "end_offset": 11,
  170. "type": "<ALPHANUM>",
  171. "position": 2
  172. },
  173. {
  174. "token": "Brown",
  175. "start_offset": 12,
  176. "end_offset": 17,
  177. "type": "<ALPHANUM>",
  178. "position": 3
  179. },
  180. {
  181. "token": "Foxes",
  182. "start_offset": 18,
  183. "end_offset": 23,
  184. "type": "<ALPHANUM>",
  185. "position": 4
  186. },
  187. {
  188. "token": "jumpe",
  189. "start_offset": 24,
  190. "end_offset": 29,
  191. "type": "<ALPHANUM>",
  192. "position": 5
  193. },
  194. {
  195. "token": "d",
  196. "start_offset": 29,
  197. "end_offset": 30,
  198. "type": "<ALPHANUM>",
  199. "position": 6
  200. },
  201. {
  202. "token": "over",
  203. "start_offset": 31,
  204. "end_offset": 35,
  205. "type": "<ALPHANUM>",
  206. "position": 7
  207. },
  208. {
  209. "token": "the",
  210. "start_offset": 36,
  211. "end_offset": 39,
  212. "type": "<ALPHANUM>",
  213. "position": 8
  214. },
  215. {
  216. "token": "lazy",
  217. "start_offset": 40,
  218. "end_offset": 44,
  219. "type": "<ALPHANUM>",
  220. "position": 9
  221. },
  222. {
  223. "token": "dog's",
  224. "start_offset": 45,
  225. "end_offset": 50,
  226. "type": "<ALPHANUM>",
  227. "position": 10
  228. },
  229. {
  230. "token": "bone",
  231. "start_offset": 51,
  232. "end_offset": 55,
  233. "type": "<ALPHANUM>",
  234. "position": 11
  235. }
  236. ]
  237. }
  238. ----------------------------
  239. /////////////////////
  240. The above example produces the following terms:
  241. [source,text]
  242. ---------------------------
  243. [ The, 2, QUICK, Brown, Foxes, jumpe, d, over, the, lazy, dog's, bone ]
  244. ---------------------------