uaxurlemail-tokenizer.asciidoc 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196
  1. [[analysis-uaxurlemail-tokenizer]]
  2. === UAX URL email tokenizer
  3. ++++
  4. <titleabbrev>UAX URL email</titleabbrev>
  5. ++++
  6. The `uax_url_email` tokenizer is like the <<analysis-standard-tokenizer,`standard` tokenizer>> except that it
  7. recognises URLs and email addresses as single tokens.
  8. [discrete]
  9. === Example output
  10. [source,console]
  11. ---------------------------
  12. POST _analyze
  13. {
  14. "tokenizer": "uax_url_email",
  15. "text": "Email me at john.smith@global-international.com"
  16. }
  17. ---------------------------
  18. /////////////////////
  19. [source,console-result]
  20. ----------------------------
  21. {
  22. "tokens": [
  23. {
  24. "token": "Email",
  25. "start_offset": 0,
  26. "end_offset": 5,
  27. "type": "<ALPHANUM>",
  28. "position": 0
  29. },
  30. {
  31. "token": "me",
  32. "start_offset": 6,
  33. "end_offset": 8,
  34. "type": "<ALPHANUM>",
  35. "position": 1
  36. },
  37. {
  38. "token": "at",
  39. "start_offset": 9,
  40. "end_offset": 11,
  41. "type": "<ALPHANUM>",
  42. "position": 2
  43. },
  44. {
  45. "token": "john.smith@global-international.com",
  46. "start_offset": 12,
  47. "end_offset": 47,
  48. "type": "<EMAIL>",
  49. "position": 3
  50. }
  51. ]
  52. }
  53. ----------------------------
  54. /////////////////////
  55. The above sentence would produce the following terms:
  56. [source,text]
  57. ---------------------------
  58. [ Email, me, at, john.smith@global-international.com ]
  59. ---------------------------
  60. while the `standard` tokenizer would produce:
  61. [source,text]
  62. ---------------------------
  63. [ Email, me, at, john.smith, global, international.com ]
  64. ---------------------------
  65. [discrete]
  66. === Configuration
  67. The `uax_url_email` tokenizer accepts the following parameters:
  68. [horizontal]
  69. `max_token_length`::
  70. The maximum token length. If a token is seen that exceeds this length then
  71. it is split at `max_token_length` intervals. Defaults to `255`.
  72. [discrete]
  73. === Example configuration
  74. In this example, we configure the `uax_url_email` tokenizer to have a
  75. `max_token_length` of 5 (for demonstration purposes):
  76. [source,console]
  77. ----------------------------
  78. PUT my-index-000001
  79. {
  80. "settings": {
  81. "analysis": {
  82. "analyzer": {
  83. "my_analyzer": {
  84. "tokenizer": "my_tokenizer"
  85. }
  86. },
  87. "tokenizer": {
  88. "my_tokenizer": {
  89. "type": "uax_url_email",
  90. "max_token_length": 5
  91. }
  92. }
  93. }
  94. }
  95. }
  96. POST my-index-000001/_analyze
  97. {
  98. "analyzer": "my_analyzer",
  99. "text": "john.smith@global-international.com"
  100. }
  101. ----------------------------
  102. /////////////////////
  103. [source,console-result]
  104. ----------------------------
  105. {
  106. "tokens": [
  107. {
  108. "token": "john",
  109. "start_offset": 0,
  110. "end_offset": 4,
  111. "type": "<ALPHANUM>",
  112. "position": 0
  113. },
  114. {
  115. "token": "smith",
  116. "start_offset": 5,
  117. "end_offset": 10,
  118. "type": "<ALPHANUM>",
  119. "position": 1
  120. },
  121. {
  122. "token": "globa",
  123. "start_offset": 11,
  124. "end_offset": 16,
  125. "type": "<ALPHANUM>",
  126. "position": 2
  127. },
  128. {
  129. "token": "l",
  130. "start_offset": 16,
  131. "end_offset": 17,
  132. "type": "<ALPHANUM>",
  133. "position": 3
  134. },
  135. {
  136. "token": "inter",
  137. "start_offset": 18,
  138. "end_offset": 23,
  139. "type": "<ALPHANUM>",
  140. "position": 4
  141. },
  142. {
  143. "token": "natio",
  144. "start_offset": 23,
  145. "end_offset": 28,
  146. "type": "<ALPHANUM>",
  147. "position": 5
  148. },
  149. {
  150. "token": "nal.c",
  151. "start_offset": 28,
  152. "end_offset": 33,
  153. "type": "<ALPHANUM>",
  154. "position": 6
  155. },
  156. {
  157. "token": "om",
  158. "start_offset": 33,
  159. "end_offset": 35,
  160. "type": "<ALPHANUM>",
  161. "position": 7
  162. }
  163. ]
  164. }
  165. ----------------------------
  166. /////////////////////
  167. The above example produces the following terms:
  168. [source,text]
  169. ---------------------------
  170. [ john, smith, globa, l, inter, natio, nal.c, om ]
  171. ---------------------------