1
0

infer-api-ingest-pipeline.asciidoc 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. ////
  2. [source,console]
  3. ----
  4. DELETE _ingest/pipeline/*_embeddings
  5. ----
  6. // TEST
  7. // TEARDOWN
  8. ////
  9. // tag::cohere[]
  10. [source,console]
  11. --------------------------------------------------
  12. PUT _ingest/pipeline/cohere_embeddings
  13. {
  14. "processors": [
  15. {
  16. "inference": {
  17. "model_id": "cohere_embeddings", <1>
  18. "input_output": { <2>
  19. "input_field": "content",
  20. "output_field": "content_embedding"
  21. }
  22. }
  23. }
  24. ]
  25. }
  26. --------------------------------------------------
  27. <1> The name of the inference endpoint you created by using the
  28. <<put-inference-api>>, it's referred to as `inference_id` in that step.
  29. <2> Configuration object that defines the `input_field` for the {infer} process
  30. and the `output_field` that will contain the {infer} results.
  31. // end::cohere[]
  32. // tag::openai[]
  33. [source,console]
  34. --------------------------------------------------
  35. PUT _ingest/pipeline/openai_embeddings
  36. {
  37. "processors": [
  38. {
  39. "inference": {
  40. "model_id": "openai_embeddings", <1>
  41. "input_output": { <2>
  42. "input_field": "content",
  43. "output_field": "content_embedding"
  44. }
  45. }
  46. }
  47. ]
  48. }
  49. --------------------------------------------------
  50. <1> The name of the inference endpoint you created by using the
  51. <<put-inference-api>>, it's referred to as `inference_id` in that step.
  52. <2> Configuration object that defines the `input_field` for the {infer} process
  53. and the `output_field` that will contain the {infer} results.
  54. // end::openai[]