Skip to content

Commit

Permalink
Auto-generated API code
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine committed Dec 10, 2024
1 parent 00675bf commit 6806939
Show file tree
Hide file tree
Showing 104 changed files with 3,020 additions and 25,641 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
{
attachment: {
field: "data",
remove_binary: false,
remove_binary: true,
},
},
],
Expand Down
35 changes: 35 additions & 0 deletions docs/doc_examples/2a67608dadbf220a2f040f3a79d3677d.asciidoc
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples

[source, js]
----
const response = await client.ingest.putPipeline({
id: "attachment",
description: "Extract attachment information including original binary",
processors: [
{
attachment: {
field: "data",
remove_binary: false,
},
},
],
});
console.log(response);
const response1 = await client.index({
index: "my-index-000001",
id: "my_id",
pipeline: "attachment",
document: {
data: "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
},
});
console.log(response1);
const response2 = await client.get({
index: "my-index-000001",
id: "my_id",
});
console.log(response2);
----
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ const response = await client.inference.put({
service: "openai",
service_settings: {
api_key: "<api_key>",
model_id: "text-embedding-ada-002",
model_id: "text-embedding-3-small",
dimensions: 128,
},
},
});
Expand Down
17 changes: 0 additions & 17 deletions docs/doc_examples/49b31e23f8b9667b6a7b2734d55fb6ed.asciidoc

This file was deleted.

18 changes: 18 additions & 0 deletions docs/doc_examples/53d9d2ec9cb8d211772d764e76fe6890.asciidoc
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples

[source, js]
----
const response = await client.ingest.simulate({
id: "query_helper_pipeline",
docs: [
{
_source: {
content:
"artificial intelligence in medicine articles published in the last 12 months",
},
},
],
});
console.log(response);
----
22 changes: 0 additions & 22 deletions docs/doc_examples/5a70db31f587b7ffed5e9bc1445430cb.asciidoc

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const response = await client.ingest.putPipeline({
field: "data",
indexed_chars: 11,
indexed_chars_field: "max_size",
remove_binary: false,
remove_binary: true,
},
},
],
Expand Down
22 changes: 0 additions & 22 deletions docs/doc_examples/8cad5d95a0e7c103f08be53d0b172558.asciidoc

This file was deleted.

32 changes: 32 additions & 0 deletions docs/doc_examples/931817b168e055ecf738785c721125dd.asciidoc
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples

[source, js]
----
const response = await client.ingest.putPipeline({
id: "query_helper_pipeline",
processors: [
{
script: {
source:
"ctx.prompt = 'Please generate an elasticsearch search query on index `articles_index` for the following natural language query. Dates are in the field `@timestamp`, document types are in the field `type` (options are `news`, `publication`), categories in the field `category` and can be multiple (options are `medicine`, `pharmaceuticals`, `technology`), and document names are in the field `title` which should use a fuzzy match. Ignore fields which cannot be determined from the natural language query context: ' + ctx.content",
},
},
{
inference: {
model_id: "openai_chat_completions",
input_output: {
input_field: "prompt",
output_field: "query",
},
},
},
{
remove: {
field: "prompt",
},
},
],
});
console.log(response);
----
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const response = await client.indices.create({
properties: {
infer_field: {
type: "semantic_text",
inference_id: "my-elser-endpoint",
inference_id: ".elser-2-elasticsearch",
},
source_field: {
type: "text",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ const response = await client.ingest.putPipeline({
attachment: {
target_field: "_ingest._value.attachment",
field: "_ingest._value.data",
remove_binary: false,
remove_binary: true,
},
},
},
Expand Down
24 changes: 24 additions & 0 deletions docs/doc_examples/d29031409016b2b798148ef173a196ae.asciidoc
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples

[source, js]
----
const response = await client.indices.create({
index: "test-index",
query: {
semantic: {
field: "my_semantic_field",
},
},
highlight: {
fields: {
my_semantic_field: {
type: "semantic",
number_of_fragments: 2,
order: "score",
},
},
},
});
console.log(response);
----
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
{
attachment: {
field: "data",
remove_binary: false,
remove_binary: true,
},
},
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ const response = await client.ingest.putPipeline({
attachment: {
field: "data",
properties: ["content", "title"],
remove_binary: false,
remove_binary: true,
},
},
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const response = await client.ingest.putPipeline({
field: "data",
indexed_chars: 11,
indexed_chars_field: "max_size",
remove_binary: false,
remove_binary: true,
},
},
],
Expand Down
Loading

0 comments on commit 6806939

Please sign in to comment.