Skip to content

Commit

Permalink
Merge branch 'main' into update-service-in-tutorial
Browse files Browse the repository at this point in the history
  • Loading branch information
kosabogi authored Dec 10, 2024
2 parents 3df60ee + 0d57e10 commit dcdad72
Show file tree
Hide file tree
Showing 280 changed files with 2,378 additions and 933 deletions.
5 changes: 0 additions & 5 deletions .buildkite/pipelines/periodic-packaging.template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,14 @@ steps:
matrix:
setup:
image:
- debian-11
- debian-12
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8
- sles-12
- sles-15
- ubuntu-1804
- ubuntu-2004
- ubuntu-2204
- rocky-8
- rocky-9
- rhel-7
- rhel-8
- rhel-9
- almalinux-8
Expand Down
5 changes: 0 additions & 5 deletions .buildkite/pipelines/periodic-packaging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,14 @@ steps:
matrix:
setup:
image:
- debian-11
- debian-12
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8
- sles-12
- sles-15
- ubuntu-1804
- ubuntu-2004
- ubuntu-2204
- rocky-8
- rocky-9
- rhel-7
- rhel-8
- rhel-9
- almalinux-8
Expand Down
6 changes: 0 additions & 6 deletions .buildkite/pipelines/periodic-platform-support.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,14 @@ steps:
matrix:
setup:
image:
- debian-11
- debian-12
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8
- sles-12
- sles-15
- ubuntu-1804
- ubuntu-2004
- ubuntu-2204
- rocky-8
- rocky-9
- rhel-7
- rhel-8
- rhel-9
- almalinux-8
Expand Down Expand Up @@ -90,7 +85,6 @@ steps:
setup:
image:
- amazonlinux-2023
- amazonlinux-2
agents:
provider: aws
imagePrefix: elasticsearch-{{matrix.image}}
Expand Down
5 changes: 0 additions & 5 deletions .buildkite/pipelines/pull-request/packaging-tests-unix.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,14 @@ steps:
matrix:
setup:
image:
- debian-11
- debian-12
- opensuse-leap-15
- oraclelinux-7
- oraclelinux-8
- sles-12
- sles-15
- ubuntu-1804
- ubuntu-2004
- ubuntu-2204
- rocky-8
- rocky-9
- rhel-7
- rhel-8
- rhel-9
- almalinux-8
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
* This class models the different Docker base images that are used to build Docker distributions of Elasticsearch.
*/
public enum DockerBase {
// "latest" here is intentional, since the image name specifies "8"
DEFAULT("docker.elastic.co/ubi8/ubi-minimal:latest", "", "microdnf"),
// "latest" here is intentional, since the image name specifies "9"
DEFAULT("docker.elastic.co/ubi9/ubi-minimal:latest", "", "microdnf"),

// The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build
IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank", "yum"),
Expand Down
2 changes: 1 addition & 1 deletion build-tools-internal/version.properties
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ commonscodec = 1.15
protobuf = 3.25.5

# test dependencies
randomizedrunner = 2.8.0
randomizedrunner = 2.8.2
junit = 4.13.2
junit5 = 5.7.1
hamcrest = 2.1
Expand Down
4 changes: 2 additions & 2 deletions dev-tools/publish_zstd_binaries.sh
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ build_linux_jar() {
}

echo 'Building Linux jars...'
LINUX_ARM_JAR=$(build_linux_jar "linux/amd64" "x86-64")
LINUX_X86_JAR=$(build_linux_jar "linux/arm64" "aarch64")
LINUX_ARM_JAR=$(build_linux_jar "linux/arm64" "aarch64")
LINUX_X86_JAR=$(build_linux_jar "linux/amd64" "x86-64")

build_windows_jar() {
ARTIFACT="$TEMP/zstd-$VERSION-windows-x86-64.jar"
Expand Down
6 changes: 3 additions & 3 deletions distribution/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,10 @@ tasks.register("generateDependenciesReport", ConcatFilesTask) {
// Explicitly add the dependency on the RHEL UBI Docker base image
String[] rhelUbiFields = [
'Red Hat Universal Base Image minimal',
'8',
'https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8',
'9',
'https://catalog.redhat.com/software/containers/ubi9-minimal/61832888c0d15aff4912fe0d',
'Custom;https://www.redhat.com/licenses/EULA_Red_Hat_Universal_Base_Image_English_20190422.pdf',
'https://oss-dependencies.elastic.co/red-hat-universal-base-image-minimal/8/ubi-minimal-8-source.tar.gz'
'https://oss-dependencies.elastic.co/red-hat-universal-base-image-minimal/9/ubi-minimal-9-source.tar.gz'
]
additionalLines << rhelUbiFields.join(',')
}
Expand Down
5 changes: 5 additions & 0 deletions docs/changelog/118114.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 118114
summary: Enable physical plan verification
area: ES|QL
type: enhancement
issues: []
16 changes: 11 additions & 5 deletions docs/reference/inference/service-openai.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,12 @@ https://platform.openai.com/api-keys[API keys section].
include::inference-shared.asciidoc[tag=api-key-admonition]
--

`dimensions`:::
(Optional, integer)
The number of dimensions the resulting output embeddings should have.
Only supported in `text-embedding-3` and later models.
If not set the OpenAI defined default for the model is used.

`model_id`:::
(Required, string)
The name of the model to use for the {infer} task.
Expand Down Expand Up @@ -134,8 +140,8 @@ Specifies the user issuing the request, which can be used for abuse detection.
[[inference-example-openai]]
==== OpenAI service example

The following example shows how to create an {infer} endpoint called
`openai-embeddings` to perform a `text_embedding` task type.
The following example shows how to create an {infer} endpoint called `openai-embeddings` to perform a `text_embedding` task type.
The embeddings created by requests to this endpoint will have 128 dimensions.

[source,console]
------------------------------------------------------------
Expand All @@ -144,14 +150,14 @@ PUT _inference/text_embedding/openai-embeddings
"service": "openai",
"service_settings": {
"api_key": "<api_key>",
"model_id": "text-embedding-ada-002"
"model_id": "text-embedding-3-small",
"dimensions": 128
}
}
------------------------------------------------------------
// TEST[skip:TBD]

The next example shows how to create an {infer} endpoint called
`openai-completion` to perform a `completion` task type.
The next example shows how to create an {infer} endpoint called `openai-completion` to perform a `completion` task type.

[source,console]
------------------------------------------------------------
Expand Down
15 changes: 10 additions & 5 deletions gradle/verification-metadata.xml
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,11 @@
<sha256 value="3180617871494fe5585e93d0986fc3eb556ade2e64076730917d3a67e3928a24" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="com.carrotsearch.randomizedtesting" name="randomizedtesting-runner" version="2.8.2">
<artifact name="randomizedtesting-runner-2.8.2.jar">
<sha256 value="01a3551f40b56265ba117125311b4fe6865355c179f2dfc354d67f09839bc7a9" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="com.cedarsoftware" name="java-util" version="1.9.0">
<artifact name="java-util-1.9.0.jar">
<sha256 value="b81bdfc4ba11bbc88742ea14e8832d0a1031dd628868f5abbdabb8f8b98705dc" origin="Generated by Gradle"/>
Expand Down Expand Up @@ -4478,11 +4483,11 @@
<sha256 value="6e24913b021ffacfe8e7e053d6e0ccc731941148cfa078d4f1ed3d96904530f8" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.ow2.asm" name="asm-util" version="9.7.1">
<artifact name="asm-util-9.7.1.jar">
<sha256 value="f885be71b5c90556f5f1ad1c4f9276b29b96057c497d46666fe4ddbec3cb43c6" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.ow2.asm" name="asm-util" version="9.7.1">
<artifact name="asm-util-9.7.1.jar">
<sha256 value="f885be71b5c90556f5f1ad1c4f9276b29b96057c497d46666fe4ddbec3cb43c6" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.reactivestreams" name="reactive-streams" version="1.0.4">
<artifact name="reactive-streams-1.0.4.jar">
<sha256 value="f75ca597789b3dac58f61857b9ac2e1034a68fa672db35055a8fb4509e325f28" origin="Generated by Gradle"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ public class ASCIIFoldingTokenFilterFactory extends AbstractTokenFilterFactory i
private final boolean preserveOriginal;

public ASCIIFoldingTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name, settings);
super(name);
preserveOriginal = settings.getAsBoolean(PRESERVE_ORIGINAL.getPreferredName(), DEFAULT_PRESERVE_ORIGINAL);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ public abstract class AbstractCompoundWordTokenFilterFactory extends AbstractTok
protected final CharArraySet wordList;

protected AbstractCompoundWordTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);

minWordSize = settings.getAsInt("min_word_size", CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE);
minSubwordSize = settings.getAsInt("min_subword_size", CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
public class ApostropheFilterFactory extends AbstractTokenFilterFactory {

ApostropheFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name, settings);
super(name);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class ArabicAnalyzerProvider extends AbstractIndexAnalyzerProvider<Arabic
private final ArabicAnalyzer arabicAnalyzer;

ArabicAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);
arabicAnalyzer = new ArabicAnalyzer(
Analysis.parseStopWords(env, settings, ArabicAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
public class ArabicNormalizationFilterFactory extends AbstractTokenFilterFactory implements NormalizingTokenFilterFactory {

ArabicNormalizationFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name, settings);
super(name);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
public class ArabicStemTokenFilterFactory extends AbstractTokenFilterFactory {

ArabicStemTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name, settings);
super(name);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class ArmenianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Arme
private final ArmenianAnalyzer analyzer;

ArmenianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);
analyzer = new ArmenianAnalyzer(
Analysis.parseStopWords(env, settings, ArmenianAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class BasqueAnalyzerProvider extends AbstractIndexAnalyzerProvider<Basque
private final BasqueAnalyzer analyzer;

BasqueAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);
analyzer = new BasqueAnalyzer(
Analysis.parseStopWords(env, settings, BasqueAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class BengaliAnalyzerProvider extends AbstractIndexAnalyzerProvider<Benga
private final BengaliAnalyzer analyzer;

BengaliAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);
analyzer = new BengaliAnalyzer(
Analysis.parseStopWords(env, settings, BengaliAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
public class BengaliNormalizationFilterFactory extends AbstractTokenFilterFactory implements NormalizingTokenFilterFactory {

BengaliNormalizationFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name, settings);
super(name);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class BrazilianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Bra
private final BrazilianAnalyzer analyzer;

BrazilianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);
analyzer = new BrazilianAnalyzer(
Analysis.parseStopWords(env, settings, BrazilianAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public class BrazilianStemTokenFilterFactory extends AbstractTokenFilterFactory
private final CharArraySet exclusions;

BrazilianStemTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name, settings);
super(name);
this.exclusions = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class BulgarianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Bul
private final BulgarianAnalyzer analyzer;

BulgarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);
analyzer = new BulgarianAnalyzer(
Analysis.parseStopWords(env, settings, BulgarianAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory {

@SuppressWarnings("HiddenField")
CJKBigramFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name, settings);
super(name);
outputUnigrams = settings.getAsBoolean("output_unigrams", false);
final List<String> asArray = settings.getAsList("ignored_scripts");
Set<String> scripts = new HashSet<>(Arrays.asList("han", "hiragana", "katakana", "hangul"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
public final class CJKWidthFilterFactory extends AbstractTokenFilterFactory implements NormalizingTokenFilterFactory {

CJKWidthFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class CatalanAnalyzerProvider extends AbstractIndexAnalyzerProvider<Catal
private final CatalanAnalyzer analyzer;

CatalanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);
analyzer = new CatalanAnalyzer(
Analysis.parseStopWords(env, settings, CatalanAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public class CharGroupTokenizerFactory extends AbstractTokenizerFactory {
private boolean tokenizeOnSymbol = false;

public CharGroupTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, settings, name);
super(name);

maxTokenLength = settings.getAsInt(MAX_TOKEN_LENGTH, CharTokenizer.DEFAULT_MAX_WORD_LEN);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public class ChineseAnalyzerProvider extends AbstractIndexAnalyzerProvider<Stand
private final StandardAnalyzer analyzer;

ChineseAnalyzerProvider(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name, settings);
super(name);
// old index: best effort
analyzer = new StandardAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class CjkAnalyzerProvider extends AbstractIndexAnalyzerProvider<CJKAnalyz
private final CJKAnalyzer analyzer;

CjkAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name, settings);
super(name);
CharArraySet stopWords = Analysis.parseStopWords(env, settings, CJKAnalyzer.getDefaultStopSet());

analyzer = new CJKAnalyzer(stopWords);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
public class ClassicFilterFactory extends AbstractTokenFilterFactory {

ClassicFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name, settings);
super(name);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ public class ClassicTokenizerFactory extends AbstractTokenizerFactory {
private final int maxTokenLength;

ClassicTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, settings, name);
super(name);
maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
}

Expand Down
Loading

0 comments on commit dcdad72

Please sign in to comment.