Skip to content

Commit

Permalink
Merge branch 'main' into disable_ml_on_macos_x86_64
Browse files Browse the repository at this point in the history
  • Loading branch information
edsavage authored Nov 21, 2024
2 parents e965efd + 4e04a7b commit 4c8ae24
Show file tree
Hide file tree
Showing 60 changed files with 3,539 additions and 524 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.plugins.PluginsLoader;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.DocReader;
Expand Down Expand Up @@ -76,8 +77,7 @@ public class ScriptScoreBenchmark {
private final PluginsService pluginsService = new PluginsService(
Settings.EMPTY,
null,
null,
Path.of(System.getProperty("plugins.dir"))
new PluginsLoader(null, Path.of(System.getProperty("plugins.dir")))
);
private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList());

Expand Down
3 changes: 3 additions & 0 deletions branches.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@
{
"branch": "8.16"
},
{
"branch": "8.17"
},
{
"branch": "8.x"
},
Expand Down
5 changes: 5 additions & 0 deletions docs/changelog/116765.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 116765
summary: Metrics for incremental bulk splits
area: Distributed
type: enhancement
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/117182.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 117182
summary: Change synthetic source logic for `constant_keyword`
area: Mapping
type: bug
issues:
- 117083
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/

package org.elasticsearch.datastreams.action;

import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.admin.indices.rollover.RolloverRequestBuilder;
import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction;
import org.elasticsearch.action.datastreams.CreateDataStreamAction;
import org.elasticsearch.action.datastreams.ReindexDataStreamAction;
import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamRequest;
import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
import org.elasticsearch.cluster.metadata.Template;
import org.elasticsearch.datastreams.DataStreamsPlugin;
import org.elasticsearch.datastreams.task.ReindexDataStreamTask;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xcontent.XContentType;

import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicReference;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;

public class ReindexDataStreamTransportActionIT extends ESIntegTestCase {

@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(DataStreamsPlugin.class);
}

public void testNonExistentDataStream() {
String nonExistentDataStreamName = randomAlphaOfLength(50);
ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(nonExistentDataStreamName);
assertThrows(
ResourceNotFoundException.class,
() -> client().execute(new ActionType<ReindexDataStreamResponse>(ReindexDataStreamAction.NAME), reindexDataStreamRequest)
.actionGet()
);
}

public void testAlreadyUpToDateDataStream() throws Exception {
String dataStreamName = randomAlphaOfLength(50).toLowerCase(Locale.ROOT);
ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(dataStreamName);
createDataStream(dataStreamName);
ReindexDataStreamResponse response = client().execute(
new ActionType<ReindexDataStreamResponse>(ReindexDataStreamAction.NAME),
reindexDataStreamRequest
).actionGet();
String persistentTaskId = response.getTaskId();
assertThat(persistentTaskId, equalTo("reindex-data-stream-" + dataStreamName));
AtomicReference<ReindexDataStreamTask> runningTask = new AtomicReference<>();
for (TransportService transportService : internalCluster().getInstances(TransportService.class)) {
TaskManager taskManager = transportService.getTaskManager();
Map<Long, CancellableTask> tasksMap = taskManager.getCancellableTasks();
Optional<Map.Entry<Long, CancellableTask>> optionalTask = taskManager.getCancellableTasks()
.entrySet()
.stream()
.filter(entry -> entry.getValue().getType().equals("persistent"))
.filter(
entry -> entry.getValue() instanceof ReindexDataStreamTask
&& persistentTaskId.equals((((ReindexDataStreamTask) entry.getValue()).getPersistentTaskId()))
)
.findAny();
optionalTask.ifPresent(
longCancellableTaskEntry -> runningTask.compareAndSet(null, (ReindexDataStreamTask) longCancellableTaskEntry.getValue())
);
}
ReindexDataStreamTask task = runningTask.get();
assertNotNull(task);
assertThat(task.getStatus().complete(), equalTo(true));
assertNull(task.getStatus().exception());
assertThat(task.getStatus().pending(), equalTo(0));
assertThat(task.getStatus().inProgress(), equalTo(0));
assertThat(task.getStatus().errors().size(), equalTo(0));
}

private void createDataStream(String dataStreamName) {
final TransportPutComposableIndexTemplateAction.Request putComposableTemplateRequest =
new TransportPutComposableIndexTemplateAction.Request("my-template");
putComposableTemplateRequest.indexTemplate(
ComposableIndexTemplate.builder()
.indexPatterns(List.of(dataStreamName))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false))
.template(Template.builder().build())
.build()
);
final AcknowledgedResponse putComposableTemplateResponse = safeGet(
client().execute(TransportPutComposableIndexTemplateAction.TYPE, putComposableTemplateRequest)
);
assertThat(putComposableTemplateResponse.isAcknowledged(), is(true));

final CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
dataStreamName
);
final AcknowledgedResponse createDataStreamResponse = safeGet(
client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest)
);
assertThat(createDataStreamResponse.isAcknowledged(), is(true));
indexDocs(dataStreamName);
safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute());
indexDocs(dataStreamName);
safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute());
}

private void indexDocs(String dataStreamName) {
int docs = randomIntBetween(5, 10);
CountDownLatch countDownLatch = new CountDownLatch(docs);
for (int i = 0; i < docs; i++) {
var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE);
final String doc = "{ \"@timestamp\": \"2099-05-06T16:21:15.000Z\", \"message\": \"something cool happened\" }";
indexRequest.source(doc, XContentType.JSON);
client().index(indexRequest, new ActionListener<>() {
@Override
public void onResponse(DocWriteResponse docWriteResponse) {
countDownLatch.countDown();
}

@Override
public void onFailure(Exception e) {
fail("Indexing request should have succeeded eventually, failed with " + e.getMessage());
}
});
}
safeAwait(countDownLatch);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,23 @@
import org.elasticsearch.action.datastreams.MigrateToDataStreamAction;
import org.elasticsearch.action.datastreams.ModifyDataStreamsAction;
import org.elasticsearch.action.datastreams.PromoteDataStreamAction;
import org.elasticsearch.action.datastreams.ReindexDataStreamAction;
import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction;
import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction;
import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction;
import org.elasticsearch.client.internal.Client;
import org.elasticsearch.client.internal.OriginSettingClient;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.datastreams.action.CreateDataStreamTransportAction;
Expand All @@ -40,6 +44,7 @@
import org.elasticsearch.datastreams.action.MigrateToDataStreamTransportAction;
import org.elasticsearch.datastreams.action.ModifyDataStreamsTransportAction;
import org.elasticsearch.datastreams.action.PromoteDataStreamTransportAction;
import org.elasticsearch.datastreams.action.ReindexDataStreamTransportAction;
import org.elasticsearch.datastreams.action.TransportGetDataStreamsAction;
import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore;
import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService;
Expand Down Expand Up @@ -73,14 +78,27 @@
import org.elasticsearch.datastreams.rest.RestMigrateToDataStreamAction;
import org.elasticsearch.datastreams.rest.RestModifyDataStreamsAction;
import org.elasticsearch.datastreams.rest.RestPromoteDataStreamAction;
import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskExecutor;
import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskState;
import org.elasticsearch.datastreams.task.ReindexDataStreamStatus;
import org.elasticsearch.datastreams.task.ReindexDataStreamTask;
import org.elasticsearch.datastreams.task.ReindexDataStreamTaskParams;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.health.HealthIndicatorService;
import org.elasticsearch.index.IndexSettingProvider;
import org.elasticsearch.persistent.PersistentTaskParams;
import org.elasticsearch.persistent.PersistentTaskState;
import org.elasticsearch.persistent.PersistentTasksExecutor;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.HealthPlugin;
import org.elasticsearch.plugins.PersistentTaskPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.ParseField;

import java.io.IOException;
import java.time.Clock;
Expand All @@ -93,7 +111,7 @@

import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_STREAM_LIFECYCLE_ORIGIN;

public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin {
public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin, PersistentTaskPlugin {

public static final Setting<TimeValue> TIME_SERIES_POLL_INTERVAL = Setting.timeSetting(
"time_series.poll_interval",
Expand Down Expand Up @@ -244,6 +262,7 @@ public Collection<?> createComponents(PluginServices services) {
actions.add(new ActionHandler<>(PutDataStreamOptionsAction.INSTANCE, TransportPutDataStreamOptionsAction.class));
actions.add(new ActionHandler<>(DeleteDataStreamOptionsAction.INSTANCE, TransportDeleteDataStreamOptionsAction.class));
}
actions.add(new ActionHandler<>(ReindexDataStreamAction.INSTANCE, ReindexDataStreamTransportAction.class));
return actions;
}

Expand Down Expand Up @@ -302,4 +321,48 @@ public void close() throws IOException {
public Collection<HealthIndicatorService> getHealthIndicatorServices() {
return List.of(dataStreamLifecycleHealthIndicatorService.get());
}

@Override
public List<NamedXContentRegistry.Entry> getNamedXContent() {
return List.of(
new NamedXContentRegistry.Entry(
PersistentTaskState.class,
new ParseField(ReindexDataStreamPersistentTaskState.NAME),
ReindexDataStreamPersistentTaskState::fromXContent
),
new NamedXContentRegistry.Entry(
PersistentTaskParams.class,
new ParseField(ReindexDataStreamTaskParams.NAME),
ReindexDataStreamTaskParams::fromXContent
)
);
}

@Override
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
return List.of(
new NamedWriteableRegistry.Entry(
PersistentTaskState.class,
ReindexDataStreamPersistentTaskState.NAME,
ReindexDataStreamPersistentTaskState::new
),
new NamedWriteableRegistry.Entry(
PersistentTaskParams.class,
ReindexDataStreamTaskParams.NAME,
ReindexDataStreamTaskParams::new
),
new NamedWriteableRegistry.Entry(Task.Status.class, ReindexDataStreamStatus.NAME, ReindexDataStreamStatus::new)
);
}

@Override
public List<PersistentTasksExecutor<?>> getPersistentTasksExecutor(
ClusterService clusterService,
ThreadPool threadPool,
Client client,
SettingsModule settingsModule,
IndexNameExpressionResolver expressionResolver
) {
return List.of(new ReindexDataStreamPersistentTaskExecutor(client, clusterService, ReindexDataStreamTask.TASK_NAME, threadPool));
}
}
Loading

0 comments on commit 4c8ae24

Please sign in to comment.