diff --git a/docs/user/admin/settings.rst b/docs/user/admin/settings.rst index dfce554e2b..686116636a 100644 --- a/docs/user/admin/settings.rst +++ b/docs/user/admin/settings.rst @@ -317,9 +317,9 @@ plugins.query.executionengine.spark.session.enabled Description ----------- -By default, execution engine is executed in session mode. You can disable session mode by this setting. +By default, execution engine is executed in job mode. You can enable session mode by this setting. -1. The default value is true. +1. The default value is false. 2. This setting is node scope. 3. This setting can be updated dynamically. @@ -328,7 +328,7 @@ You can update the setting with a new value like this. SQL query:: sh$ curl -sS -H 'Content-Type: application/json' -X PUT localhost:9200/_plugins/_query/settings \ - ... -d '{"transient":{"plugins.query.executionengine.spark.session.enabled":"false"}}' + ... -d '{"transient":{"plugins.query.executionengine.spark.session.enabled":"true"}}' { "acknowledged": true, "persistent": {}, @@ -338,7 +338,7 @@ SQL query:: "executionengine": { "spark": { "session": { - "enabled": "false" + "enabled": "true" } } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java index 6554ef7f61..f80b576fe0 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java @@ -138,7 +138,7 @@ public class OpenSearchSettings extends Settings { public static final Setting SPARK_EXECUTION_SESSION_ENABLED_SETTING = Setting.boolSetting( Key.SPARK_EXECUTION_SESSION_ENABLED.getKeyValue(), - true, + false, Setting.Property.NodeScope, Setting.Property.Dynamic); diff --git a/spark/src/main/java/org/opensearch/sql/spark/execution/statestore/StateStore.java b/spark/src/main/java/org/opensearch/sql/spark/execution/statestore/StateStore.java index f471d79c22..e6bad9fc26 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/execution/statestore/StateStore.java +++ b/spark/src/main/java/org/opensearch/sql/spark/execution/statestore/StateStore.java @@ -60,9 +60,7 @@ public class StateStore { public static String SETTINGS_FILE_NAME = "query_execution_request_settings.yml"; public static String MAPPING_FILE_NAME = "query_execution_request_mapping.yml"; public static Function DATASOURCE_TO_REQUEST_INDEX = - datasourceName -> - String.format( - "%s_%s", SPARK_REQUEST_BUFFER_INDEX_NAME, datasourceName.toLowerCase(Locale.ROOT)); + datasourceName -> String.format("%s_%s", SPARK_REQUEST_BUFFER_INDEX_NAME, datasourceName); private static final Logger LOG = LogManager.getLogger(); diff --git a/spark/src/test/java/org/opensearch/sql/spark/asyncquery/AsyncQueryExecutorServiceImplSpecTest.java b/spark/src/test/java/org/opensearch/sql/spark/asyncquery/AsyncQueryExecutorServiceImplSpecTest.java index 4bc894c1b2..cf638effc6 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/asyncquery/AsyncQueryExecutorServiceImplSpecTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/asyncquery/AsyncQueryExecutorServiceImplSpecTest.java @@ -37,7 +37,6 @@ import lombok.Getter; import org.junit.After; import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; @@ -213,6 +212,9 @@ public void withSessionCreateAsyncQueryThenGetResultThenCancel() { AsyncQueryExecutorService asyncQueryExecutorService = createAsyncQueryExecutorService(emrsClient); + // enable session + enableSession(true); + // 1. create async query. CreateAsyncQueryResponse response = asyncQueryExecutorService.createAsyncQuery( @@ -313,9 +315,6 @@ public void interactiveQueryNoTimeout() { assertEquals(0L, (long) emrsClient.getJobRequest().executionTimeout()); } - @Ignore( - "flaky test, java.lang.IllegalArgumentException: Right now only AES/GCM/NoPadding is" - + " supported") @Test public void datasourceWithBasicAuth() { Map properties = new HashMap<>(); @@ -481,42 +480,6 @@ public void submitQueryInInvalidSessionWillCreateNewSession() { assertNotEquals(invalidSessionId.getSessionId(), asyncQuery.getSessionId()); } - @Test - public void datasourceNameIncludeUppercase() { - dataSourceService.createDataSource( - new DataSourceMetadata( - "TESTS3", - DataSourceType.S3GLUE, - ImmutableList.of(), - ImmutableMap.of( - "glue.auth.type", - "iam_role", - "glue.auth.role_arn", - "arn:aws:iam::924196221507:role/FlintOpensearchServiceRole", - "glue.indexstore.opensearch.uri", - "http://localhost:9200", - "glue.indexstore.opensearch.auth", - "noauth"), - null)); - - LocalEMRSClient emrsClient = new LocalEMRSClient(); - AsyncQueryExecutorService asyncQueryExecutorService = - createAsyncQueryExecutorService(emrsClient); - - // enable session - enableSession(true); - - CreateAsyncQueryResponse response = - asyncQueryExecutorService.createAsyncQuery( - new CreateAsyncQueryRequest("select 1", "TESTS3", LangType.SQL, null)); - String params = emrsClient.getJobRequest().getSparkSubmitParams(); - - assertNotNull(response.getSessionId()); - assertTrue( - params.contains( - "--conf spark.sql.catalog.TESTS3=org.opensearch.sql.FlintDelegatingSessionCatalog")); - } - private DataSourceServiceImpl createDataSourceService() { String masterKey = "a57d991d9b573f75b9bba1df"; DataSourceMetadataStorage dataSourceMetadataStorage =