Skip to content

Commit

Permalink
Merge branch 'main' of github.com:opensearch-project/sql into patch
Browse files Browse the repository at this point in the history
  • Loading branch information
derek-ho committed Oct 17, 2023
2 parents b98f0b3 + 501cf91 commit ad97a7a
Show file tree
Hide file tree
Showing 34 changed files with 1,190 additions and 90 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
public class DataSourceServiceImpl implements DataSourceService {

private static String DATASOURCE_NAME_REGEX = "[@*A-Za-z]+?[*a-zA-Z_\\-0-9]*";
public static final Set<String> CONFIDENTIAL_AUTH_KEYS =
Set.of("auth.username", "auth.password", "auth.access_key", "auth.secret_key");

private final DataSourceLoaderCache dataSourceLoaderCache;

Expand Down Expand Up @@ -197,7 +199,12 @@ private void removeAuthInfo(Set<DataSourceMetadata> dataSourceMetadataSet) {

private void removeAuthInfo(DataSourceMetadata dataSourceMetadata) {
HashMap<String, String> safeProperties = new HashMap<>(dataSourceMetadata.getProperties());
safeProperties.entrySet().removeIf(entry -> entry.getKey().contains("auth"));
safeProperties
.entrySet()
.removeIf(
entry ->
CONFIDENTIAL_AUTH_KEYS.stream()
.anyMatch(confidentialKey -> entry.getKey().endsWith(confidentialKey)));
dataSourceMetadata.setProperties(safeProperties);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ void testGetDataSourceMetadataSet() {
assertEquals(1, dataSourceMetadataSet.size());
DataSourceMetadata dataSourceMetadata = dataSourceMetadataSet.iterator().next();
assertTrue(dataSourceMetadata.getProperties().containsKey("prometheus.uri"));
assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.type"));
assertTrue(dataSourceMetadata.getProperties().containsKey("prometheus.auth.type"));
assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.username"));
assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.password"));
assertFalse(
Expand Down Expand Up @@ -392,11 +392,72 @@ void testRemovalOfAuthorizationInfo() {
DataSourceMetadata dataSourceMetadata1 = dataSourceService.getDataSourceMetadata("testDS");
assertEquals("testDS", dataSourceMetadata1.getName());
assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata1.getConnector());
assertFalse(dataSourceMetadata1.getProperties().containsKey("prometheus.auth.type"));
assertTrue(dataSourceMetadata1.getProperties().containsKey("prometheus.auth.type"));
assertFalse(dataSourceMetadata1.getProperties().containsKey("prometheus.auth.username"));
assertFalse(dataSourceMetadata1.getProperties().containsKey("prometheus.auth.password"));
}

@Test
void testRemovalOfAuthorizationInfoForAccessKeyAndSecretKye() {
HashMap<String, String> properties = new HashMap<>();
properties.put("prometheus.uri", "https://localhost:9090");
properties.put("prometheus.auth.type", "awssigv4");
properties.put("prometheus.auth.access_key", "access_key");
properties.put("prometheus.auth.secret_key", "secret_key");
DataSourceMetadata dataSourceMetadata =
new DataSourceMetadata(
"testDS",
DataSourceType.PROMETHEUS,
Collections.singletonList("prometheus_access"),
properties,
null);
when(dataSourceMetadataStorage.getDataSourceMetadata("testDS"))
.thenReturn(Optional.of(dataSourceMetadata));

DataSourceMetadata dataSourceMetadata1 = dataSourceService.getDataSourceMetadata("testDS");
assertEquals("testDS", dataSourceMetadata1.getName());
assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata1.getConnector());
assertTrue(dataSourceMetadata1.getProperties().containsKey("prometheus.auth.type"));
assertFalse(dataSourceMetadata1.getProperties().containsKey("prometheus.auth.access_key"));
assertFalse(dataSourceMetadata1.getProperties().containsKey("prometheus.auth.secret_key"));
}

@Test
void testRemovalOfAuthorizationInfoForGlueWithRoleARN() {
HashMap<String, String> properties = new HashMap<>();
properties.put("glue.auth.type", "iam_role");
properties.put("glue.auth.role_arn", "role_arn");
properties.put("glue.indexstore.opensearch.uri", "http://localhost:9200");
properties.put("glue.indexstore.opensearch.auth", "basicauth");
properties.put("glue.indexstore.opensearch.auth.username", "username");
properties.put("glue.indexstore.opensearch.auth.password", "password");
DataSourceMetadata dataSourceMetadata =
new DataSourceMetadata(
"testGlue",
DataSourceType.S3GLUE,
Collections.singletonList("glue_access"),
properties,
null);
when(dataSourceMetadataStorage.getDataSourceMetadata("testGlue"))
.thenReturn(Optional.of(dataSourceMetadata));

DataSourceMetadata dataSourceMetadata1 = dataSourceService.getDataSourceMetadata("testGlue");
assertEquals("testGlue", dataSourceMetadata1.getName());
assertEquals(DataSourceType.S3GLUE, dataSourceMetadata1.getConnector());
assertTrue(dataSourceMetadata1.getProperties().containsKey("glue.auth.type"));
assertTrue(dataSourceMetadata1.getProperties().containsKey("glue.auth.role_arn"));
assertTrue(dataSourceMetadata1.getProperties().containsKey("glue.indexstore.opensearch.uri"));
assertTrue(dataSourceMetadata1.getProperties().containsKey("glue.indexstore.opensearch.auth"));
assertFalse(
dataSourceMetadata1
.getProperties()
.containsKey("glue.indexstore.opensearch.auth.username"));
assertFalse(
dataSourceMetadata1
.getProperties()
.containsKey("glue.indexstore.opensearch.auth.password"));
}

@Test
void testGetDataSourceMetadataForNonExistingDataSource() {
when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")).thenReturn(Optional.empty());
Expand All @@ -421,7 +482,7 @@ void testGetDataSourceMetadataForSpecificDataSourceName() {
"testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), properties)));
DataSourceMetadata dataSourceMetadata = this.dataSourceService.getDataSourceMetadata("testDS");
assertTrue(dataSourceMetadata.getProperties().containsKey("prometheus.uri"));
assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.type"));
assertTrue(dataSourceMetadata.getProperties().containsKey("prometheus.auth.type"));
assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.username"));
assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.password"));
verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata("testDS");
Expand Down
2 changes: 1 addition & 1 deletion integ-test/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ apply plugin: 'java'
apply plugin: 'io.freefair.lombok'
apply plugin: 'com.wiredforcode.spawn'

String baseVersion = "2.11.0"
String baseVersion = "2.12.0"
String bwcVersion = baseVersion + ".0";
String baseName = "sqlBwcCluster"
String bwcFilePath = "src/test/resources/bwc/"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,10 @@ public void createDataSourceAPITest() {
new Gson().fromJson(getResponseString, DataSourceMetadata.class);
Assert.assertEquals(
"https://localhost:9090", dataSourceMetadata.getProperties().get("prometheus.uri"));
Assert.assertEquals(
"basicauth", dataSourceMetadata.getProperties().get("prometheus.auth.type"));
Assert.assertNull(dataSourceMetadata.getProperties().get("prometheus.auth.username"));
Assert.assertNull(dataSourceMetadata.getProperties().get("prometheus.auth.password"));
Assert.assertEquals("Prometheus Creation for Integ test", dataSourceMetadata.getDescription());
}

Expand Down Expand Up @@ -268,6 +272,10 @@ public void issue2196() {
new Gson().fromJson(getResponseString, DataSourceMetadata.class);
Assert.assertEquals(
"https://localhost:9090", dataSourceMetadata.getProperties().get("prometheus.uri"));
Assert.assertEquals(
"basicauth", dataSourceMetadata.getProperties().get("prometheus.auth.type"));
Assert.assertNull(dataSourceMetadata.getProperties().get("prometheus.auth.username"));
Assert.assertNull(dataSourceMetadata.getProperties().get("prometheus.auth.password"));
Assert.assertEquals("Prometheus Creation for Integ test", dataSourceMetadata.getDescription());
}
}
2 changes: 1 addition & 1 deletion legacy/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ dependencies {
}
}
implementation group: 'com.google.guava', name: 'guava', version: '32.0.1-jre'
implementation group: 'org.json', name: 'json', version:'20230227'
implementation group: 'org.json', name: 'json', version:'20231013'
implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.12.0'
implementation group: 'org.opensearch', name: 'opensearch', version: "${opensearch_version}"
// add geo module as dependency. https://github.com/opensearch-project/OpenSearch/pull/4180/.
Expand Down
2 changes: 1 addition & 1 deletion opensearch/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ dependencies {
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: "${versions.jackson}"
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: "${versions.jackson_databind}"
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-cbor', version: "${versions.jackson}"
implementation group: 'org.json', name: 'json', version:'20230227'
implementation group: 'org.json', name: 'json', version:'20231013'
compileOnly group: 'org.opensearch.client', name: 'opensearch-rest-high-level-client', version: "${opensearch_version}"
implementation group: 'org.opensearch', name:'opensearch-ml-client', version: "${opensearch_build}"

Expand Down
34 changes: 21 additions & 13 deletions plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@
import org.opensearch.sql.spark.client.EMRServerlessClient;
import org.opensearch.sql.spark.client.EmrServerlessClientImpl;
import org.opensearch.sql.spark.config.SparkExecutionEngineConfig;
import org.opensearch.sql.spark.config.SparkExecutionEngineConfigSupplier;
import org.opensearch.sql.spark.config.SparkExecutionEngineConfigSupplierImpl;
import org.opensearch.sql.spark.dispatcher.SparkQueryDispatcher;
import org.opensearch.sql.spark.flint.FlintIndexMetadataReaderImpl;
import org.opensearch.sql.spark.response.JobExecutionResponseReader;
Expand Down Expand Up @@ -214,15 +216,21 @@ public Collection<Object> createComponents(
dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata());
LocalClusterState.state().setClusterService(clusterService);
LocalClusterState.state().setPluginSettings((OpenSearchSettings) pluginSettings);
if (StringUtils.isEmpty(this.pluginSettings.getSettingValue(SPARK_EXECUTION_ENGINE_CONFIG))) {
SparkExecutionEngineConfigSupplier sparkExecutionEngineConfigSupplier =
new SparkExecutionEngineConfigSupplierImpl(pluginSettings);
SparkExecutionEngineConfig sparkExecutionEngineConfig =
sparkExecutionEngineConfigSupplier.getSparkExecutionEngineConfig();
if (StringUtils.isEmpty(sparkExecutionEngineConfig.getRegion())) {
LOGGER.warn(
String.format(
"Async Query APIs are disabled as %s is not configured in cluster settings. "
"Async Query APIs are disabled as %s is not configured properly in cluster settings. "
+ "Please configure and restart the domain to enable Async Query APIs",
SPARK_EXECUTION_ENGINE_CONFIG.getKeyValue()));
this.asyncQueryExecutorService = new AsyncQueryExecutorServiceImpl();
} else {
this.asyncQueryExecutorService = createAsyncQueryExecutorService();
this.asyncQueryExecutorService =
createAsyncQueryExecutorService(
sparkExecutionEngineConfigSupplier, sparkExecutionEngineConfig);
}

ModulesBuilder modules = new ModulesBuilder();
Expand Down Expand Up @@ -293,10 +301,13 @@ private DataSourceServiceImpl createDataSourceService() {
dataSourceUserAuthorizationHelper);
}

private AsyncQueryExecutorService createAsyncQueryExecutorService() {
private AsyncQueryExecutorService createAsyncQueryExecutorService(
SparkExecutionEngineConfigSupplier sparkExecutionEngineConfigSupplier,
SparkExecutionEngineConfig sparkExecutionEngineConfig) {
AsyncQueryJobMetadataStorageService asyncQueryJobMetadataStorageService =
new OpensearchAsyncQueryJobMetadataStorageService(client, clusterService);
EMRServerlessClient emrServerlessClient = createEMRServerlessClient();
EMRServerlessClient emrServerlessClient =
createEMRServerlessClient(sparkExecutionEngineConfig.getRegion());
JobExecutionResponseReader jobExecutionResponseReader = new JobExecutionResponseReader(client);
SparkQueryDispatcher sparkQueryDispatcher =
new SparkQueryDispatcher(
Expand All @@ -307,21 +318,18 @@ private AsyncQueryExecutorService createAsyncQueryExecutorService() {
new FlintIndexMetadataReaderImpl(client),
client);
return new AsyncQueryExecutorServiceImpl(
asyncQueryJobMetadataStorageService, sparkQueryDispatcher, pluginSettings);
asyncQueryJobMetadataStorageService,
sparkQueryDispatcher,
sparkExecutionEngineConfigSupplier);
}

private EMRServerlessClient createEMRServerlessClient() {
String sparkExecutionEngineConfigString =
this.pluginSettings.getSettingValue(SPARK_EXECUTION_ENGINE_CONFIG);
private EMRServerlessClient createEMRServerlessClient(String region) {
return AccessController.doPrivileged(
(PrivilegedAction<EMRServerlessClient>)
() -> {
SparkExecutionEngineConfig sparkExecutionEngineConfig =
SparkExecutionEngineConfig.toSparkExecutionEngineConfig(
sparkExecutionEngineConfigString);
AWSEMRServerless awsemrServerless =
AWSEMRServerlessClientBuilder.standard()
.withRegion(sparkExecutionEngineConfig.getRegion())
.withRegion(region)
.withCredentials(new DefaultAWSCredentialsProviderChain())
.build();
return new EmrServerlessClientImpl(awsemrServerless);
Expand Down
2 changes: 1 addition & 1 deletion ppl/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ dependencies {

implementation "org.antlr:antlr4-runtime:4.7.1"
implementation group: 'com.google.guava', name: 'guava', version: '32.0.1-jre'
api group: 'org.json', name: 'json', version: '20230227'
api group: 'org.json', name: 'json', version: '20231013'
implementation group: 'org.apache.logging.log4j', name: 'log4j-core', version:'2.20.0'
api project(':common')
api project(':core')
Expand Down
2 changes: 1 addition & 1 deletion prometheus/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ dependencies {
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: "${versions.jackson}"
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: "${versions.jackson_databind}"
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-cbor', version: "${versions.jackson}"
implementation group: 'org.json', name: 'json', version: '20230227'
implementation group: 'org.json', name: 'json', version: '20231013'

testImplementation('org.junit.jupiter:junit-jupiter:5.6.2')
testImplementation group: 'org.hamcrest', name: 'hamcrest-library', version: '2.1'
Expand Down
55 changes: 55 additions & 0 deletions release-notes/opensearch-sql.release-notes-2.11.0.0.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
Compatible with OpenSearch and OpenSearch Dashboards Version 2.11.0

### Features

### Enhancements
* Enable PPL lang and add datasource to async query API in https://github.com/opensearch-project/sql/pull/2195
* Refactor Flint Auth in https://github.com/opensearch-project/sql/pull/2201
* Add conf for spark structured streaming job in https://github.com/opensearch-project/sql/pull/2203
* Submit long running job only when auto_refresh = false in https://github.com/opensearch-project/sql/pull/2209
* Bug Fix, handle DESC TABLE response in https://github.com/opensearch-project/sql/pull/2213
* Drop Index Implementation in https://github.com/opensearch-project/sql/pull/2217
* Enable PPL Queries in https://github.com/opensearch-project/sql/pull/2223
* Read extra Spark submit parameters from cluster settings in https://github.com/opensearch-project/sql/pull/2236
* Spark Execution Engine Config Refactor in https://github.com/opensearch-project/sql/pull/2266
* Provide auth.type and auth.role_arn paramters in GET Datasource API response. in https://github.com/opensearch-project/sql/pull/2283
* Add support for `date_nanos` and tests. (#337) in https://github.com/opensearch-project/sql/pull/2020
* Applied formatting improvements to Antlr files based on spotless changes (#2017) by @MitchellGale in https://github.com/opensearch-project/sql/pull/2023
* Revert "Guarantee datasource read api is strong consistent read (#1815)" in https://github.com/opensearch-project/sql/pull/2031
* Add _primary preference only for segment replication enabled indices in https://github.com/opensearch-project/sql/pull/2045
* Changed allowlist config to denylist ip config for datasource uri hosts in https://github.com/opensearch-project/sql/pull/2058

### Bug Fixes
* fix broken link for connectors doc in https://github.com/opensearch-project/sql/pull/2199
* Fix response codes returned by JSON formatting them in https://github.com/opensearch-project/sql/pull/2200
* Bug fix, datasource API should be case sensitive in https://github.com/opensearch-project/sql/pull/2202
* Minor fix in dropping covering index in https://github.com/opensearch-project/sql/pull/2240
* Fix Unit tests for FlintIndexReader in https://github.com/opensearch-project/sql/pull/2242
* Bug Fix , delete OpenSearch index when DROP INDEX in https://github.com/opensearch-project/sql/pull/2252
* Correctly Set query status in https://github.com/opensearch-project/sql/pull/2232
* Exclude generated files from spotless in https://github.com/opensearch-project/sql/pull/2024
* Fix mockito core conflict. in https://github.com/opensearch-project/sql/pull/2131
* Fix `ASCII` function and groom UT for text functions. (#301) in https://github.com/opensearch-project/sql/pull/2029
* Fixed response codes For Requests With security exception. in https://github.com/opensearch-project/sql/pull/2036

### Documentation
* Datasource description in https://github.com/opensearch-project/sql/pull/2138
* Add documentation for S3GlueConnector. in https://github.com/opensearch-project/sql/pull/2234

### Infrastructure
* bump aws-encryption-sdk-java to 1.71 in https://github.com/opensearch-project/sql/pull/2057
* Run IT tests with security plugin (#335) #1986 by @MitchellGale in https://github.com/opensearch-project/sql/pull/2022

### Refactoring
* Merging Async Query APIs feature branch into main. in https://github.com/opensearch-project/sql/pull/2163
* Removed Domain Validation in https://github.com/opensearch-project/sql/pull/2136
* Check for existence of security plugin in https://github.com/opensearch-project/sql/pull/2069
* Always use snapshot version for security plugin download in https://github.com/opensearch-project/sql/pull/2061
* Add customized result index in data source etc in https://github.com/opensearch-project/sql/pull/2220

### Security
* bump okhttp to 4.10.0 (#2043) by @joshuali925 in https://github.com/opensearch-project/sql/pull/2044
* bump okio to 3.4.0 by @joshuali925 in https://github.com/opensearch-project/sql/pull/2047

---
**Full Changelog**: https://github.com/opensearch-project/sql/compare/2.3.0.0...v.2.11.0.0
Loading

0 comments on commit ad97a7a

Please sign in to comment.