From d5f236d27ef88d2ba54fb7fc29651c7a9ce6268c Mon Sep 17 00:00:00 2001 From: Tomoyuki MORITA Date: Thu, 7 Nov 2024 10:38:05 -0800 Subject: [PATCH] Bump Flint version to 0.7.0 (#867) Signed-off-by: Tomoyuki Morita --- README.md | 5 +++-- build.sbt | 2 +- docs/index.md | 6 +++--- docs/ppl-lang/PPL-on-Spark.md | 4 ++-- .../scala/org/opensearch/flint/common/FlintVersion.scala | 3 ++- 5 files changed, 11 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 4c470e98b..592b2645d 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,7 @@ Version compatibility: | 0.4.0 | 11+ | 3.3.2 | 2.12.14 | 2.13+ | | 0.5.0 | 11+ | 3.5.1 | 2.12.14 | 2.17+ | | 0.6.0 | 11+ | 3.5.1 | 2.12.14 | 2.17+ | +| 0.7.0 | 11+ | 3.5.1 | 2.12.14 | 2.17+ | ## Flint Extension Usage @@ -62,7 +63,7 @@ sbt clean standaloneCosmetic/publishM2 ``` then add org.opensearch:opensearch-spark-standalone_2.12 when run spark application, for example, ``` -bin/spark-shell --packages "org.opensearch:opensearch-spark-standalone_2.12:0.6.0-SNAPSHOT" \ +bin/spark-shell --packages "org.opensearch:opensearch-spark-standalone_2.12:0.7.0-SNAPSHOT" \ --conf "spark.sql.extensions=org.opensearch.flint.spark.FlintSparkExtensions" \ --conf "spark.sql.catalog.dev=org.apache.spark.opensearch.catalog.OpenSearchCatalog" ``` @@ -76,7 +77,7 @@ sbt clean sparkPPLCosmetic/publishM2 ``` then add org.opensearch:opensearch-spark-ppl_2.12 when run spark application, for example, ``` -bin/spark-shell --packages "org.opensearch:opensearch-spark-ppl_2.12:0.6.0-SNAPSHOT" \ +bin/spark-shell --packages "org.opensearch:opensearch-spark-ppl_2.12:0.7.0-SNAPSHOT" \ --conf "spark.sql.extensions=org.opensearch.flint.spark.FlintPPLSparkExtensions" \ --conf "spark.sql.catalog.dev=org.apache.spark.opensearch.catalog.OpenSearchCatalog" diff --git a/build.sbt b/build.sbt index 507938542..73d70c404 100644 --- a/build.sbt +++ b/build.sbt @@ -21,7 +21,7 @@ val sparkMinorVersion = sparkVersion.split("\\.").take(2).mkString(".") ThisBuild / organization := "org.opensearch" -ThisBuild / version := "0.6.0-SNAPSHOT" +ThisBuild / version := "0.7.0-SNAPSHOT" ThisBuild / scalaVersion := scala212 diff --git a/docs/index.md b/docs/index.md index e76cb387a..82c147de2 100644 --- a/docs/index.md +++ b/docs/index.md @@ -60,7 +60,7 @@ Currently, Flint metadata is only static configuration without version control a ```json { - "version": "0.6.0", + "version": "0.7.0", "name": "...", "kind": "skipping", "source": "...", @@ -698,7 +698,7 @@ For now, only single or conjunct conditions (conditions connected by AND) in WHE ### AWS EMR Spark Integration - Using execution role Flint use [DefaultAWSCredentialsProviderChain](https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html). When running in EMR Spark, Flint use executionRole credentials ``` ---conf spark.jars.packages=org.opensearch:opensearch-spark-standalone_2.12:0.6.0-SNAPSHOT \ +--conf spark.jars.packages=org.opensearch:opensearch-spark-standalone_2.12:0.7.0-SNAPSHOT \ --conf spark.jars.repositories=https://aws.oss.sonatype.org/content/repositories/snapshots \ --conf spark.emr-serverless.driverEnv.JAVA_HOME=/usr/lib/jvm/java-17-amazon-corretto.x86_64 \ --conf spark.executorEnv.JAVA_HOME=/usr/lib/jvm/java-17-amazon-corretto.x86_64 \ @@ -740,7 +740,7 @@ Flint use [DefaultAWSCredentialsProviderChain](https://docs.aws.amazon.com/AWSJa ``` 3. Set the spark.datasource.flint.customAWSCredentialsProvider property with value as com.amazonaws.emr.AssumeRoleAWSCredentialsProvider. Set the environment variable ASSUME_ROLE_CREDENTIALS_ROLE_ARN with the ARN value of CrossAccountRoleB. ``` ---conf spark.jars.packages=org.opensearch:opensearch-spark-standalone_2.12:0.6.0-SNAPSHOT \ +--conf spark.jars.packages=org.opensearch:opensearch-spark-standalone_2.12:0.7.0-SNAPSHOT \ --conf spark.jars.repositories=https://aws.oss.sonatype.org/content/repositories/snapshots \ --conf spark.emr-serverless.driverEnv.JAVA_HOME=/usr/lib/jvm/java-17-amazon-corretto.x86_64 \ --conf spark.executorEnv.JAVA_HOME=/usr/lib/jvm/java-17-amazon-corretto.x86_64 \ diff --git a/docs/ppl-lang/PPL-on-Spark.md b/docs/ppl-lang/PPL-on-Spark.md index 3b260bd37..1b057572b 100644 --- a/docs/ppl-lang/PPL-on-Spark.md +++ b/docs/ppl-lang/PPL-on-Spark.md @@ -34,7 +34,7 @@ sbt clean sparkPPLCosmetic/publishM2 ``` then add org.opensearch:opensearch-spark_2.12 when run spark application, for example, ``` -bin/spark-shell --packages "org.opensearch:opensearch-spark-ppl_2.12:0.6.0-SNAPSHOT" +bin/spark-shell --packages "org.opensearch:opensearch-spark-ppl_2.12:0.7.0-SNAPSHOT" ``` ### PPL Extension Usage @@ -46,7 +46,7 @@ spark-sql --conf "spark.sql.extensions=org.opensearch.flint.spark.FlintPPLSparkE ``` ### Running With both Flint & PPL Extensions -In order to make use of both flint and ppl extension, one can simply add both jars (`org.opensearch:opensearch-spark-ppl_2.12:0.6.0-SNAPSHOT`,`org.opensearch:opensearch-spark_2.12:0.6.0-SNAPSHOT`) to the cluster's +In order to make use of both flint and ppl extension, one can simply add both jars (`org.opensearch:opensearch-spark-ppl_2.12:0.7.0-SNAPSHOT`,`org.opensearch:opensearch-spark_2.12:0.7.0-SNAPSHOT`) to the cluster's classpath. Next need to configure both extensions : diff --git a/flint-commons/src/main/scala/org/opensearch/flint/common/FlintVersion.scala b/flint-commons/src/main/scala/org/opensearch/flint/common/FlintVersion.scala index 1203ea7ef..53574b770 100644 --- a/flint-commons/src/main/scala/org/opensearch/flint/common/FlintVersion.scala +++ b/flint-commons/src/main/scala/org/opensearch/flint/common/FlintVersion.scala @@ -20,6 +20,7 @@ object FlintVersion { val V_0_4_0: FlintVersion = FlintVersion("0.4.0") val V_0_5_0: FlintVersion = FlintVersion("0.5.0") val V_0_6_0: FlintVersion = FlintVersion("0.6.0") + val V_0_7_0: FlintVersion = FlintVersion("0.7.0") - def current(): FlintVersion = V_0_6_0 + def current(): FlintVersion = V_0_7_0 }