Skip to content

Commit

Permalink
add optimizer support for flint based acceleration queries - also usi…
Browse files Browse the repository at this point in the history
…ng the PPL

add ppl-spark-extension jar mvn publish

Signed-off-by: YANGDB <[email protected]>
  • Loading branch information
YANG-DB committed Oct 2, 2023
1 parent e3210f0 commit 00192ba
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 9 deletions.
1 change: 1 addition & 0 deletions .github/workflows/snapshot-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ jobs:
- name: Publish to Local Maven
run: |
sbt standaloneCosmetic/publishM2
sbt sparkPPLCosmetic/publishM2
sbt sparkSqlApplicationCosmetic/publishM2
- uses: actions/checkout@v3
Expand Down
19 changes: 10 additions & 9 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,16 @@ lazy val flintCore = (project in file("flint-core"))
exclude ("com.fasterxml.jackson.core", "jackson-databind")),
publish / skip := true)

lazy val pplSparkIntegration = (project in file("ppl-spark-integration"))
lazy val flintSparkIntegration = (project in file("flint-spark-integration"))
.dependsOn(flintCore)
.enablePlugins(AssemblyPlugin, Antlr4Plugin)
.settings(
commonSettings,
name := "ppl-spark-integration",
name := "flint-spark-integration",
scalaVersion := scala212,
libraryDependencies ++= Seq(
"com.amazonaws" % "aws-java-sdk" % "1.12.397" % "provided"
exclude ("com.fasterxml.jackson.core", "jackson-databind"),
"org.scalactic" %% "scalactic" % "3.2.15" % "test",
"org.scalatest" %% "scalatest" % "3.2.15" % "test",
"org.scalatest" %% "scalatest-flatspec" % "3.2.15" % "test",
Expand All @@ -77,7 +80,7 @@ lazy val pplSparkIntegration = (project in file("ppl-spark-integration"))
libraryDependencies ++= deps(sparkVersion),
// ANTLR settings
Antlr4 / antlr4Version := "4.8",
Antlr4 / antlr4PackageName := Some("org.opensearch.flint.spark.ppl"),
Antlr4 / antlr4PackageName := Some("org.opensearch.flint.spark.sql"),
Antlr4 / antlr4GenListener := true,
Antlr4 / antlr4GenVisitor := true,
// Assembly settings
Expand All @@ -97,16 +100,14 @@ lazy val pplSparkIntegration = (project in file("ppl-spark-integration"))
},
assembly / test := (Test / test).value)

lazy val flintSparkIntegration = (project in file("flint-spark-integration"))
.dependsOn(flintCore)
lazy val pplSparkIntegration = (project in file("ppl-spark-integration"))
.enablePlugins(AssemblyPlugin, Antlr4Plugin)
.dependsOn(flintSparkIntegration)
.settings(
commonSettings,
name := "flint-spark-integration",
name := "ppl-spark-integration",
scalaVersion := scala212,
libraryDependencies ++= Seq(
"com.amazonaws" % "aws-java-sdk" % "1.12.397" % "provided"
exclude ("com.fasterxml.jackson.core", "jackson-databind"),
"org.scalactic" %% "scalactic" % "3.2.15" % "test",
"org.scalatest" %% "scalatest" % "3.2.15" % "test",
"org.scalatest" %% "scalatest-flatspec" % "3.2.15" % "test",
Expand All @@ -116,7 +117,7 @@ lazy val flintSparkIntegration = (project in file("flint-spark-integration"))
libraryDependencies ++= deps(sparkVersion),
// ANTLR settings
Antlr4 / antlr4Version := "4.8",
Antlr4 / antlr4PackageName := Some("org.opensearch.flint.spark.sql"),
Antlr4 / antlr4PackageName := Some("org.opensearch.flint.spark.ppl"),
Antlr4 / antlr4GenListener := true,
Antlr4 / antlr4GenVisitor := true,
// Assembly settings
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

package org.opensearch.flint.spark

import org.opensearch.flint.spark.function.TumbleFunction
import org.opensearch.flint.spark.ppl.FlintSparkPPLParser

import org.apache.spark.sql.SparkSessionExtensions
Expand All @@ -18,5 +19,11 @@ class FlintPPLSparkExtensions extends (SparkSessionExtensions => Unit) {
extensions.injectParser { (spark, parser) =>
new FlintSparkPPLParser(parser)
}

extensions.injectFunction(TumbleFunction.description)

extensions.injectOptimizerRule { spark =>
new FlintSparkOptimizer(spark)
}
}
}

0 comments on commit 00192ba

Please sign in to comment.