Skip to content

Commit

Permalink
Enable parallelExecution for integration test suites (#934)
Browse files Browse the repository at this point in the history
* Split integration test to multiple groups and enable parallelExecution

Signed-off-by: Heng Qian <[email protected]>

* Fix spark-warehouse conflict

Signed-off-by: Heng Qian <[email protected]>

* Test with 3 groups

Signed-off-by: Heng Qian <[email protected]>

* Random shuffle tests before splitting groups

Signed-off-by: Heng Qian <[email protected]>

* reset group number to 4

Signed-off-by: Heng Qian <[email protected]>

* revert shuffle

Signed-off-by: Heng Qian <[email protected]>

---------

Signed-off-by: Heng Qian <[email protected]>
  • Loading branch information
qianheng-aws authored Nov 22, 2024
1 parent 31fae14 commit 3ff2ef2
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 4 deletions.
28 changes: 24 additions & 4 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
import Dependencies._
import sbtassembly.AssemblyPlugin.autoImport.ShadeRule
import Dependencies.*

lazy val scala212 = "2.12.14"
lazy val sparkVersion = "3.5.1"
Expand Down Expand Up @@ -38,6 +37,11 @@ ThisBuild / scalastyleConfig := baseDirectory.value / "scalastyle-config.xml"
*/
ThisBuild / Test / parallelExecution := false

/**
* Set the parallelism of forked tests to 4 to accelerate integration test
*/
concurrentRestrictions in Global := Seq(Tags.limit(Tags.ForkedTestGroup, 4))

// Run as part of compile task.
lazy val compileScalastyle = taskKey[Unit]("compileScalastyle")

Expand Down Expand Up @@ -274,13 +278,29 @@ lazy val integtest = (project in file("integ-test"))
IntegrationTest / javaSource := baseDirectory.value / "src/integration/java",
IntegrationTest / scalaSource := baseDirectory.value / "src/integration/scala",
IntegrationTest / resourceDirectory := baseDirectory.value / "src/integration/resources",
IntegrationTest / parallelExecution := false,
IntegrationTest / parallelExecution := true, // enable parallel execution
IntegrationTest / testForkedParallel := false, // disable forked parallel execution to avoid duplicate spark context in the same JVM
IntegrationTest / fork := true,
IntegrationTest / testGrouping := {
val tests = (IntegrationTest / definedTests).value
val forkOptions = ForkOptions()
val groups = tests.grouped(tests.size / 4 + 1).zipWithIndex.map { case (group, index) =>
val groupName = s"group-${index + 1}"
new Tests.Group(
name = groupName,
tests = group,
runPolicy = Tests.SubProcess(
forkOptions.withRunJVMOptions(forkOptions.runJVMOptions ++
Seq(s"-Djava.io.tmpdir=${baseDirectory.value}/integ-test/target/tmp/$groupName")))
)
}
groups.toSeq
}
)),
inConfig(AwsIntegrationTest)(Defaults.testSettings ++ Seq(
AwsIntegrationTest / javaSource := baseDirectory.value / "src/aws-integration/java",
AwsIntegrationTest / scalaSource := baseDirectory.value / "src/aws-integration/scala",
AwsIntegrationTest / parallelExecution := false,
AwsIntegrationTest / parallelExecution := true,
AwsIntegrationTest / fork := true,
)),
libraryDependencies ++= Seq(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation
import org.apache.spark.sql.flint.config.{FlintConfigEntry, FlintSparkConf}
import org.apache.spark.sql.flint.config.FlintSparkConf.{EXTERNAL_SCHEDULER_ENABLED, HYBRID_SCAN_ENABLED, METADATA_CACHE_WRITE}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.WAREHOUSE_PATH
import org.apache.spark.sql.test.SharedSparkSession

trait FlintSuite extends SharedSparkSession {
Expand All @@ -30,6 +31,7 @@ trait FlintSuite extends SharedSparkSession {
.set(
FlintSparkConf.CUSTOM_FLINT_SCHEDULER_CLASS.key,
"org.opensearch.flint.core.scheduler.AsyncQuerySchedulerBuilderTest$AsyncQuerySchedulerForLocalTest")
.set(WAREHOUSE_PATH.key, s"spark-warehouse/${suiteName}")
conf
}

Expand Down

0 comments on commit 3ff2ef2

Please sign in to comment.