Skip to content

Commit

Permalink
#661 Update Scala, Spark, and sbt patch versions.
Browse files Browse the repository at this point in the history
  • Loading branch information
yruslan committed Mar 26, 2024
1 parent f2c9544 commit ac94f37
Show file tree
Hide file tree
Showing 6 changed files with 43 additions and 33 deletions.
22 changes: 12 additions & 10 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,25 +18,27 @@ jobs:
strategy:
fail-fast: false
matrix:
scala: [ 2.11.12, 2.12.18, 2.13.12 ]
spark: [ 2.4.8, 3.4.1, 3.5.0 ]
scala: [ 2.11.12, 2.12.19, 2.13.13 ]
spark: [ 2.4.8, 3.4.2, 3.5.1 ]
exclude:
- scala: 2.11.12
spark: 3.4.1
spark: 3.4.2
- scala: 2.11.12
spark: 3.5.0
- scala: 2.12.18
spark: 3.5.1
- scala: 2.12.19
spark: 2.4.8
- scala: 2.13.12
- scala: 2.13.13
spark: 2.4.8
name: Spark ${{matrix.spark}} on Scala ${{matrix.scala}}
steps:
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
- uses: coursier/cache-action@v5
- name: Setup Scala
uses: olafurpg/setup-scala@v10
- name: Setup JDK
uses: actions/setup-java@v4
with:
java-version: "[email protected]"
distribution: temurin
java-version: 8
cache: sbt
- name: Build and run tests
run: sbt ++${{matrix.scala}} test -DSPARK_VERSION=${{matrix.spark}}
2 changes: 1 addition & 1 deletion .github/workflows/fossa.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:

steps:
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4

- name: Run FOSSA scan and upload build data
uses: fossa-contrib/fossa-action@v2
Expand Down
14 changes: 8 additions & 6 deletions .github/workflows/jacoco_check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,9 @@ jobs:
# Scala 2.12 is chosen since it is supported by the most wide range of Spark versions and
# vendor distributions.
include:
- scala: 2.12.18
- scala: 2.12.19
scalaShort: "2.12"
spark: 3.3.3
spark: 3.3.4
overall: 0.0
changed: 80.0

Expand All @@ -49,11 +49,13 @@ jobs:
changedCobolParserOverride: 20
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Scala
uses: olafurpg/setup-scala@v10
uses: actions/checkout@v4
- name: Setup JDK
uses: actions/setup-java@v4
with:
java-version: "[email protected]"
distribution: temurin
java-version: 8
cache: sbt
- name: Build and run tests
run: sbt ++${{matrix.scala}} jacoco -DSPARK_VERSION=${{matrix.spark}}
- name: Add coverage of 'cobol-parser' to PR
Expand Down
22 changes: 11 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -302,24 +302,24 @@ Creating an uber jar for Cobrix is very easy. Steps to build:
sbt -DSPARK_VERSION="2.4.8" ++2.11.12 assembly

# For Scala 2.12
sbt -DSPARK_VERSION="2.4.8" ++2.12.17 assembly
sbt -DSPARK_VERSION="3.1.3" ++2.12.17 assembly
sbt -DSPARK_VERSION="3.2.3" ++2.12.17 assembly
sbt -DSPARK_VERSION="3.3.2" ++2.12.17 assembly
sbt -DSPARK_VERSION="3.4.0" ++2.12.17 assembly
sbt -DSPARK_VERSION="2.4.8" ++2.12.19 assembly
sbt -DSPARK_VERSION="3.1.3" ++2.12.19 assembly
sbt -DSPARK_VERSION="3.2.3" ++2.12.19 assembly
sbt -DSPARK_VERSION="3.3.2" ++2.12.19 assembly
sbt -DSPARK_VERSION="3.4.0" ++2.12.19 assembly

# For Scala 2.13
sbt -DSPARK_VERSION="3.3.2" ++2.13.10 assembly
sbt -DSPARK_VERSION="3.4.0" ++2.13.10 assembly
sbt -DSPARK_VERSION="3.3.2" ++2.13.13 assembly
sbt -DSPARK_VERSION="3.4.0" ++2.13.13 assembly
```

You can collect the uber jar of `spark-cobol` either at
`spark-cobol/target/scala-2.11/` or in `spark-cobol/target/scala-2.12/` depending on the Scala version you used.
The fat jar will have '-bundle' suffix. You can also download pre-built bundles from https://github.com/AbsaOSS/cobrix/releases/tag/v2.6.5
The fat jar will have '-bundle' suffix. You can also download pre-built bundles from https://github.com/AbsaOSS/cobrix/releases/tag/v2.6.10

Then, run `spark-shell` or `spark-submit` adding the fat jar as the option.
```sh
$ spark-shell --jars spark-cobol_2.12_3.3.2-2.6.11-SNAPSHOT-bundle.jar
$ spark-shell --jars spark-cobol_2.12_3.3-2.6.11-SNAPSHOT-bundle.jar
```

> <b>A note for building and running tests on Windows</b>
Expand All @@ -330,8 +330,8 @@ $ spark-shell --jars spark-cobol_2.12_3.3.2-2.6.11-SNAPSHOT-bundle.jar
> You can work around it by using default Spark version for a given Scala version:
> ```sh
> sbt ++2.11.12 assembly
> sbt ++2.12.17 assembly
> sbt ++2.13.10 assembly
> sbt ++2.12.19 assembly
> sbt ++2.13.13 assembly
> ```

## Other Features
Expand Down
6 changes: 3 additions & 3 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ import ScalacOptions._
import com.github.sbt.jacoco.report.JacocoReportSettings

lazy val scala211 = "2.11.12"
lazy val scala212 = "2.12.18"
lazy val scala213 = "2.13.12"
lazy val scala212 = "2.12.19"
lazy val scala213 = "2.13.13"

ThisBuild / organization := "za.co.absa.cobrix"

Expand Down Expand Up @@ -139,7 +139,7 @@ lazy val assemblySettings = Seq(
// The SLF4j API and implementation are provided by Spark
ShadeRule.zap("org.slf4j.**").inAll
),
assembly / assemblyJarName := s"${name.value}_${scalaBinaryVersion.value}_${sparkVersion(scalaVersion.value)}-${version.value}-bundle.jar",
assembly / assemblyJarName := s"${name.value}_${scalaBinaryVersion.value}_${sparkVersionShort(scalaVersion.value)}-${version.value}-bundle.jar",
assembly / logLevel := Level.Info,
assembly / test := {}
)
10 changes: 8 additions & 2 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ object Dependencies {
private val mockitoVersion = "4.11.0"

private val defaultSparkVersionForScala211 = "2.4.8"
private val defaultSparkVersionForScala212 = "3.4.1"
private val defaultSparkVersionForScala213 = "3.5.0"
private val defaultSparkVersionForScala212 = "3.4.2"
private val defaultSparkVersionForScala213 = "3.5.1"

def sparkFallbackVersion(scalaVersion: String): String = {
if (scalaVersion.startsWith("2.11.")) {
Expand All @@ -45,6 +45,12 @@ object Dependencies {

def sparkVersion(scalaVersion: String): String = sys.props.getOrElse("SPARK_VERSION", sparkFallbackVersion(scalaVersion))

def sparkVersionShort(scalaVersion: String): String = {
val fullVersion = sparkVersion(scalaVersion)

fullVersion.split('.').take(2).mkString(".")
}

def getScalaDependency(scalaVersion: String): ModuleID = "org.scala-lang" % "scala-library" % scalaVersion % Provided

def SparkCobolDependencies(scalaVersion: String): Seq[ModuleID] = Seq(
Expand Down

0 comments on commit ac94f37

Please sign in to comment.