Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make delta-lake shim dependencies parametrizable [databricks] #11697

Merged
merged 22 commits into from
Nov 8, 2024
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
559 changes: 22 additions & 537 deletions aggregator/pom.xml

Large diffs are not rendered by default.

14 changes: 14 additions & 0 deletions build/make-scala-version-build-files.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,20 @@

set -e

trap_func() {
rv=$?
if [[ $rv == 0 ]]; then
echo DONE scala2.13 poms generated: exit code = $rv
else
echo ERROR generating scala2.13 poms, re-execute with:
echo " bash -x $*"
echo to inspect the error output
exit $rv
fi
}

trap "trap_func" EXIT

VALID_VERSIONS=( 2.13 )
declare -A DEFAULT_SPARK
DEFAULT_SPARK[2.12]="spark320"
Expand Down
54 changes: 54 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@
<spark.version>${spark320.version}</spark.version>
<spark.test.version>${spark320.version}</spark.test.version>
<parquet.hadoop.version>1.12.1</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
</properties>
<modules>
<module>delta-lake/delta-20x</module>
Expand All @@ -125,6 +126,7 @@
<spark.version>${spark321.version}</spark.version>
<spark.test.version>${spark321.version}</spark.test.version>
<parquet.hadoop.version>1.12.2</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
</properties>
<modules>
<module>delta-lake/delta-20x</module>
Expand All @@ -145,6 +147,7 @@
<spark.version>${spark321cdh.version}</spark.version>
<spark.test.version>${spark321cdh.version}</spark.test.version>
<parquet.hadoop.version>1.10.1</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
<cloudera.repo.enabled>true</cloudera.repo.enabled>
<!-- #endif scala-2.12 -->
<!-- Keeping the scala plugin version 4.3.0 for details
Expand Down Expand Up @@ -172,6 +175,7 @@
<spark.version>${spark322.version}</spark.version>
<spark.test.version>${spark322.version}</spark.test.version>
<parquet.hadoop.version>1.12.2</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
</properties>
<modules>
<module>delta-lake/delta-20x</module>
Expand All @@ -192,6 +196,7 @@
<spark.version>${spark323.version}</spark.version>
<spark.test.version>${spark323.version}</spark.test.version>
<parquet.hadoop.version>1.12.2</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
</properties>
<modules>
<module>delta-lake/delta-20x</module>
Expand All @@ -212,6 +217,7 @@
<spark.version>${spark324.version}</spark.version>
<spark.test.version>${spark324.version}</spark.test.version>
<parquet.hadoop.version>1.12.2</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-20x</rapids.delta.artifactId1>
</properties>
<modules>
<module>delta-lake/delta-20x</module>
Expand All @@ -234,6 +240,9 @@
<spark.version>${spark330.version}</spark.version>
<spark.test.version>${spark330.version}</spark.test.version>
<parquet.hadoop.version>1.12.2</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
<rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
<rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
</properties>
<modules>
Expand All @@ -255,6 +264,9 @@
<spark.version>${spark331.version}</spark.version>
<spark.test.version>${spark331.version}</spark.test.version>
<parquet.hadoop.version>1.12.2</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
<rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
<rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
</properties>
<modules>
Expand All @@ -276,6 +288,9 @@
<spark.version>${spark332.version}</spark.version>
<spark.test.version>${spark332.version}</spark.test.version>
<parquet.hadoop.version>1.12.2</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
<rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
<rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
</properties>
<modules>
Expand All @@ -297,6 +312,9 @@
<spark.version>${spark333.version}</spark.version>
<spark.test.version>${spark333.version}</spark.test.version>
<parquet.hadoop.version>1.12.2</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
<rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
<rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
</properties>
<modules>
Expand All @@ -318,6 +336,9 @@
<spark.version>${spark334.version}</spark.version>
<spark.test.version>${spark334.version}</spark.test.version>
<parquet.hadoop.version>1.12.2</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
<rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
<rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
</properties>
<modules>
Expand All @@ -339,6 +360,7 @@
<spark.version>${spark340.version}</spark.version>
<spark.test.version>${spark340.version}</spark.test.version>
<parquet.hadoop.version>1.12.3</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.6</slf4j.version>
</properties>
Expand All @@ -359,6 +381,7 @@
<spark.version>${spark341.version}</spark.version>
<spark.test.version>${spark341.version}</spark.test.version>
<parquet.hadoop.version>1.12.3</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.6</slf4j.version>
</properties>
Expand All @@ -379,6 +402,7 @@
<spark.version>${spark342.version}</spark.version>
<spark.test.version>${spark342.version}</spark.test.version>
<parquet.hadoop.version>1.12.3</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.6</slf4j.version>
</properties>
Expand All @@ -399,6 +423,7 @@
<spark.version>${spark343.version}</spark.version>
<spark.test.version>${spark343.version}</spark.test.version>
<parquet.hadoop.version>1.12.3</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.6</slf4j.version>
</properties>
Expand All @@ -419,6 +444,7 @@
<spark.version>${spark344.version}</spark.version>
<spark.test.version>${spark344.version}</spark.test.version>
<parquet.hadoop.version>1.12.3</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-24x</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.6</slf4j.version>
</properties>
Expand All @@ -439,6 +465,9 @@
<spark.version>${spark330cdh.version}</spark.version>
<spark.test.version>${spark330cdh.version}</spark.test.version>
<parquet.hadoop.version>1.10.99.7.1.8.0-801</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
<rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
<rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<cloudera.repo.enabled>true</cloudera.repo.enabled>
<!-- Keeping the scala plugin version 4.3.0 for details
Expand All @@ -465,6 +494,9 @@
<spark.version>${spark332cdh.version}</spark.version>
<spark.test.version>${spark332cdh.version}</spark.test.version>
<parquet.hadoop.version>1.10.99.7.1.9.0-387</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-21x</rapids.delta.artifactId1>
<rapids.delta.artifactId2>rapids-4-spark-delta-22x</rapids.delta.artifactId2>
<rapids.delta.artifactId3>rapids-4-spark-delta-23x</rapids.delta.artifactId3>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<cloudera.repo.enabled>true</cloudera.repo.enabled>
<!-- Keeping the scala plugin version 4.3.0 for details
Expand Down Expand Up @@ -495,6 +527,7 @@
<hadoop.client.version>3.3.1</hadoop.client.version>
<rat.consoleOutput>true</rat.consoleOutput>
<parquet.hadoop.version>1.12.0</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-${spark.version.classifier}</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
</properties>
<modules>
Expand All @@ -519,6 +552,7 @@
<hadoop.client.version>3.3.1</hadoop.client.version>
<rat.consoleOutput>true</rat.consoleOutput>
<parquet.hadoop.version>1.12.0</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-${spark.version.classifer}</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
</properties>
<modules>
Expand All @@ -542,6 +576,7 @@
<hadoop.client.version>3.3.1</hadoop.client.version>
<rat.consoleOutput>true</rat.consoleOutput>
<parquet.hadoop.version>1.12.0</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-${spark.version.classifer}</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
</properties>
<modules>
Expand All @@ -562,6 +597,7 @@
<spark.version>${spark350.version}</spark.version>
<spark.test.version>${spark350.version}</spark.test.version>
<parquet.hadoop.version>1.13.1</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.7</slf4j.version>
</properties>
Expand All @@ -582,6 +618,7 @@
<spark.version>${spark351.version}</spark.version>
<spark.test.version>${spark351.version}</spark.test.version>
<parquet.hadoop.version>1.13.1</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.7</slf4j.version>
</properties>
Expand All @@ -602,6 +639,7 @@
<spark.version>${spark352.version}</spark.version>
<spark.test.version>${spark352.version}</spark.test.version>
<parquet.hadoop.version>1.13.1</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.7</slf4j.version>
</properties>
Expand All @@ -622,6 +660,7 @@
<spark.version>${spark353.version}</spark.version>
<spark.test.version>${spark353.version}</spark.test.version>
<parquet.hadoop.version>1.13.1</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.7</slf4j.version>
</properties>
Expand All @@ -643,6 +682,7 @@
<spark.version>${spark400.version}</spark.version>
<spark.test.version>${spark400.version}</spark.test.version>
<parquet.hadoop.version>1.13.1</parquet.hadoop.version>
<rapids.delta.artifactId1>rapids-4-spark-delta-stub</rapids.delta.artifactId1>
<iceberg.version>${spark330.iceberg.version}</iceberg.version>
<slf4j.version>2.0.7</slf4j.version>
</properties>
Expand Down Expand Up @@ -789,6 +829,15 @@
<rapids.shade.package>${spark.version.classifier}.com.nvidia.shaded.spark</rapids.shade.package>
<rapids.shim.jar.phase>none</rapids.shim.jar.phase>
<rapids.shim.jar.test.phase>package</rapids.shim.jar.test.phase>

<!--
Dummy value just to pass the Maven artifactId format check.
Enforcer Plugin checks for the proper value override for each releaseXYZ profile
-->
<rapids.delta.artifactId1>DEFINE_FOR_EVERY_SPARK_SHIM</rapids.delta.artifactId1>

<rapids.delta.artifactId2>${rapids.delta.artifactId1}</rapids.delta.artifactId2>
<rapids.delta.artifactId3>${rapids.delta.artifactId1}</rapids.delta.artifactId3>
<test.include.tags/>
<rapids.shuffle.manager.override>true</rapids.shuffle.manager.override>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
Expand Down Expand Up @@ -1509,6 +1558,11 @@ This will force full Scala code rebuild in downstream modules.
<message>Minimum Maven version 3.6.x required</message>
<version>[3.6,)</version>
</requireMavenVersion>
<requireProperty>
<regexMessage>At least one of rapids.delta.artifactId1, rapids.delta.artifactId2 ... is required in the POM profile "release${buildver}"</regexMessage>
<property>rapids.delta.artifactId1</property>
<regex>^rapids-4-spark-delta-.*</regex>
</requireProperty>
<!-- #if scala-2.12 -->
<requireJavaVersion>
<message>Only Java 8, 11, and 17 are supported!</message>
Expand Down
Loading