From c75e847fc6ad2143b82f545be4da8c512842eb6d Mon Sep 17 00:00:00 2001 From: Ilias Xenogiannis Date: Tue, 6 Aug 2024 18:09:19 +0300 Subject: [PATCH] Update pr_tests_spark.yml --- .github/workflows/pr_tests_spark.yml | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/.github/workflows/pr_tests_spark.yml b/.github/workflows/pr_tests_spark.yml index 3085d4ea..27b81cec 100644 --- a/.github/workflows/pr_tests_spark.yml +++ b/.github/workflows/pr_tests_spark.yml @@ -141,6 +141,8 @@ jobs: spark.hadoop.com.amazonaws.services.s3.enableV4 true spark.hadoop.fs.s3a.aws.credentials.provider org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider spark.hadoop.hive.metastore.client.factory.class com.amazonaws.glue.catalog.metastore.AWSGlueDataCatalogHiveClientFactory + spark.driver.extraClassPath /spark/jars/* + spark.executor.extraClassPath /spark/jars/* EOF - name: Create docker-compose.yml @@ -192,7 +194,7 @@ jobs: - spark-network thrift-server: build: . - command: ["/bin/bash", "-c", "sleep 30 && /spark/sbin/start-thriftserver.sh --master spark://spark-master:7077 --driver-memory 2g --executor-memory 3g --hiveconf hive.server2.thrift.port=10000 --hiveconf hive.server2.thrift.bind.host=0.0.0.0 --conf spark.sql.hive.thriftServer.async=true --conf spark.sql.hive.thriftServer.workerQueue.size=2000 --conf spark.sql.hive.thriftServer.maxWorkerThreads=100 --conf spark.sql.hive.thriftServer.minWorkerThreads=50 --jars /spark/jars/iceberg-spark-runtime-3.5_2.12-1.3.1.jar,/spark/jars/iceberg-aws-bundle-1.3.1.jar && tail -f /spark/logs/spark--org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-*.out"] + command: ["/bin/bash", "-c", "sleep 30 && /spark/sbin/start-thriftserver.sh --master spark://spark-master:7077 --driver-memory 2g --executor-memory 3g --hiveconf hive.server2.thrift.port=10000 --hiveconf hive.server2.thrift.bind.host=0.0.0.0 --conf spark.sql.hive.thriftServer.async=true --conf spark.sql.hive.thriftServer.workerQueue.size=2000 --conf spark.sql.hive.thriftServer.maxWorkerThreads=100 --conf spark.sql.hive.thriftServer.minWorkerThreads=50 --jars /spark/jars/iceberg-spark-runtime-3.5_2.12-1.3.1.jar,/spark/jars/iceberg-aws-bundle-1.3.1.jar --driver-class-path /spark/jars/* --executor-class-path /spark/jars/* && tail -f /spark/logs/spark--org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-*.out"] ports: - '10000:10000' depends_on: @@ -269,7 +271,7 @@ jobs: - name: Test ThriftServer connection with Beeline run: | docker-compose exec -T thrift-server bash -c ' - /spark/bin/beeline -u "jdbc:hive2://localhost:10000" -e "SHOW DATABASES;" + /spark/bin/beeline -u "jdbc:hive2://localhost:10000" -e "SHOW DATABASES;" --hiveconf hive.cli.errors.ignore=true ' - name: Verify AWS Credentials in Spark @@ -293,14 +295,4 @@ jobs: - name: 'Pre-test: Drop ci schemas' run: | - dbt run-operation post_ci_cleanup --target spark - - - name: Run tests - run: | - echo "Running DBT tests..." - ./.scripts/integration_tests.sh -d spark - echo "DBT tests completed." - - - name: 'Post-test: Drop ci schemas' - run: | - dbt run-operation post_ \ No newline at end of file + dbt run-operation post_ci_cleanup --target spark \ No newline at end of file