From 5ba67b51d42190a1ea773fbca84e99e3852a731f Mon Sep 17 00:00:00 2001 From: Peng Huo Date: Wed, 15 May 2024 11:12:53 -0700 Subject: [PATCH] update format Signed-off-by: Peng Huo --- .../scala/org/apache/spark/sql/flint/package.scala | 14 ++++++-------- .../spark/FlintSparkSkippingIndexSqlITSuite.scala | 1 + 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/flint-spark-integration/src/main/scala/org/apache/spark/sql/flint/package.scala b/flint-spark-integration/src/main/scala/org/apache/spark/sql/flint/package.scala index cb07e2f86..d71bc5d12 100644 --- a/flint-spark-integration/src/main/scala/org/apache/spark/sql/flint/package.scala +++ b/flint-spark-integration/src/main/scala/org/apache/spark/sql/flint/package.scala @@ -147,24 +147,22 @@ package object flint { def resolveCatalogName(spark: SparkSession, catalog: CatalogPlugin): String = { /** - * Flint use FlintDelegatingSessionCatalog to customized catalog name. - * FlintDelegatingSessionCatalog name is spark_catalog which can not change. @see issue319 + * Check if the provided catalog is a session catalog. */ if (CatalogV2Util.isSessionCatalog(catalog)) { val defaultCatalog = spark.conf.get(DEFAULT_CATALOG) - // defaultCatalog name is as same as customized catalog name. if (spark.sessionState.catalogManager.isCatalogRegistered(defaultCatalog)) { defaultCatalog } else { + /** - * It may happen when spark.sql.defaultCatalog is configured, but there's no implementation. - * For instance, spark.sql.defaultCatalog = "unknown" + * It may happen when spark.sql.defaultCatalog is configured, but there's no + * implementation. For instance, spark.sql.defaultCatalog = "unknown" */ - throw new RuntimeException("Unknown catalog name") + throw new IllegalStateException(s"Unknown catalog name: $defaultCatalog") } } else { - // Works for customized non Spark V2SessionCatalog implementation. + // Return the name for non-session catalogs catalog.name() } } diff --git a/integ-test/src/test/scala/org/opensearch/flint/spark/FlintSparkSkippingIndexSqlITSuite.scala b/integ-test/src/test/scala/org/opensearch/flint/spark/FlintSparkSkippingIndexSqlITSuite.scala index d48768f89..947316980 100644 --- a/integ-test/src/test/scala/org/opensearch/flint/spark/FlintSparkSkippingIndexSqlITSuite.scala +++ b/integ-test/src/test/scala/org/opensearch/flint/spark/FlintSparkSkippingIndexSqlITSuite.scala @@ -330,6 +330,7 @@ class FlintSparkSkippingIndexSqlITSuite extends FlintSparkSuite { flint.describeIndex(s"flint_${catalogName}_sample_test1_skipping_index") shouldBe defined flint.describeIndex(s"flint_${catalogName}_sample_test2_skipping_index") shouldBe defined } finally { + /** * TODO: REMOVE DROP TABLE when iceberg support CASCADE. More reading at * https://github.com/apache/iceberg/pull/7275.