diff --git a/integ-test/src/integration/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLBasicITSuite.scala b/integ-test/src/integration/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLBasicITSuite.scala index c1bb1cd24..34b25f5e8 100644 --- a/integ-test/src/integration/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLBasicITSuite.scala +++ b/integ-test/src/integration/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLBasicITSuite.scala @@ -588,12 +588,6 @@ class FlintSparkPPLBasicITSuite val t4Parts = "`spark_catalog`.default.`startTime:1,endTime:2`.`this(is:['a/name'])`" val t5Parts = "`spark_catalog`.default.`startTime:1,endTime:2`.`this(is:['sub/name'])`.`this(is:['sub-sub/name'])`" - Seq(t7, t4Parts, t5Parts).foreach { table => - val ex = intercept[AnalysisException](sql(s""" - | source = $table| head 2 - | """.stripMargin)) - assert(ex.getMessage().contains("TABLE_OR_VIEW_NOT_FOUND")) - } Seq(t7, t4Parts, t5Parts).foreach { table => val ex = intercept[AnalysisException](sql(s""" diff --git a/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystQueryPlanVisitor.java b/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystQueryPlanVisitor.java index debd37376..000c16b92 100644 --- a/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystQueryPlanVisitor.java +++ b/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystQueryPlanVisitor.java @@ -151,8 +151,12 @@ public LogicalPlan visitRelation(Relation node, CatalystPlanContext context) { } //regular sql algebraic relations node.getQualifiedNames().forEach(q -> - // Resolving the qualifiedName which is composed of a datasource.schema.table - context.withRelation(new UnresolvedRelation(getTableIdentifier(q).nameParts(), CaseInsensitiveStringMap.empty(), false)) + // TODO Do not support 4+ parts table identifier in future (may be reverted this PR in 0.8.0) + // node.getQualifiedNames.getParts().size() > 3 + // A Spark TableIdentifier should only contain 3 parts: tableName, databaseName and catalogName. + // If the qualifiedName has more than 3 parts, + // we merge all parts from 3 to last parts into the tableName as one whole + context.withRelation(new UnresolvedRelation(seq(q.getParts()), CaseInsensitiveStringMap.empty(), false)) ); return context.getPlan(); } diff --git a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala index f33b1578a..f380332e4 100644 --- a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala +++ b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala @@ -67,9 +67,11 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite test("test read table with backticks and more then 3 parts") { val context = new CatalystPlanContext - val logPlan = + val logPlan = { planTransformer.visit(plan(pplParser, "source=`t`.b.`c.d`.`e.f`"), context) - val table = UnresolvedRelation(Seq("t", "b", "c.d.e.f")) + } + + val table = UnresolvedRelation(Seq("t", "b", "c.d", "e.f")) val expectedPlan = Project(Seq(UnresolvedStar(None)), table) comparePlans(expectedPlan, logPlan, false) } @@ -106,7 +108,8 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite Seq( "_Basic", "default", - "startTime:0,endTime:1.logGroups(logGroupIdentifier:['hello/service_log'])")) + "startTime:0,endTime:1", + "logGroups(logGroupIdentifier:['hello/service_log'])")) val expectedPlan = Project(Seq(UnresolvedStar(None)), table) comparePlans(expectedPlan, logPlan, false) }