Skip to content

Commit

Permalink
Temp fix for table name more than 3 parts
Browse files Browse the repository at this point in the history
Signed-off-by: Louis Chu <[email protected]>
  • Loading branch information
noCharger committed Nov 15, 2024
1 parent c37b3bd commit 15d8d2a
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -588,11 +588,16 @@ class FlintSparkPPLBasicITSuite
val t4Parts = "`spark_catalog`.default.`startTime:1,endTime:2`.`this(is:['a/name'])`"
val t5Parts =
"`spark_catalog`.default.`startTime:1,endTime:2`.`this(is:['sub/name'])`.`this(is:['sub-sub/name'])`"

Seq(t7, t4Parts, t5Parts).foreach { table =>
val ex = intercept[AnalysisException](sql(s"""
| source = $table| head 2
| """.stripMargin))
assert(ex.getMessage().contains("TABLE_OR_VIEW_NOT_FOUND"))
| source = $table| head 2
| """.stripMargin))
// Expected since V2SessionCatalog only supports 3 parts
assert(
ex.getMessage()
.contains(
"[REQUIRES_SINGLE_PART_NAMESPACE] spark_catalog requires a single-part namespace"))
}

Seq(t7, t4Parts, t5Parts).foreach { table =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,12 @@ public LogicalPlan visitRelation(Relation node, CatalystPlanContext context) {
}
//regular sql algebraic relations
node.getQualifiedNames().forEach(q ->
// Resolving the qualifiedName which is composed of a datasource.schema.table
context.withRelation(new UnresolvedRelation(getTableIdentifier(q).nameParts(), CaseInsensitiveStringMap.empty(), false))
// TODO Do not support 4+ parts table identifier in future (may be reverted this PR in 0.8.0)
// node.getQualifiedNames.getParts().size() > 3
// A Spark TableIdentifier should only contain 3 parts: tableName, databaseName and catalogName.
// If the qualifiedName has more than 3 parts,
// we merge all parts from 3 to last parts into the tableName as one whole
context.withRelation(new UnresolvedRelation(seq(q.getParts()), CaseInsensitiveStringMap.empty(), false))
);
return context.getPlan();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,11 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite

test("test read table with backticks and more then 3 parts") {
val context = new CatalystPlanContext
val logPlan =
val logPlan = {
planTransformer.visit(plan(pplParser, "source=`t`.b.`c.d`.`e.f`"), context)
val table = UnresolvedRelation(Seq("t", "b", "c.d.e.f"))
}

val table = UnresolvedRelation(Seq("t", "b", "c.d", "e.f"))
val expectedPlan = Project(Seq(UnresolvedStar(None)), table)
comparePlans(expectedPlan, logPlan, false)
}
Expand Down Expand Up @@ -100,13 +102,14 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
planTransformer.visit(
plan(
pplParser,
"source=`_Basic`.default.`startTime:0,endTime:1`.`logGroups(logGroupIdentifier:['hello/service_log'])`"),
"source=`_Basic`.default.`startTime:0,endTime:1`.`123.logGroups(logGroupIdentifier:['hello.world/service_log'])`"),
context)
val table = UnresolvedRelation(
Seq(
"_Basic",
"default",
"startTime:0,endTime:1.logGroups(logGroupIdentifier:['hello/service_log'])"))
"startTime:0,endTime:1",
"123.logGroups(logGroupIdentifier:['hello.world/service_log'])"))
val expectedPlan = Project(Seq(UnresolvedStar(None)), table)
comparePlans(expectedPlan, logPlan, false)
}
Expand Down

0 comments on commit 15d8d2a

Please sign in to comment.