diff --git a/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystQueryPlanVisitor.java b/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystQueryPlanVisitor.java index 1e31e9c6f..be00796a8 100644 --- a/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystQueryPlanVisitor.java +++ b/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystQueryPlanVisitor.java @@ -65,6 +65,7 @@ import org.opensearch.sql.ast.tree.Relation; import org.opensearch.sql.ast.tree.Sort; import org.opensearch.sql.ast.tree.TopAggregation; +import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.ppl.utils.AggregatorTranslator; import org.opensearch.sql.ppl.utils.BuiltinFunctionTranslator; import org.opensearch.sql.ppl.utils.ComparatorTransformer; @@ -238,7 +239,16 @@ public LogicalPlan visitAlias(Alias node, CatalystPlanContext context) { @Override public LogicalPlan visitProject(Project node, CatalystPlanContext context) { - if (!node.isExcluded()) { + if (node.isExcluded()) { + List intersect = context.getProjectedFields().stream() + .filter(node.getProjectList()::contains) + .collect(Collectors.toList()); + if (!intersect.isEmpty()) { + // Fields in parent projection, but they have be excluded in child. For example, + // source=t | fields - A, B | fields A, B, C will throw "[Field A, Field B] can't be resolved" + throw new SyntaxCheckException(intersect + " can't be resolved"); + } + } else { context.withProjectedFields(node.getProjectList()); } LogicalPlan child = node.getChild().get(0).accept(this, context); diff --git a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala index a5deac0f0..7b9fdcf1e 100644 --- a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala +++ b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala @@ -6,6 +6,7 @@ package org.opensearch.flint.spark.ppl import org.opensearch.flint.spark.ppl.PlaneUtils.plan +import org.opensearch.sql.common.antlr.SyntaxCheckException import org.opensearch.sql.ppl.{CatalystPlanContext, CatalystQueryPlanVisitor} import org.scalatest.matchers.should.Matchers @@ -314,4 +315,33 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite val expectedPlan = Project(Seq(UnresolvedStar(None)), planWithLimit) comparePlans(expectedPlan, logPlan, false) } + + test("test fields + then - field list") { + val context = new CatalystPlanContext + val logPlan = planTransformer.visit( + plan(pplParser, "source=t | fields + A, B, C | fields - A, B"), + context) + + val table = UnresolvedRelation(Seq("t")) + val projectABC = Project( + Seq(UnresolvedAttribute("A"), UnresolvedAttribute("B"), UnresolvedAttribute("C")), + table) + val dropList = Seq(UnresolvedAttribute("A"), UnresolvedAttribute("B")) + val dropAB = DataFrameDropColumns(dropList, projectABC) + + val expectedPlan = Project(Seq(UnresolvedStar(None)), dropAB) + comparePlans(expectedPlan, logPlan, false) + } + + test("test fields - then + field list") { + val context = new CatalystPlanContext + val thrown = intercept[SyntaxCheckException] { + planTransformer.visit( + plan(pplParser, "source=t | fields - A, B | fields + A, B, C"), + context) + } + assert( + thrown.getMessage + === "[Field(field=A, fieldArgs=[]), Field(field=B, fieldArgs=[])] can't be resolved") + } }