diff --git a/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLAggregationsITSuite.scala b/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLAggregationsITSuite.scala
index 745c354eb..3bc227e7d 100644
--- a/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLAggregationsITSuite.scala
+++ b/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLAggregationsITSuite.scala
@@ -46,7 +46,6 @@ class FlintSparkPPLAggregationsITSuite
// Define the expected results
val expectedResults: Array[Row] = Array(Row(36.25))
- // Compare the results
// Compare the results
implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, Double](_.getAs[Double](0))
assert(results.sorted.sameElements(expectedResults.sorted))
@@ -76,7 +75,6 @@ class FlintSparkPPLAggregationsITSuite
// Define the expected results
val expectedResults: Array[Row] = Array(Row(25))
- // Compare the results
// Compare the results
implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, Double](_.getAs[Double](0))
assert(results.sorted.sameElements(expectedResults.sorted))
diff --git a/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLBasicITSuite.scala b/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLBasicITSuite.scala
index ba925339e..fc77b7156 100644
--- a/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLBasicITSuite.scala
+++ b/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLBasicITSuite.scala
@@ -37,32 +37,34 @@ class FlintSparkPPLBasicITSuite
}
test("create ppl simple query test") {
- val frame = sql(s"""
- | source = $testTable
- | """.stripMargin)
-
- // Retrieve the results
- val results: Array[Row] = frame.collect()
- // Define the expected results
- val expectedResults: Array[Row] = Array(
- Row("Jake", 70, "California", "USA", 2023, 4),
- Row("Hello", 30, "New York", "USA", 2023, 4),
- Row("John", 25, "Ontario", "Canada", 2023, 4),
- Row("Jane", 20, "Quebec", "Canada", 2023, 4))
- // Compare the results
- // Compare the results
- implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, String](_.getAs[String](0))
- assert(results.sorted.sameElements(expectedResults.sorted))
-
- // Retrieve the logical plan
- val logicalPlan: LogicalPlan = frame.queryExecution.logical
- // Define the expected logical plan
- val expectedPlan: LogicalPlan =
- Project(
- Seq(UnresolvedStar(None)),
- UnresolvedRelation(Seq("spark_catalog", "default", "flint_ppl_test")))
- // Compare the two plans
- assert(expectedPlan === logicalPlan)
+ val testTableQuoted = "`spark_catalog`.`default`.`flint_ppl_test`"
+ Seq(testTable, testTableQuoted).foreach { table =>
+ val frame = sql(s"""
+ | source = $table
+ | """.stripMargin)
+
+ // Retrieve the results
+ val results: Array[Row] = frame.collect()
+ // Define the expected results
+ val expectedResults: Array[Row] = Array(
+ Row("Jake", 70, "California", "USA", 2023, 4),
+ Row("Hello", 30, "New York", "USA", 2023, 4),
+ Row("John", 25, "Ontario", "Canada", 2023, 4),
+ Row("Jane", 20, "Quebec", "Canada", 2023, 4))
+ // Compare the results
+ implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, String](_.getAs[String](0))
+ assert(results.sorted.sameElements(expectedResults.sorted))
+
+ // Retrieve the logical plan
+ val logicalPlan: LogicalPlan = frame.queryExecution.logical
+ // Define the expected logical plan
+ val expectedPlan: LogicalPlan =
+ Project(
+ Seq(UnresolvedStar(None)),
+ UnresolvedRelation(Seq("spark_catalog", "default", "flint_ppl_test")))
+ // Compare the two plans
+ assert(expectedPlan === logicalPlan)
+ }
}
test("create ppl simple query with head (limit) 3 test") {
@@ -90,7 +92,6 @@ class FlintSparkPPLBasicITSuite
| source = $testTable| sort name | head 2
| """.stripMargin)
- // Retrieve the results
// Retrieve the results
val results: Array[Row] = frame.collect()
assert(results.length == 2)
@@ -187,27 +188,29 @@ class FlintSparkPPLBasicITSuite
}
test("create ppl simple query two with fields and head (limit) with sorting test") {
- val frame = sql(s"""
- | source = $testTable| fields name, age | head 1 | sort age
- | """.stripMargin)
-
- // Retrieve the results
- val results: Array[Row] = frame.collect()
- assert(results.length == 1)
-
- // Retrieve the logical plan
- val logicalPlan: LogicalPlan = frame.queryExecution.logical
- val project = Project(
- Seq(UnresolvedAttribute("name"), UnresolvedAttribute("age")),
- UnresolvedRelation(Seq("spark_catalog", "default", "flint_ppl_test")))
- // Define the expected logical plan
- val limitPlan: LogicalPlan = Limit(Literal(1), project)
- val sortedPlan: LogicalPlan =
- Sort(Seq(SortOrder(UnresolvedAttribute("age"), Ascending)), global = true, limitPlan)
-
- val expectedPlan = Project(Seq(UnresolvedStar(None)), sortedPlan);
- // Compare the two plans
- assert(compareByString(expectedPlan) === compareByString(logicalPlan))
+ Seq(("name, age", "age"), ("`name`, `age`", "`age`")).foreach {
+ case (selectFields, sortField) =>
+ val frame = sql(s"""
+ | source = $testTable| fields $selectFields | head 1 | sort $sortField
+ | """.stripMargin)
+
+ // Retrieve the results
+ val results: Array[Row] = frame.collect()
+ assert(results.length == 1)
+
+ // Retrieve the logical plan
+ val logicalPlan: LogicalPlan = frame.queryExecution.logical
+ val project = Project(
+ Seq(UnresolvedAttribute("name"), UnresolvedAttribute("age")),
+ UnresolvedRelation(Seq("spark_catalog", "default", "flint_ppl_test")))
+ // Define the expected logical plan
+ val limitPlan: LogicalPlan = Limit(Literal(1), project)
+ val sortedPlan: LogicalPlan =
+ Sort(Seq(SortOrder(UnresolvedAttribute("age"), Ascending)), global = true, limitPlan)
+
+ val expectedPlan = Project(Seq(UnresolvedStar(None)), sortedPlan);
+ // Compare the two plans
+ assert(compareByString(expectedPlan) === compareByString(logicalPlan))
+ }
}
-
}
diff --git a/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLFiltersITSuite.scala b/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLFiltersITSuite.scala
index 236c216cf..9a21bb45a 100644
--- a/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLFiltersITSuite.scala
+++ b/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLFiltersITSuite.scala
@@ -45,7 +45,6 @@ class FlintSparkPPLFiltersITSuite
// Define the expected results
val expectedResults: Array[Row] = Array(Row("John", 25))
// Compare the results
- // Compare the results
implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, String](_.getAs[String](0))
assert(results.sorted.sameElements(expectedResults.sorted))
@@ -72,7 +71,6 @@ class FlintSparkPPLFiltersITSuite
// Define the expected results
val expectedResults: Array[Row] = Array(Row("John", 25), Row("Jane", 20))
// Compare the results
- // Compare the results
implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, String](_.getAs[String](0))
assert(results.sorted.sameElements(expectedResults.sorted))
@@ -182,7 +180,6 @@ class FlintSparkPPLFiltersITSuite
// Define the expected results
val expectedResults: Array[Row] = Array(Row("Jake", 70), Row("Hello", 30))
// Compare the results
- // Compare the results
implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, String](_.getAs[String](0))
assert(results.sorted.sameElements(expectedResults.sorted))
@@ -209,7 +206,6 @@ class FlintSparkPPLFiltersITSuite
// Define the expected results
val expectedResults: Array[Row] = Array(Row("Hello", 30), Row("John", 25), Row("Jane", 20))
// Compare the results
- // Compare the results
implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, String](_.getAs[String](0))
assert(results.sorted.sameElements(expectedResults.sorted))
@@ -287,7 +283,6 @@ class FlintSparkPPLFiltersITSuite
// Define the expected results
val expectedResults: Array[Row] = Array(Row("Hello", 30), Row("John", 25), Row("Jane", 20))
- // Compare the results
// Compare the results
implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, String](_.getAs[String](0))
assert(results.sorted.sameElements(expectedResults.sorted))
diff --git a/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLTimeWindowITSuite.scala b/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLTimeWindowITSuite.scala
index fbae03fff..08be40ac2 100644
--- a/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLTimeWindowITSuite.scala
+++ b/integ-test/src/test/scala/org/opensearch/flint/spark/ppl/FlintSparkPPLTimeWindowITSuite.scala
@@ -25,7 +25,6 @@ class FlintSparkPPLTimeWindowITSuite
override def beforeAll(): Unit = {
super.beforeAll()
// Create test table
- // Update table creation
createTimeSeriesTransactionTable(testTable)
}
@@ -39,16 +38,6 @@ class FlintSparkPPLTimeWindowITSuite
}
test("create ppl query count sales by days window test") {
- /*
- val dataFrame = spark.read.table(testTable)
- val query = dataFrame
- .groupBy(
- window(
- col("transactionDate"), " 1 days")
- ).agg(sum(col("productsAmount")))
-
- query.show(false)
- */
val frame = sql(s"""
| source = $testTable| stats sum(productsAmount) by span(transactionDate, 1d) as age_date
| """.stripMargin)
diff --git a/ppl-spark-integration/src/main/java/org/opensearch/sql/common/utils/StringUtils.java b/ppl-spark-integration/src/main/java/org/opensearch/sql/common/utils/StringUtils.java
new file mode 100644
index 000000000..074c5a48e
--- /dev/null
+++ b/ppl-spark-integration/src/main/java/org/opensearch/sql/common/utils/StringUtils.java
@@ -0,0 +1,100 @@
+/*
+ * Copyright OpenSearch Contributors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package org.opensearch.sql.common.utils;
+
+import com.google.common.base.Strings;
+
+import java.util.IllegalFormatException;
+import java.util.Locale;
+
+public class StringUtils {
+ /**
+ * Unquote Identifier which has " or ' as mark. Strings quoted by ' or " with two of these quotes
+ * appearing next to each other in the quote acts as an escape
+ * Example: 'Test''s' will result in 'Test's', similar with those single quotes being replaced
+ * with double quote. Supports escaping quotes (single/double) and escape characters using the `\`
+ * characters.
+ *
+ * @param text string
+ * @return An unquoted string whose outer pair of (single/double) quotes have been removed
+ */
+ public static String unquoteText(String text) {
+ if (text.length() < 2) {
+ return text;
+ }
+
+ char enclosingQuote = 0;
+ char firstChar = text.charAt(0);
+ char lastChar = text.charAt(text.length() - 1);
+
+ if (firstChar != lastChar) {
+ return text;
+ }
+
+ if (firstChar == '`') {
+ return text.substring(1, text.length() - 1);
+ }
+
+ if (firstChar == lastChar && (firstChar == '\'' || firstChar == '"')) {
+ enclosingQuote = firstChar;
+ } else {
+ return text;
+ }
+
+ char currentChar;
+ char nextChar;
+
+ StringBuilder textSB = new StringBuilder();
+
+ // Ignores first and last character as they are the quotes that should be removed
+ for (int chIndex = 1; chIndex < text.length() - 1; chIndex++) {
+ currentChar = text.charAt(chIndex);
+ nextChar = text.charAt(chIndex + 1);
+
+ if ((currentChar == '\\' && (nextChar == '"' || nextChar == '\\' || nextChar == '\''))
+ || (currentChar == nextChar && currentChar == enclosingQuote)) {
+ chIndex++;
+ currentChar = nextChar;
+ }
+ textSB.append(currentChar);
+ }
+ return textSB.toString();
+ }
+
+ /**
+ * Unquote Identifier which has ` as mark.
+ *
+ * @param identifier identifier that possibly enclosed by backticks
+ * @return An unquoted string whose outer pair of backticks have been removed
+ */
+ public static String unquoteIdentifier(String identifier) {
+ if (isQuoted(identifier, "`")) {
+ return identifier.substring(1, identifier.length() - 1);
+ } else {
+ return identifier;
+ }
+ }
+
+ /**
+ * Returns a formatted string using the specified format string and arguments, as well as the
+ * {@link Locale#ROOT} locale.
+ *
+ * @param format format string
+ * @param args arguments referenced by the format specifiers in the format string
+ * @return A formatted string
+ * @throws IllegalFormatException If a format string contains an illegal syntax, a format
+ * specifier that is incompatible with the given arguments, insufficient arguments given the
+ * format string, or other illegal conditions.
+ * @see String#format(Locale, String, Object...)
+ */
+ public static String format(final String format, Object... args) {
+ return String.format(Locale.ROOT, format, args);
+ }
+
+ private static boolean isQuoted(String text, String mark) {
+ return !Strings.isNullOrEmpty(text) && text.startsWith(mark) && text.endsWith(mark);
+ }
+}
diff --git a/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java b/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java
index 3344cd7c2..b265047b5 100644
--- a/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java
+++ b/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java
@@ -32,6 +32,7 @@
import org.opensearch.sql.ast.expression.UnresolvedArgument;
import org.opensearch.sql.ast.expression.UnresolvedExpression;
import org.opensearch.sql.ast.expression.Xor;
+import org.opensearch.sql.common.utils.StringUtils;
import org.opensearch.sql.ppl.utils.ArgumentFactory;
import java.util.Arrays;
@@ -322,7 +323,7 @@ public UnresolvedExpression visitIntervalLiteral(OpenSearchPPLParser.IntervalLit
@Override
public UnresolvedExpression visitStringLiteral(OpenSearchPPLParser.StringLiteralContext ctx) {
- return new Literal(ctx.getText(), DataType.STRING);
+ return new Literal(StringUtils.unquoteText(ctx.getText()), DataType.STRING);
}
@Override
@@ -349,7 +350,7 @@ public UnresolvedExpression visitBySpanClause(OpenSearchPPLParser.BySpanClauseCo
String name = ctx.spanClause().getText();
return ctx.alias != null
? new Alias(
- name, visit(ctx.spanClause()), ctx.alias.getText())
+ name, visit(ctx.spanClause()), StringUtils.unquoteIdentifier(ctx.alias.getText()))
: new Alias(name, visit(ctx.spanClause()));
}
@@ -363,6 +364,7 @@ private QualifiedName visitIdentifiers(List extends ParserRuleContext> ctx) {
return new QualifiedName(
ctx.stream()
.map(RuleContext::getText)
+ .map(StringUtils::unquoteIdentifier)
.collect(Collectors.toList()));
}
@@ -373,10 +375,10 @@ private List singleFieldRelevanceArguments(
ImmutableList.Builder builder = ImmutableList.builder();
builder.add(
new UnresolvedArgument(
- "field", new QualifiedName(ctx.field.getText())));
+ "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
builder.add(
new UnresolvedArgument(
- "query", new Literal(ctx.query.getText(), DataType.STRING)));
+ "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
ctx.relevanceArg()
.forEach(
v ->
@@ -384,7 +386,7 @@ private List singleFieldRelevanceArguments(
new UnresolvedArgument(
v.relevanceArgName().getText().toLowerCase(),
new Literal(
- v.relevanceArgValue().getText(),
+ StringUtils.unquoteText(v.relevanceArgValue().getText()),
DataType.STRING))));
return builder.build();
}
diff --git a/ppl-spark-integration/src/test/java/org/opensearch/sql/common/utils/StringUtilsTest.java b/ppl-spark-integration/src/test/java/org/opensearch/sql/common/utils/StringUtilsTest.java
new file mode 100644
index 000000000..4a942d067
--- /dev/null
+++ b/ppl-spark-integration/src/test/java/org/opensearch/sql/common/utils/StringUtilsTest.java
@@ -0,0 +1,54 @@
+package org.opensearch.sql.common.utils;
+
+import static org.junit.Assert.assertEquals;
+import static org.opensearch.sql.common.utils.StringUtils.unquoteIdentifier;
+import static org.opensearch.sql.common.utils.StringUtils.unquoteText;
+
+import org.junit.Test;
+
+public class StringUtilsTest {
+ @Test
+ public void unquoteIdentifierTest() {
+ assertEquals("test", unquoteIdentifier("test"));
+ assertEquals("test", unquoteIdentifier("`test`"));
+
+ assertEquals("\"test\"", unquoteIdentifier("\"test\""));
+ assertEquals("\'test\'", unquoteIdentifier("\'test\'"));
+ }
+
+ @Test
+ public void unquoteTextTest() {
+ assertEquals("test", unquoteText("test"));
+ assertEquals("test", unquoteText("'test'"));
+
+ assertEquals("test'", unquoteText("'test'''"));
+ assertEquals("test\"", unquoteText("\"test\"\"\""));
+
+ assertEquals("te``st", unquoteText("'te``st'"));
+ assertEquals("te``st", unquoteText("\"te``st\""));
+
+ assertEquals("te'st", unquoteText("'te''st'"));
+ assertEquals("te''st", unquoteText("\"te''st\""));
+
+ assertEquals("te\"\"st", unquoteText("'te\"\"st'"));
+ assertEquals("te\"st", unquoteText("\"te\"\"st\""));
+
+ assertEquals("''", unquoteText("''''''"));
+ assertEquals("\"\"", unquoteText("\"\"\"\"\"\""));
+
+ assertEquals("test'", unquoteText("'test''"));
+
+ assertEquals("", unquoteText(""));
+ assertEquals("'", unquoteText("'"));
+ assertEquals("\"", unquoteText("\""));
+
+ assertEquals("hello'", unquoteText("'hello''"));
+ assertEquals("don't", unquoteText("'don't'"));
+ assertEquals("don\"t", unquoteText("\"don\"t\""));
+
+ assertEquals("hel\\lo'", unquoteText("'hel\\lo''"));
+ assertEquals("hel'lo", unquoteText("'hel'lo'"));
+ assertEquals("hel\"lo", unquoteText("\"hel\"lo\""));
+ assertEquals("hel\\'\\lo", unquoteText("'hel\\\\''\\\\lo'"));
+ }
+}
diff --git a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanAggregationQueriesTranslatorTestSuite.scala b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanAggregationQueriesTranslatorTestSuite.scala
index 87f7e5b28..1fdd20c74 100644
--- a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanAggregationQueriesTranslatorTestSuite.scala
+++ b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanAggregationQueriesTranslatorTestSuite.scala
@@ -20,14 +20,14 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
with LogicalPlanTestUtils
with Matchers {
- private val planTrnasformer = new CatalystQueryPlanVisitor()
+ private val planTransformer = new CatalystQueryPlanVisitor()
private val pplParser = new PPLSyntaxParser()
test("test average price ") {
// if successful build ppl logical plan and translate to catalyst logical plan
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source = table | stats avg(price) ", false), context)
+ planTransformer.visit(plan(pplParser, "source = table | stats avg(price) ", false), context)
// SQL: SELECT avg(price) as avg_price FROM table
val star = Seq(UnresolvedStar(None))
@@ -44,7 +44,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
ignore("test average price with Alias") {
// if successful build ppl logical plan and translate to catalyst logical plan
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(pplParser, "source = table | stats avg(price) as avg_price", false),
context)
// SQL: SELECT avg(price) as avg_price FROM table
@@ -63,7 +63,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
test("test average price group by product ") {
// if successful build ppl logical plan and translate to catalyst logical plan
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(pplParser, "source = table | stats avg(price) by product", false),
context)
// SQL: SELECT product, AVG(price) AS avg_price FROM table GROUP BY product
@@ -87,7 +87,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
test("test average price group by product and filter") {
// if successful build ppl logical plan and translate to catalyst logical plan
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(pplParser, "source = table country ='USA' | stats avg(price) by product", false),
context)
// SQL: SELECT product, AVG(price) AS avg_price FROM table GROUP BY product
@@ -115,7 +115,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
test("test average price group by product and filter sorted") {
// if successful build ppl logical plan and translate to catalyst logical plan
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(
pplParser,
"source = table country ='USA' | stats avg(price) by product | sort product",
@@ -148,7 +148,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
}
test("create ppl simple avg age by span of interval of 10 years query test ") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(pplParser, "source = table | stats avg(age) by span(age, 10) as age_span", false),
context)
// Define the expected logical plan
@@ -169,7 +169,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
test("create ppl simple avg age by span of interval of 10 years query with sort test ") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(
pplParser,
"source = table | stats avg(age) by span(age, 10) as age_span | sort age",
@@ -195,7 +195,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
test("create ppl simple avg age by span of interval of 10 years by country query test ") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(
pplParser,
"source = table | stats avg(age) by span(age, 10) as age_span, country",
@@ -223,7 +223,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
}
test("create ppl query count sales by weeks window and productId with sorting test") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(
pplParser,
"source = table | stats sum(productsAmount) by span(transactionDate, 1w) as age_date | sort age_date",
@@ -262,7 +262,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
test("create ppl query count sales by days window and productId with sorting test") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(
pplParser,
"source = table | stats sum(productsAmount) by span(transactionDate, 1d) as age_date, productId | sort age_date",
@@ -300,7 +300,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
}
test("create ppl query count status amount by day window and group by status test") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(
pplParser,
"source = table | stats sum(status) by span(@timestamp, 1d) as status_count_by_day, status | head 100",
@@ -336,7 +336,7 @@ class PPLLogicalPlanAggregationQueriesTranslatorTestSuite
test(
"create ppl query count only error (status >= 400) status amount by day window and group by status test") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(
pplParser,
"source = table | where status >= 400 | stats sum(status) by span(@timestamp, 1d) as status_count_by_day, status | head 100",
diff --git a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala
index 1b04189db..bc31691d0 100644
--- a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala
+++ b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanBasicQueriesTranslatorTestSuite.scala
@@ -21,24 +21,33 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
with LogicalPlanTestUtils
with Matchers {
- private val planTrnasformer = new CatalystQueryPlanVisitor()
+ private val planTransformer = new CatalystQueryPlanVisitor()
private val pplParser = new PPLSyntaxParser()
test("test simple search with only one table and no explicit fields (defaults to all fields)") {
// if successful build ppl logical plan and translate to catalyst logical plan
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(plan(pplParser, "source=table", false), context)
+ val logPlan = planTransformer.visit(plan(pplParser, "source=table", false), context)
val projectList: Seq[NamedExpression] = Seq(UnresolvedStar(None))
val expectedPlan = Project(projectList, UnresolvedRelation(Seq("table")))
assertEquals(expectedPlan, logPlan)
+ }
+
+ test("test simple search with escaped table name") {
+ // if successful build ppl logical plan and translate to catalyst logical plan
+ val context = new CatalystPlanContext
+ val logPlan = planTransformer.visit(plan(pplParser, "source=`table`", false), context)
+ val projectList: Seq[NamedExpression] = Seq(UnresolvedStar(None))
+ val expectedPlan = Project(projectList, UnresolvedRelation(Seq("table")))
+ assertEquals(expectedPlan, logPlan)
}
test("test simple search with schema.table and no explicit fields (defaults to all fields)") {
// if successful build ppl logical plan and translate to catalyst logical plan
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(plan(pplParser, "source=schema.table", false), context)
+ val logPlan = planTransformer.visit(plan(pplParser, "source=schema.table", false), context)
val projectList: Seq[NamedExpression] = Seq(UnresolvedStar(None))
val expectedPlan = Project(projectList, UnresolvedRelation(Seq("schema", "table")))
@@ -49,7 +58,7 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
test("test simple search with schema.table and one field projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=schema.table | fields A", false), context)
+ planTransformer.visit(plan(pplParser, "source=schema.table | fields A", false), context)
val projectList: Seq[NamedExpression] = Seq(UnresolvedAttribute("A"))
val expectedPlan = Project(projectList, UnresolvedRelation(Seq("schema", "table")))
@@ -59,7 +68,7 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
test("test simple search with only one table with one field projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=table | fields A", false), context)
+ planTransformer.visit(plan(pplParser, "source=table | fields A", false), context)
val projectList: Seq[NamedExpression] = Seq(UnresolvedAttribute("A"))
val expectedPlan = Project(projectList, UnresolvedRelation(Seq("table")))
@@ -68,7 +77,7 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
test("test simple search with only one table with two fields projected") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(plan(pplParser, "source=t | fields A, B", false), context)
+ val logPlan = planTransformer.visit(plan(pplParser, "source=t | fields A, B", false), context)
val table = UnresolvedRelation(Seq("t"))
val projectList = Seq(UnresolvedAttribute("A"), UnresolvedAttribute("B"))
@@ -79,7 +88,7 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
test("test simple search with one table with two fields projected sorted by one field") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t | sort A | fields A, B", false), context)
+ planTransformer.visit(plan(pplParser, "source=t | sort A | fields A, B", false), context)
val table = UnresolvedRelation(Seq("t"))
val projectList = Seq(UnresolvedAttribute("A"), UnresolvedAttribute("B"))
@@ -95,7 +104,7 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
"test simple search with only one table with two fields with head (limit ) command projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t | fields A, B | head 5", false), context)
+ planTransformer.visit(plan(pplParser, "source=t | fields A, B | head 5", false), context)
val table = UnresolvedRelation(Seq("t"))
val projectList = Seq(UnresolvedAttribute("A"), UnresolvedAttribute("B"))
@@ -108,7 +117,7 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
test(
"test simple search with only one table with two fields with head (limit ) command projected sorted by one descending field") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(pplParser, "source=t | sort - A | fields A, B | head 5", false),
context)
@@ -127,7 +136,7 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
test(
"Search multiple tables - translated into union call - fields expected to exist in both tables ") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(pplParser, "search source = table1, table2 | fields A, B", false),
context)
@@ -149,7 +158,7 @@ class PPLLogicalPlanBasicQueriesTranslatorTestSuite
test("Search multiple tables - translated into union call with fields") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source = table1, table2 ", false), context)
+ planTransformer.visit(plan(pplParser, "source = table1, table2 ", false), context)
val table1 = UnresolvedRelation(Seq("table1"))
val table2 = UnresolvedRelation(Seq("table2"))
diff --git a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanFiltersTranslatorTestSuite.scala b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanFiltersTranslatorTestSuite.scala
index fe9485f4b..27dd972fc 100644
--- a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanFiltersTranslatorTestSuite.scala
+++ b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanFiltersTranslatorTestSuite.scala
@@ -28,12 +28,12 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
with LogicalPlanTestUtils
with Matchers {
- private val planTrnasformer = new CatalystQueryPlanVisitor()
+ private val planTransformer = new CatalystQueryPlanVisitor()
private val pplParser = new PPLSyntaxParser()
test("test simple search with only one table with one field literal filtered ") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(plan(pplParser, "source=t a = 1 ", false), context)
+ val logPlan = planTransformer.visit(plan(pplParser, "source=t a = 1 ", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterExpr = EqualTo(UnresolvedAttribute("a"), Literal(1))
@@ -46,7 +46,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
test("test simple search with only one table with two field with 'and' filtered ") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t a = 1 AND b != 2", false), context)
+ planTransformer.visit(plan(pplParser, "source=t a = 1 AND b != 2", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterAExpr = EqualTo(UnresolvedAttribute("a"), Literal(1))
@@ -60,7 +60,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
test("test simple search with only one table with two field with 'or' filtered ") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t a = 1 OR b != 2", false), context)
+ planTransformer.visit(plan(pplParser, "source=t a = 1 OR b != 2", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterAExpr = EqualTo(UnresolvedAttribute("a"), Literal(1))
@@ -74,7 +74,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
test("test simple search with only one table with two field with 'not' filtered ") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t not a = 1 or b != 2 ", false), context)
+ planTransformer.visit(plan(pplParser, "source=t not a = 1 or b != 2 ", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterAExpr = Not(EqualTo(UnresolvedAttribute("a"), Literal(1)))
@@ -89,7 +89,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
"test simple search with only one table with one field literal int equality filtered and one field projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t a = 1 | fields a", false), context)
+ planTransformer.visit(plan(pplParser, "source=t a = 1 | fields a", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterExpr = EqualTo(UnresolvedAttribute("a"), Literal(1))
@@ -103,7 +103,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
"test simple search with only one table with one field literal string equality filtered and one field projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, """source=t a = 'hi' | fields a""", false), context)
+ planTransformer.visit(plan(pplParser, """source=t a = 'hi' | fields a""", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterExpr = EqualTo(UnresolvedAttribute("a"), Literal("hi"))
@@ -117,7 +117,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
test(
"test simple search with only one table with one field literal string none equality filtered and one field projected") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(pplParser, """source=t a != 'bye' | fields a""", false),
context)
@@ -134,7 +134,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
"test simple search with only one table with one field greater than filtered and one field projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t a > 1 | fields a", false), context)
+ planTransformer.visit(plan(pplParser, "source=t a > 1 | fields a", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterExpr = GreaterThan(UnresolvedAttribute("a"), Literal(1))
@@ -148,7 +148,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
"test simple search with only one table with one field greater than equal filtered and one field projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t a >= 1 | fields a", false), context)
+ planTransformer.visit(plan(pplParser, "source=t a >= 1 | fields a", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterExpr = GreaterThanOrEqual(UnresolvedAttribute("a"), Literal(1))
@@ -162,7 +162,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
"test simple search with only one table with one field lower than filtered and one field projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t a < 1 | fields a", false), context)
+ planTransformer.visit(plan(pplParser, "source=t a < 1 | fields a", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterExpr = LessThan(UnresolvedAttribute("a"), Literal(1))
@@ -176,7 +176,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
"test simple search with only one table with one field lower than equal filtered and one field projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t a <= 1 | fields a", false), context)
+ planTransformer.visit(plan(pplParser, "source=t a <= 1 | fields a", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterExpr = LessThanOrEqual(UnresolvedAttribute("a"), Literal(1))
@@ -190,7 +190,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
"test simple search with only one table with one field not equal filtered and one field projected") {
val context = new CatalystPlanContext
val logPlan =
- planTrnasformer.visit(plan(pplParser, "source=t a != 1 | fields a", false), context)
+ planTransformer.visit(plan(pplParser, "source=t a != 1 | fields a", false), context)
val table = UnresolvedRelation(Seq("t"))
val filterExpr = Not(EqualTo(UnresolvedAttribute("a"), Literal(1)))
@@ -203,7 +203,7 @@ class PPLLogicalPlanFiltersTranslatorTestSuite
test(
"test simple search with only one table with one field not equal filtered and one field projected and sorted") {
val context = new CatalystPlanContext
- val logPlan = planTrnasformer.visit(
+ val logPlan = planTransformer.visit(
plan(pplParser, "source=t a != 1 | fields a | sort a", false),
context)