diff --git a/docs/ppl-lang/PPL-Example-Commands.md b/docs/ppl-lang/PPL-Example-Commands.md index e780f688d..409b128c9 100644 --- a/docs/ppl-lang/PPL-Example-Commands.md +++ b/docs/ppl-lang/PPL-Example-Commands.md @@ -58,17 +58,10 @@ _- **Limitation: new field added by eval command with a function cannot be dropp - `source = table | where a not in (1, 2, 3) | fields a,b,c` - `source = table | where a between 1 and 4` - Note: This returns a >= 1 and a <= 4, i.e. [1, 4] - `source = table | where b not between '2024-09-10' and '2025-09-10'` - Note: This returns b >= '2024-09-10' and b <= '2025-09-10' -- `source = table | where cidrmatch(ip, '192.169.1.0/24')` +- `source = table | where cidrmatch(ip, '192.169.1.0/24')` - `source = table | where cidrmatch(ipv6, '2003:db8::/32')` - `source = table | trendline sma(2, temperature) as temp_trend` -#### **IP related queries** -[See additional command details](functions/ppl-ip.md) - -- `source = table | where cidrmatch(ip, '192.169.1.0/24')` -- `source = table | where isV6 = false and isValid = true and cidrmatch(ipAddress, '192.168.1.0/24')` -- `source = table | where isV6 = true | eval inRange = case(cidrmatch(ipAddress, '2003:db8::/32'), 'in' else 'out') | fields ip, inRange` - ```sql source = table | eval status_category = case(a >= 200 AND a < 300, 'Success', diff --git a/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystExpressionVisitor.java b/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystExpressionVisitor.java index a0506ceee..69a89b83a 100644 --- a/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystExpressionVisitor.java +++ b/ppl-spark-integration/src/main/java/org/opensearch/sql/ppl/CatalystExpressionVisitor.java @@ -5,7 +5,6 @@ package org.opensearch.sql.ppl; -import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute; import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute$; import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation; import org.apache.spark.sql.catalyst.analysis.UnresolvedStar$; @@ -16,7 +15,6 @@ import org.apache.spark.sql.catalyst.expressions.GreaterThanOrEqual; import org.apache.spark.sql.catalyst.expressions.In$; import org.apache.spark.sql.catalyst.expressions.InSubquery$; -import org.apache.spark.sql.catalyst.expressions.LambdaFunction$; import org.apache.spark.sql.catalyst.expressions.LessThan; import org.apache.spark.sql.catalyst.expressions.LessThanOrEqual; import org.apache.spark.sql.catalyst.expressions.ListQuery$; @@ -26,8 +24,6 @@ import org.apache.spark.sql.catalyst.expressions.RowFrame$; import org.apache.spark.sql.catalyst.expressions.ScalaUDF; import org.apache.spark.sql.catalyst.expressions.ScalarSubquery$; -import org.apache.spark.sql.catalyst.expressions.UnresolvedNamedLambdaVariable; -import org.apache.spark.sql.catalyst.expressions.UnresolvedNamedLambdaVariable$; import org.apache.spark.sql.catalyst.expressions.SpecifiedWindowFrame; import org.apache.spark.sql.catalyst.expressions.WindowExpression; import org.apache.spark.sql.catalyst.expressions.WindowSpecDefinition; @@ -51,7 +47,6 @@ import org.opensearch.sql.ast.expression.Literal; import org.opensearch.sql.ast.expression.Not; import org.opensearch.sql.ast.expression.Or; -import org.opensearch.sql.ast.expression.LambdaFunction; import org.opensearch.sql.ast.expression.QualifiedName; import org.opensearch.sql.ast.expression.Span; import org.opensearch.sql.ast.expression.UnresolvedExpression; @@ -73,9 +68,7 @@ import org.opensearch.sql.ppl.utils.AggregatorTransformer; import org.opensearch.sql.ppl.utils.BuiltinFunctionTransformer; import org.opensearch.sql.ppl.utils.ComparatorTransformer; -import org.opensearch.sql.ppl.utils.JavaToScalaTransformer; import scala.Option; -import scala.PartialFunction; import scala.Tuple2; import scala.collection.Seq; @@ -439,23 +432,6 @@ public Expression visitCidr(org.opensearch.sql.ast.expression.Cidr node, Catalys return context.getNamedParseExpressions().push(udf); } - @Override - public Expression visitLambdaFunction(LambdaFunction node, CatalystPlanContext context) { - PartialFunction transformer = JavaToScalaTransformer.toPartialFunction( - expr -> expr instanceof UnresolvedAttribute, - expr -> { - UnresolvedAttribute attr = (UnresolvedAttribute) expr; - return new UnresolvedNamedLambdaVariable(attr.nameParts()); - } - ); - Expression functionResult = node.getFunction().accept(this, context).transformUp(transformer); - context.popNamedParseExpressions(); - List argsResult = node.getFuncArgs().stream() - .map(arg -> UnresolvedNamedLambdaVariable$.MODULE$.apply(seq(arg.getParts()))) - .collect(Collectors.toList()); - return context.getNamedParseExpressions().push(LambdaFunction$.MODULE$.apply(functionResult, seq(argsResult), false)); - } - private List visitExpressionList(List expressionList, CatalystPlanContext context) { return expressionList.isEmpty() ? emptyList()