Skip to content

Commit

Permalink
Isempty (#676)
Browse files Browse the repository at this point in the history
* isempty implementation

Signed-off-by: Kacper Trochimiak <[email protected]>

* isempty implementation v2

Signed-off-by: Kacper Trochimiak <[email protected]>

* Is empty function returns correct column list.

Signed-off-by: Lukasz Soszynski <[email protected]>

* Tests for is empty function

Signed-off-by: Lukasz Soszynski <[email protected]>

* Test and documentation related to the isempty function

Signed-off-by: Lukasz Soszynski <[email protected]>

---------

Signed-off-by: Kacper Trochimiak <[email protected]>
Signed-off-by: Lukasz Soszynski <[email protected]>
Co-authored-by: Kacper Trochimiak <[email protected]>
  • Loading branch information
lukasz-soszynski-eliatra and kt-eliatra authored Sep 19, 2024
1 parent 7ff1512 commit a7fe6e6
Show file tree
Hide file tree
Showing 12 changed files with 242 additions and 25 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ import java.sql.{Date, Time, Timestamp}
import org.opensearch.sql.ppl.utils.DataTypeTransformer.seq

import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, UnresolvedFunction, UnresolvedRelation}
import org.apache.spark.sql.catalyst.expressions.{EqualTo, GreaterThan, Literal}
import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan, Project}
import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, UnresolvedFunction, UnresolvedRelation, UnresolvedStar}
import org.apache.spark.sql.catalyst.expressions.{Alias, CaseWhen, EqualTo, GreaterThan, Literal, NamedExpression}
import org.apache.spark.sql.catalyst.plans.logical.{Filter, GlobalLimit, LocalLimit, LogicalPlan, Project}
import org.apache.spark.sql.streaming.StreamTest
import org.apache.spark.sql.types.DoubleType

Expand All @@ -25,6 +25,7 @@ class FlintSparkPPLBuiltinFunctionITSuite
/** Test table and index name */
private val testTable = "spark_catalog.default.flint_ppl_test"
private val testNullTable = "spark_catalog.default.flint_ppl_test_null"
private val testTextSizeTable = "spark_catalog.default.flint_ppl_text_size"

override def beforeAll(): Unit = {
super.beforeAll()
Expand Down Expand Up @@ -270,6 +271,96 @@ class FlintSparkPPLBuiltinFunctionITSuite
comparePlans(logicalPlan, expectedPlan, checkAnalysis = false)
}

test("test string functions - isempty eval") {
val frame = sql(s"""
| source = $testNullTable | head 1 | eval a = isempty('full'), b = isempty(''), c = isempty(' ') | fields a, b, c
| """.stripMargin)

val results: Array[Row] = frame.collect()
val expectedResults: Array[Row] = Array(Row(false, true, true))
assert(results.sameElements(expectedResults))

val logicalPlan: LogicalPlan = frame.queryExecution.logical
val table = UnresolvedRelation(Seq("spark_catalog", "default", "flint_ppl_test_null"))
val localLimit = LocalLimit(Literal(1), table)
val globalLimit = GlobalLimit(Literal(1), localLimit)

// val projectList = Seq(UnresolvedStar(None))

val caseOne = CaseWhen(
Seq(
(
EqualTo(
UnresolvedFunction(
"length",
Seq(UnresolvedFunction("trim", Seq(Literal("full")), isDistinct = false)),
isDistinct = false),
Literal(0)),
Literal(true))),
Literal(false))
val aliasOne = Alias(caseOne, "a")()

val caseTwo = CaseWhen(
Seq(
(
EqualTo(
UnresolvedFunction(
"length",
Seq(UnresolvedFunction("trim", Seq(Literal("")), isDistinct = false)),
isDistinct = false),
Literal(0)),
Literal(true))),
Literal(false))
val aliasTwo = Alias(caseTwo, "b")()

val caseThree = CaseWhen(
Seq(
(
EqualTo(
UnresolvedFunction(
"length",
Seq(UnresolvedFunction("trim", Seq(Literal(" ")), isDistinct = false)),
isDistinct = false),
Literal(0)),
Literal(true))),
Literal(false))
val aliasThree = Alias(caseThree, "c")()

val projectList = Seq(UnresolvedStar(None), aliasOne, aliasTwo, aliasThree)
val innerProject = Project(projectList, globalLimit)

val expectedPlan = Project(
Seq(UnresolvedAttribute("a"), UnresolvedAttribute("b"), UnresolvedAttribute("c")),
innerProject)
comparePlans(logicalPlan, expectedPlan, checkAnalysis = false)
}

test("test string functions - isempty where") {
val frame = sql(s"""
| source = $testNullTable | where isempty('I am not empty');
| """.stripMargin)
val results: Array[Row] = frame.collect()
assert(results.length == 0)

val logicalPlan: LogicalPlan = frame.queryExecution.logical

val table = UnresolvedRelation(Seq("spark_catalog", "default", "flint_ppl_test_null"))
val caseIsEmpty = CaseWhen(
Seq(
(
EqualTo(
UnresolvedFunction(
"length",
Seq(UnresolvedFunction("trim", Seq(Literal("I am not empty")), isDistinct = false)),
isDistinct = false),
Literal(0)),
Literal(true))),
Literal(false))
val filterPlan = Filter(caseIsEmpty, table)
val expectedPlan = Project(Seq(UnresolvedStar(None)), filterPlan)
comparePlans(logicalPlan, expectedPlan, checkAnalysis = false)
}

test("test math functions - abs") {
val frame = sql(s"""
| source = $testTable |where age = abs(-30) | fields name, age
Expand Down
4 changes: 3 additions & 1 deletion ppl-spark-integration/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,7 @@ See the next samples of PPL queries :
- `source = table | where c = 'test' | fields a,b,c | head 3`
- `source = table | where ispresent(b)`
- `source = table | where isnull(coalesce(a, b)) | fields a,b,c | head 3`
- `source = table | where isempty(a)`

**Filters With Logical Conditions**
- `source = table | where c = 'test' AND a = 1 | fields a,b,c`
Expand All @@ -262,7 +263,8 @@ Assumptions: `a`, `b`, `c` are existing fields in `table`
- `source = table | eval f = a * 2, h = f * 2 | fields a,f,h`
- `source = table | eval f = a * 2, h = b | stats avg(f) by h`
- `source = table | eval f = ispresent(a)`
- `source = table | eval r = coalesce(a, b, c) | fields r
- `source = table | eval r = coalesce(a, b, c) | fields r`
- `source = table | eval e = isempty(a) | fields e`

Limitation: Overriding existing field is unsupported, following queries throw exceptions with "Reference 'a' is ambiguous"
- `source = table | eval a = 10 | fields a,b,c`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,7 @@ LOCATE: 'LOCATE';
REPLACE: 'REPLACE';
REVERSE: 'REVERSE';
CAST: 'CAST';
ISEMPTY: 'ISEMPTY';

// BOOL FUNCTIONS
LIKE: 'LIKE';
Expand Down
6 changes: 6 additions & 0 deletions ppl-spark-integration/src/main/antlr4/OpenSearchPPLParser.g4
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,7 @@ logicalExpression
| left = logicalExpression (AND)? right = logicalExpression # logicalAnd
| left = logicalExpression XOR right = logicalExpression # logicalXor
| booleanExpression # booleanExpr
| isEmptyExpression # isEmptyExpr
;

comparisonExpression
Expand Down Expand Up @@ -328,6 +329,10 @@ booleanExpression
: booleanFunctionCall
;

isEmptyExpression
: ISEMPTY LT_PRTHS functionArg RT_PRTHS
;

relevanceExpression
: singleFieldRelevanceFunction
| multiFieldRelevanceFunction
Expand Down Expand Up @@ -688,6 +693,7 @@ textFunctionName
| LOCATE
| REPLACE
| REVERSE
| ISEMPTY
;

positionFunctionName
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.opensearch.sql.ast.expression.Function;
import org.opensearch.sql.ast.expression.In;
import org.opensearch.sql.ast.expression.Interval;
import org.opensearch.sql.ast.expression.IsEmpty;
import org.opensearch.sql.ast.expression.Let;
import org.opensearch.sql.ast.expression.Literal;
import org.opensearch.sql.ast.expression.Map;
Expand Down Expand Up @@ -166,6 +167,10 @@ public T visitFunction(Function node, C context) {
return visitChildren(node, context);
}

public T visitIsEmpty(IsEmpty node, C context) {
return visitChildren(node, context);
}

public T visitWindowFunction(WindowFunction node, C context) {
return visitChildren(node, context);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,18 @@ public Case(UnresolvedExpression caseValue, List<When> whenClauses, UnresolvedEx
this.elseClause = elseClause;
}

public UnresolvedExpression getCaseValue() {
return caseValue;
}

public List<When> getWhenClauses() {
return whenClauses;
}

public UnresolvedExpression getElseClause() {
return elseClause;
}

@Override
public List<? extends Node> getChild() {
ImmutableList.Builder<Node> children = ImmutableList.builder();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package org.opensearch.sql.ast.expression;

import com.google.common.collect.ImmutableList;
import org.opensearch.sql.ast.AbstractNodeVisitor;

import java.util.List;

public class IsEmpty extends UnresolvedExpression {
private Case caseValue;

public IsEmpty(Case caseValue) {
this.caseValue = caseValue;
}

@Override
public List<UnresolvedExpression> getChild() {
return ImmutableList.of(this.caseValue);
}

@Override
public <R, C> R accept(AbstractNodeVisitor<R, C> nodeVisitor, C context) {
return nodeVisitor.visitIsEmpty(this, context);
}

public Case getCaseValue() {
return caseValue;
}

@Override
public String toString() {
return String.format(
"isempty(%s)",
caseValue.toString());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,14 @@ public When(UnresolvedExpression condition, UnresolvedExpression result) {
this.result = result;
}

public UnresolvedExpression getCondition() {
return condition;
}

public UnresolvedExpression getResult() {
return result;
}

@Override
public List<? extends Node> getChild() {
return ImmutableList.of(condition, result);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,11 @@ public Stack<Expression> getNamedParseExpressions() {
return namedParseExpressions;
}

public void setNamedParseExpressions(Stack<org.apache.spark.sql.catalyst.expressions.Expression> namedParseExpressions) {
this.namedParseExpressions.clear();
this.namedParseExpressions.addAll(namedParseExpressions);
}

public Optional<Expression> popNamedParseExpressions() {
return namedParseExpressions.isEmpty() ? Optional.empty() : Optional.of(namedParseExpressions.pop());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation;
import org.apache.spark.sql.catalyst.analysis.UnresolvedStar$;
import org.apache.spark.sql.catalyst.expressions.Ascending$;
import org.apache.spark.sql.catalyst.expressions.CaseWhen;
import org.apache.spark.sql.catalyst.expressions.Descending$;
import org.apache.spark.sql.catalyst.expressions.Expression;
import org.apache.spark.sql.catalyst.expressions.NamedExpression;
Expand Down Expand Up @@ -37,6 +38,7 @@
import org.opensearch.sql.ast.expression.Function;
import org.opensearch.sql.ast.expression.In;
import org.opensearch.sql.ast.expression.Interval;
import org.opensearch.sql.ast.expression.IsEmpty;
import org.opensearch.sql.ast.expression.Let;
import org.opensearch.sql.ast.expression.Literal;
import org.opensearch.sql.ast.expression.Not;
Expand All @@ -45,6 +47,7 @@
import org.opensearch.sql.ast.expression.QualifiedName;
import org.opensearch.sql.ast.expression.Span;
import org.opensearch.sql.ast.expression.UnresolvedExpression;
import org.opensearch.sql.ast.expression.When;
import org.opensearch.sql.ast.expression.WindowFunction;
import org.opensearch.sql.ast.expression.Xor;
import org.opensearch.sql.ast.statement.Explain;
Expand Down Expand Up @@ -74,17 +77,16 @@
import org.opensearch.sql.ppl.utils.SortUtils;
import scala.Option;
import scala.Option$;
import scala.Tuple2;
import scala.collection.Seq;

import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.*;
import java.util.function.BiFunction;
import java.util.stream.Collectors;

import static java.util.Collections.emptyList;
import static java.util.List.of;
import static org.opensearch.sql.expression.function.BuiltinFunctionName.EQUAL;
import static org.opensearch.sql.ppl.CatalystPlanContext.findRelation;
import static org.opensearch.sql.ppl.utils.DataTypeTransformer.seq;
import static org.opensearch.sql.ppl.utils.DataTypeTransformer.translate;
Expand Down Expand Up @@ -351,11 +353,6 @@ public LogicalPlan visitIn(In node, CatalystPlanContext context) {
throw new IllegalStateException("Not Supported operation : In");
}

@Override
public LogicalPlan visitCase(Case node, CatalystPlanContext context) {
throw new IllegalStateException("Not Supported operation : Case");
}

@Override
public LogicalPlan visitRareTopN(RareTopN node, CatalystPlanContext context) {
throw new IllegalStateException("Not Supported operation : RareTopN");
Expand Down Expand Up @@ -580,6 +577,18 @@ public Expression visitFunction(Function node, CatalystPlanContext context) {
return context.getNamedParseExpressions().push(function);
}

@Override
public Expression visitIsEmpty(IsEmpty node, CatalystPlanContext context) {
Stack<Expression> namedParseExpressions = new Stack<>();
namedParseExpressions.addAll(context.getNamedParseExpressions());
Expression expression = visitCase(node.getCaseValue(), context);
namedParseExpressions.add(expression);
context.setNamedParseExpressions(namedParseExpressions);
return expression;
}



@Override
public Expression visitInterval(Interval node, CatalystPlanContext context) {
throw new IllegalStateException("Not Supported operation : Interval");
Expand All @@ -602,7 +611,29 @@ public Expression visitKmeans(Kmeans node, CatalystPlanContext context) {

@Override
public Expression visitCase(Case node, CatalystPlanContext context) {
throw new IllegalStateException("Not Supported operation : Case");
analyze(node.getElseClause(), context);
Expression elseValue = context.getNamedParseExpressions().pop();
List<Tuple2<Expression, Expression>> whens = new ArrayList<>();
for (When when : node.getWhenClauses()) {
if (node.getCaseValue() == null) {
whens.add(
new Tuple2<>(
analyze(when.getCondition(), context),
analyze(when.getResult(), context)
)
);
} else {
// Merge case value and condition (compare value) into a single equal condition
Compare compare = new Compare(EQUAL.getName().getFunctionName(), node.getCaseValue(), when.getCondition());
whens.add(
new Tuple2<>(
analyze(compare, context), analyze(when.getResult(), context)
)
);
}
context.retainAllNamedParseExpressions(e -> e);
}
return context.getNamedParseExpressions().push(new CaseWhen(seq(whens), Option.apply(elseValue)));
}

@Override
Expand Down
Loading

0 comments on commit a7fe6e6

Please sign in to comment.