Skip to content

Commit

Permalink
update tests & scala-fmt
Browse files Browse the repository at this point in the history
Signed-off-by: YANGDB <[email protected]>
  • Loading branch information
YANG-DB committed Dec 9, 2024
1 parent b7b0713 commit af8144a
Show file tree
Hide file tree
Showing 4 changed files with 102 additions and 15 deletions.
6 changes: 3 additions & 3 deletions docs/ppl-lang/functions/ppl-json.md
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ Example:
| {"a":["valueA", "valueB", "valueC"]} |
+-------------------------------------------------+

os> source=people | eval append = json_append(`{"a":["valueA", "valueB"]}`, {"a":["valueC"]})
os> source=people | eval append = json_append(`{"a":["valueA", "valueB"]}`, ['a', {"a":["valueC"]}])
fetched rows / total rows = 1/1
+-----------------------------------------------+
| append |
Expand Down Expand Up @@ -304,15 +304,15 @@ Extend arrays as individual values separates the `json_extend` functionality fro

Example:

os> source=people | eval extend = json_extend(`{"a":["valueA", "valueB"]}`, ["valueC","valueD"])
os> source=people | eval extend = json_extend(`{"a":["valueA", "valueB"]}`,['a', ["valueC","valueD"]])
fetched rows / total rows = 1/1
+-------------------------------------------------+
| extend |
+-------------------------------------------------+
| {"a":["valueA", "valueB", "valueC", "valueD"]} |
+-------------------------------------------------+

os> source=people | eval extend = json_extend(`{"a":["valueA", "valueB"]}`, {"b":["valueC","valueD"]})
os> source=people | eval extend = json_extend(`{"a":["valueA", "valueB"]}`,['a',[{"b":["valueC","valueD"]}]])
fetched rows / total rows = 1/1
+-------------------------------------------------------------+
| extend |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,4 +183,70 @@ public void testJsonExtendFunctionWithNonArrayPath() {

assertEquals(expectedJson, result);
}

@Test
public void testJsonExtendFunctionAddValuesToExistingArray() {
// Initial JSON string
String jsonStr = "{\"key1\":\"value1\",\"key2\":[\"value2\"]}";

// Path-value pairs to extend
List<Map.Entry<String, List<String>>> pathValuePairs = new ArrayList<>();
pathValuePairs.add( Map.entry("key2", Arrays.asList("value3", "value4")));

// Expected JSON after extension
String expectedJson = "{\"key1\":\"value1\",\"key2\":[\"value2\",\"value3\",\"value4\"]}";

// Apply the function
String result = jsonExtendFunction.apply(jsonStr, pathValuePairs);

// Assert that the result matches the expected JSON
assertEquals(expectedJson, result);
}

@Test
public void testJsonExtendFunctionAddNewArray() {
// Initial JSON string
String jsonStr = "{\"key1\":\"value1\"}";

// Path-value pairs to add
List<Map.Entry<String, List<String>>> pathValuePairs = new ArrayList<>();
pathValuePairs.add( Map.entry("key2", Arrays.asList("value2", "value3")));

// Expected JSON after adding new array
String expectedJson = "{\"key1\":\"value1\",\"key2\":[\"value2\",\"value3\"]}";

// Apply the function
String result = jsonExtendFunction.apply(jsonStr, pathValuePairs);

// Assert that the result matches the expected JSON
assertEquals(expectedJson, result);
}

@Test
public void testJsonExtendFunctionHandleEmptyValues() {
// Initial JSON string
String jsonStr = "{\"key1\":\"value1\",\"key2\":[\"value2\"]}";

// Path-value pairs with an empty list of values to add
List<Map.Entry<String, List<String>>> pathValuePairs = new ArrayList<>();
pathValuePairs.add( Map.entry("key2", Collections.emptyList()));

// Expected JSON should remain unchanged
String expectedJson = "{\"key1\":\"value1\",\"key2\":[\"value2\"]}";

// Apply the function
String result = jsonExtendFunction.apply(jsonStr, pathValuePairs);

// Assert that the result matches the expected JSON
assertEquals(expectedJson, result);
}

@Test
public void testJsonExtendFunctionHandleNullInput() {
// Apply the function with null input
String result = jsonExtendFunction.apply(null, Collections.singletonList( Map.entry("key2", List.of("value2"))));

// Assert that the result is null
assertEquals(null, result);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,21 @@

package org.opensearch.flint.spark.ppl

import java.util

import org.opensearch.flint.spark.ppl.PlaneUtils.plan
import org.opensearch.sql.expression.function.SerializableUdf
import org.opensearch.sql.expression.function.SerializableUdf.visit
import org.opensearch.sql.ppl.{CatalystPlanContext, CatalystQueryPlanVisitor}
import org.opensearch.sql.ppl.utils.DataTypeTransformer.seq
import org.scalatest.matchers.should.Matchers

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, UnresolvedFunction, UnresolvedRelation, UnresolvedStar}
import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal}
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.{Filter, Project}
import org.apache.spark.sql.types.DataTypes
import org.opensearch.sql.expression.function.SerializableUdf
import org.opensearch.sql.expression.function.SerializableUdf.visit
import org.opensearch.sql.ppl.utils.DataTypeTransformer.seq

import java.util

class PPLLogicalPlanJsonFunctionsTranslatorTestSuite
extends SparkFunSuite
Expand Down Expand Up @@ -194,11 +195,11 @@ class PPLLogicalPlanJsonFunctionsTranslatorTestSuite
val context = new CatalystPlanContext
val logPlan =
planTransformer.visit(
plan(pplParser, """source=t a = json_delete('{"a":[{"b":1},{"c":2}]}', ["a.b"])"""),
plan(pplParser, """source=t a = json_delete('{"a":[{"b":1},{"c":2}]}', '["a.b"]')"""),
context)

val table = UnresolvedRelation(Seq("t"))
val keysExpression = Literal("[a.b]")
val keysExpression = Literal("""["a.b"]""")
val jsonObjExp = Literal("""{"a":[{"b":1},{"c":2}]}""")
val jsonFunc = visit("json_delete", util.List.of(jsonObjExp, keysExpression))
val filterExpr = EqualTo(UnresolvedAttribute("a"), jsonFunc)
Expand All @@ -208,18 +209,37 @@ class PPLLogicalPlanJsonFunctionsTranslatorTestSuite
comparePlans(expectedPlan, logPlan, false)
}


test("test json_append()") {
val context = new CatalystPlanContext
val logPlan =
planTransformer.visit(
plan(pplParser, """source=t a = json_append('{"a":[{"b":1},{"c":2}]}', 'a.b')"""),
plan(pplParser, """source=t a = json_append('{"a":[1,2]}', '["a",3]')"""),
context)

val table = UnresolvedRelation(Seq("t"))
val keysExpression = Literal("""["a",3]""")
val jsonObjExp = Literal("""{"a":[1,2]}""")
val jsonFunc = visit("json_append", util.List.of(jsonObjExp, keysExpression))
val filterExpr = EqualTo(UnresolvedAttribute("a"), jsonFunc)
val filterPlan = Filter(filterExpr, table)
val projectList = Seq(UnresolvedStar(None))
val expectedPlan = Project(projectList, filterPlan)
comparePlans(expectedPlan, logPlan, false)
}

test("test json_extend()") {
val context = new CatalystPlanContext
val logPlan =
planTransformer.visit(
plan(
pplParser,
"""source=t a = json_extend('{"a":[{"b":1},{"c":2}]}', '["a",{"c":2}]')"""),
context)

val table = UnresolvedRelation(Seq("t"))
val keysExpression = Literal("a.b")
val keysExpression = Literal("""["a",{"c":2}]""")
val jsonObjExp = Literal("""{"a":[{"b":1},{"c":2}]}""")
val jsonFunc = visit("json_delete", util.List.of(jsonObjExp, keysExpression))
val jsonFunc = visit("json_extend", util.List.of(jsonObjExp, keysExpression))
val filterExpr = EqualTo(UnresolvedAttribute("a"), jsonFunc)
val filterPlan = Filter(filterExpr, table)
val projectList = Seq(UnresolvedStar(None))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,17 @@ package org.opensearch.flint.spark.ppl

import org.opensearch.flint.spark.ppl.PlaneUtils.plan
import org.opensearch.sql.expression.function.SerializableUdf
import org.opensearch.sql.expression.function.SerializableUdf.visit
import org.opensearch.sql.ppl.{CatalystPlanContext, CatalystQueryPlanVisitor}
import org.opensearch.sql.ppl.utils.DataTypeTransformer.seq
import org.scalatest.matchers.should.Matchers

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, UnresolvedFunction, UnresolvedRelation, UnresolvedStar}
import org.apache.spark.sql.catalyst.expressions.{Alias, And, Ascending, CaseWhen, Descending, EqualTo, GreaterThan, Literal, NullsFirst, NullsLast, RegExpExtract, SortOrder}
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.types.DataTypes
import org.opensearch.sql.expression.function.SerializableUdf.visit

class PPLLogicalPlanParseCidrmatchTestSuite
extends SparkFunSuite
Expand Down

0 comments on commit af8144a

Please sign in to comment.