diff --git a/docs/ppl-lang/functions/ppl-json.md b/docs/ppl-lang/functions/ppl-json.md index 27cc33d30..627eb110c 100644 --- a/docs/ppl-lang/functions/ppl-json.md +++ b/docs/ppl-lang/functions/ppl-json.md @@ -269,7 +269,7 @@ Example: | {"a":["valueA", "valueB", "valueC"]} | +-------------------------------------------------+ - os> source=people | eval append = json_append(`{"a":["valueA", "valueB"]}`, {"a":["valueC"]}) + os> source=people | eval append = json_append(`{"a":["valueA", "valueB"]}`, ['a', {"a":["valueC"]}]) fetched rows / total rows = 1/1 +-----------------------------------------------+ | append | @@ -304,7 +304,7 @@ Extend arrays as individual values separates the `json_extend` functionality fro Example: - os> source=people | eval extend = json_extend(`{"a":["valueA", "valueB"]}`, ["valueC","valueD"]) + os> source=people | eval extend = json_extend(`{"a":["valueA", "valueB"]}`,['a', ["valueC","valueD"]]) fetched rows / total rows = 1/1 +-------------------------------------------------+ | extend | @@ -312,7 +312,7 @@ Example: | {"a":["valueA", "valueB", "valueC", "valueD"]} | +-------------------------------------------------+ - os> source=people | eval extend = json_extend(`{"a":["valueA", "valueB"]}`, {"b":["valueC","valueD"]}) + os> source=people | eval extend = json_extend(`{"a":["valueA", "valueB"]}`,['a',[{"b":["valueC","valueD"]}]]) fetched rows / total rows = 1/1 +-------------------------------------------------------------+ | extend | diff --git a/ppl-spark-integration/src/test/java/org/opensearch/sql/expression/function/SerializableJsonUdfTest.java b/ppl-spark-integration/src/test/java/org/opensearch/sql/expression/function/SerializableJsonUdfTest.java index 2c7080622..996569611 100644 --- a/ppl-spark-integration/src/test/java/org/opensearch/sql/expression/function/SerializableJsonUdfTest.java +++ b/ppl-spark-integration/src/test/java/org/opensearch/sql/expression/function/SerializableJsonUdfTest.java @@ -183,4 +183,70 @@ public void testJsonExtendFunctionWithNonArrayPath() { assertEquals(expectedJson, result); } + + @Test + public void testJsonExtendFunctionAddValuesToExistingArray() { + // Initial JSON string + String jsonStr = "{\"key1\":\"value1\",\"key2\":[\"value2\"]}"; + + // Path-value pairs to extend + List>> pathValuePairs = new ArrayList<>(); + pathValuePairs.add( Map.entry("key2", Arrays.asList("value3", "value4"))); + + // Expected JSON after extension + String expectedJson = "{\"key1\":\"value1\",\"key2\":[\"value2\",\"value3\",\"value4\"]}"; + + // Apply the function + String result = jsonExtendFunction.apply(jsonStr, pathValuePairs); + + // Assert that the result matches the expected JSON + assertEquals(expectedJson, result); + } + + @Test + public void testJsonExtendFunctionAddNewArray() { + // Initial JSON string + String jsonStr = "{\"key1\":\"value1\"}"; + + // Path-value pairs to add + List>> pathValuePairs = new ArrayList<>(); + pathValuePairs.add( Map.entry("key2", Arrays.asList("value2", "value3"))); + + // Expected JSON after adding new array + String expectedJson = "{\"key1\":\"value1\",\"key2\":[\"value2\",\"value3\"]}"; + + // Apply the function + String result = jsonExtendFunction.apply(jsonStr, pathValuePairs); + + // Assert that the result matches the expected JSON + assertEquals(expectedJson, result); + } + + @Test + public void testJsonExtendFunctionHandleEmptyValues() { + // Initial JSON string + String jsonStr = "{\"key1\":\"value1\",\"key2\":[\"value2\"]}"; + + // Path-value pairs with an empty list of values to add + List>> pathValuePairs = new ArrayList<>(); + pathValuePairs.add( Map.entry("key2", Collections.emptyList())); + + // Expected JSON should remain unchanged + String expectedJson = "{\"key1\":\"value1\",\"key2\":[\"value2\"]}"; + + // Apply the function + String result = jsonExtendFunction.apply(jsonStr, pathValuePairs); + + // Assert that the result matches the expected JSON + assertEquals(expectedJson, result); + } + + @Test + public void testJsonExtendFunctionHandleNullInput() { + // Apply the function with null input + String result = jsonExtendFunction.apply(null, Collections.singletonList( Map.entry("key2", List.of("value2")))); + + // Assert that the result is null + assertEquals(null, result); + } } diff --git a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanJsonFunctionsTranslatorTestSuite.scala b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanJsonFunctionsTranslatorTestSuite.scala index 34d0133e0..8e36395b3 100644 --- a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanJsonFunctionsTranslatorTestSuite.scala +++ b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanJsonFunctionsTranslatorTestSuite.scala @@ -5,20 +5,21 @@ package org.opensearch.flint.spark.ppl +import java.util + import org.opensearch.flint.spark.ppl.PlaneUtils.plan +import org.opensearch.sql.expression.function.SerializableUdf +import org.opensearch.sql.expression.function.SerializableUdf.visit import org.opensearch.sql.ppl.{CatalystPlanContext, CatalystQueryPlanVisitor} +import org.opensearch.sql.ppl.utils.DataTypeTransformer.seq import org.scalatest.matchers.should.Matchers + import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, UnresolvedFunction, UnresolvedRelation, UnresolvedStar} import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal} import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.catalyst.plans.logical.{Filter, Project} import org.apache.spark.sql.types.DataTypes -import org.opensearch.sql.expression.function.SerializableUdf -import org.opensearch.sql.expression.function.SerializableUdf.visit -import org.opensearch.sql.ppl.utils.DataTypeTransformer.seq - -import java.util class PPLLogicalPlanJsonFunctionsTranslatorTestSuite extends SparkFunSuite @@ -194,11 +195,11 @@ class PPLLogicalPlanJsonFunctionsTranslatorTestSuite val context = new CatalystPlanContext val logPlan = planTransformer.visit( - plan(pplParser, """source=t a = json_delete('{"a":[{"b":1},{"c":2}]}', ["a.b"])"""), + plan(pplParser, """source=t a = json_delete('{"a":[{"b":1},{"c":2}]}', '["a.b"]')"""), context) val table = UnresolvedRelation(Seq("t")) - val keysExpression = Literal("[a.b]") + val keysExpression = Literal("""["a.b"]""") val jsonObjExp = Literal("""{"a":[{"b":1},{"c":2}]}""") val jsonFunc = visit("json_delete", util.List.of(jsonObjExp, keysExpression)) val filterExpr = EqualTo(UnresolvedAttribute("a"), jsonFunc) @@ -208,18 +209,37 @@ class PPLLogicalPlanJsonFunctionsTranslatorTestSuite comparePlans(expectedPlan, logPlan, false) } - test("test json_append()") { val context = new CatalystPlanContext val logPlan = planTransformer.visit( - plan(pplParser, """source=t a = json_append('{"a":[{"b":1},{"c":2}]}', 'a.b')"""), + plan(pplParser, """source=t a = json_append('{"a":[1,2]}', '["a",3]')"""), + context) + + val table = UnresolvedRelation(Seq("t")) + val keysExpression = Literal("""["a",3]""") + val jsonObjExp = Literal("""{"a":[1,2]}""") + val jsonFunc = visit("json_append", util.List.of(jsonObjExp, keysExpression)) + val filterExpr = EqualTo(UnresolvedAttribute("a"), jsonFunc) + val filterPlan = Filter(filterExpr, table) + val projectList = Seq(UnresolvedStar(None)) + val expectedPlan = Project(projectList, filterPlan) + comparePlans(expectedPlan, logPlan, false) + } + + test("test json_extend()") { + val context = new CatalystPlanContext + val logPlan = + planTransformer.visit( + plan( + pplParser, + """source=t a = json_extend('{"a":[{"b":1},{"c":2}]}', '["a",{"c":2}]')"""), context) val table = UnresolvedRelation(Seq("t")) - val keysExpression = Literal("a.b") + val keysExpression = Literal("""["a",{"c":2}]""") val jsonObjExp = Literal("""{"a":[{"b":1},{"c":2}]}""") - val jsonFunc = visit("json_delete", util.List.of(jsonObjExp, keysExpression)) + val jsonFunc = visit("json_extend", util.List.of(jsonObjExp, keysExpression)) val filterExpr = EqualTo(UnresolvedAttribute("a"), jsonFunc) val filterPlan = Filter(filterExpr, table) val projectList = Seq(UnresolvedStar(None)) diff --git a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanParseCidrmatchTestSuite.scala b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanParseCidrmatchTestSuite.scala index d14b0fee1..c8a8a67ad 100644 --- a/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanParseCidrmatchTestSuite.scala +++ b/ppl-spark-integration/src/test/scala/org/opensearch/flint/spark/ppl/PPLLogicalPlanParseCidrmatchTestSuite.scala @@ -7,16 +7,17 @@ package org.opensearch.flint.spark.ppl import org.opensearch.flint.spark.ppl.PlaneUtils.plan import org.opensearch.sql.expression.function.SerializableUdf +import org.opensearch.sql.expression.function.SerializableUdf.visit import org.opensearch.sql.ppl.{CatalystPlanContext, CatalystQueryPlanVisitor} import org.opensearch.sql.ppl.utils.DataTypeTransformer.seq import org.scalatest.matchers.should.Matchers + import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, UnresolvedFunction, UnresolvedRelation, UnresolvedStar} import org.apache.spark.sql.catalyst.expressions.{Alias, And, Ascending, CaseWhen, Descending, EqualTo, GreaterThan, Literal, NullsFirst, NullsLast, RegExpExtract, SortOrder} import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.types.DataTypes -import org.opensearch.sql.expression.function.SerializableUdf.visit class PPLLogicalPlanParseCidrmatchTestSuite extends SparkFunSuite