Skip to content

Commit

Permalink
[SPARK-49208][SQL][TESTS] Add more tests for negative MONTH intervals
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?
Adding more tests.

### Why are the changes needed?
To cover particular values of negative months that might be susceptible to bugs due to modulo operations on negative numbers.

### Does this PR introduce _any_ user-facing change?
No. Test only.

### How was this patch tested?
Ran the tests.

### Was this patch authored or co-authored using generative AI tooling?
No.

Closes #47687 from nemanjapetr-db/monthinterval.

Authored-by: Nemanja Petrovic <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
  • Loading branch information
nemanjapetr-db authored and MaxGekk committed Aug 12, 2024
1 parent 654a997 commit 7aa83ad
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@ import org.apache.spark.sql.catalyst.util.DateTimeUtils.millisToMicros
import org.apache.spark.sql.catalyst.util.IntervalStringStyles.{ANSI_STYLE, HIVE_STYLE}
import org.apache.spark.sql.catalyst.util.IntervalUtils._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.DayTimeIntervalType
import org.apache.spark.sql.types.{YearMonthIntervalType => YM}
import org.apache.spark.sql.types.DayTimeIntervalType._
import org.apache.spark.sql.types.YearMonthIntervalType._
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}

class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
Expand Down Expand Up @@ -447,7 +449,6 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
}

test("from day-time string") {
import org.apache.spark.sql.types.DayTimeIntervalType._
def check(input: String, from: Byte, to: Byte, expected: String): Unit = {
withClue(s"from = $from, to = $to") {
val expectedUtf8 = UTF8String.fromString(expected)
Expand Down Expand Up @@ -554,6 +555,15 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
test("SPARK-34615: period to months") {
assert(periodToMonths(Period.ZERO) === 0)
assert(periodToMonths(Period.of(0, -1, 0)) === -1)
assert(periodToMonths(Period.of(0, -11, 0)) === -11)
assert(periodToMonths(Period.of(0, -12, 0)) === -12)
assert(periodToMonths(Period.of(0, -13, 0)) === -13)
assert(periodToMonths(Period.of(0, 11, 0), YM.YEAR) === 0)
assert(periodToMonths(Period.of(0, -11, 0), YM.YEAR) === 0)
assert(periodToMonths(Period.of(0, 12, 0), YM.YEAR) === 12)
assert(periodToMonths(Period.of(0, -12, 0), YM.YEAR) === -12)
assert(periodToMonths(Period.of(0, 13, 0), YM.YEAR) === 12)
assert(periodToMonths(Period.of(0, -13, 0), YM.YEAR) === -12)
assert(periodToMonths(Period.of(-1, 0, 10)) === -12) // ignore days
assert(periodToMonths(Period.of(178956970, 7, 0)) === Int.MaxValue)
assert(periodToMonths(Period.of(-178956970, -8, 123)) === Int.MinValue)
Expand Down Expand Up @@ -637,11 +647,12 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
}

test("SPARK-35016: format year-month intervals") {
import org.apache.spark.sql.types.YearMonthIntervalType._
Seq(
0 -> ("0-0", "INTERVAL '0-0' YEAR TO MONTH"),
-11 -> ("-0-11", "INTERVAL '-0-11' YEAR TO MONTH"),
11 -> ("0-11", "INTERVAL '0-11' YEAR TO MONTH"),
-12 -> ("-1-0", "INTERVAL '-1-0' YEAR TO MONTH"),
12 -> ("1-0", "INTERVAL '1-0' YEAR TO MONTH"),
-13 -> ("-1-1", "INTERVAL '-1-1' YEAR TO MONTH"),
13 -> ("1-1", "INTERVAL '1-1' YEAR TO MONTH"),
-24 -> ("-2-0", "INTERVAL '-2-0' YEAR TO MONTH"),
Expand All @@ -654,8 +665,22 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
}
}

test("SPARK-49208 format year-month intervals") {
Seq(
0 -> ("0-0", "INTERVAL '0' MONTH"),
-11 -> ("-0-11", "INTERVAL '-11' MONTH"),
11 -> ("0-11", "INTERVAL '11' MONTH"),
-12 -> ("-1-0", "INTERVAL '-12' MONTH"),
12 -> ("1-0", "INTERVAL '12' MONTH"),
-13 -> ("-1-1", "INTERVAL '-13' MONTH"),
13 -> ("1-1", "INTERVAL '13' MONTH")
).foreach { case (months, (hiveIntervalStr, ansiIntervalStr)) =>
assert(toYearMonthIntervalString(months, ANSI_STYLE, MONTH, MONTH) === ansiIntervalStr)
assert(toYearMonthIntervalString(months, HIVE_STYLE, MONTH, MONTH) === hiveIntervalStr)
}
}

test("SPARK-35016: format day-time intervals") {
import DayTimeIntervalType._
Seq(
0L -> ("0 00:00:00.000000000", "INTERVAL '0 00:00:00' DAY TO SECOND"),
-1L -> ("-0 00:00:00.000001000", "INTERVAL '-0 00:00:00.000001' DAY TO SECOND"),
Expand All @@ -670,8 +695,22 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
}
}

test("SPARK-49208: format negative month intervals") {
Seq(
0 -> ("0-0", "INTERVAL '0' MONTH"),
-11 -> ("-0-11", "INTERVAL '-11' MONTH"),
11 -> ("0-11", "INTERVAL '11' MONTH"),
-12 -> ("-1-0", "INTERVAL '-12' MONTH"),
12 -> ("1-0", "INTERVAL '12' MONTH"),
-13 -> ("-1-1", "INTERVAL '-13' MONTH"),
13 -> ("1-1", "INTERVAL '13' MONTH")
).foreach { case (months, (hiveIntervalStr, ansiIntervalStr)) =>
assert(toYearMonthIntervalString(months, ANSI_STYLE, MONTH, MONTH) === ansiIntervalStr)
assert(toYearMonthIntervalString(months, HIVE_STYLE, MONTH, MONTH) === hiveIntervalStr)
}
}

test("SPARK-35734: Format day-time intervals using type fields") {
import DayTimeIntervalType._
Seq(
0L ->
("INTERVAL '0 00:00:00' DAY TO SECOND",
Expand Down Expand Up @@ -777,12 +816,13 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
}

test("SPARK-35771: Format year-month intervals using type fields") {
import org.apache.spark.sql.types.YearMonthIntervalType._
Seq(
0 ->
("INTERVAL '0-0' YEAR TO MONTH", "INTERVAL '0' YEAR", "INTERVAL '0' MONTH"),
-11 -> ("INTERVAL '-0-11' YEAR TO MONTH", "INTERVAL '-0' YEAR", "INTERVAL '-11' MONTH"),
11 -> ("INTERVAL '0-11' YEAR TO MONTH", "INTERVAL '0' YEAR", "INTERVAL '11' MONTH"),
-12 -> ("INTERVAL '-1-0' YEAR TO MONTH", "INTERVAL '-1' YEAR", "INTERVAL '-12' MONTH"),
12 -> ("INTERVAL '1-0' YEAR TO MONTH", "INTERVAL '1' YEAR", "INTERVAL '12' MONTH"),
-13 -> ("INTERVAL '-1-1' YEAR TO MONTH", "INTERVAL '-1' YEAR", "INTERVAL '-13' MONTH"),
13 -> ("INTERVAL '1-1' YEAR TO MONTH", "INTERVAL '1' YEAR", "INTERVAL '13' MONTH"),
-24 -> ("INTERVAL '-2-0' YEAR TO MONTH", "INTERVAL '-2' YEAR", "INTERVAL '-24' MONTH"),
Expand All @@ -803,7 +843,6 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
}

test("SPARK-38324: The second range is not [0, 59] in the day time ANSI interval") {
import org.apache.spark.sql.types.DayTimeIntervalType._
Seq(
("10 12:40:60", 60, DAY, SECOND),
("10 12:40:60.999999999", 60, DAY, SECOND),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ import org.apache.spark.sql.connector.catalog.InMemoryTableCatalog
import org.apache.spark.sql.execution.HiveResult._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.{ExamplePoint, ExamplePointUDT, SharedSparkSession}
import org.apache.spark.sql.types.{YearMonthIntervalType => YM}
import org.apache.spark.sql.types.YearMonthIntervalType


class HiveResultSuite extends SharedSparkSession {
import testImplicits._
Expand Down Expand Up @@ -119,6 +122,34 @@ class HiveResultSuite extends SharedSparkSession {
assert(hiveResultString(plan2) === Seq("[-10-1]"))
}

test("SPARK-49208: negative month intervals") {
Seq(
"0-0" -> (11, YM.YEAR, YM.YEAR),
"0-0" -> (-11, YM.YEAR, YM.YEAR),
"0-11" -> (11, YM.YEAR, YM.MONTH),
"-0-11" -> (-11, YM.YEAR, YM.MONTH),
"0-11" -> (11, YM.MONTH, YM.MONTH),
"-0-11" -> (-11, YM.MONTH, YM.MONTH),
"1-0" -> (12, YM.YEAR, YM.YEAR),
"-1-0" -> (-12, YM.YEAR, YM.YEAR),
"1-0" -> (12, YM.YEAR, YM.MONTH),
"-1-0" -> (-12, YM.YEAR, YM.MONTH),
"1-0" -> (12, YM.MONTH, YM.MONTH),
"-1-0" -> (-12, YM.MONTH, YM.MONTH),
"1-0" -> (13, YM.YEAR, YM.YEAR),
"-1-0" -> (-13, YM.YEAR, YM.YEAR),
"1-1" -> (13, YM.YEAR, YM.MONTH),
"-1-1" -> (-13, YM.YEAR, YM.MONTH),
"1-1" -> (13, YM.MONTH, YM.MONTH),
"-1-1" -> (-13, YM.MONTH, YM.MONTH)
).foreach { case (hiveString, (months, startField, endField)) =>
assert(toHiveString((Period.ofMonths(months), YearMonthIntervalType(startField, endField)),
false,
getTimeFormatters,
getBinaryFormatter) === hiveString)
}
}

test("SPARK-34984, SPARK-35016: day-time interval formatting in hive result") {
val df = Seq(Duration.ofDays(5).plusMillis(10)).toDF("i")
val plan1 = df.queryExecution.executedPlan
Expand Down

0 comments on commit 7aa83ad

Please sign in to comment.