Skip to content

Commit

Permalink
fix scalastyle
Browse files Browse the repository at this point in the history
  • Loading branch information
dusantism-db committed Nov 22, 2024
1 parent 44bb907 commit ffca616
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -744,7 +744,7 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
"statement1",
"statement2",
"DropVariable", // drop for query var intCol
"DropVariable", // drop for loop var x
"DropVariable" // drop for loop var x
))
}

Expand Down Expand Up @@ -791,7 +791,7 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
"DropVariable", // drop for query var intCol1
"DropVariable", // drop for loop var y
"DropVariable", // drop for query var intCol
"DropVariable", // drop for loop var x
"DropVariable" // drop for loop var x
))
}

Expand All @@ -808,7 +808,7 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
val statements = iter.map(extractStatementValue).toSeq
assert(statements === Seq(
"body",
"DropVariable", // drop for query var intCol
"DropVariable" // drop for query var intCol
))
}

Expand All @@ -827,7 +827,7 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
val statements = iter.map(extractStatementValue).toSeq
assert(statements === Seq(
"statement1", "statement2", "statement1", "statement2",
"DropVariable", // drop for query var intCol
"DropVariable" // drop for query var intCol
))
}

Expand Down Expand Up @@ -869,7 +869,7 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
"DropVariable", // drop for query var intCol1
"body", "body",
"DropVariable", // drop for query var intCol1
"DropVariable", // drop for query var intCol
"DropVariable" // drop for query var intCol
))
}

Expand All @@ -893,7 +893,7 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
"statement1",
"lbl1",
"DropVariable", // drop for query var intCol
"DropVariable", // drop for loop var x
"DropVariable" // drop for loop var x
))
}

Expand Down Expand Up @@ -945,7 +945,7 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
"body1",
"lbl1",
"DropVariable", // drop for query var intCol
"DropVariable", // drop for loop var x
"DropVariable" // drop for loop var x
))
}

Expand Down Expand Up @@ -990,7 +990,7 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
val statements = iter.map(extractStatementValue).toSeq
assert(statements === Seq(
"statement1", "lbl1", "statement1", "lbl1",
"DropVariable", // drop for query var intCol
"DropVariable" // drop for query var intCol
))
}

Expand Down Expand Up @@ -1036,7 +1036,7 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
val statements = iter.map(extractStatementValue).toSeq
assert(statements === Seq(
"outer_body", "body1", "lbl1", "outer_body", "body1", "lbl1",
"DropVariable", // drop for query var intCol
"DropVariable" // drop for query var intCol
))
}

Expand All @@ -1056,9 +1056,9 @@ class SqlScriptingExecutionNodeSuite extends SparkFunSuite with SharedSparkSessi
body = new CompoundBodyExec(Seq(
TestLeafStatement("body1"),
new LeaveStatementExec("lbl1"),
TestLeafStatement("body2"))),
TestLeafStatement("body2")))
)
)),
))
)
)).getTreeIterator
val statements = iter.map(extractStatementValue).toSeq
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1568,7 +1568,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -1612,7 +1612,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand All @@ -1623,7 +1623,8 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
val sqlScript =
"""
|BEGIN
| CREATE TABLE t (int_column INT, map_column MAP<STRING, INT>, struct_column STRUCT<name: STRING, age: INT>, array_column ARRAY<STRING>);
| CREATE TABLE t (int_column INT, map_column MAP<STRING, INT>,
| struct_column STRUCT<name: STRING, age: INT>, array_column ARRAY<STRING>);
| INSERT INTO t VALUES
| (1, MAP('a', 1), STRUCT('John', 25), ARRAY('apricot', 'quince')),
| (2, MAP('b', 2), STRUCT('Jane', 30), ARRAY('plum', 'pear'));
Expand Down Expand Up @@ -1657,7 +1658,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand All @@ -1669,7 +1670,8 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
"""
|BEGIN
| CREATE TABLE t
| (int_column INT, struct_column STRUCT<num: INT, struct2: STRUCT<struct3: STRUCT<name: STRING>>>);
| (int_column INT,
| struct_column STRUCT<num: INT, struct2: STRUCT<struct3: STRUCT<name: STRING>>>);
| INSERT INTO t VALUES
| (1, STRUCT(1, STRUCT(STRUCT("one")))),
| (2, STRUCT(2, STRUCT(STRUCT("two"))));
Expand All @@ -1689,7 +1691,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row(Row(2, Row(Row("two"))))), // select struct_column
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -1720,7 +1722,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row(Map("b" -> Map(2 -> Map(true -> 20))))), // select map_column
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -1752,7 +1754,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row(Array(Seq(Seq(7, 8), Seq(9, 10)), Seq(Seq(11, 12))))), // array_column
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand All @@ -1771,7 +1773,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
|""".stripMargin

val expected = Seq(
Seq.empty[Row], // create table
Seq.empty[Row] // create table
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -1806,7 +1808,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row("fourth")), // select x.stringCol
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -2032,7 +2034,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq.empty[Row], // insert
Seq.empty[Row], // insert
Seq(Row(3)), // select y.intCol2
Seq(Row(3)), // select intCol2
Seq(Row(3)) // select intCol2
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -2068,7 +2070,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row(3)), // select y.intCol2
Seq(Row(3)), // select intCol2
Seq.empty[Row], // drop outer var
Seq.empty[Row], // drop outer var
Seq.empty[Row] // drop outer var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -2097,7 +2099,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row(1.0)), // select doubleCol
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -2131,7 +2133,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row(2.0)), // select doubleCol
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand All @@ -2142,7 +2144,8 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
val sqlScript =
"""
|BEGIN
| CREATE TABLE t (int_column INT, map_column MAP<STRING, INT>, struct_column STRUCT<name: STRING, age: INT>, array_column ARRAY<STRING>);
| CREATE TABLE t (int_column INT, map_column MAP<STRING, INT>,
| struct_column STRUCT<name: STRING, age: INT>, array_column ARRAY<STRING>);
| INSERT INTO t VALUES
| (1, MAP('a', 1), STRUCT('John', 25), ARRAY('apricot', 'quince')),
| (2, MAP('b', 2), STRUCT('Jane', 30), ARRAY('plum', 'pear'));
Expand All @@ -2166,7 +2169,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand All @@ -2177,8 +2180,8 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
val sqlScript =
"""
|BEGIN
| CREATE TABLE t
| (int_column INT, struct_column STRUCT<num: INT, struct2: STRUCT<struct3: STRUCT<name: STRING>>>);
| CREATE TABLE t (int_column INT,
| struct_column STRUCT<num: INT, struct2: STRUCT<struct3: STRUCT<name: STRING>>>);
| INSERT INTO t VALUES
| (1, STRUCT(1, STRUCT(STRUCT("one")))),
| (2, STRUCT(2, STRUCT(STRUCT("two"))));
Expand All @@ -2194,7 +2197,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row(Row(1, Row(Row("one"))))), // select struct_column
Seq(Row(Row(2, Row(Row("two"))))), // select struct_column
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand All @@ -2221,7 +2224,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row(Map("a" -> Map(1 -> Map(false -> 10))))), // select map_column
Seq(Row(Map("b" -> Map(2 -> Map(true -> 20))))), // select map_column
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -2249,7 +2252,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row(Seq(Seq(Seq(1, 2), Seq(3, 4)), Seq(Seq(5, 6))))), // array_column
Seq(Row(Array(Seq(Seq(7, 8), Seq(9, 10)), Seq(Seq(11, 12))))), // array_column
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand All @@ -2268,7 +2271,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
|""".stripMargin

val expected = Seq(
Seq.empty[Row], // create table
Seq.empty[Row] // create table
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -2298,7 +2301,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq(Row("third")), // select stringCol
Seq(Row("fourth")), // select stringCol
Seq.empty[Row], // drop local var
Seq.empty[Row], // drop local var
Seq.empty[Row] // drop local var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand All @@ -2325,7 +2328,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq.empty[Row], // create table
Seq.empty[Row], // insert
Seq(Row("first")), // select stringCol
Seq(Row("second")), // select stringCol
Seq(Row("second")) // select stringCol
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -2498,7 +2501,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq.empty[Row], // create table
Seq.empty[Row], // insert
Seq.empty[Row], // insert
Seq(Row(3)), // select intCol2
Seq(Row(3)) // select intCol2
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down Expand Up @@ -2530,7 +2533,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with SharedSparkSession {
Seq.empty[Row], // insert
Seq(Row(3)), // select intCol2
Seq(Row(3)), // select intCol2
Seq.empty[Row], // drop outer var
Seq.empty[Row] // drop outer var
)
verifySqlScriptResult(sqlScript, expected)
}
Expand Down

0 comments on commit ffca616

Please sign in to comment.