diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 20d0bddcbfd0a..4520cbeaa1521 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -415,6 +415,7 @@ class SparkSession private( /** * Executes given script and return the result of the last statement. + * If script contains no queries, an empty `DataFrame` is returned. * * @param script A SQL script to execute. * @param args A map of parameter names to SQL literal expressions. diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala index d124e3d484c71..9fa70156125c6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala @@ -44,9 +44,7 @@ class SqlScriptingExecution( override def hasNext: Boolean = current.isDefined override def next(): DataFrame = { - if (!hasNext) {throw SparkException.internalError( - "No more elements to iterate through.") - } + if (!hasNext) throw SparkException.internalError("No more elements to iterate through.") val nextDataFrame = current.get current = getNextResult nextDataFrame