Skip to content

Commit

Permalink
[SPARK-49511][SQL] Apply formatting rules to sql/api
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?

This PR proposes to apply formatting rules to sql/api

### Why are the changes needed?

The package is new, and we can start autoformatting. It won't have downside of backporting difficulty and etc.

### Does this PR introduce _any_ user-facing change?

No.

### How was this patch tested?

Yes, by `dev/lint-scala`

### Was this patch authored or co-authored using generative AI tooling?

No.

Closes #47989 from hvanhovell/SPARK-49511.

Lead-authored-by: Herman van Hovell <[email protected]>
Co-authored-by: Hyukjin Kwon <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
  • Loading branch information
hvanhovell and HyukjinKwon committed Sep 6, 2024
1 parent 62344cd commit fdeb288
Show file tree
Hide file tree
Showing 112 changed files with 7,952 additions and 5,684 deletions.
3 changes: 2 additions & 1 deletion dev/lint-scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ ERRORS=$(./build/mvn \
-Dscalafmt.skip=false \
-Dscalafmt.validateOnly=true \
-Dscalafmt.changedOnly=false \
-pl sql/api \
-pl sql/connect/common \
-pl sql/connect/server \
-pl connector/connect/client/jvm \
Expand All @@ -38,7 +39,7 @@ ERRORS=$(./build/mvn \
if test ! -z "$ERRORS"; then
echo -e "The scalafmt check failed on sql/connect or connector/connect at following occurrences:\n\n$ERRORS\n"
echo "Before submitting your change, please make sure to format your code using the following command:"
echo "./build/mvn scalafmt:format -Dscalafmt.skip=false -Dscalafmt.validateOnly=false -Dscalafmt.changedOnly=false -pl sql/connect/common -pl sql/connect/server -pl connector/connect/client/jvm"
echo "./build/mvn scalafmt:format -Dscalafmt.skip=false -Dscalafmt.validateOnly=false -Dscalafmt.changedOnly=false -pl sql/api -pl sql/connect/common -pl sql/connect/server -pl connector/connect/client/jvm"
exit 1
else
echo -e "Scalafmt checks passed."
Expand Down
36 changes: 17 additions & 19 deletions sql/api/src/main/scala/org/apache/spark/sql/AnalysisException.scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,20 +29,20 @@ import org.apache.spark.sql.catalyst.trees.{Origin, WithOrigin}
* @since 1.3.0
*/
@Stable
class AnalysisException protected(
class AnalysisException protected (
val message: String,
val line: Option[Int] = None,
val startPosition: Option[Int] = None,
val cause: Option[Throwable] = None,
val errorClass: Option[String] = None,
val messageParameters: Map[String, String] = Map.empty,
val context: Array[QueryContext] = Array.empty)
extends Exception(message, cause.orNull) with SparkThrowable with Serializable with WithOrigin {
extends Exception(message, cause.orNull)
with SparkThrowable
with Serializable
with WithOrigin {

def this(
errorClass: String,
messageParameters: Map[String, String],
cause: Option[Throwable]) =
def this(errorClass: String, messageParameters: Map[String, String], cause: Option[Throwable]) =
this(
SparkThrowableHelper.getMessage(errorClass, messageParameters),
errorClass = Some(errorClass),
Expand Down Expand Up @@ -73,18 +73,10 @@ class AnalysisException protected(
cause = null,
context = context)

def this(
errorClass: String,
messageParameters: Map[String, String]) =
this(
errorClass = errorClass,
messageParameters = messageParameters,
cause = None)
def this(errorClass: String, messageParameters: Map[String, String]) =
this(errorClass = errorClass, messageParameters = messageParameters, cause = None)

def this(
errorClass: String,
messageParameters: Map[String, String],
origin: Origin) =
def this(errorClass: String, messageParameters: Map[String, String], origin: Origin) =
this(
SparkThrowableHelper.getMessage(errorClass, messageParameters),
line = origin.line,
Expand Down Expand Up @@ -115,8 +107,14 @@ class AnalysisException protected(
errorClass: Option[String] = this.errorClass,
messageParameters: Map[String, String] = this.messageParameters,
context: Array[QueryContext] = this.context): AnalysisException =
new AnalysisException(message, line, startPosition, cause, errorClass,
messageParameters, context)
new AnalysisException(
message,
line,
startPosition,
cause,
errorClass,
messageParameters,
context)

def withPosition(origin: Origin): AnalysisException = {
val newException = this.copy(
Expand Down
3 changes: 1 addition & 2 deletions sql/api/src/main/scala/org/apache/spark/sql/Artifact.scala
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,7 @@ import org.apache.spark.sql.util.ArtifactUtils
import org.apache.spark.util.ArrayImplicits._
import org.apache.spark.util.MavenUtils


private[sql] class Artifact private(val path: Path, val storage: LocalData) {
private[sql] class Artifact private (val path: Path, val storage: LocalData) {
require(!path.isAbsolute, s"Bad path: $path")

lazy val size: Long = storage match {
Expand Down
Loading

0 comments on commit fdeb288

Please sign in to comment.