From c1fc8fa27e2c4777025794804aa2a6e5001a683b Mon Sep 17 00:00:00 2001 From: Louis Chu Date: Fri, 26 Apr 2024 21:52:37 -0700 Subject: [PATCH] sbt fmt --- .../main/scala/org/apache/spark/sql/FlintREPL.scala | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/spark-sql-application/src/main/scala/org/apache/spark/sql/FlintREPL.scala b/spark-sql-application/src/main/scala/org/apache/spark/sql/FlintREPL.scala index 12d354082..ad955053b 100644 --- a/spark-sql-application/src/main/scala/org/apache/spark/sql/FlintREPL.scala +++ b/spark-sql-application/src/main/scala/org/apache/spark/sql/FlintREPL.scala @@ -29,9 +29,7 @@ import org.opensearch.search.sort.SortOrder import org.apache.spark.SparkConf import org.apache.spark.internal.Logging import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd} -import org.apache.spark.sql.FlintJob.createSparkSession import org.apache.spark.sql.flint.config.FlintSparkConf -import org.apache.spark.sql.flint.config.FlintSparkConf.REPL_INACTIVITY_TIMEOUT_MILLIS import org.apache.spark.util.ThreadUtils /** @@ -139,11 +137,12 @@ object FlintREPL extends Logging with FlintJobExecutor { val sessionTimerContext = getTimerContext(MetricConstants.REPL_PROCESSING_TIME_METRIC) /** - * Transition the session update logic from {@link org.apache.spark.util.ShutdownHookManager} to {@link SparkListenerApplicationEnd}. - * This change helps prevent interruptions to asynchronous SigV4A signing during REPL shutdown. + * Transition the session update logic from {@link + * org.apache.spark.util.ShutdownHookManager} to {@link SparkListenerApplicationEnd}. This + * change helps prevent interruptions to asynchronous SigV4A signing during REPL shutdown. * - * Cancelling an EMR job directly when SigV4a signer in use could otherwise lead to stale sessions. For - * tracking, see the GitHub issue: + * Cancelling an EMR job directly when SigV4a signer in use could otherwise lead to stale + * sessions. For tracking, see the GitHub issue: * https://github.com/opensearch-project/opensearch-spark/issues/320 */ spark.sparkContext.addSparkListener(