diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala index 63d981c5fde82..e63d72ebb40d2 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala @@ -24,7 +24,7 @@ import java.util.concurrent.{ScheduledFuture, TimeUnit} import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} import scala.util.Random -import org.apache.spark.{SecurityManager, SparkConf, SparkException} +import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState} import org.apache.spark.deploy.DeployMessages._ import org.apache.spark.deploy.master.DriverState.DriverState @@ -119,9 +119,6 @@ private[deploy] class Master( // Default maxCores for applications that don't specify it (i.e. pass Int.MaxValue) private val defaultCores = conf.get(DEFAULT_CORES) val reverseProxy = conf.get(UI_REVERSE_PROXY) - if (defaultCores < 1) { - throw new SparkException(s"${DEFAULT_CORES.key} must be positive") - } val historyServerUrl = conf.get(MASTER_UI_HISTORY_SERVER_URL) // Alternative application submission gateway that is stable across Spark versions diff --git a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala index 906ec0fc99737..7b35e92022ae0 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala @@ -73,6 +73,7 @@ private[spark] object Deploy { val DEFAULT_CORES = ConfigBuilder("spark.deploy.defaultCores") .version("0.9.0") .intConf + .checkValue(_ > 0, "spark.deploy.defaultCores must be positive.") .createWithDefault(Int.MaxValue) val MAX_DRIVERS = ConfigBuilder("spark.deploy.maxDrivers")