diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala index a493c5ff37..429048c77f 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala @@ -156,7 +156,7 @@ object SparkConfiguration extends Logging { CommonVars("wds.linkis.spark.engineconn.fatal.log", "error writing class;OutOfMemoryError") val PYSPARK_PYTHON3_PATH = - CommonVars[String]("pyspark.python3.path", "/appcom/Install/anaconda3/bin/python") + CommonVars[String]("pyspark.python3.path", "python3") val ENABLE_REPLACE_PACKAGE_NAME = CommonVars("wds.linkis.spark.engine.scala.replace_package_header.enable", true) @@ -182,6 +182,9 @@ object SparkConfiguration extends Logging { val LINKIS_SPARK_ETL_SUPPORT_HUDI = CommonVars[Boolean]("linkis.spark.etl.support.hudi", false) + val LINKIS_PYSPARK_USE_SECURE_RANDOM = + CommonVars[Boolean]("linkis.pyspark.use.secure.random", false).getValue + val SCALA_PARSE_APPEND_CODE = CommonVars("linkis.scala.parse.append.code", "val linkisVar=1").getValue diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala index 5d4305c67c..f947db9338 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala @@ -46,6 +46,7 @@ import org.apache.spark.sql.execution.datasources.csv.UDF import java.io._ import java.net.InetAddress +import java.security.SecureRandom import java.util import scala.collection.JavaConverters._ @@ -76,7 +77,12 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In private val lineOutputStream = new RsOutputStream val sqlContext = sparkEngineSession.sqlContext val SUCCESS = "success" - private lazy val py4jToken: String = SecureRandomStringUtils.randomAlphanumeric(256) + + private lazy val py4jToken: String = if (SparkConfiguration.LINKIS_PYSPARK_USE_SECURE_RANDOM) { + SecureRandomStringUtils.randomAlphanumeric(256) + } else { + SecureRandom.getInstance("SHA1PRNG").nextInt(100000).toString + } private lazy val gwBuilder: GatewayServerBuilder = { val builder = new GatewayServerBuilder() @@ -152,7 +158,6 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In ) val userDefinePythonVersion = engineCreationContext.getOptions .getOrDefault("spark.python.version", "python") - .toString .toLowerCase() val sparkPythonVersion = if (