Skip to content

Commit

Permalink
[SPARK-45392][CORE][SQL][SS] Replace Class.newInstance() with `Clas…
Browse files Browse the repository at this point in the history
…s.getDeclaredConstructor().newInstance()`

### What changes were proposed in this pull request?
This PR replaces `Class.newInstance()` with `Class.getDeclaredConstructor().newInstance()` to clean up the use of deprecated APIs refer to

https://github.com/openjdk/jdk/blob/dfacda488bfbe2e11e8d607a6d08527710286982/src/java.base/share/classes/java/lang/Class.java#L557-L583

Note: The new API no longer has the `cachedConstructor` capability that comes with the `Class.newInstance()`. Currently, I think there are no hotspots in the places that have been fixed. If hotspots are indeed discovered in the future, they can be optimized by adding a Loading Cache.

### Why are the changes needed?
Clean up the use of deprecated APIs.

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
Pass GitHub Actions

### Was this patch authored or co-authored using generative AI tooling?
No

Closes #43193 from LuciferYang/class-newInstance.

Authored-by: yangjie01 <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
  • Loading branch information
LuciferYang authored and dongjoon-hyun committed Oct 2, 2023
1 parent 643553a commit 8f1b028
Show file tree
Hide file tree
Showing 9 changed files with 19 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ class ExecutorClassLoaderSuite
// scalastyle:off classforname
val classB = Class.forName("TestClassB", true, classLoader)
// scalastyle:on classforname
val instanceOfTestClassB = classB.newInstance()
val instanceOfTestClassB = classB.getDeclaredConstructor().newInstance()
assert(instanceOfTestClassB.toString === "TestClassB")
classB.getMethod("foo").invoke(instanceOfTestClassB).asInstanceOf[String]
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -763,7 +763,8 @@ class SparkSession private(
DataSource.lookupDataSource(runner, sessionState.conf) match {
case source if classOf[ExternalCommandRunner].isAssignableFrom(source) =>
Dataset.ofRows(self, ExternalCommandExecutor(
source.newInstance().asInstanceOf[ExternalCommandRunner], command, options))
source.getDeclaredConstructor().newInstance()
.asInstanceOf[ExternalCommandRunner], command, options))

case _ =>
throw QueryCompilationErrors.commandExecutionInRunnerUnsupportedError(runner)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1024,7 +1024,8 @@ object DDLUtils extends Logging {
source match {
case f: FileFormat => DataSourceUtils.checkFieldNames(f, schema)
case f: FileDataSourceV2 =>
DataSourceUtils.checkFieldNames(f.fallbackFileFormat.newInstance(), schema)
DataSourceUtils.checkFieldNames(
f.fallbackFileFormat.getDeclaredConstructor().newInstance(), schema)
case _ =>
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ case class DataSource(
// [[FileDataSourceV2]] will still be used if we call the load()/save() method in
// [[DataFrameReader]]/[[DataFrameWriter]], since they use method `lookupDataSource`
// instead of `providingClass`.
cls.newInstance() match {
cls.getDeclaredConstructor().newInstance() match {
case f: FileDataSourceV2 => f.fallbackFileFormat
case _ => cls
}
Expand Down Expand Up @@ -699,7 +699,7 @@ object DataSource extends Logging {
val useV1Sources = conf.getConf(SQLConf.USE_V1_SOURCE_LIST).toLowerCase(Locale.ROOT)
.split(",").map(_.trim)
val cls = lookupDataSource(provider, conf)
cls.newInstance() match {
cls.getDeclaredConstructor().newInstance() match {
case d: DataSourceRegister if useV1Sources.contains(d.shortName()) => None
case t: TableProvider
if !useV1Sources.contains(cls.getCanonicalName.toLowerCase(Locale.ROOT)) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class FallBackFileSourceV2(sparkSession: SparkSession) extends Rule[LogicalPlan]
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case i @ InsertIntoStatement(
d @ DataSourceV2Relation(table: FileTable, _, _, _, _), _, _, _, _, _, _) =>
val v1FileFormat = table.fallbackFileFormat.newInstance()
val v1FileFormat = table.fallbackFileFormat.getDeclaredConstructor().newInstance()
val relation = HadoopFsRelation(
table.fileIndex,
table.fileIndex.partitionSchema,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,10 +284,11 @@ case class PreprocessTableCreation(catalog: SessionCatalog) extends Rule[Logical
}
}

private def fallBackV2ToV1(cls: Class[_]): Class[_] = cls.newInstance match {
case f: FileDataSourceV2 => f.fallbackFileFormat
case _ => cls
}
private def fallBackV2ToV1(cls: Class[_]): Class[_] =
cls.getDeclaredConstructor().newInstance() match {
case f: FileDataSourceV2 => f.fallbackFileFormat
case _ => cls
}

private def normalizeCatalogTable(schema: StructType, table: CatalogTable): CatalogTable = {
SchemaUtils.checkSchemaColumnNameDuplication(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@ class RatePerMicroBatchProviderSuite extends StreamTest {
import testImplicits._

test("RatePerMicroBatchProvider in registry") {
val ds = DataSource.lookupDataSource("rate-micro-batch", spark.sqlContext.conf).newInstance()
val ds = DataSource.lookupDataSource("rate-micro-batch", spark.sqlContext.conf)
.getConstructor().newInstance()
assert(ds.isInstanceOf[RatePerMicroBatchProvider], "Could not find rate-micro-batch source")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,14 +56,15 @@ class RateStreamProviderSuite extends StreamTest {
}

test("RateStreamProvider in registry") {
val ds = DataSource.lookupDataSource("rate", spark.sqlContext.conf).newInstance()
val ds = DataSource.lookupDataSource("rate", spark.sqlContext.conf)
.getConstructor().newInstance()
assert(ds.isInstanceOf[RateStreamProvider], "Could not find rate source")
}

test("compatible with old path in registry") {
val ds = DataSource.lookupDataSource(
"org.apache.spark.sql.execution.streaming.RateSourceProvider",
spark.sqlContext.conf).newInstance()
spark.sqlContext.conf).getConstructor().newInstance()
assert(ds.isInstanceOf[RateStreamProvider], "Could not find rate source")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ class TextSocketStreamSuite extends StreamTest with SharedSparkSession {
test("backward compatibility with old path") {
val ds = DataSource.lookupDataSource(
"org.apache.spark.sql.execution.streaming.TextSocketSourceProvider",
spark.sqlContext.conf).newInstance()
spark.sqlContext.conf).getConstructor().newInstance()
assert(ds.isInstanceOf[TextSocketSourceProvider], "Could not find socket source")
}

Expand Down

0 comments on commit 8f1b028

Please sign in to comment.