From a3625a98e78c43c64cbe4a21f7c70f46307df508 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Wed, 12 Jun 2024 17:11:22 +0800 Subject: [PATCH] [SPARK-48595][CORE] Cleanup deprecated api usage related to `commons-compress` ### What changes were proposed in this pull request? This pr use `org.apache.commons.io.output.CountingOutputStream` instead of `org.apache.commons.compress.utils.CountingOutputStream` to fix the following compilation warnings related to 'commons-compress': ``` [WARNING] [Warn] /Users/yangjie01/SourceCode/git/spark-mine-13/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileWriters.scala:308: class CountingOutputStream in package utils is deprecated Applicable -Wconf / nowarn filters for this warning: msg=, cat=deprecation, site=org.apache.spark.deploy.history.RollingEventLogFilesWriter.countingOutputStream, origin=org.apache.commons.compress.utils.CountingOutputStream [WARNING] [Warn] /Users/yangjie01/SourceCode/git/spark-mine-13/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileWriters.scala:351: class CountingOutputStream in package utils is deprecated Applicable -Wconf / nowarn filters for this warning: msg=, cat=deprecation, site=org.apache.spark.deploy.history.RollingEventLogFilesWriter.rollEventLogFile.$anonfun, origin=org.apache.commons.compress.utils.CountingOutputStream ``` The fix refers to: https://github.com/apache/commons-compress/blob/95727006cac0892c654951c4e7f1db142462f22a/src/main/java/org/apache/commons/compress/utils/CountingOutputStream.java#L25-L33 ``` /** * Stream that tracks the number of bytes read. * * since 1.3 * NotThreadSafe * deprecated Use {link org.apache.commons.io.output.CountingOutputStream}. */ Deprecated public class CountingOutputStream extends FilterOutputStream { ``` ### Why are the changes needed? Cleanup deprecated api usage related to `commons-compress` ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Pass GitHub Actions ### Was this patch authored or co-authored using generative AI tooling? No Closes #46950 from LuciferYang/SPARK-48595. Authored-by: yangjie01 Signed-off-by: Kent Yao --- .../org/apache/spark/deploy/history/EventLogFileWriters.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileWriters.scala b/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileWriters.scala index 963ed121547cb..f3bb6d5af3358 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileWriters.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileWriters.scala @@ -21,7 +21,7 @@ import java.io._ import java.net.URI import java.nio.charset.StandardCharsets -import org.apache.commons.compress.utils.CountingOutputStream +import org.apache.commons.io.output.CountingOutputStream import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileStatus, FileSystem, FSDataOutputStream, Path} import org.apache.hadoop.fs.permission.FsPermission @@ -330,7 +330,7 @@ class RollingEventLogFilesWriter( override def writeEvent(eventJson: String, flushLogger: Boolean = false): Unit = { writer.foreach { w => - val currentLen = countingOutputStream.get.getBytesWritten + val currentLen = countingOutputStream.get.getByteCount if (currentLen + eventJson.length > eventFileMaxLength) { rollEventLogFile() }