diff --git a/dev/deps/spark-deps-hadoop-2.7-hive-1.2 b/dev/deps/spark-deps-hadoop-2.7-hive-1.2
index 4936d808db85a..f1bf1160f9039 100644
--- a/dev/deps/spark-deps-hadoop-2.7-hive-1.2
+++ b/dev/deps/spark-deps-hadoop-2.7-hive-1.2
@@ -35,7 +35,7 @@ commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar
commons-cli/1.2//commons-cli-1.2.jar
commons-codec/1.10//commons-codec-1.10.jar
commons-collections/3.2.2//commons-collections-3.2.2.jar
-commons-compiler/3.1.2//commons-compiler-3.1.2.jar
+commons-compiler/3.0.16//commons-compiler-3.0.16.jar
commons-compress/1.8.1//commons-compress-1.8.1.jar
commons-configuration/1.6//commons-configuration-1.6.jar
commons-crypto/1.0.0//commons-crypto-1.0.0.jar
@@ -106,7 +106,7 @@ jakarta.inject/2.6.1//jakarta.inject-2.6.1.jar
jakarta.validation-api/2.0.2//jakarta.validation-api-2.0.2.jar
jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar
jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
-janino/3.1.2//janino-3.1.2.jar
+janino/3.0.16//janino-3.0.16.jar
javassist/3.25.0-GA//javassist-3.25.0-GA.jar
javax.inject/1//javax.inject-1.jar
javax.servlet-api/3.1.0//javax.servlet-api-3.1.0.jar
diff --git a/dev/deps/spark-deps-hadoop-2.7-hive-2.3 b/dev/deps/spark-deps-hadoop-2.7-hive-2.3
index 82cd20bf77191..758e32086024f 100644
--- a/dev/deps/spark-deps-hadoop-2.7-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-2.7-hive-2.3
@@ -33,7 +33,7 @@ commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar
commons-cli/1.2//commons-cli-1.2.jar
commons-codec/1.10//commons-codec-1.10.jar
commons-collections/3.2.2//commons-collections-3.2.2.jar
-commons-compiler/3.1.2//commons-compiler-3.1.2.jar
+commons-compiler/3.0.16//commons-compiler-3.0.16.jar
commons-compress/1.8.1//commons-compress-1.8.1.jar
commons-configuration/1.6//commons-configuration-1.6.jar
commons-crypto/1.0.0//commons-crypto-1.0.0.jar
@@ -119,7 +119,7 @@ jakarta.inject/2.6.1//jakarta.inject-2.6.1.jar
jakarta.validation-api/2.0.2//jakarta.validation-api-2.0.2.jar
jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar
jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
-janino/3.1.2//janino-3.1.2.jar
+janino/3.0.16//janino-3.0.16.jar
javassist/3.25.0-GA//javassist-3.25.0-GA.jar
javax.inject/1//javax.inject-1.jar
javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar
diff --git a/dev/deps/spark-deps-hadoop-3.2-hive-2.3 b/dev/deps/spark-deps-hadoop-3.2-hive-2.3
index 7257fb8722422..0b8a068bff626 100644
--- a/dev/deps/spark-deps-hadoop-3.2-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3.2-hive-2.3
@@ -30,7 +30,7 @@ commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar
commons-cli/1.2//commons-cli-1.2.jar
commons-codec/1.10//commons-codec-1.10.jar
commons-collections/3.2.2//commons-collections-3.2.2.jar
-commons-compiler/3.1.2//commons-compiler-3.1.2.jar
+commons-compiler/3.0.16//commons-compiler-3.0.16.jar
commons-compress/1.8.1//commons-compress-1.8.1.jar
commons-configuration2/2.1.1//commons-configuration2-2.1.1.jar
commons-crypto/1.0.0//commons-crypto-1.0.0.jar
@@ -118,7 +118,7 @@ jakarta.inject/2.6.1//jakarta.inject-2.6.1.jar
jakarta.validation-api/2.0.2//jakarta.validation-api-2.0.2.jar
jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar
jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
-janino/3.1.2//janino-3.1.2.jar
+janino/3.0.16//janino-3.0.16.jar
javassist/3.25.0-GA//javassist-3.25.0-GA.jar
javax.inject/1//javax.inject-1.jar
javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar
diff --git a/pom.xml b/pom.xml
index 23de5697f68e9..c5722fae7392a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -185,7 +185,7 @@
2.6.2
4.1.17
14.0.1
- 3.1.2
+ 3.0.16
2.30
2.10.5
3.5.2
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index 51aeee5c277f7..f4be223cfebb6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -27,9 +27,8 @@ import scala.util.control.NonFatal
import com.google.common.cache.{CacheBuilder, CacheLoader}
import com.google.common.util.concurrent.{ExecutionError, UncheckedExecutionException}
-import org.codehaus.commons.compiler.{CompileException, InternalCompilerException}
-import org.codehaus.commons.compiler.util.reflect.ByteArrayClassLoader
-import org.codehaus.janino.{ClassBodyEvaluator, SimpleCompiler}
+import org.codehaus.commons.compiler.CompileException
+import org.codehaus.janino.{ByteArrayClassLoader, ClassBodyEvaluator, InternalCompilerException, SimpleCompiler}
import org.codehaus.janino.util.ClassFile
import org.apache.spark.{TaskContext, TaskKilledException}
@@ -1421,10 +1420,9 @@ object CodeGenerator extends Logging {
private def updateAndGetCompilationStats(evaluator: ClassBodyEvaluator): ByteCodeStats = {
// First retrieve the generated classes.
val classes = {
- val scField = classOf[ClassBodyEvaluator].getDeclaredField("sc")
- scField.setAccessible(true)
- val compiler = scField.get(evaluator).asInstanceOf[SimpleCompiler]
- val loader = compiler.getClassLoader.asInstanceOf[ByteArrayClassLoader]
+ val resultField = classOf[SimpleCompiler].getDeclaredField("result")
+ resultField.setAccessible(true)
+ val loader = resultField.get(evaluator).asInstanceOf[ByteArrayClassLoader]
val classesField = loader.getClass.getDeclaredField("classes")
classesField.setAccessible(true)
classesField.get(loader).asInstanceOf[JavaMap[String, Array[Byte]]].asScala
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index bcfc77545bbd2..7acc7720dd7ab 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -2541,6 +2541,11 @@ class DataFrameSuite extends QueryTest
assert(df.schema === new StructType().add(StructField("d", DecimalType(38, 0))))
}
}
+
+ test("SPARK-32640: ln(NaN) should return NaN") {
+ val df = Seq(Double.NaN).toDF("d")
+ checkAnswer(df.selectExpr("ln(d)"), Row(Double.NaN))
+ }
}
case class GroupByKey(a: Int, b: Int)