@@ -21,7 +21,7 @@ import java.io._
21
21
22
22
import scala .collection .mutable .ArrayBuffer
23
23
24
- import org .apache .spark .{ SparkConf , SparkContext , SparkEnv , SparkException , TestUtils }
24
+ import org .apache .spark ._
25
25
import org .apache .spark .deploy .SparkSubmit ._
26
26
import org .apache .spark .util .Utils
27
27
import org .scalatest .FunSuite
@@ -451,24 +451,24 @@ class SparkSubmitSuite extends FunSuite with Matchers {
451
451
}
452
452
}
453
453
454
- object JarCreationTest {
454
+ object JarCreationTest extends Logging {
455
455
def main (args : Array [String ]) {
456
456
Utils .configTestLog4j(" INFO" )
457
457
val conf = new SparkConf ()
458
458
val sc = new SparkContext (conf)
459
459
val result = sc.makeRDD(1 to 100 , 10 ).mapPartitions { x =>
460
- var foundClasses = false
460
+ var exception : String = null
461
461
try {
462
462
Class .forName(" SparkSubmitClassA" , true , Thread .currentThread().getContextClassLoader)
463
463
Class .forName(" SparkSubmitClassA" , true , Thread .currentThread().getContextClassLoader)
464
- foundClasses = true
465
464
} catch {
466
- case _ : Throwable => // catch all
465
+ case t : Throwable =>
466
+ exception = t + " \n " + t.getStackTraceString
467
467
}
468
- Seq (foundClasses) .iterator
468
+ Option (exception).toSeq .iterator
469
469
}.collect()
470
- if (result.contains( false ) ) {
471
- throw new Exception (" Could not load user defined classes inside of executors " )
470
+ if (result.nonEmpty ) {
471
+ throw new Exception (" Could not load user class from jar: \n " + result( 0 ) )
472
472
}
473
473
}
474
474
}
0 commit comments