Skip to content

Commit 05cd0de

Browse files
committed
added mock repository generator
use Ivy format address comments v0.1 addressed comments v0.2 new fix not using nio.Path
1 parent 9646018 commit 05cd0de

File tree

5 files changed

+403
-99
lines changed

5 files changed

+403
-99
lines changed

core/src/main/scala/org/apache/spark/TestUtils.scala

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -105,23 +105,18 @@ private[spark] object TestUtils {
105105
URI.create(s"string:///${name.replace(".", "/")}${SOURCE.extension}")
106106
}
107107

108-
private class JavaSourceFromString(val name: String, val code: String)
108+
private[spark] class JavaSourceFromString(val name: String, val code: String)
109109
extends SimpleJavaFileObject(createURI(name), SOURCE) {
110110
override def getCharContent(ignoreEncodingErrors: Boolean): String = code
111111
}
112112

113-
/** Creates a compiled class with the given name. Class file will be placed in destDir. */
113+
/** Creates a compiled class with the source file. Class file will be placed in destDir. */
114114
def createCompiledClass(
115115
className: String,
116116
destDir: File,
117-
toStringValue: String = "",
118-
baseClass: String = null,
119-
classpathUrls: Seq[URL] = Seq()): File = {
117+
sourceFile: JavaSourceFromString,
118+
classpathUrls: Seq[URL]): File = {
120119
val compiler = ToolProvider.getSystemJavaCompiler
121-
val extendsText = Option(baseClass).map { c => s" extends ${c}" }.getOrElse("")
122-
val sourceFile = new JavaSourceFromString(className,
123-
"public class " + className + extendsText + " implements java.io.Serializable {" +
124-
" @Override public String toString() { return \"" + toStringValue + "\"; }}")
125120

126121
// Calling this outputs a class file in pwd. It's easier to just rename the file than
127122
// build a custom FileManager that controls the output location.
@@ -144,4 +139,18 @@ private[spark] object TestUtils {
144139
assert(out.exists(), "Destination file not moved: " + out.getAbsolutePath())
145140
out
146141
}
142+
143+
/** Creates a compiled class with the given name. Class file will be placed in destDir. */
144+
def createCompiledClass(
145+
className: String,
146+
destDir: File,
147+
toStringValue: String = "",
148+
baseClass: String = null,
149+
classpathUrls: Seq[URL] = Seq()): File = {
150+
val extendsText = Option(baseClass).map { c => s" extends ${c}" }.getOrElse("")
151+
val sourceFile = new JavaSourceFromString(className,
152+
"public class " + className + extendsText + " implements java.io.Serializable {" +
153+
" @Override public String toString() { return \"" + toStringValue + "\"; }}")
154+
createCompiledClass(className, destDir, sourceFile, classpathUrls)
155+
}
147156
}

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 68 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -712,7 +712,9 @@ private[spark] object SparkSubmitUtils {
712712
* @param artifactId the artifactId of the coordinate
713713
* @param version the version of the coordinate
714714
*/
715-
private[deploy] case class MavenCoordinate(groupId: String, artifactId: String, version: String)
715+
private[deploy] case class MavenCoordinate(groupId: String, artifactId: String, version: String) {
716+
override def toString: String = s"$groupId:$artifactId:$version"
717+
}
716718

717719
/**
718720
* Extracts maven coordinates from a comma-delimited string. Coordinates should be provided
@@ -735,6 +737,10 @@ private[spark] object SparkSubmitUtils {
735737
}
736738
}
737739

740+
/** Path of the local Maven cache. */
741+
private[spark] def m2Path: File = new File(System.getProperty("user.home"),
742+
".m2" + File.separator + "repository" + File.separator)
743+
738744
/**
739745
* Extracts maven coordinates from a comma-delimited string
740746
* @param remoteRepos Comma-delimited string of remote repositories
@@ -748,8 +754,7 @@ private[spark] object SparkSubmitUtils {
748754

749755
val localM2 = new IBiblioResolver
750756
localM2.setM2compatible(true)
751-
val m2Path = ".m2" + File.separator + "repository" + File.separator
752-
localM2.setRoot(new File(System.getProperty("user.home"), m2Path).toURI.toString)
757+
localM2.setRoot(m2Path.toURI.toString)
753758
localM2.setUsepoms(true)
754759
localM2.setName("local-m2-cache")
755760
cr.add(localM2)
@@ -874,69 +879,72 @@ private[spark] object SparkSubmitUtils {
874879
""
875880
} else {
876881
val sysOut = System.out
877-
// To prevent ivy from logging to system out
878-
System.setOut(printStream)
879-
val artifacts = extractMavenCoordinates(coordinates)
880-
// Default configuration name for ivy
881-
val ivyConfName = "default"
882-
// set ivy settings for location of cache
883-
val ivySettings: IvySettings = new IvySettings
884-
// Directories for caching downloads through ivy and storing the jars when maven coordinates
885-
// are supplied to spark-submit
886-
val alternateIvyCache = ivyPath.getOrElse("")
887-
val packagesDirectory: File =
888-
if (alternateIvyCache.trim.isEmpty) {
889-
new File(ivySettings.getDefaultIvyUserDir, "jars")
882+
try {
883+
// To prevent ivy from logging to system out
884+
System.setOut(printStream)
885+
val artifacts = extractMavenCoordinates(coordinates)
886+
// Default configuration name for ivy
887+
val ivyConfName = "default"
888+
// set ivy settings for location of cache
889+
val ivySettings: IvySettings = new IvySettings
890+
// Directories for caching downloads through ivy and storing the jars when maven coordinates
891+
// are supplied to spark-submit
892+
val alternateIvyCache = ivyPath.getOrElse("")
893+
val packagesDirectory: File =
894+
if (alternateIvyCache.trim.isEmpty) {
895+
new File(ivySettings.getDefaultIvyUserDir, "jars")
896+
} else {
897+
ivySettings.setDefaultIvyUserDir(new File(alternateIvyCache))
898+
ivySettings.setDefaultCache(new File(alternateIvyCache, "cache"))
899+
new File(alternateIvyCache, "jars")
900+
}
901+
printStream.println(
902+
s"Ivy Default Cache set to: ${ivySettings.getDefaultCache.getAbsolutePath}")
903+
printStream.println(s"The jars for the packages stored in: $packagesDirectory")
904+
// create a pattern matcher
905+
ivySettings.addMatcher(new GlobPatternMatcher)
906+
// create the dependency resolvers
907+
val repoResolver = createRepoResolvers(remoteRepos, ivySettings)
908+
ivySettings.addResolver(repoResolver)
909+
ivySettings.setDefaultResolver(repoResolver.getName)
910+
911+
val ivy = Ivy.newInstance(ivySettings)
912+
// Set resolve options to download transitive dependencies as well
913+
val resolveOptions = new ResolveOptions
914+
resolveOptions.setTransitive(true)
915+
val retrieveOptions = new RetrieveOptions
916+
// Turn downloading and logging off for testing
917+
if (isTest) {
918+
resolveOptions.setDownload(false)
919+
resolveOptions.setLog(LogOptions.LOG_QUIET)
920+
retrieveOptions.setLog(LogOptions.LOG_QUIET)
890921
} else {
891-
ivySettings.setDefaultIvyUserDir(new File(alternateIvyCache))
892-
ivySettings.setDefaultCache(new File(alternateIvyCache, "cache"))
893-
new File(alternateIvyCache, "jars")
922+
resolveOptions.setDownload(true)
894923
}
895-
printStream.println(
896-
s"Ivy Default Cache set to: ${ivySettings.getDefaultCache.getAbsolutePath}")
897-
printStream.println(s"The jars for the packages stored in: $packagesDirectory")
898-
// create a pattern matcher
899-
ivySettings.addMatcher(new GlobPatternMatcher)
900-
// create the dependency resolvers
901-
val repoResolver = createRepoResolvers(remoteRepos, ivySettings)
902-
ivySettings.addResolver(repoResolver)
903-
ivySettings.setDefaultResolver(repoResolver.getName)
904-
905-
val ivy = Ivy.newInstance(ivySettings)
906-
// Set resolve options to download transitive dependencies as well
907-
val resolveOptions = new ResolveOptions
908-
resolveOptions.setTransitive(true)
909-
val retrieveOptions = new RetrieveOptions
910-
// Turn downloading and logging off for testing
911-
if (isTest) {
912-
resolveOptions.setDownload(false)
913-
resolveOptions.setLog(LogOptions.LOG_QUIET)
914-
retrieveOptions.setLog(LogOptions.LOG_QUIET)
915-
} else {
916-
resolveOptions.setDownload(true)
917-
}
918924

919-
// A Module descriptor must be specified. Entries are dummy strings
920-
val md = getModuleDescriptor
921-
md.setDefaultConf(ivyConfName)
925+
// A Module descriptor must be specified. Entries are dummy strings
926+
val md = getModuleDescriptor
927+
md.setDefaultConf(ivyConfName)
922928

923-
// Add exclusion rules for Spark and Scala Library
924-
addExclusionRules(ivySettings, ivyConfName, md)
925-
// add all supplied maven artifacts as dependencies
926-
addDependenciesToIvy(md, artifacts, ivyConfName)
929+
// Add exclusion rules for Spark and Scala Library
930+
addExclusionRules(ivySettings, ivyConfName, md)
931+
// add all supplied maven artifacts as dependencies
932+
addDependenciesToIvy(md, artifacts, ivyConfName)
927933

928-
// resolve dependencies
929-
val rr: ResolveReport = ivy.resolve(md, resolveOptions)
930-
if (rr.hasError) {
931-
throw new RuntimeException(rr.getAllProblemMessages.toString)
934+
// resolve dependencies
935+
val rr: ResolveReport = ivy.resolve(md, resolveOptions)
936+
if (rr.hasError) {
937+
throw new RuntimeException(rr.getAllProblemMessages.toString)
938+
}
939+
// retrieve all resolved dependencies
940+
ivy.retrieve(rr.getModuleDescriptor.getModuleRevisionId,
941+
packagesDirectory.getAbsolutePath + File.separator +
942+
"[organization]_[artifact]-[revision].[ext]",
943+
retrieveOptions.setConfs(Array(ivyConfName)))
944+
resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory)
945+
} finally {
946+
System.setOut(sysOut)
932947
}
933-
// retrieve all resolved dependencies
934-
ivy.retrieve(rr.getModuleDescriptor.getModuleRevisionId,
935-
packagesDirectory.getAbsolutePath + File.separator +
936-
"[organization]_[artifact]-[revision].[ext]",
937-
retrieveOptions.setConfs(Array(ivyConfName)))
938-
System.setOut(sysOut)
939-
resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory)
940948
}
941949
}
942950
}

0 commit comments

Comments
 (0)