@@ -87,7 +87,7 @@ object SparkBuild extends Build {
87
87
case Some (v) => v.toBoolean
88
88
}
89
89
lazy val hadoopClient = if (hadoopVersion.startsWith(" 0.20." ) || hadoopVersion == " 1.0.0" ) " hadoop-core" else " hadoop-client"
90
-
90
+ val isAvroNeeded = hadoopVersion.startsWith( " 0.23. " ) && isYarnEnabled
91
91
// Conditionally include the yarn sub-project
92
92
lazy val yarnAlpha = Project (" yarn-alpha" , file(" yarn/alpha" ), settings = yarnAlphaSettings) dependsOn(core)
93
93
lazy val yarn = Project (" yarn" , file(" yarn/stable" ), settings = yarnSettings) dependsOn(core)
@@ -130,6 +130,8 @@ object SparkBuild extends Build {
130
130
javacOptions := Seq (" -target" , JAVAC_JVM_VERSION , " -source" , JAVAC_JVM_VERSION ),
131
131
unmanagedJars in Compile <<= baseDirectory map { base => (base / " lib" ** " *.jar" ).classpath },
132
132
retrieveManaged := true ,
133
+ // This is to add convenience of enabling sbt -Dsbt.offline=true for making the build offline.
134
+ offline := " true" .equalsIgnoreCase(sys.props(" sbt.offline" )),
133
135
retrievePattern := " [type]s/[artifact](-[revision])(-[classifier]).[ext]" ,
134
136
transitiveClassifiers in Scope .GlobalScope := Seq (" sources" ),
135
137
testListeners <<= target.map(t => Seq (new eu.henkelmann.sbt.JUnitXmlTestsListener (t.getAbsolutePath))),
@@ -254,39 +256,38 @@ object SparkBuild extends Build {
254
256
),
255
257
256
258
libraryDependencies ++= Seq (
257
- " com.google.guava" % " guava" % " 14.0.1" ,
258
- " com.google.code.findbugs" % " jsr305" % " 1.3.9" ,
259
- " log4j" % " log4j" % " 1.2.17" ,
260
- " org.slf4j" % " slf4j-api" % slf4jVersion,
261
- " org.slf4j" % " slf4j-log4j12" % slf4jVersion,
262
- " org.slf4j" % " jul-to-slf4j" % slf4jVersion,
263
- " org.slf4j" % " jcl-over-slf4j" % slf4jVersion,
264
- " commons-daemon" % " commons-daemon" % " 1.0.10" , // workaround for bug HADOOP-9407
265
- " com.ning" % " compress-lzf" % " 1.0.0" ,
266
- " org.xerial.snappy" % " snappy-java" % " 1.0.5" ,
267
- " org.ow2.asm" % " asm" % " 4.0" ,
268
- " org.spark-project.akka" %% " akka-remote" % " 2.2.3-shaded-protobuf" excludeAll(excludeNetty),
269
- " org.spark-project.akka" %% " akka-slf4j" % " 2.2.3-shaded-protobuf" excludeAll(excludeNetty),
270
- " org.spark-project.akka" %% " akka-testkit" % " 2.2.3-shaded-protobuf" % " test" ,
271
- " org.json4s" %% " json4s-jackson" % " 3.2.6" ,
272
- " it.unimi.dsi" % " fastutil" % " 6.4.4" ,
273
- " colt" % " colt" % " 1.2.0" ,
274
- " org.apache.mesos" % " mesos" % " 0.13.0" ,
275
- " net.java.dev.jets3t" % " jets3t" % " 0.7.1" excludeAll(excludeCommonsLogging),
276
- " org.apache.derby" % " derby" % " 10.4.2.0" % " test" ,
277
- " org.apache.hadoop" % hadoopClient % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm, excludeCglib, excludeCommonsLogging, excludeSLF4J),
278
- " org.apache.avro" % " avro" % " 1.7.4" ,
279
- " org.apache.avro" % " avro-ipc" % " 1.7.4" excludeAll(excludeNetty),
280
- " org.apache.curator" % " curator-recipes" % " 2.4.0" excludeAll(excludeNetty),
281
- " com.codahale.metrics" % " metrics-core" % " 3.0.0" ,
282
- " com.codahale.metrics" % " metrics-jvm" % " 3.0.0" ,
283
- " com.codahale.metrics" % " metrics-json" % " 3.0.0" ,
284
- " com.codahale.metrics" % " metrics-ganglia" % " 3.0.0" ,
285
- " com.codahale.metrics" % " metrics-graphite" % " 3.0.0" ,
286
- " com.twitter" %% " chill" % " 0.3.1" ,
287
- " com.twitter" % " chill-java" % " 0.3.1" ,
288
- " com.clearspring.analytics" % " stream" % " 2.5.1"
289
- )
259
+ " com.google.guava" % " guava" % " 14.0.1" ,
260
+ " com.google.code.findbugs" % " jsr305" % " 1.3.9" ,
261
+ " log4j" % " log4j" % " 1.2.17" ,
262
+ " org.slf4j" % " slf4j-api" % slf4jVersion,
263
+ " org.slf4j" % " slf4j-log4j12" % slf4jVersion,
264
+ " org.slf4j" % " jul-to-slf4j" % slf4jVersion,
265
+ " org.slf4j" % " jcl-over-slf4j" % slf4jVersion,
266
+ " commons-daemon" % " commons-daemon" % " 1.0.10" , // workaround for bug HADOOP-9407
267
+ " com.ning" % " compress-lzf" % " 1.0.0" ,
268
+ " org.xerial.snappy" % " snappy-java" % " 1.0.5" ,
269
+ " org.ow2.asm" % " asm" % " 4.0" ,
270
+ " org.spark-project.akka" %% " akka-remote" % " 2.2.3-shaded-protobuf" excludeAll(excludeNetty),
271
+ " org.spark-project.akka" %% " akka-slf4j" % " 2.2.3-shaded-protobuf" excludeAll(excludeNetty),
272
+ " org.spark-project.akka" %% " akka-testkit" % " 2.2.3-shaded-protobuf" % " test" ,
273
+ " org.json4s" %% " json4s-jackson" % " 3.2.6" ,
274
+ " it.unimi.dsi" % " fastutil" % " 6.4.4" ,
275
+ " colt" % " colt" % " 1.2.0" ,
276
+ " org.apache.mesos" % " mesos" % " 0.13.0" ,
277
+ " net.java.dev.jets3t" % " jets3t" % " 0.7.1" excludeAll(excludeCommonsLogging),
278
+ " org.apache.derby" % " derby" % " 10.4.2.0" % " test" ,
279
+ " org.apache.hadoop" % hadoopClient % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm, excludeCglib, excludeCommonsLogging, excludeSLF4J),
280
+ " org.apache.curator" % " curator-recipes" % " 2.4.0" excludeAll(excludeNetty),
281
+ " com.codahale.metrics" % " metrics-core" % " 3.0.0" ,
282
+ " com.codahale.metrics" % " metrics-jvm" % " 3.0.0" ,
283
+ " com.codahale.metrics" % " metrics-json" % " 3.0.0" ,
284
+ " com.codahale.metrics" % " metrics-ganglia" % " 3.0.0" ,
285
+ " com.codahale.metrics" % " metrics-graphite" % " 3.0.0" ,
286
+ " com.twitter" %% " chill" % " 0.3.1" ,
287
+ " com.twitter" % " chill-java" % " 0.3.1" ,
288
+ " com.clearspring.analytics" % " stream" % " 2.5.1"
289
+ ) ++ (if (isAvroNeeded) Seq (
290
+ " org.apache.avro" % " avro" % " 1.7.4" ) else Seq ())
290
291
)
291
292
292
293
def rootSettings = sharedSettings ++ Seq (
0 commit comments