Skip to content

Commit 6332e0f

Browse files
author
Jacky Li
committed
fix bug
1 parent fcbf0d9 commit 6332e0f

File tree

197 files changed

+2598
-1596
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

197 files changed

+2598
-1596
lines changed

.gitignore

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,19 @@
88
*.pyc
99
.idea/
1010
.idea_modules/
11-
sbt/*.jar
11+
build/*.jar
1212
.settings
1313
.cache
14+
cache
1415
.generated-mima*
15-
/build/
1616
work/
1717
out/
1818
.DS_Store
1919
third_party/libmesos.so
2020
third_party/libmesos.dylib
21+
build/apache-maven*
22+
build/zinc*
23+
build/scala*
2124
conf/java-opts
2225
conf/*.sh
2326
conf/*.cmd
@@ -55,6 +58,7 @@ dev/create-release/*final
5558
spark-*-bin-*.tgz
5659
unit-tests.log
5760
/lib/
61+
ec2/lib/
5862
rat-results.txt
5963
scalastyle.txt
6064
scalastyle-output.xml

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ To build Spark and its example programs, run:
2626

2727
(You do not need to do this if you downloaded a pre-built package.)
2828
More detailed documentation is available from the project site, at
29-
["Building Spark with Maven"](http://spark.apache.org/docs/latest/building-with-maven.html).
29+
["Building Spark with Maven"](http://spark.apache.org/docs/latest/building-spark.html).
3030

3131
## Interactive Scala Shell
3232

core/pom.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -352,9 +352,9 @@
352352
</execution>
353353
</executions>
354354
<configuration>
355-
<tasks>
355+
<target>
356356
<unzip src="../python/lib/py4j-0.8.2.1-src.zip" dest="../python/build" />
357-
</tasks>
357+
</target>
358358
</configuration>
359359
</plugin>
360360
<plugin>

core/src/main/resources/org/apache/spark/ui/static/webui.css

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -169,6 +169,16 @@ span.additional-metric-title {
169169
display: inline-block;
170170
}
171171

172+
.version {
173+
line-height: 30px;
174+
vertical-align: bottom;
175+
font-size: 12px;
176+
padding: 0;
177+
margin: 0;
178+
font-weight: bold;
179+
color: #777;
180+
}
181+
172182
/* Hide all additional metrics by default. This is done here rather than using JavaScript to
173183
* avoid slow page loads for stage pages with large numbers (e.g., thousands) of tasks. */
174184
.scheduler_delay, .deserialization_time, .serialization_time, .getting_result_time {

core/src/main/scala/org/apache/spark/Dependency.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,9 @@ abstract class NarrowDependency[T](_rdd: RDD[T]) extends Dependency[T] {
6060
* @param serializer [[org.apache.spark.serializer.Serializer Serializer]] to use. If set to None,
6161
* the default serializer, as specified by `spark.serializer` config option, will
6262
* be used.
63+
* @param keyOrdering key ordering for RDD's shuffles
64+
* @param aggregator map/reduce-side aggregator for RDD's shuffle
65+
* @param mapSideCombine whether to perform partial aggregation (also known as map-side combine)
6366
*/
6467
@DeveloperApi
6568
class ShuffleDependency[K, V, C](

core/src/main/scala/org/apache/spark/MapOutputTracker.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,8 @@ private[spark] class MapOutputTrackerMasterActor(tracker: MapOutputTrackerMaster
7676
*/
7777
private[spark] abstract class MapOutputTracker(conf: SparkConf) extends Logging {
7878
private val timeout = AkkaUtils.askTimeout(conf)
79+
private val retryAttempts = AkkaUtils.numRetries(conf)
80+
private val retryIntervalMs = AkkaUtils.retryWaitMs(conf)
7981

8082
/** Set to the MapOutputTrackerActor living on the driver. */
8183
var trackerActor: ActorRef = _
@@ -108,8 +110,7 @@ private[spark] abstract class MapOutputTracker(conf: SparkConf) extends Logging
108110
*/
109111
protected def askTracker(message: Any): Any = {
110112
try {
111-
val future = trackerActor.ask(message)(timeout)
112-
Await.result(future, timeout)
113+
AkkaUtils.askWithReply(message, trackerActor, retryAttempts, retryIntervalMs, timeout)
113114
} catch {
114115
case e: Exception =>
115116
logError("Error communicating with MapOutputTracker", e)

core/src/main/scala/org/apache/spark/SecurityManager.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,8 +151,8 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging with
151151

152152
private val authOn = sparkConf.getBoolean("spark.authenticate", false)
153153
// keep spark.ui.acls.enable for backwards compatibility with 1.0
154-
private var aclsOn = sparkConf.getOption("spark.acls.enable").getOrElse(
155-
sparkConf.get("spark.ui.acls.enable", "false")).toBoolean
154+
private var aclsOn =
155+
sparkConf.getBoolean("spark.acls.enable", sparkConf.getBoolean("spark.ui.acls.enable", false))
156156

157157
// admin acls should be set before view or modify acls
158158
private var adminAcls: Set[String] =

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -172,6 +172,9 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
172172
private[spark] def this(master: String, appName: String, sparkHome: String, jars: Seq[String]) =
173173
this(master, appName, sparkHome, jars, Map(), Map())
174174

175+
// log out Spark Version in Spark driver log
176+
logInfo(s"Running Spark version $SPARK_VERSION")
177+
175178
private[spark] val conf = config.clone()
176179
conf.validateSettings()
177180

@@ -1705,19 +1708,19 @@ object SparkContext extends Logging {
17051708

17061709
// Implicit conversions to common Writable types, for saveAsSequenceFile
17071710

1708-
implicit def intToIntWritable(i: Int) = new IntWritable(i)
1711+
implicit def intToIntWritable(i: Int): IntWritable = new IntWritable(i)
17091712

1710-
implicit def longToLongWritable(l: Long) = new LongWritable(l)
1713+
implicit def longToLongWritable(l: Long): LongWritable = new LongWritable(l)
17111714

1712-
implicit def floatToFloatWritable(f: Float) = new FloatWritable(f)
1715+
implicit def floatToFloatWritable(f: Float): FloatWritable = new FloatWritable(f)
17131716

1714-
implicit def doubleToDoubleWritable(d: Double) = new DoubleWritable(d)
1717+
implicit def doubleToDoubleWritable(d: Double): DoubleWritable = new DoubleWritable(d)
17151718

1716-
implicit def boolToBoolWritable (b: Boolean) = new BooleanWritable(b)
1719+
implicit def boolToBoolWritable (b: Boolean): BooleanWritable = new BooleanWritable(b)
17171720

1718-
implicit def bytesToBytesWritable (aob: Array[Byte]) = new BytesWritable(aob)
1721+
implicit def bytesToBytesWritable (aob: Array[Byte]): BytesWritable = new BytesWritable(aob)
17191722

1720-
implicit def stringToText(s: String) = new Text(s)
1723+
implicit def stringToText(s: String): Text = new Text(s)
17211724

17221725
private implicit def arrayToArrayWritable[T <% Writable: ClassTag](arr: Traversable[T])
17231726
: ArrayWritable = {

core/src/main/scala/org/apache/spark/SparkEnv.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -395,7 +395,7 @@ object SparkEnv extends Logging {
395395
val sparkProperties = (conf.getAll ++ schedulerMode).sorted
396396

397397
// System properties that are not java classpaths
398-
val systemProperties = System.getProperties.iterator.toSeq
398+
val systemProperties = Utils.getSystemProperties.toSeq
399399
val otherProperties = systemProperties.filter { case (k, _) =>
400400
k != "java.class.path" && !k.startsWith("spark.")
401401
}.sorted

core/src/main/scala/org/apache/spark/api/java/JavaUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ private[spark] object JavaUtils {
8080
prev match {
8181
case Some(k) =>
8282
underlying match {
83-
case mm: mutable.Map[a, _] =>
83+
case mm: mutable.Map[A, _] =>
8484
mm remove k
8585
prev = None
8686
case _ =>

0 commit comments

Comments
 (0)