Skip to content

Commit 2f706f1

Browse files
committed
Don't use floats
1 parent 542a736 commit 2f706f1

23 files changed

+54
-54
lines changed

core/src/main/scala/org/apache/spark/Aggregator.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark
2020
import org.apache.spark.util.collection.{AppendOnlyMap, ExternalAppendOnlyMap}
2121

2222
/**
23-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
23+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
2424
*
2525
* A set of functions used to aggregate data.
2626
*

core/src/main/scala/org/apache/spark/Dependency.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,15 +21,15 @@ import org.apache.spark.rdd.RDD
2121
import org.apache.spark.serializer.Serializer
2222

2323
/**
24-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
24+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
2525
*
2626
* Base class for dependencies.
2727
*/
2828
abstract class Dependency[T](val rdd: RDD[T]) extends Serializable
2929

3030

3131
/**
32-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
32+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
3333
*
3434
* Base class for dependencies where each partition of the parent RDD is used by at most one
3535
* partition of the child RDD. Narrow dependencies allow for pipelined execution.
@@ -45,7 +45,7 @@ abstract class NarrowDependency[T](rdd: RDD[T]) extends Dependency(rdd) {
4545

4646

4747
/**
48-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
48+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
4949
*
5050
* Represents a dependency on the output of a shuffle stage.
5151
* @param rdd the parent RDD
@@ -65,7 +65,7 @@ class ShuffleDependency[K, V](
6565

6666

6767
/**
68-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
68+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
6969
*
7070
* Represents a one-to-one dependency between partitions of the parent and child RDDs.
7171
*/
@@ -75,7 +75,7 @@ class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
7575

7676

7777
/**
78-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
78+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
7979
*
8080
* Represents a one-to-one dependency between ranges of partitions in the parent and child RDDs.
8181
* @param rdd the parent RDD

core/src/main/scala/org/apache/spark/FutureAction.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.apache.spark.rdd.RDD
2525
import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter}
2626

2727
/**
28-
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
28+
* <span class="badge badge-red">EXPERIMENTAL API</span>
2929
*
3030
* A future for the result of an action to support cancellation. This is an extension of the
3131
* Scala Future interface to support cancellation.
@@ -150,7 +150,7 @@ class SimpleFutureAction[T] private[spark](jobWaiter: JobWaiter[_], resultFunc:
150150

151151

152152
/**
153-
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
153+
* <span class="badge badge-red">EXPERIMENTAL API</span>
154154
*
155155
* A [[FutureAction]] for actions that could trigger multiple Spark jobs. Examples include take,
156156
* takeSample. Cancellation works by setting the cancelled flag to true and interrupting the

core/src/main/scala/org/apache/spark/TaskContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import scala.collection.mutable.ArrayBuffer
2222
import org.apache.spark.executor.TaskMetrics
2323

2424
/**
25-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
25+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
2626
*
2727
* Contextual information about a task which can be read or mutated during execution.
2828
*/

core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.executor
2020
import org.apache.spark.storage.{BlockId, BlockStatus}
2121

2222
/**
23-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
23+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
2424
*
2525
* Metrics tracked during the execution of a task.
2626
*/
@@ -88,7 +88,7 @@ object TaskMetrics {
8888

8989

9090
/**
91-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
91+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
9292
*
9393
* Metrics pertaining to shuffle data read in a given task.
9494
*/
@@ -127,7 +127,7 @@ class ShuffleReadMetrics extends Serializable {
127127
}
128128

129129
/**
130-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
130+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
131131
*
132132
* Metrics pertaining to shuffle data written in a given task.
133133
*/

core/src/main/scala/org/apache/spark/io/CompressionCodec.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}
2525
import org.apache.spark.SparkConf
2626

2727
/**
28-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
28+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
2929
*
3030
* CompressionCodec allows the customization of choosing different compression implementations
3131
* to be used in block storage.
@@ -58,7 +58,7 @@ private[spark] object CompressionCodec {
5858

5959

6060
/**
61-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
61+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
6262
*
6363
* LZF implementation of [[org.apache.spark.io.CompressionCodec]].
6464
*
@@ -77,7 +77,7 @@ class LZFCompressionCodec(conf: SparkConf) extends CompressionCodec {
7777

7878

7979
/**
80-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
80+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
8181
*
8282
* Snappy implementation of [[org.apache.spark.io.CompressionCodec]].
8383
* Block size can be configured by spark.io.compression.snappy.block.size.

core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.partial
1919

2020
/**
21-
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
21+
* <span class="badge badge-red">EXPERIMENTAL API</span>
2222
*
2323
* A Double value with error bars and associated confidence.
2424
*/

core/src/main/scala/org/apache/spark/partial/PartialResult.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.partial
1919

2020
/**
21-
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
21+
* <span class="badge badge-red">EXPERIMENTAL API</span>
2222
*/
2323
class PartialResult[R](initialVal: R, isFinal: Boolean) {
2424
private var finalValue: Option[R] = if (isFinal) Some(initialVal) else None

core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ import scala.reflect.ClassTag
2626
import org.apache.spark.{ComplexFutureAction, FutureAction, Logging}
2727

2828
/**
29-
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
29+
* <span class="badge badge-red">EXPERIMENTAL API</span>
3030
*
3131
* A set of asynchronous RDD actions available through an implicit conversion.
3232
* Import `org.apache.spark.SparkContext._` at the top of your program to use these functions.

core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ private[spark] class HadoopPartition(rddId: Int, idx: Int, @transient s: InputSp
7070
}
7171

7272
/**
73-
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
73+
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
7474
*
7575
* An RDD that provides core functionality for reading data stored in Hadoop (e.g., files in HDFS,
7676
* sources in HBase, or S3), using the older MapReduce API (`org.apache.hadoop.mapred`).

0 commit comments

Comments
 (0)