Skip to content

Commit 72fdeb6

Browse files
committed
remove since tags from private vars
1 parent e94968a commit 72fdeb6

File tree

2 files changed

+6
-28
lines changed

2 files changed

+6
-28
lines changed

mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,6 @@ class KMeansModel (
9797
KMeansModel.SaveLoadV1_0.save(sc, this, path)
9898
}
9999

100-
/** @since 1.4.0 */
101100
override protected def formatVersion: String = "1.0"
102101
}
103102

mllib/src/main/scala/org/apache/spark/mllib/clustering/StreamingKMeans.scala

Lines changed: 6 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -77,32 +77,20 @@ class StreamingKMeansModel(
7777
*/
7878
def update(data: RDD[Vector], decayFactor: Double, timeUnit: String): StreamingKMeansModel = {
7979

80-
/**
81-
* find nearest cluster to each point
82-
* @since 1.2.0
83-
*/
80+
// find nearest cluster to each point
8481
val closest = data.map(point => (this.predict(point), (point, 1L)))
8582

86-
/**
87-
* get sums and counts for updating each cluster
88-
* @since 1.2.0
89-
*/
83+
// get sums and counts for updating each cluster
9084
val mergeContribs: ((Vector, Long), (Vector, Long)) => (Vector, Long) = (p1, p2) => {
9185
BLAS.axpy(1.0, p2._1, p1._1)
9286
(p1._1, p1._2 + p2._2)
9387
}
9488
val dim = clusterCenters(0).size
9589

96-
/**
97-
* @since 1.2.0
98-
*/
9990
val pointStats: Array[(Int, (Vector, Long))] = closest
10091
.aggregateByKey((Vectors.zeros(dim), 0L))(mergeContribs, mergeContribs)
10192
.collect()
10293

103-
/**
104-
* @since 1.2.0
105-
*/
10694
val discount = timeUnit match {
10795
case StreamingKMeans.BATCHES => decayFactor
10896
case StreamingKMeans.POINTS =>
@@ -112,16 +100,10 @@ class StreamingKMeansModel(
112100
math.pow(decayFactor, numNewPoints)
113101
}
114102

115-
/**
116-
* apply discount to weights
117-
* @since 1.2.0
118-
*/
103+
// apply discount to weights
119104
BLAS.scal(discount, Vectors.dense(clusterWeights))
120105

121-
/**
122-
* implement update rule
123-
* @since 1.2.0
124-
*/
106+
// implement update rule
125107
pointStats.foreach { case (label, (sum, count)) =>
126108
val centroid = clusterCenters(label)
127109

@@ -141,10 +123,7 @@ class StreamingKMeansModel(
141123
logInfo(s"Cluster $label updated with weight $updatedWeight and centroid: $display")
142124
}
143125

144-
/**
145-
* Check whether the smallest cluster is dying. If so, split the largest cluster.
146-
* @since 1.2.0
147-
*/
126+
// Check whether the smallest cluster is dying. If so, split the largest cluster.
148127
val weightsWithIndex = clusterWeights.view.zipWithIndex
149128
val (maxWeight, largest) = weightsWithIndex.maxBy(_._1)
150129
val (minWeight, smallest) = weightsWithIndex.minBy(_._1)
@@ -197,7 +176,7 @@ class StreamingKMeans(
197176

198177
/** @since 1.2.0 */
199178
def this() = this(2, 1.0, StreamingKMeans.BATCHES)
200-
/** @since 1.2.0 */
179+
201180
protected var model: StreamingKMeansModel = new StreamingKMeansModel(null, null)
202181

203182
/**

0 commit comments

Comments
 (0)