18
18
package org .apache .spark .mllib .regression
19
19
20
20
import org .apache .spark .mllib .linalg .Vector
21
- import org .apache .spark .mllib .regression .MonotonicityConstraint .MonotonicityConstraint .{ Isotonic , MonotonicityConstraint }
21
+ import org .apache .spark .mllib .regression .MonotonicityConstraint .MonotonicityConstraint ._
22
22
import org .apache .spark .rdd .RDD
23
23
24
24
/**
@@ -31,7 +31,9 @@ object MonotonicityConstraint {
31
31
object MonotonicityConstraint {
32
32
33
33
sealed trait MonotonicityConstraint {
34
- private [regression] def holds (current : WeightedLabeledPoint , next : WeightedLabeledPoint ): Boolean
34
+ private [regression] def holds (
35
+ current : WeightedLabeledPoint ,
36
+ next : WeightedLabeledPoint ): Boolean
35
37
}
36
38
37
39
/**
@@ -72,7 +74,7 @@ class IsotonicRegressionModel(
72
74
testData.map(predict)
73
75
74
76
override def predict (testData : Vector ): Double = {
75
- // take the highest of data points smaller than our feature or data point with lowest feature
77
+ // Take the highest of data points smaller than our feature or data point with lowest feature
76
78
(predictions.head +:
77
79
predictions.filter(y => y.features.toArray.head <= testData.toArray.head)).last.label
78
80
}
@@ -87,7 +89,8 @@ trait IsotonicRegressionAlgorithm
87
89
/**
88
90
* Creates isotonic regression model with given parameters
89
91
*
90
- * @param predictions labels estimated using isotonic regression algorithm. Used for predictions on new data points.
92
+ * @param predictions labels estimated using isotonic regression algorithm.
93
+ * Used for predictions on new data points.
91
94
* @param monotonicityConstraint isotonic or antitonic
92
95
* @return isotonic regression model
93
96
*/
@@ -142,7 +145,7 @@ class PoolAdjacentViolators private [mllib]
142
145
in : Array [WeightedLabeledPoint ],
143
146
monotonicityConstraint : MonotonicityConstraint ): Array [WeightedLabeledPoint ] = {
144
147
145
- // Pools sub array within given bounds assigning weighted average value to all elements
148
+ // Pools sub array within given bounds assigning weighted average value to all elements
146
149
def pool (in : Array [WeightedLabeledPoint ], start : Int , end : Int ): Unit = {
147
150
val poolSubArray = in.slice(start, end + 1 )
148
151
@@ -159,17 +162,17 @@ class PoolAdjacentViolators private [mllib]
159
162
while (i < in.length) {
160
163
var j = i
161
164
162
- // find monotonicity violating sequence, if any
165
+ // Find monotonicity violating sequence, if any
163
166
while (j < in.length - 1 && ! monotonicityConstraint.holds(in(j), in(j + 1 ))) {
164
167
j = j + 1
165
168
}
166
169
167
- // if monotonicity was not violated, move to next data point
170
+ // If monotonicity was not violated, move to next data point
168
171
if (i == j) {
169
172
i = i + 1
170
173
} else {
171
- // otherwise pool the violating sequence
172
- // and check if pooling caused monotonicity violation in previously processed points
174
+ // Otherwise pool the violating sequence
175
+ // And check if pooling caused monotonicity violation in previously processed points
173
176
while (i >= 0 && ! monotonicityConstraint.holds(in(i), in(i + 1 ))) {
174
177
pool(in, i, j)
175
178
i = i - 1
@@ -214,10 +217,11 @@ object IsotonicRegression {
214
217
* Label is the dependent y value
215
218
* Weight of the data point is the number of measurements. Default is 1
216
219
*
217
- * @param input RDD of (label, array of features, weight). Each point describes a row of the data
218
- * matrix A as well as the corresponding right hand side label y
219
- * and weight as number of measurements
220
- * @param monotonicityConstraint
220
+ * @param input RDD of (label, array of features, weight).
221
+ * Each point describes a row of the data
222
+ * matrix A as well as the corresponding right hand side label y
223
+ * and weight as number of measurements
224
+ * @param monotonicityConstraint Isotonic (increasing) or Antitonic (decreasing) sequence
221
225
*/
222
226
def train (
223
227
input : RDD [WeightedLabeledPoint ],
0 commit comments