Skip to content

Commit a0dae07

Browse files
committed
simplify code
1 parent d8616fd commit a0dae07

File tree

1 file changed

+7
-21
lines changed

1 file changed

+7
-21
lines changed

docs/ml-guide.md

Lines changed: 7 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -191,36 +191,22 @@ The following code illustrates how to load a sample dataset and use logistic reg
191191

192192
{% highlight scala %}
193193

194-
import scala.collection.mutable
195-
import scala.language.reflectiveCalls
196-
197-
import org.apache.spark.{SparkConf, SparkContext}
198-
import org.apache.spark.ml.{Pipeline, PipelineStage}
199-
import org.apache.spark.ml.classification.{LogisticRegression, LogisticRegressionModel}
200-
import org.apache.spark.ml.feature.StringIndexer
194+
import org.apache.spark.ml.classification.LogisticRegression
201195
import org.apache.spark.mllib.util.MLUtils
202-
import org.apache.spark.sql.DataFrame
203-
204-
val regParam = 0.3
205-
val elasticNetParam = 0.8
206-
val tol = 1E-6
207-
val dataPath = "data/mllib/sample_libsvm_data.txt"
208-
209-
println(s"LogisticRegressionExample with regParam $regParam and elasticNetParam $elasticNetParam")
210196

211197
// Load training and test data and cache it.
212-
val training = MLUtils.loadLibSVMFile(sc, dataPath).toDF()
198+
val training = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_libsvm_data.txt").toDF()
213199

214200
val lor = new LogisticRegression()
215-
.setRegParam(regParam)
216-
.setElasticNetParam(elasticNetParam)
217-
.setTol(tol)
201+
.setRegParam(0.3)
202+
.setElasticNetParam(0.8)
203+
.setTol(1e-6)
218204

219205
// Fit the model
220-
val lirModel = lor.fit(training)
206+
val lorModel = lor.fit(training)
221207

222208
// Print the weights and intercept for logistic regression.
223-
println(s"Weights: ${lirModel.weights} Intercept: ${lirModel.intercept}")
209+
println(s"Weights: ${lorModel.weights} Intercept: ${lorModel.intercept}")
224210

225211
{% endhighlight %}
226212

0 commit comments

Comments
 (0)