Skip to content

Commit 4bd93eb

Browse files
rxinjeanlyn
authored andcommitted
[SPARK-7940] Enforce whitespace checking for DO, TRY, CATCH, FINALLY, MATCH, LARROW, RARROW in style checker.
… Author: Reynold Xin <[email protected]> Closes apache#6491 from rxin/more-whitespace and squashes the following commits: f6e63dc [Reynold Xin] [SPARK-7940] Enforce whitespace checking for DO, TRY, CATCH, FINALLY, MATCH, LARROW, RARROW in style checker.
1 parent adf8bc2 commit 4bd93eb

File tree

9 files changed

+13
-15
lines changed

9 files changed

+13
-15
lines changed

core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ private[nio] class BlockMessage() {
155155

156156
override def toString: String = {
157157
"BlockMessage [type = " + typ + ", id = " + id + ", level = " + level +
158-
", data = " + (if (data != null) data.remaining.toString else "null") + "]"
158+
", data = " + (if (data != null) data.remaining.toString else "null") + "]"
159159
}
160160
}
161161

core/src/main/scala/org/apache/spark/network/nio/Connection.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -326,15 +326,14 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
326326

327327
// MUST be called within the selector loop
328328
def connect() {
329-
try{
329+
try {
330330
channel.register(selector, SelectionKey.OP_CONNECT)
331331
channel.connect(address)
332332
logInfo("Initiating connection to [" + address + "]")
333333
} catch {
334-
case e: Exception => {
334+
case e: Exception =>
335335
logError("Error connecting to " + address, e)
336336
callOnExceptionCallbacks(e)
337-
}
338337
}
339338
}
340339

core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -635,12 +635,11 @@ private[nio] class ConnectionManager(
635635
val message = securityMsgResp.toBufferMessage
636636
if (message == null) throw new IOException("Error creating security message")
637637
sendSecurityMessage(waitingConn.getRemoteConnectionManagerId(), message)
638-
} catch {
639-
case e: Exception => {
638+
} catch {
639+
case e: Exception =>
640640
logError("Error handling sasl client authentication", e)
641641
waitingConn.close()
642642
throw new IOException("Error evaluating sasl response: ", e)
643-
}
644643
}
645644
}
646645
}

core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ class PartitionerAwareUnionRDD[T: ClassTag](
8686
}
8787
val location = if (locations.isEmpty) {
8888
None
89-
} else {
89+
} else {
9090
// Find the location that maximum number of parent partitions prefer
9191
Some(locations.groupBy(x => x).maxBy(_._2.length)._1)
9292
}

mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ class Node (
8383
def predict(features: Vector) : Double = {
8484
if (isLeaf) {
8585
predict.predict
86-
} else{
86+
} else {
8787
if (split.get.featureType == Continuous) {
8888
if (features(split.get.feature) <= split.get.threshold) {
8989
leftNode.get.predict(features)

mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ object LogisticRegressionSuite {
119119
}
120120
// Preventing the overflow when we compute the probability
121121
val maxMargin = margins.max
122-
if (maxMargin > 0) for (i <-0 until nClasses) margins(i) -= maxMargin
122+
if (maxMargin > 0) for (i <- 0 until nClasses) margins(i) -= maxMargin
123123

124124
// Computing the probabilities for each class from the margins.
125125
val norm = {
@@ -130,7 +130,7 @@ object LogisticRegressionSuite {
130130
}
131131
temp
132132
}
133-
for (i <-0 until nClasses) probs(i) /= norm
133+
for (i <- 0 until nClasses) probs(i) /= norm
134134

135135
// Compute the cumulative probability so we can generate a random number and assign a label.
136136
for (i <- 1 until nClasses) probs(i) += probs(i - 1)

scalastyle-config.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -144,12 +144,12 @@
144144
<check level="error" class="org.scalastyle.scalariform.SpaceAfterCommentStartChecker" enabled="true"></check>
145145
<check level="error" class="org.scalastyle.scalariform.EnsureSingleSpaceBeforeTokenChecker" enabled="true">
146146
<parameters>
147-
<parameter name="tokens">ARROW, EQUALS</parameter>
147+
<parameter name="tokens">ARROW, EQUALS, ELSE, TRY, CATCH, FINALLY, LARROW, RARROW</parameter>
148148
</parameters>
149149
</check>
150150
<check level="error" class="org.scalastyle.scalariform.EnsureSingleSpaceAfterTokenChecker" enabled="true">
151151
<parameters>
152-
<parameter name="tokens">ARROW, EQUALS, COMMA, COLON, IF, WHILE, FOR</parameter>
152+
<parameter name="tokens">ARROW, EQUALS, COMMA, COLON, IF, ELSE, DO, WHILE, FOR, MATCH, TRY, CATCH, FINALLY, LARROW, RARROW</parameter>
153153
</parameters>
154154
</check>
155155
<check level="error" class="org.scalastyle.scalariform.NotImplementedErrorUsage" enabled="true"></check>

sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,7 @@ object UTF8String {
203203
def apply(s: String): UTF8String = {
204204
if (s != null) {
205205
new UTF8String().set(s)
206-
} else{
206+
} else {
207207
null
208208
}
209209
}

sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -262,7 +262,7 @@ private[sql] class JDBCRDD(
262262
}
263263

264264
private def escapeSql(value: String): String =
265-
if (value == null) null else StringUtils.replace(value, "'", "''")
265+
if (value == null) null else StringUtils.replace(value, "'", "''")
266266

267267
/**
268268
* Turns a single Filter into a String representing a SQL expression.

0 commit comments

Comments
 (0)