Skip to content

Commit 1b2aab8

Browse files
committed
[SPARK-6765] Fix test code style for SQL
So we can turn style checker on for test code. Author: Reynold Xin <[email protected]> Closes apache#5412 from rxin/test-style-sql and squashes the following commits: 9098a31 [Reynold Xin] One more compilation error ... 8c7250a [Reynold Xin] Fix compilation. 82d0944 [Reynold Xin] Indentation. 0b03fbb [Reynold Xin] code review. f2f4348 [Reynold Xin] oops. ef4ec48 [Reynold Xin] Hive module. 7e0db5e [Reynold Xin] sql module 04ec7ac [Reynold Xin] catalyst module
1 parent 891ada5 commit 1b2aab8

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+395
-234
lines changed

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/DistributionSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ class DistributionSuite extends FunSuite {
3030
inputPartitioning: Partitioning,
3131
requiredDistribution: Distribution,
3232
satisfied: Boolean) {
33-
if (inputPartitioning.satisfies(requiredDistribution) != satisfied)
33+
if (inputPartitioning.satisfies(requiredDistribution) != satisfied) {
3434
fail(
3535
s"""
3636
|== Input Partitioning ==
@@ -40,6 +40,7 @@ class DistributionSuite extends FunSuite {
4040
|== Does input partitioning satisfy required distribution? ==
4141
|Expected $satisfied got ${inputPartitioning.satisfies(requiredDistribution)}
4242
""".stripMargin)
43+
}
4344
}
4445

4546
test("HashPartitioning is the output partitioning") {

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ import org.apache.spark.sql.types._
2727
import org.apache.spark.sql.catalyst.dsl.expressions._
2828
import org.apache.spark.sql.catalyst.dsl.plans._
2929

30+
import scala.collection.immutable
31+
3032
class AnalysisSuite extends FunSuite with BeforeAndAfter {
3133
val caseSensitiveCatalog = new SimpleCatalog(true)
3234
val caseInsensitiveCatalog = new SimpleCatalog(false)
@@ -41,10 +43,10 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
4143
}
4244

4345

44-
def caseSensitiveAnalyze(plan: LogicalPlan) =
46+
def caseSensitiveAnalyze(plan: LogicalPlan): Unit =
4547
caseSensitiveAnalyzer.checkAnalysis(caseSensitiveAnalyzer(plan))
4648

47-
def caseInsensitiveAnalyze(plan: LogicalPlan) =
49+
def caseInsensitiveAnalyze(plan: LogicalPlan): Unit =
4850
caseInsensitiveAnalyzer.checkAnalysis(caseInsensitiveAnalyzer(plan))
4951

5052
val testRelation = LocalRelation(AttributeReference("a", IntegerType, nullable = true)())
@@ -147,7 +149,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
147149
name: String,
148150
plan: LogicalPlan,
149151
errorMessages: Seq[String],
150-
caseSensitive: Boolean = true) = {
152+
caseSensitive: Boolean = true): Unit = {
151153
test(name) {
152154
val error = intercept[AnalysisException] {
153155
if(caseSensitive) {
@@ -202,7 +204,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
202204

203205
case class UnresolvedTestPlan() extends LeafNode {
204206
override lazy val resolved = false
205-
override def output = Nil
207+
override def output: Seq[Attribute] = Nil
206208
}
207209

208210
errorTest(

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,9 @@ class HiveTypeCoercionSuite extends PlanTest {
9696
widenTest(StringType, TimestampType, None)
9797

9898
// ComplexType
99-
widenTest(NullType, MapType(IntegerType, StringType, false), Some(MapType(IntegerType, StringType, false)))
99+
widenTest(NullType,
100+
MapType(IntegerType, StringType, false),
101+
Some(MapType(IntegerType, StringType, false)))
100102
widenTest(NullType, StructType(Seq()), Some(StructType(Seq())))
101103
widenTest(StringType, MapType(IntegerType, StringType, true), None)
102104
widenTest(ArrayType(IntegerType), StructType(Seq()), None)
@@ -113,7 +115,9 @@ class HiveTypeCoercionSuite extends PlanTest {
113115
// Remove superflous boolean -> boolean casts.
114116
ruleTest(Cast(Literal(true), BooleanType), Literal(true))
115117
// Stringify boolean when casting to string.
116-
ruleTest(Cast(Literal(false), StringType), If(Literal(false), Literal("true"), Literal("false")))
118+
ruleTest(
119+
Cast(Literal(false), StringType),
120+
If(Literal(false), Literal("true"), Literal("false")))
117121
}
118122

119123
test("coalesce casts") {

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala

Lines changed: 93 additions & 41 deletions
Large diffs are not rendered by default.

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala

Lines changed: 25 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -176,40 +176,39 @@ class ConstantFoldingSuite extends PlanTest {
176176
}
177177

178178
test("Constant folding test: expressions have null literals") {
179-
val originalQuery =
180-
testRelation
181-
.select(
182-
IsNull(Literal(null)) as 'c1,
183-
IsNotNull(Literal(null)) as 'c2,
179+
val originalQuery = testRelation.select(
180+
IsNull(Literal(null)) as 'c1,
181+
IsNotNull(Literal(null)) as 'c2,
184182

185-
GetItem(Literal.create(null, ArrayType(IntegerType)), 1) as 'c3,
186-
GetItem(Literal.create(Seq(1), ArrayType(IntegerType)), Literal.create(null, IntegerType)) as 'c4,
187-
UnresolvedGetField(
188-
Literal.create(null, StructType(Seq(StructField("a", IntegerType, true)))),
189-
"a") as 'c5,
183+
GetItem(Literal.create(null, ArrayType(IntegerType)), 1) as 'c3,
184+
GetItem(
185+
Literal.create(Seq(1), ArrayType(IntegerType)), Literal.create(null, IntegerType)) as 'c4,
186+
UnresolvedGetField(
187+
Literal.create(null, StructType(Seq(StructField("a", IntegerType, true)))),
188+
"a") as 'c5,
190189

191-
UnaryMinus(Literal.create(null, IntegerType)) as 'c6,
192-
Cast(Literal(null), IntegerType) as 'c7,
193-
Not(Literal.create(null, BooleanType)) as 'c8,
190+
UnaryMinus(Literal.create(null, IntegerType)) as 'c6,
191+
Cast(Literal(null), IntegerType) as 'c7,
192+
Not(Literal.create(null, BooleanType)) as 'c8,
194193

195-
Add(Literal.create(null, IntegerType), 1) as 'c9,
196-
Add(1, Literal.create(null, IntegerType)) as 'c10,
194+
Add(Literal.create(null, IntegerType), 1) as 'c9,
195+
Add(1, Literal.create(null, IntegerType)) as 'c10,
197196

198-
EqualTo(Literal.create(null, IntegerType), 1) as 'c11,
199-
EqualTo(1, Literal.create(null, IntegerType)) as 'c12,
197+
EqualTo(Literal.create(null, IntegerType), 1) as 'c11,
198+
EqualTo(1, Literal.create(null, IntegerType)) as 'c12,
200199

201-
Like(Literal.create(null, StringType), "abc") as 'c13,
202-
Like("abc", Literal.create(null, StringType)) as 'c14,
200+
Like(Literal.create(null, StringType), "abc") as 'c13,
201+
Like("abc", Literal.create(null, StringType)) as 'c14,
203202

204-
Upper(Literal.create(null, StringType)) as 'c15,
203+
Upper(Literal.create(null, StringType)) as 'c15,
205204

206-
Substring(Literal.create(null, StringType), 0, 1) as 'c16,
207-
Substring("abc", Literal.create(null, IntegerType), 1) as 'c17,
208-
Substring("abc", 0, Literal.create(null, IntegerType)) as 'c18,
205+
Substring(Literal.create(null, StringType), 0, 1) as 'c16,
206+
Substring("abc", Literal.create(null, IntegerType), 1) as 'c17,
207+
Substring("abc", 0, Literal.create(null, IntegerType)) as 'c18,
209208

210-
Contains(Literal.create(null, StringType), "abc") as 'c19,
211-
Contains("abc", Literal.create(null, StringType)) as 'c20
212-
)
209+
Contains(Literal.create(null, StringType), "abc") as 'c19,
210+
Contains("abc", Literal.create(null, StringType)) as 'c20
211+
)
213212

214213
val optimized = Optimize(originalQuery.analyze)
215214

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -432,7 +432,8 @@ class FilterPushdownSuite extends PlanTest {
432432

433433
val originalQuery = {
434434
z.join(x.join(y))
435-
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) && ("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
435+
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) &&
436+
("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
436437
}
437438

438439
val optimized = Optimize(originalQuery.analyze)

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ class OptimizeInSuite extends PlanTest {
5252
val optimized = Optimize(originalQuery.analyze)
5353
val correctAnswer =
5454
testRelation
55-
.where(InSet(UnresolvedAttribute("a"), HashSet[Any]()+1+2))
55+
.where(InSet(UnresolvedAttribute("a"), HashSet[Any]() + 1 + 2))
5656
.analyze
5757

5858
comparePlans(optimized, correctAnswer)

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,12 +45,13 @@ class PlanTest extends FunSuite {
4545
protected def comparePlans(plan1: LogicalPlan, plan2: LogicalPlan) {
4646
val normalized1 = normalizeExprIds(plan1)
4747
val normalized2 = normalizeExprIds(plan2)
48-
if (normalized1 != normalized2)
48+
if (normalized1 != normalized2) {
4949
fail(
5050
s"""
5151
|== FAIL: Plans do not match ===
5252
|${sideBySide(normalized1.treeString, normalized2.treeString).mkString("\n")}
53-
""".stripMargin)
53+
""".stripMargin)
54+
}
5455
}
5556

5657
/** Fails the test if the two expressions do not match */

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SameResultSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ class SameResultSuite extends FunSuite {
3232
val testRelation = LocalRelation('a.int, 'b.int, 'c.int)
3333
val testRelation2 = LocalRelation('a.int, 'b.int, 'c.int)
3434

35-
def assertSameResult(a: LogicalPlan, b: LogicalPlan, result: Boolean = true) = {
35+
def assertSameResult(a: LogicalPlan, b: LogicalPlan, result: Boolean = true): Unit = {
3636
val aAnalyzed = a.analyze
3737
val bAnalyzed = b.analyze
3838

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,12 +25,12 @@ import org.apache.spark.sql.catalyst.expressions._
2525
import org.apache.spark.sql.types.{StringType, NullType}
2626

2727
case class Dummy(optKey: Option[Expression]) extends Expression {
28-
def children = optKey.toSeq
29-
def nullable = true
30-
def dataType = NullType
28+
def children: Seq[Expression] = optKey.toSeq
29+
def nullable: Boolean = true
30+
def dataType: NullType = NullType
3131
override lazy val resolved = true
3232
type EvaluatedType = Any
33-
def eval(input: Row) = null.asInstanceOf[Any]
33+
def eval(input: Row): Any = null.asInstanceOf[Any]
3434
}
3535

3636
class TreeNodeSuite extends FunSuite {

0 commit comments

Comments
 (0)