Skip to content

Commit da56200

Browse files
committed
Comments.
1 parent e38f264 commit da56200

File tree

2 files changed

+7
-7
lines changed

2 files changed

+7
-7
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ abstract class Expression extends TreeNode[Expression] {
3939
def foldable: Boolean = false
4040

4141
/**
42-
* Returns true when an expressions always return the same result for a specific set of
42+
* Returns true when the current expression always return the same result for a specific set of
4343
* input values.
4444
*/
4545
// TODO: Need to well define what are explicit input values and implicit input values.

sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -447,7 +447,7 @@ class ColumnExpressionSuite extends QueryTest {
447447
}
448448

449449
test("rand") {
450-
val randCol = testData.select('key, rand(5L).as("rand"))
450+
val randCol = testData.select($"key", rand(5L).as("rand"))
451451
randCol.columns.length should be (2)
452452
val rows = randCol.collect()
453453
rows.foreach { row =>
@@ -464,15 +464,15 @@ class ColumnExpressionSuite extends QueryTest {
464464

465465
// We first create a plan with two Projects.
466466
// Project [rand + 1 AS rand1, rand - 1 AS rand2]
467-
// Project [key, Rand 5 AS rand]
467+
// Project [key, (Rand 5 + 1) AS rand]
468468
// LogicalRDD [key, value]
469469
// Because Rand function is not deterministic, the column rand is not deterministic.
470470
// So, in the optimizer, we will not collapse Project [rand + 1 AS rand1, rand - 1 AS rand2]
471471
// and Project [key, Rand 5 AS rand]. The final plan still has two Projects.
472472
val dfWithTwoProjects =
473473
testData
474-
.select('key, rand(5L).as("rand"))
475-
.select(('rand + 1).as("rand1"), ('rand - 1).as("rand2"))
474+
.select($"key", (rand(5L) + 1).as("rand"))
475+
.select(($"rand" + 1).as("rand1"), ($"rand" - 1).as("rand2"))
476476
checkNumProjects(dfWithTwoProjects, 2)
477477

478478
// Now, we add one more project rand1 - rand2 on top of the query plan.
@@ -481,13 +481,13 @@ class ColumnExpressionSuite extends QueryTest {
481481
// So, the plan will be optimized from ...
482482
// Project [(rand1 - rand2) AS (rand1 - rand2)]
483483
// Project [rand + 1 AS rand1, rand - 1 AS rand2]
484-
// Project [key, Rand 5 AS rand]
484+
// Project [key, (Rand 5 + 1) AS rand]
485485
// LogicalRDD [key, value]
486486
// to ...
487487
// Project [((rand + 1 AS rand1) - (rand - 1 AS rand2)) AS (rand1 - rand2)]
488488
// Project [key, Rand 5 AS rand]
489489
// LogicalRDD [key, value]
490-
val dfWithThreeProjects = dfWithTwoProjects.select('rand1 - 'rand2)
490+
val dfWithThreeProjects = dfWithTwoProjects.select($"rand1" - $"rand2")
491491
checkNumProjects(dfWithThreeProjects, 2)
492492
dfWithThreeProjects.collect().foreach { row =>
493493
assert(row.getDouble(0) === 2.0 +- 0.0001)

0 commit comments

Comments
 (0)