File tree Expand file tree Collapse file tree 2 files changed +17
-1
lines changed
main/scala/org/apache/spark/sql
test/scala/org/apache/spark/sql Expand file tree Collapse file tree 2 files changed +17
-1
lines changed Original file line number Diff line number Diff line change @@ -747,7 +747,19 @@ class DataFrame private[sql](
747
747
* Returns a new [[DataFrame ]] by adding a column.
748
748
* @group dfops
749
749
*/
750
- def withColumn (colName : String , col : Column ): DataFrame = select(Column (" *" ), col.as(colName))
750
+ def withColumn (colName : String , col : Column ): DataFrame = {
751
+ val resolver = sqlContext.analyzer.resolver
752
+ val replaced = schema.exists(f => resolver(f.name, colName))
753
+ if (replaced) {
754
+ val colNames = schema.map { field =>
755
+ val name = field.name
756
+ if (resolver(name, colName)) col.as(colName) else Column (name)
757
+ }
758
+ select(colNames :_* )
759
+ } else {
760
+ select(Column (" *" ), col.as(colName))
761
+ }
762
+ }
751
763
752
764
/**
753
765
* Returns a new [[DataFrame ]] with a column renamed.
Original file line number Diff line number Diff line change @@ -457,6 +457,10 @@ class DataFrameSuite extends QueryTest {
457
457
Row (key, value, key + 1 )
458
458
}.toSeq)
459
459
assert(df.schema.map(_.name).toSeq === Seq (" key" , " value" , " newCol" ))
460
+
461
+ val df2 = TestSQLContext .sparkContext.parallelize(Array (1 , 2 , 3 )).toDF(" x" )
462
+ val df3 = df2.withColumn(" x" , df2(" x" ) + 1 )
463
+ assert(df3.select(" x" ).collect().toSeq === Seq (Row (2 ), Row (3 ), Row (4 )))
460
464
}
461
465
462
466
test(" withColumnRenamed" ) {
You can’t perform that action at this time.
0 commit comments