File tree Expand file tree Collapse file tree 2 files changed +21
-1
lines changed
main/scala/org/apache/spark/sql
test/scala/org/apache/spark/sql Expand file tree Collapse file tree 2 files changed +21
-1
lines changed Original file line number Diff line number Diff line change @@ -747,7 +747,19 @@ class DataFrame private[sql](
747747 * Returns a new [[DataFrame ]] by adding a column.
748748 * @group dfops
749749 */
750- def withColumn (colName : String , col : Column ): DataFrame = select(Column (" *" ), col.as(colName))
750+ def withColumn (colName : String , col : Column ): DataFrame = {
751+ val resolver = sqlContext.analyzer.resolver
752+ val replaced = schema.exists(f => resolver(f.name, colName))
753+ if (replaced) {
754+ val colNames = schema.map { field =>
755+ val name = field.name
756+ if (resolver(name, colName)) col.as(colName) else Column (name)
757+ }
758+ select(colNames :_* )
759+ } else {
760+ select(Column (" *" ), col.as(colName))
761+ }
762+ }
751763
752764 /**
753765 * Returns a new [[DataFrame ]] with a column renamed.
Original file line number Diff line number Diff line change @@ -473,6 +473,14 @@ class DataFrameSuite extends QueryTest {
473473 assert(df.schema.map(_.name).toSeq === Seq (" key" , " value" , " newCol" ))
474474 }
475475
476+ test(" replace column using withColumn" ) {
477+ val df2 = TestSQLContext .sparkContext.parallelize(Array (1 , 2 , 3 )).toDF(" x" )
478+ val df3 = df2.withColumn(" x" , df2(" x" ) + 1 )
479+ checkAnswer(
480+ df3.select(" x" ),
481+ Row (2 ) :: Row (3 ) :: Row (4 ) :: Nil )
482+ }
483+
476484 test(" withColumnRenamed" ) {
477485 val df = testData.toDF().withColumn(" newCol" , col(" key" ) + 1 )
478486 .withColumnRenamed(" value" , " valueRenamed" )
You can’t perform that action at this time.
0 commit comments