Skip to content

Commit 9b487bf

Browse files
committed
Fixes compilation errors introduced while rebasing
1 parent ea6c8dd commit 9b487bf

File tree

2 files changed

+15
-5
lines changed

2 files changed

+15
-5
lines changed

sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ import org.apache.spark.sql.catalyst.analysis.{ResolvedStar, UnresolvedAttribute
3737
import org.apache.spark.sql.catalyst.expressions._
3838
import org.apache.spark.sql.catalyst.plans.logical.{Filter, _}
3939
import org.apache.spark.sql.catalyst.plans.{Inner, JoinType}
40-
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, ScalaReflection, SqlParser}
40+
import org.apache.spark.sql.catalyst.{expressions, CatalystTypeConverters, ScalaReflection, SqlParser}
4141
import org.apache.spark.sql.execution.{EvaluatePython, ExplainCommand, LogicalRDD}
4242
import org.apache.spark.sql.jdbc.JDBCWriteDetails
4343
import org.apache.spark.sql.json.JacksonGenerator
@@ -400,7 +400,9 @@ class DataFrame private[sql](
400400
joined.left,
401401
joined.right,
402402
joinType = Inner,
403-
Some(EqualTo(joined.left.resolve(usingColumn), joined.right.resolve(usingColumn))))
403+
Some(expressions.EqualTo(
404+
joined.left.resolve(usingColumn),
405+
joined.right.resolve(usingColumn))))
404406
)
405407
}
406408

@@ -1343,7 +1345,7 @@ class DataFrame private[sql](
13431345
mode: SaveMode,
13441346
options: java.util.Map[String, String],
13451347
partitionColumns: java.util.List[String]): Unit = {
1346-
???
1348+
saveAsTable(tableName, source, mode, options.toMap, partitionColumns)
13471349
}
13481350

13491351
/**
@@ -1399,7 +1401,15 @@ class DataFrame private[sql](
13991401
mode: SaveMode,
14001402
options: Map[String, String],
14011403
partitionColumns: Seq[String]): Unit = {
1402-
???
1404+
sqlContext.executePlan(
1405+
CreateTableUsingAsSelect(
1406+
tableName,
1407+
source,
1408+
temporary = false,
1409+
partitionColumns.toArray,
1410+
mode,
1411+
options,
1412+
logicalPlan)).toRdd
14031413
}
14041414

14051415
/**

sql/core/src/main/scala/org/apache/spark/sql/sources/DataSourceStrategy.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ private[sql] object DataSourceStrategy extends Strategy {
9797
.reduceOption(expressions.And)
9898
.getOrElse(Literal(true))
9999

100-
val boundPredicate = InterpretedPredicate(predicate.transform {
100+
val boundPredicate = InterpretedPredicate.create(predicate.transform {
101101
case a: AttributeReference =>
102102
val index = partitionColumns.indexWhere(a.name == _.name)
103103
BoundReference(index, partitionColumns(index).dataType, nullable = true)

0 commit comments

Comments
 (0)