Skip to content

Commit fa01bec

Browse files
JoshRosenrxin
authored andcommitted
[Build] Enable MiMa checks for SQL
Now that 1.3 has been released, we should enable MiMa checks for the `sql` subproject. Author: Josh Rosen <[email protected]> Closes apache#5727 from JoshRosen/enable-more-mima-checks and squashes the following commits: 3ad302b [Josh Rosen] Merge remote-tracking branch 'origin/master' into enable-more-mima-checks 0c48e4d [Josh Rosen] Merge remote-tracking branch 'origin/master' into enable-more-mima-checks e276cee [Josh Rosen] Fix SQL MiMa checks via excludes and private[sql] 44d0d01 [Josh Rosen] Add back 'launcher' exclude 1aae027 [Josh Rosen] Enable MiMa checks for launcher and sql projects.
1 parent 77cc25f commit fa01bec

File tree

5 files changed

+23
-7
lines changed

5 files changed

+23
-7
lines changed

project/MimaExcludes.scala

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,22 @@ object MimaExcludes {
8888
"org.apache.spark.mllib.linalg.Vector.toSparse"),
8989
ProblemFilters.exclude[MissingMethodProblem](
9090
"org.apache.spark.mllib.linalg.Vector.numActives")
91+
) ++ Seq(
92+
// This `protected[sql]` method was removed in 1.3.1
93+
ProblemFilters.exclude[MissingMethodProblem](
94+
"org.apache.spark.sql.SQLContext.checkAnalysis"),
95+
// This `private[sql]` class was removed in 1.4.0:
96+
ProblemFilters.exclude[MissingClassProblem](
97+
"org.apache.spark.sql.execution.AddExchange"),
98+
ProblemFilters.exclude[MissingClassProblem](
99+
"org.apache.spark.sql.execution.AddExchange$"),
100+
// These test support classes were moved out of src/main and into src/test:
101+
ProblemFilters.exclude[MissingClassProblem](
102+
"org.apache.spark.sql.parquet.ParquetTestData"),
103+
ProblemFilters.exclude[MissingClassProblem](
104+
"org.apache.spark.sql.parquet.ParquetTestData$"),
105+
ProblemFilters.exclude[MissingClassProblem](
106+
"org.apache.spark.sql.parquet.TestGroupWriteSupport")
91107
)
92108

93109
case v if v.startsWith("1.3") =>

project/SparkBuild.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -156,9 +156,8 @@ object SparkBuild extends PomBuild {
156156
/* Enable tests settings for all projects except examples, assembly and tools */
157157
(allProjects ++ optionallyEnabledProjects).foreach(enable(TestSettings.settings))
158158

159-
// TODO: Add Sql to mima checks
160-
// TODO: remove launcher from this list after 1.3.
161-
allProjects.filterNot(x => Seq(spark, sql, hive, hiveThriftServer, catalyst, repl,
159+
// TODO: remove launcher from this list after 1.4.0
160+
allProjects.filterNot(x => Seq(spark, hive, hiveThriftServer, catalyst, repl,
162161
networkCommon, networkShuffle, networkYarn, launcher, unsafe).contains(x)).foreach {
163162
x => enable(MimaBuild.mimaSettings(sparkHome, x))(x)
164163
}

sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ object RDDConversions {
8484
}
8585

8686
/** Logical plan node for scanning data from an RDD. */
87-
case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLContext)
87+
private[sql] case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLContext)
8888
extends LogicalPlan with MultiInstanceRelation {
8989

9090
override def children: Seq[LogicalPlan] = Nil
@@ -105,11 +105,12 @@ case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLCont
105105
}
106106

107107
/** Physical plan node for scanning data from an RDD. */
108-
case class PhysicalRDD(output: Seq[Attribute], rdd: RDD[Row]) extends LeafNode {
108+
private[sql] case class PhysicalRDD(output: Seq[Attribute], rdd: RDD[Row]) extends LeafNode {
109109
override def execute(): RDD[Row] = rdd
110110
}
111111

112112
/** Logical plan node for scanning data from a local collection. */
113+
private[sql]
113114
case class LogicalLocalTable(output: Seq[Attribute], rows: Seq[Row])(sqlContext: SQLContext)
114115
extends LogicalPlan with MultiInstanceRelation {
115116

sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScan.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.Attribute
2626
/**
2727
* Physical plan node for scanning data from a local collection.
2828
*/
29-
case class LocalTableScan(output: Seq[Attribute], rows: Seq[Row]) extends LeafNode {
29+
private[sql] case class LocalTableScan(output: Seq[Attribute], rows: Seq[Row]) extends LeafNode {
3030

3131
private lazy val rdd = sqlContext.sparkContext.parallelize(rows)
3232

sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ trait RunnableCommand extends logical.Command {
4242
* A physical operator that executes the run method of a `RunnableCommand` and
4343
* saves the result to prevent multiple executions.
4444
*/
45-
case class ExecutedCommand(cmd: RunnableCommand) extends SparkPlan {
45+
private[sql] case class ExecutedCommand(cmd: RunnableCommand) extends SparkPlan {
4646
/**
4747
* A concrete command should override this lazy field to wrap up any side effects caused by the
4848
* command or any other computation that should be evaluated exactly once. The value of this field

0 commit comments

Comments
 (0)