@@ -89,21 +89,34 @@ object SparkBuild extends PomBuild {
8989 allProjects ++ optionallyEnabledProjects ++ assemblyProjects foreach enable(sharedSettings)
9090
9191 /* Enable tests settings for all projects except examples, assembly and tools */
92- allProjects ++ optionallyEnabledProjects foreach enable(TestSettings .s )
92+ allProjects ++ optionallyEnabledProjects foreach enable(TestSettings .settings )
9393
9494 /* Enable Mima for all projects except spark, sql, hive, catalyst and repl */
95- allProjects filterNot(y => Seq (spark, sql, hive, catalyst, repl).exists(x => x == y)) foreach (x => enable(MimaSettings .effectiveSetting(x))(x))
95+ allProjects.filterNot(y => Seq (spark, sql, hive, catalyst, repl).exists(x => x == y)).
96+ foreach (x => enable(MimaBuild .mimaSettings(sparkHome, x))(x))
9697
9798 /* Enable Assembly for all assembly projects */
98- assemblyProjects foreach enable(AssemblySettings .s )
99+ assemblyProjects foreach enable(Assembly .settings )
99100
100101 /* Enable unidoc only for the root spark project */
101- Seq (spark) foreach enable ( UnidocSettings .s )
102+ enable( Unidoc .settings)(spark )
102103
103104 /* Hive console settings */
104- Seq (hive) foreach enable (hiveSettings )
105+ enable( Hive .settings)(hive )
105106
106- lazy val hiveSettings = Seq (
107+ // TODO: move this to its upstream project.
108+ override def projectDefinitions (baseDirectory : File ): Seq [Project ] = {
109+ super .projectDefinitions(baseDirectory).map { x =>
110+ if (projectsMap.exists(_._1 == x.id)) x.settings(projectsMap(x.id): _* )
111+ else x.settings(Seq [Setting [_]](): _* )
112+ }
113+ }
114+
115+ }
116+
117+ object Hive {
118+
119+ lazy val settings = Seq (
107120
108121 javaOptions += " -XX:MaxPermSize=1g" ,
109122 // Multiple queries rely on the TestHive singleton. See comments there for more details.
@@ -129,36 +142,13 @@ object SparkBuild extends PomBuild {
129142 |import org.apache.spark.sql.parquet.ParquetTestData""" .stripMargin
130143 )
131144
132- // TODO: move this to its upstream project.
133- override def projectDefinitions (baseDirectory : File ): Seq [Project ] = {
134- super .projectDefinitions(baseDirectory).map { x =>
135- if (projectsMap.exists(_._1 == x.id)) x.settings(projectsMap(x.id): _* )
136- else x.settings(Seq [Setting [_]](): _* )
137- }
138- }
139-
140145}
141146
142- object MimaSettings {
143-
144- import BuildCommons ._
145- import com .typesafe .tools .mima .plugin .MimaKeys .previousArtifact
146-
147- private lazy val s = MimaBuild .mimaSettings(sparkHome)
148-
149- def effectiveSetting (projectRef : ProjectRef ) = {
150- val organization = " org.apache.spark"
151- val version = " 0.9.0-incubating"
152- val fullId = " spark-" + projectRef.project + " _2.10"
153- s ++ Seq (previousArtifact := Some (organization % fullId % version))
154- }
155- }
156-
157- object AssemblySettings {
147+ object Assembly {
158148 import sbtassembly .Plugin ._
159149 import AssemblyKeys ._
160150
161- lazy val s = assemblySettings ++ Seq (
151+ lazy val settings = assemblySettings ++ Seq (
162152 test in assembly := {},
163153 jarName in assembly <<= (version, moduleName) map { (v, mName) => mName + " -" + v + " -hadoop" +
164154 Option (System .getProperty(" hadoop.version" )).getOrElse(" 1.0.4" ) + " .jar" }, // TODO: add proper default hadoop version.
@@ -175,7 +165,7 @@ object AssemblySettings {
175165
176166}
177167
178- object UnidocSettings {
168+ object Unidoc {
179169
180170 import BuildCommons ._
181171 import sbtunidoc .Plugin ._
@@ -186,7 +176,7 @@ object UnidocSettings {
186176 names.map(s => " org.apache.spark." + s).mkString(" :" )
187177 }
188178
189- lazy val s = scalaJavaUnidocSettings ++ Seq (
179+ lazy val settings = scalaJavaUnidocSettings ++ Seq (
190180 publish := {},
191181
192182 unidocProjectFilter in(ScalaUnidoc , unidoc) :=
@@ -240,10 +230,9 @@ object UnidocSettings {
240230}
241231
242232object TestSettings {
243-
244233 import BuildCommons ._
245234
246- lazy val s = Seq (
235+ lazy val settings = Seq (
247236 // Fork new JVMs for tests and set Java options for those
248237 fork := true ,
249238 javaOptions in Test += " -Dspark.home=" + sparkHome,
0 commit comments