@@ -142,19 +142,25 @@ CURRENT_BLOCK=$BLOCK_BUILD
142142 # We always build with Hive because the PySpark Spark SQL tests need it.
143143 BUILD_MVN_PROFILE_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Phive -Phive-0.12.0"
144144
145-
146145 # NOTE: echo "q" is needed because sbt on encountering a build file with failure
147146 # + (either resolution or compilation) prompts the user for input either q, r, etc
148147 # + to quit or retry. This echo is there to make it not block.
149148 # NOTE: Do not quote $BUILD_MVN_PROFILE_ARGS or else it will be interpreted as a
150149 # + single argument!
151150 # QUESTION: Why doesn't 'yes "q"' work?
152151 # QUESTION: Why doesn't 'grep -v -e "^\[info\] Resolving"' work?
153- # First build with 0.12 to ensure patches do not break the hive 12 build
154- echo " [info] Compile with hive 0.12"
152+ # First build with 0.12 to ensure patches do not break the hive 12 build.
153+ echo " [info] Compile with hive 0.12 (base support) "
155154 echo -e " q\n" \
156- | sbt/sbt $BUILD_MVN_PROFILE_ARGS clean hive/compile hive-thriftserver/compile \
155+ | sbt/sbt $BUILD_MVN_PROFILE_ARGS clean hive/compile \
157156 | grep -v -e " info.*Resolving" -e " warn.*Merging" -e " info.*Including"
157+ if [[ $BUILD_MVN_PROFILE_ARGS != * scala-2.11* ]]; then
158+ # JDBC server only supported for Scala 2.10
159+ echo " [info] Compile with hive 0.12 (JDBC server)"
160+ echo -e " q\n" \
161+ | sbt/sbt $BUILD_MVN_PROFILE_ARGS hive-thriftserver/compile \
162+ | grep -v -e " info.*Resolving" -e " warn.*Merging" -e " info.*Including"
163+ fi
158164
159165 # Then build with default version(0.13.1) because tests are based on this version
160166 echo " [info] Building Spark with these arguments: $SBT_MAVEN_PROFILES_ARGS -Phive"
0 commit comments