Skip to content

Commit 4d8bf02

Browse files
committed
Remove hive 12 compilation
1 parent 8843a25 commit 4d8bf02

File tree

3 files changed

+3
-28
lines changed

3 files changed

+3
-28
lines changed

dev/run-tests

Lines changed: 0 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -142,29 +142,6 @@ CURRENT_BLOCK=$BLOCK_BUILD
142142

143143
{
144144
HIVE_BUILD_ARGS="$SBT_MAVEN_PROFILES_ARGS -Phive -Phive-thriftserver"
145-
HIVE_12_BUILD_ARGS="$HIVE_BUILD_ARGS -Phive-0.12.0"
146-
147-
# First build with Hive 0.12.0 to ensure patches do not break the Hive 0.12.0 build
148-
echo "[info] Compile with Hive 0.12.0"
149-
[ -d "lib_managed" ] && rm -rf lib_managed
150-
echo "[info] Building Spark with these arguments: $HIVE_12_BUILD_ARGS"
151-
152-
if [ "${AMPLAB_JENKINS_BUILD_TOOL}" == "maven" ]; then
153-
build/mvn $HIVE_12_BUILD_ARGS clean package -DskipTests
154-
else
155-
# NOTE: echo "q" is needed because sbt on encountering a build file with failure
156-
# (either resolution or compilation) prompts the user for input either q, r, etc
157-
# to quit or retry. This echo is there to make it not block.
158-
# NOTE: Do not quote $BUILD_MVN_PROFILE_ARGS or else it will be interpreted as a
159-
# single argument!
160-
# QUESTION: Why doesn't 'yes "q"' work?
161-
# QUESTION: Why doesn't 'grep -v -e "^\[info\] Resolving"' work?
162-
echo -e "q\n" \
163-
| build/sbt $HIVE_12_BUILD_ARGS clean hive/compile hive-thriftserver/compile \
164-
| grep -v -e "info.*Resolving" -e "warn.*Merging" -e "info.*Including"
165-
fi
166-
167-
# Then build with default Hive version (0.13.1) because tests are based on this version
168145
echo "[info] Compile with Hive 0.13.1"
169146
[ -d "lib_managed" ] && rm -rf lib_managed
170147
echo "[info] Building Spark with these arguments: $HIVE_BUILD_ARGS"

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,8 +150,9 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
150150
// into the isolated client loader
151151
val metadataConf = new HiveConf()
152152
val allConfig = metadataConf.iterator.map(e => e.getKey -> e.getValue).toMap
153-
153+
154154
// Config goes second to override other settings.
155+
// TODO: Support for loading the jars from an already downloaded location.
155156
IsolatedClientLoader.forVersion(hiveVersion, allConfig ++ configure).client
156157
}
157158

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -200,10 +200,7 @@ case class InsertIntoHiveTable(
200200
orderedPartitionSpec.put(entry.getName,partitionSpec.get(entry.getName).getOrElse(""))
201201
}
202202
val partVals = MetaStoreUtils.getPvals(table.hiveQlTable.getPartCols, partitionSpec)
203-
catalog.synchronized {
204-
// TODO
205-
// catalog.client.validatePartitionNameCharacters(partVals)
206-
}
203+
207204
// inheritTableSpecs is set to true. It should be set to false for a IMPORT query
208205
// which is currently considered as a Hive native command.
209206
val inheritTableSpecs = true

0 commit comments

Comments
 (0)