Skip to content

Commit 78ceb00

Browse files
committed
Matching the Hive 2.3.x prefix
1 parent 2e7f31c commit 78ceb00

File tree

4 files changed

+7
-7
lines changed

4 files changed

+7
-7
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ private[hive] object HiveShim {
158158
}
159159

160160
def deserializePlan[UDFType](is: java.io.InputStream, clazz: Class[_]): UDFType = {
161-
if (HiveUtils.isSupportedHive2) {
161+
if (HiveUtils.isHive23) {
162162
val borrowKryo = serUtilClass.getMethod("borrowKryo")
163163
val kryo = borrowKryo.invoke(serUtilClass)
164164
val deserializeObjectByKryo = findMethod(serUtilClass, deserializeMethodName,
@@ -180,7 +180,7 @@ private[hive] object HiveShim {
180180
}
181181

182182
def serializePlan(function: AnyRef, out: java.io.OutputStream): Unit = {
183-
if (HiveUtils.isSupportedHive2) {
183+
if (HiveUtils.isHive23) {
184184
val borrowKryo = serUtilClass.getMethod("borrowKryo")
185185
val kryo = borrowKryo.invoke(serUtilClass)
186186
val serializeObjectByKryo = findMethod(serUtilClass, serializeMethodName,

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -55,11 +55,11 @@ private[spark] object HiveUtils extends Logging {
5555
sc
5656
}
5757

58-
private val supportedHive2ShortVersions = Set("2.3.0")
59-
val isSupportedHive2 = supportedHive2ShortVersions.contains(HiveVersionInfo.getShortVersion)
58+
private val hiveVersion = HiveVersionInfo.getVersion
59+
val isHive23: Boolean = hiveVersion.startsWith("2.3")
6060

6161
/** The version of hive used internally by Spark SQL. */
62-
val builtinHiveVersion: String = if (isSupportedHive2) HiveVersionInfo.getVersion else "1.2.1"
62+
val builtinHiveVersion: String = if (isHive23) hiveVersion else "1.2.1"
6363

6464
val HIVE_METASTORE_VERSION = buildConf("spark.sql.hive.metastore.version")
6565
.doc("Version of the Hive metastore. Available options are " +

sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -338,7 +338,7 @@ private[hive] case class HiveUDAFFunction(
338338
}
339339

340340
val clazz = Utils.classForName(classOf[SimpleGenericUDAFParameterInfo].getName)
341-
if (HiveUtils.isSupportedHive2) {
341+
if (HiveUtils.isHive23) {
342342
val ctor = clazz.getDeclaredConstructor(
343343
classOf[Array[ObjectInspector]], JBoolean.TYPE, JBoolean.TYPE, JBoolean.TYPE)
344344
val args = Array[AnyRef](inputInspectors, JBoolean.FALSE, JBoolean.FALSE, JBoolean.FALSE)

sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFilters.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ private[orc] object OrcFilters extends Logging {
7070
}
7171

7272
def createFilter(schema: StructType, filters: Array[Filter]): Option[SearchArgument] = {
73-
if (HiveUtils.isSupportedHive2) {
73+
if (HiveUtils.isHive23) {
7474
DatasourceOrcFilters.createFilter(schema, filters).asInstanceOf[Option[SearchArgument]]
7575
} else {
7676
val dataTypeMap = schema.map(f => f.name -> f.dataType).toMap

0 commit comments

Comments
 (0)