From 787c1df87bde6f914ff5ca37ff5954503ff29dec Mon Sep 17 00:00:00 2001 From: zhangzc Date: Sat, 23 Jan 2021 12:37:11 +0800 Subject: [PATCH] solve the problem of hive on HBase table access exception after upgrading 3.0.1 --- .../main/scala/org/apache/spark/sql/hive/TableReader.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala index eb9ce877fc8d..74e8a3247925 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala @@ -299,10 +299,10 @@ class HadoopTableReader( */ private def createHadoopRDD(localTableDesc: TableDesc, inputPathStr: String): RDD[Writable] = { val inputFormatClazz = localTableDesc.getInputFileFormatClass - if (classOf[newInputClass[_, _]].isAssignableFrom(inputFormatClazz)) { - createNewHadoopRDD(localTableDesc, inputPathStr) - } else { + if (classOf[oldInputClass[_, _]].isAssignableFrom(inputFormatClazz)) { createOldHadoopRDD(localTableDesc, inputPathStr) + } else { + createNewHadoopRDD(localTableDesc, inputPathStr) } }