From 56341296a1917f89c0a9003481a373d21d87f2b8 Mon Sep 17 00:00:00 2001 From: yangBottle <37429758+yangBottle@users.noreply.github.com> Date: Tue, 12 Jan 2021 10:53:35 +0800 Subject: [PATCH] Update TableReader.scala fixed spark3.0 access hive table while data in hbase problem --- .../src/main/scala/org/apache/spark/sql/hive/TableReader.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala index eb9ce877fc8d..8e22058bc4e5 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala @@ -299,7 +299,7 @@ class HadoopTableReader( */ private def createHadoopRDD(localTableDesc: TableDesc, inputPathStr: String): RDD[Writable] = { val inputFormatClazz = localTableDesc.getInputFileFormatClass - if (classOf[newInputClass[_, _]].isAssignableFrom(inputFormatClazz)) { + if (classOf[oldInputClass[_, _]].isAssignableFrom(inputFormatClazz)) { createNewHadoopRDD(localTableDesc, inputPathStr) } else { createOldHadoopRDD(localTableDesc, inputPathStr)