From 56341296a1917f89c0a9003481a373d21d87f2b8 Mon Sep 17 00:00:00 2001 From: yangBottle <37429758+yangBottle@users.noreply.github.com> Date: Tue, 12 Jan 2021 10:53:35 +0800 Subject: [PATCH 1/2] Update TableReader.scala fixed spark3.0 access hive table while data in hbase problem --- .../src/main/scala/org/apache/spark/sql/hive/TableReader.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala index eb9ce877fc8d..8e22058bc4e5 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala @@ -299,7 +299,7 @@ class HadoopTableReader( */ private def createHadoopRDD(localTableDesc: TableDesc, inputPathStr: String): RDD[Writable] = { val inputFormatClazz = localTableDesc.getInputFileFormatClass - if (classOf[newInputClass[_, _]].isAssignableFrom(inputFormatClazz)) { + if (classOf[oldInputClass[_, _]].isAssignableFrom(inputFormatClazz)) { createNewHadoopRDD(localTableDesc, inputPathStr) } else { createOldHadoopRDD(localTableDesc, inputPathStr) From 4c78d2e9e341c3c4367bd3d4d46c742dd2fd6fae Mon Sep 17 00:00:00 2001 From: yangBottle <37429758+yangBottle@users.noreply.github.com> Date: Tue, 12 Jan 2021 13:59:06 +0800 Subject: [PATCH 2/2] Update TableReader.scala fix:sparksql cannot access hive table while data in hbase --- .../main/scala/org/apache/spark/sql/hive/TableReader.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala index 8e22058bc4e5..74e8a3247925 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala @@ -300,9 +300,9 @@ class HadoopTableReader( private def createHadoopRDD(localTableDesc: TableDesc, inputPathStr: String): RDD[Writable] = { val inputFormatClazz = localTableDesc.getInputFileFormatClass if (classOf[oldInputClass[_, _]].isAssignableFrom(inputFormatClazz)) { - createNewHadoopRDD(localTableDesc, inputPathStr) - } else { createOldHadoopRDD(localTableDesc, inputPathStr) + } else { + createNewHadoopRDD(localTableDesc, inputPathStr) } }