@@ -111,13 +111,14 @@ class HadoopTableReader(
111111
112112 // Create local references to member variables, so that the entire `this` object won't be
113113 // serialized in the closure below.
114+ val localTableDesc = tableDesc
114115 val broadcastedHadoopConf = _broadcastedHadoopConf
115116
116117 val tablePath = hiveTable.getPath
117118 val inputPathStr = applyFilterIfNeeded(tablePath, filterOpt)
118119
119120 val locationPath = new Path (inputPathStr)
120- val fs = locationPath.getFileSystem(_broadcastedHadoopConf .value.value)
121+ val fs = locationPath.getFileSystem(broadcastedHadoopConf .value.value)
121122
122123 // if the location of the table which is not created by 'stored by' does not exist,
123124 // return an empty RDD
@@ -130,15 +131,15 @@ class HadoopTableReader(
130131 // logDebug("Table input: %s".format(tablePath))
131132 val ifc = hiveTable.getInputFormatClass
132133 .asInstanceOf [java.lang.Class [InputFormat [Writable , Writable ]]]
133- val hadoopRDD = createHadoopRdd(tableDesc , inputPathStr, ifc)
134+ val hadoopRDD = createHadoopRdd(localTableDesc , inputPathStr, ifc)
134135
135136 val attrsWithIndex = attributes.zipWithIndex
136137 val mutableRow = new SpecificInternalRow (attributes.map(_.dataType))
137138
138139 val deserializedHadoopRDD = hadoopRDD.mapPartitions { iter =>
139140 val hconf = broadcastedHadoopConf.value.value
140141 val deserializer = deserializerClass.newInstance()
141- deserializer.initialize(hconf, tableDesc .getProperties)
142+ deserializer.initialize(hconf, localTableDesc .getProperties)
142143 HadoopTableReader .fillObject(iter, deserializer, attrsWithIndex, mutableRow, deserializer)
143144 }
144145
0 commit comments