@@ -113,7 +113,7 @@ private[sql] object StatFunctions extends Logging {
113113 if (element == null ) " null" else element.toString
114114 }
115115 // get the distinct values of column 2, so that we can make them the column names
116- val distinctCol2 : Map [Any , Int ] =
116+ val distinctCol2 : Map [String , Int ] =
117117 counts.map(e => cleanElement(e.get(1 ))).distinct.zipWithIndex.toMap
118118 val columnSize = distinctCol2.size
119119 require(columnSize < 1e4 , s " The number of distinct values for $col2, can't " +
@@ -128,7 +128,7 @@ private[sql] object StatFunctions extends Logging {
128128 countsRow.setLong(columnIndex + 1 , row.getLong(2 ))
129129 }
130130 // the value of col1 is the first value, the rest are the counts
131- countsRow.setString(0 , cleanElement(col1Item.toString ))
131+ countsRow.setString(0 , cleanElement(col1Item))
132132 countsRow
133133 }.toSeq
134134 // Back ticks can't exist in DataFrame column names, therefore drop them. To be able to accept
@@ -139,7 +139,7 @@ private[sql] object StatFunctions extends Logging {
139139 // In the map, the column names (._1) are not ordered by the index (._2). This was the bug in
140140 // SPARK-8681. We need to explicitly sort by the column index and assign the column names.
141141 val headerNames = distinctCol2.toSeq.sortBy(_._2).map { r =>
142- StructField (cleanColumnName(r._1.toString ), LongType )
142+ StructField (cleanColumnName(r._1), LongType )
143143 }
144144 val schema = StructType (StructField (tableName, StringType ) +: headerNames)
145145
0 commit comments