File tree Expand file tree Collapse file tree 1 file changed +5
-0
lines changed
core/src/main/scala/org/apache/spark/serializer Expand file tree Collapse file tree 1 file changed +5
-0
lines changed Original file line number Diff line number Diff line change @@ -510,6 +510,10 @@ private[serializer] object KryoSerializer {
510510 // SQL / ML / MLlib classes once and then re-use that filtered list in newInstance() calls.
511511 private lazy val loadableSparkClasses : Seq [Class [_]] = {
512512 Seq (
513+ " org.apache.spark.util.HadoopFSUtils$SerializableBlockLocation" ,
514+ " [Lorg.apache.spark.util.HadoopFSUtils$SerializableBlockLocation;" ,
515+ " org.apache.spark.util.HadoopFSUtils$SerializableFileStatus" ,
516+
513517 " org.apache.spark.sql.catalyst.expressions.BoundReference" ,
514518 " org.apache.spark.sql.catalyst.expressions.SortOrder" ,
515519 " [Lorg.apache.spark.sql.catalyst.expressions.SortOrder;" ,
@@ -536,6 +540,7 @@ private[serializer] object KryoSerializer {
536540 " org.apache.spark.sql.types.DecimalType" ,
537541 " org.apache.spark.sql.types.Decimal$DecimalAsIfIntegral$" ,
538542 " org.apache.spark.sql.types.Decimal$DecimalIsFractional$" ,
543+ " org.apache.spark.sql.execution.command.PartitionStatistics" ,
539544 " org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTaskResult" ,
540545 " org.apache.spark.sql.execution.joins.EmptyHashedRelation$" ,
541546 " org.apache.spark.sql.execution.joins.LongHashedRelation" ,
You can’t perform that action at this time.
0 commit comments