Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -482,8 +482,10 @@ class CodegenContext {
*/
def genEqual(dataType: DataType, c1: String, c2: String): String = dataType match {
case BinaryType => s"java.util.Arrays.equals($c1, $c2)"
case FloatType => s"(java.lang.Float.isNaN($c1) && java.lang.Float.isNaN($c2)) || $c1 == $c2"
case DoubleType => s"(java.lang.Double.isNaN($c1) && java.lang.Double.isNaN($c2)) || $c1 == $c2"
case FloatType =>
s"((java.lang.Float.isNaN($c1) && java.lang.Float.isNaN($c2)) || $c1 == $c2)"
case DoubleType =>
s"((java.lang.Double.isNaN($c1) && java.lang.Double.isNaN($c2)) || $c1 == $c2)"
case dt: DataType if isPrimitiveType(dt) => s"$c1 == $c2"
case dt: DataType if dt.isInstanceOf[AtomicType] => s"$c1.equals($c2)"
case array: ArrayType => genComp(array, c1, c2) + " == 0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -340,4 +340,11 @@ class PredicateSuite extends SparkFunSuite with ExpressionEvalHelper {
val infinity = Literal(Double.PositiveInfinity)
checkEvaluation(EqualTo(infinity, infinity), true)
}

test("SPARK-24007: EqualNullSafe for FloatType and DoubleType might generate a wrong result") {
checkEvaluation(EqualNullSafe(Literal(null, FloatType), Literal(-1.0f)), false)
checkEvaluation(EqualNullSafe(Literal(-1.0f), Literal(null, FloatType)), false)
checkEvaluation(EqualNullSafe(Literal(null, DoubleType), Literal(-1.0d)), false)
checkEvaluation(EqualNullSafe(Literal(-1.0d), Literal(null, DoubleType)), false)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ object SQLExecution {
/**
* Wrap an action with a known executionId. When running a different action in a different
* thread from the original one, this method can be used to connect the Spark jobs in this action
* with the known executionId, e.g., `BroadcastHashJoin.broadcastFuture`.
* with the known executionId, e.g., `BroadcastExchangeExec.relationFuture`.
*/
def withExecutionId[T](sc: SparkContext, executionId: String)(body: => T): T = {
val oldExecutionId = sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ private[hive] object HadoopTableReader extends HiveInspectors with Logging {

val (fieldRefs, fieldOrdinals) = nonPartitionKeyAttrs.map { case (attr, ordinal) =>
soi.getStructFieldRef(attr.name) -> ordinal
}.unzip
}.toArray.unzip

/**
* Builds specific unwrappers ahead of time according to object inspector
Expand Down