Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ object ScalaReflection extends ScalaReflection {
val TypeRef(_, _, Seq(optType)) = t
val className = getClassNameFromType(optType)
val newTypePath = s"""- option value class: "$className"""" +: walkedTypePath
WrapOption(constructorFor(optType, path, newTypePath))
WrapOption(constructorFor(optType, path, newTypePath), dataTypeFor(optType))

case t if t <:< localTypeOf[java.lang.Integer] =>
val boxedType = classOf[java.lang.Integer]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,7 @@ case class ExpressionEncoder[T](

val plan = Project(Alias(unbound, "")() :: Nil, LocalRelation(schema))
val analyzedPlan = SimpleAnalyzer.execute(plan)
SimpleAnalyzer.checkAnalysis(analyzedPlan)
val optimizedPlan = SimplifyCasts(analyzedPlan)

// In order to construct instances of inner classes (for example those declared in a REPL cell),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,9 @@ import scala.reflect.ClassTag
import org.apache.spark.SparkConf
import org.apache.spark.serializer._
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer
import org.apache.spark.sql.catalyst.plans.logical.{Project, LocalRelation}
import org.apache.spark.sql.catalyst.util.GenericArrayData
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, CodeGenContext}
import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenContext, GeneratedExpressionCode}
import org.apache.spark.sql.catalyst.util.GenericArrayData
import org.apache.spark.sql.types._

/**
Expand Down Expand Up @@ -295,13 +293,17 @@ case class UnwrapOption(
* Converts the result of evaluating `child` into an option, checking both the isNull bit and
* (in the case of reference types) equality with null.
* @param child The expression to evaluate and wrap.
* @param optType The type of this option.
*/
case class WrapOption(child: Expression) extends UnaryExpression {
case class WrapOption(child: Expression, optType: DataType)
extends UnaryExpression with ExpectsInputTypes {

override def dataType: DataType = ObjectType(classOf[Option[_]])

override def nullable: Boolean = true

override def inputTypes: Seq[AbstractDataType] = optType :: Nil

override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")

Expand Down
10 changes: 9 additions & 1 deletion sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -481,10 +481,18 @@ class DatasetSuite extends QueryTest with SharedSQLContext {
val ds = Seq(2 -> 2.toByte, 3 -> 3.toByte).toDF("a", "b").as[ClassData]
assert(ds.collect().toSeq == Seq(ClassData("2", 2), ClassData("3", 3)))
}
}

test("verify mismatching field names fail with a good error") {
val ds = Seq(ClassData("a", 1)).toDS()
val e = intercept[AnalysisException] {
ds.as[ClassData2].collect()
}
assert(e.getMessage.contains("cannot resolve 'c' given input columns a, b"), e.getMessage)
}
}

case class ClassData(a: String, b: Int)
case class ClassData2(c: String, d: Int)
case class ClassNullableData(a: String, b: Integer)

/**
Expand Down