From b27c4c1054fa563497bc1afe3266154b95d34e8f Mon Sep 17 00:00:00 2001 From: Hyukjin Kwon Date: Tue, 8 Oct 2024 08:51:35 +0900 Subject: [PATCH 1/3] Parse unresolved identifer to keep the behavior same --- .../scala/org/apache/spark/sql/Dataset.scala | 7 ++++++- .../sql/internal/columnNodeSupport.scala | 4 ++-- .../apache/spark/sql/ColumnTestSuite.scala | 4 ++-- .../ColumnNodeToProtoConverterSuite.scala | 2 +- .../scala/org/apache/spark/sql/Column.scala | 3 ++- .../spark/sql/internal/columnNodes.scala | 13 +++++++++--- .../sql/internal/columnNodeSupport.scala | 20 ++++++++++++++----- .../spark/sql/internal/ColumnNodeSuite.scala | 2 +- ...ColumnNodeToExpressionConverterSuite.scala | 2 +- 9 files changed, 40 insertions(+), 17 deletions(-) diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala index a368da2aaee60..47ec08a2d2e6d 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -32,6 +32,7 @@ import org.apache.spark.sql.catalyst.ScalaReflection import org.apache.spark.sql.catalyst.encoders.AgnosticEncoder import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders._ import org.apache.spark.sql.catalyst.expressions.OrderUtils +import org.apache.spark.sql.catalyst.util.AttributeNameParser import org.apache.spark.sql.connect.ConnectConversions._ import org.apache.spark.sql.connect.client.SparkResult import org.apache.spark.sql.connect.common.{DataTypeProtoConverter, StorageLevelProtoConverter} @@ -413,7 +414,11 @@ class Dataset[T] private[sql] ( /** @inheritdoc */ def metadataColumn(colName: String): Column = { - Column(UnresolvedAttribute(colName, getPlanId, isMetadataColumn = true)) + Column( + UnresolvedAttribute( + AttributeNameParser.parseAttributeName(colName), + getPlanId, + isMetadataColumn = true)) } /** @inheritdoc */ diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala index 45fa449b58ed7..3f75d717ff3f4 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala @@ -52,9 +52,9 @@ object ColumnNodeToProtoConverter extends (ColumnNode => proto.Expression) { case Literal(value, Some(dataType), _) => builder.setLiteral(toLiteralProtoBuilder(value, dataType)) - case UnresolvedAttribute(unparsedIdentifier, planId, isMetadataColumn, _) => + case u @ UnresolvedAttribute(unparsedIdentifier, planId, isMetadataColumn, _) => val b = builder.getUnresolvedAttributeBuilder - .setUnparsedIdentifier(unparsedIdentifier) + .setUnparsedIdentifier(u.sql) if (isMetadataColumn) { // We only set this field when it is needed. If we would always set it, // too many of the verbatims we use for testing would have to be regenerated. diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ColumnTestSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ColumnTestSuite.scala index c37100b729029..86c7a20136851 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ColumnTestSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ColumnTestSuite.scala @@ -173,8 +173,8 @@ class ColumnTestSuite extends ConnectFunSuite { assert(explain1 != explain2) assert(explain1.strip() == "+(a, b)") assert(explain2.contains("UnresolvedFunction(+")) - assert(explain2.contains("UnresolvedAttribute(a")) - assert(explain2.contains("UnresolvedAttribute(b")) + assert(explain2.contains("UnresolvedAttribute(List(a")) + assert(explain2.contains("UnresolvedAttribute(List(b")) } private def testColName(dataType: DataType, f: ColumnName => StructField): Unit = { diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToProtoConverterSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToProtoConverterSuite.scala index 2efd396735191..cace6994b8feb 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToProtoConverterSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToProtoConverterSuite.scala @@ -93,7 +93,7 @@ class ColumnNodeToProtoConverterSuite extends ConnectFunSuite { test("attribute") { testConversion(UnresolvedAttribute("x"), attribute("x")) testConversion( - UnresolvedAttribute("y", Option(44L), isMetadataColumn = true), + UnresolvedAttribute("y" :: Nil, Option(44L), isMetadataColumn = true), expr( _.getUnresolvedAttributeBuilder .setUnparsedIdentifier("y") diff --git a/sql/api/src/main/scala/org/apache/spark/sql/Column.scala b/sql/api/src/main/scala/org/apache/spark/sql/Column.scala index 31ce44eca1684..4eb3304616f81 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/Column.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/Column.scala @@ -24,6 +24,7 @@ import org.apache.spark.internal.{Logging, MDC} import org.apache.spark.internal.LogKeys.{LEFT_EXPR, RIGHT_EXPR} import org.apache.spark.sql.catalyst.parser.DataTypeParser import org.apache.spark.sql.catalyst.trees.CurrentOrigin.withOrigin +import org.apache.spark.sql.catalyst.util.AttributeNameParser import org.apache.spark.sql.expressions.Window import org.apache.spark.sql.functions.{lit, map} import org.apache.spark.sql.internal.ColumnNode @@ -142,7 +143,7 @@ class Column(val node: ColumnNode) extends Logging { name match { case "*" => internal.UnresolvedStar(None, planId) case _ if name.endsWith(".*") => internal.UnresolvedStar(Option(name), planId) - case _ => internal.UnresolvedAttribute(name, planId) + case _ => internal.UnresolvedAttribute(AttributeNameParser.parseAttributeName(name), planId) } }) diff --git a/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala b/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala index 51b26a1fa2435..83185af81b08a 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala @@ -122,7 +122,7 @@ private[sql] case class Literal( /** * Reference to an attribute produced by one of the underlying DataFrames. * - * @param unparsedIdentifier + * @param nameParts * name of the attribute. * @param planId * id of the plan (Dataframe) that produces the attribute. @@ -130,14 +130,21 @@ private[sql] case class Literal( * whether this is a metadata column. */ private[sql] case class UnresolvedAttribute( - unparsedIdentifier: String, + nameParts: Seq[String], planId: Option[Long] = None, isMetadataColumn: Boolean = false, override val origin: Origin = CurrentOrigin.get) extends ColumnNode { + override private[internal] def normalize(): UnresolvedAttribute = copy(planId = None, origin = NO_ORIGIN) - override def sql: String = unparsedIdentifier + + override def sql: String = nameParts.map(n => if (n.contains(".")) s"`$n`" else n).mkString(".") +} + +private[sql] object UnresolvedAttribute { + // For testing + def apply(singlePart: String): UnresolvedAttribute = UnresolvedAttribute(singlePart :: Nil) } /** diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala index 920c0371292c9..476956e58e8e6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala @@ -54,8 +54,8 @@ private[sql] trait ColumnNodeToExpressionConverter extends (ColumnNode => Expres case Literal(value, None, _) => expressions.Literal(value) - case UnresolvedAttribute(unparsedIdentifier, planId, isMetadataColumn, _) => - convertUnresolvedAttribute(unparsedIdentifier, planId, isMetadataColumn) + case UnresolvedAttribute(nameParts, planId, isMetadataColumn, _) => + convertUnresolvedAttribute(nameParts, planId, isMetadataColumn) case UnresolvedStar(unparsedTarget, None, _) => val target = unparsedTarget.map { t => @@ -74,7 +74,7 @@ private[sql] trait ColumnNodeToExpressionConverter extends (ColumnNode => Expres analysis.UnresolvedRegex(columnNameRegex, Some(nameParts), conf.caseSensitiveAnalysis) case UnresolvedRegex(unparsedIdentifier, planId, _) => - convertUnresolvedAttribute(unparsedIdentifier, planId, isMetadataColumn = false) + convertUnresolvedRegex(unparsedIdentifier, planId) case UnresolvedFunction(functionName, arguments, isDistinct, isUDF, isInternal, _) => val nameParts = if (isUDF) { @@ -223,10 +223,10 @@ private[sql] trait ColumnNodeToExpressionConverter extends (ColumnNode => Expres } private def convertUnresolvedAttribute( - unparsedIdentifier: String, + nameParts: Seq[String], planId: Option[Long], isMetadataColumn: Boolean): analysis.UnresolvedAttribute = { - val attribute = analysis.UnresolvedAttribute.quotedString(unparsedIdentifier) + val attribute = analysis.UnresolvedAttribute(nameParts) if (planId.isDefined) { attribute.setTagValue(LogicalPlan.PLAN_ID_TAG, planId.get) } @@ -235,6 +235,16 @@ private[sql] trait ColumnNodeToExpressionConverter extends (ColumnNode => Expres } attribute } + + private def convertUnresolvedRegex( + unparsedIdentifier: String, + planId: Option[Long]): analysis.UnresolvedAttribute = { + val attribute = analysis.UnresolvedAttribute.quotedString(unparsedIdentifier) + if (planId.isDefined) { + attribute.setTagValue(LogicalPlan.PLAN_ID_TAG, planId.get) + } + attribute + } } private[sql] object ColumnNodeToExpressionConverter extends ColumnNodeToExpressionConverter { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeSuite.scala index 7bf70695a9854..837db7a8fc6f8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeSuite.scala @@ -151,7 +151,7 @@ class ColumnNodeSuite extends SparkFunSuite { test("normalization") { testNormalization(Literal(1)) testNormalization(UnresolvedStar(Option("a.b"), planId = planId())) - testNormalization(UnresolvedAttribute("x", planId = planId())) + testNormalization(UnresolvedAttribute("x" :: Nil, planId = planId())) testNormalization(UnresolvedRegex(".*", planId = planId())) testNormalization(SqlExpression("1 + 1")) testNormalization(attribute("a")) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToExpressionConverterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToExpressionConverterSuite.scala index 76fcdfc380950..f519c4a2afd21 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToExpressionConverterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToExpressionConverterSuite.scala @@ -86,7 +86,7 @@ class ColumnNodeToExpressionConverterSuite extends SparkFunSuite { assert(expression1.getTagValue(LogicalPlan.IS_METADATA_COL).isEmpty) val expression2 = testConversion( - UnresolvedAttribute("y", Option(44L), isMetadataColumn = true), + UnresolvedAttribute("y" :: Nil, Option(44L), isMetadataColumn = true), analysis.UnresolvedAttribute("y")) assert(expression2.getTagValue(LogicalPlan.PLAN_ID_TAG).contains(44L)) assert(expression2.getTagValue(LogicalPlan.IS_METADATA_COL).isDefined) From 8e586a50cf7dd2692fea0fd23dc18067776b67eb Mon Sep 17 00:00:00 2001 From: Hyukjin Kwon Date: Tue, 8 Oct 2024 12:07:55 +0900 Subject: [PATCH 2/3] fixup --- .../main/scala/org/apache/spark/sql/Dataset.scala | 7 +------ .../spark/sql/internal/columnNodeSupport.scala | 3 ++- .../internal/ColumnNodeToProtoConverterSuite.scala | 2 +- .../src/main/scala/org/apache/spark/sql/Column.scala | 3 +-- .../org/apache/spark/sql/internal/columnNodes.scala | 12 ++++++++++-- .../apache/spark/sql/internal/ColumnNodeSuite.scala | 2 +- .../ColumnNodeToExpressionConverterSuite.scala | 2 +- 7 files changed, 17 insertions(+), 14 deletions(-) diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala index 47ec08a2d2e6d..a368da2aaee60 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -32,7 +32,6 @@ import org.apache.spark.sql.catalyst.ScalaReflection import org.apache.spark.sql.catalyst.encoders.AgnosticEncoder import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders._ import org.apache.spark.sql.catalyst.expressions.OrderUtils -import org.apache.spark.sql.catalyst.util.AttributeNameParser import org.apache.spark.sql.connect.ConnectConversions._ import org.apache.spark.sql.connect.client.SparkResult import org.apache.spark.sql.connect.common.{DataTypeProtoConverter, StorageLevelProtoConverter} @@ -414,11 +413,7 @@ class Dataset[T] private[sql] ( /** @inheritdoc */ def metadataColumn(colName: String): Column = { - Column( - UnresolvedAttribute( - AttributeNameParser.parseAttributeName(colName), - getPlanId, - isMetadataColumn = true)) + Column(UnresolvedAttribute(colName, getPlanId, isMetadataColumn = true)) } /** @inheritdoc */ diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala index 3f75d717ff3f4..34a8a91a0ddf8 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala @@ -53,8 +53,9 @@ object ColumnNodeToProtoConverter extends (ColumnNode => proto.Expression) { builder.setLiteral(toLiteralProtoBuilder(value, dataType)) case u @ UnresolvedAttribute(unparsedIdentifier, planId, isMetadataColumn, _) => + val escapedName = u.sql val b = builder.getUnresolvedAttributeBuilder - .setUnparsedIdentifier(u.sql) + .setUnparsedIdentifier(escapedName) if (isMetadataColumn) { // We only set this field when it is needed. If we would always set it, // too many of the verbatims we use for testing would have to be regenerated. diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToProtoConverterSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToProtoConverterSuite.scala index cace6994b8feb..2efd396735191 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToProtoConverterSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToProtoConverterSuite.scala @@ -93,7 +93,7 @@ class ColumnNodeToProtoConverterSuite extends ConnectFunSuite { test("attribute") { testConversion(UnresolvedAttribute("x"), attribute("x")) testConversion( - UnresolvedAttribute("y" :: Nil, Option(44L), isMetadataColumn = true), + UnresolvedAttribute("y", Option(44L), isMetadataColumn = true), expr( _.getUnresolvedAttributeBuilder .setUnparsedIdentifier("y") diff --git a/sql/api/src/main/scala/org/apache/spark/sql/Column.scala b/sql/api/src/main/scala/org/apache/spark/sql/Column.scala index 4eb3304616f81..31ce44eca1684 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/Column.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/Column.scala @@ -24,7 +24,6 @@ import org.apache.spark.internal.{Logging, MDC} import org.apache.spark.internal.LogKeys.{LEFT_EXPR, RIGHT_EXPR} import org.apache.spark.sql.catalyst.parser.DataTypeParser import org.apache.spark.sql.catalyst.trees.CurrentOrigin.withOrigin -import org.apache.spark.sql.catalyst.util.AttributeNameParser import org.apache.spark.sql.expressions.Window import org.apache.spark.sql.functions.{lit, map} import org.apache.spark.sql.internal.ColumnNode @@ -143,7 +142,7 @@ class Column(val node: ColumnNode) extends Logging { name match { case "*" => internal.UnresolvedStar(None, planId) case _ if name.endsWith(".*") => internal.UnresolvedStar(Option(name), planId) - case _ => internal.UnresolvedAttribute(AttributeNameParser.parseAttributeName(name), planId) + case _ => internal.UnresolvedAttribute(name, planId) } }) diff --git a/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala b/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala index 83185af81b08a..d829371f66752 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala @@ -21,6 +21,7 @@ import java.util.concurrent.atomic.AtomicLong import ColumnNode._ import org.apache.spark.sql.catalyst.trees.{CurrentOrigin, Origin} +import org.apache.spark.sql.catalyst.util.AttributeNameParser import org.apache.spark.sql.errors.DataTypeErrorsBase import org.apache.spark.sql.types.{DataType, IntegerType, LongType, Metadata} import org.apache.spark.util.SparkClassUtils @@ -143,8 +144,15 @@ private[sql] case class UnresolvedAttribute( } private[sql] object UnresolvedAttribute { - // For testing - def apply(singlePart: String): UnresolvedAttribute = UnresolvedAttribute(singlePart :: Nil) + def apply( + unparsedIdentifier: String, + planId: Option[Long] = None, + isMetadataColumn: Boolean = false, + origin: Origin = CurrentOrigin.get): UnresolvedAttribute = UnresolvedAttribute( + AttributeNameParser.parseAttributeName(unparsedIdentifier), + planId = planId, + isMetadataColumn = isMetadataColumn, + origin = origin) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeSuite.scala index 837db7a8fc6f8..7bf70695a9854 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeSuite.scala @@ -151,7 +151,7 @@ class ColumnNodeSuite extends SparkFunSuite { test("normalization") { testNormalization(Literal(1)) testNormalization(UnresolvedStar(Option("a.b"), planId = planId())) - testNormalization(UnresolvedAttribute("x" :: Nil, planId = planId())) + testNormalization(UnresolvedAttribute("x", planId = planId())) testNormalization(UnresolvedRegex(".*", planId = planId())) testNormalization(SqlExpression("1 + 1")) testNormalization(attribute("a")) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToExpressionConverterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToExpressionConverterSuite.scala index f519c4a2afd21..76fcdfc380950 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToExpressionConverterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToExpressionConverterSuite.scala @@ -86,7 +86,7 @@ class ColumnNodeToExpressionConverterSuite extends SparkFunSuite { assert(expression1.getTagValue(LogicalPlan.IS_METADATA_COL).isEmpty) val expression2 = testConversion( - UnresolvedAttribute("y" :: Nil, Option(44L), isMetadataColumn = true), + UnresolvedAttribute("y", Option(44L), isMetadataColumn = true), analysis.UnresolvedAttribute("y")) assert(expression2.getTagValue(LogicalPlan.PLAN_ID_TAG).contains(44L)) assert(expression2.getTagValue(LogicalPlan.IS_METADATA_COL).isDefined) From 2ad70f4bf6a8890831706e28bf86bdbd89d5fe7a Mon Sep 17 00:00:00 2001 From: Hyukjin Kwon Date: Tue, 8 Oct 2024 14:01:07 +0900 Subject: [PATCH 3/3] fixup --- .../spark/sql/internal/columnNodes.scala | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala b/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala index d829371f66752..979baf12be614 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/internal/columnNodes.scala @@ -146,13 +146,25 @@ private[sql] case class UnresolvedAttribute( private[sql] object UnresolvedAttribute { def apply( unparsedIdentifier: String, - planId: Option[Long] = None, - isMetadataColumn: Boolean = false, - origin: Origin = CurrentOrigin.get): UnresolvedAttribute = UnresolvedAttribute( + planId: Option[Long], + isMetadataColumn: Boolean, + origin: Origin): UnresolvedAttribute = UnresolvedAttribute( AttributeNameParser.parseAttributeName(unparsedIdentifier), planId = planId, isMetadataColumn = isMetadataColumn, origin = origin) + + def apply( + unparsedIdentifier: String, + planId: Option[Long], + isMetadataColumn: Boolean): UnresolvedAttribute = + apply(unparsedIdentifier, planId, isMetadataColumn, CurrentOrigin.get) + + def apply(unparsedIdentifier: String, planId: Option[Long]): UnresolvedAttribute = + apply(unparsedIdentifier, planId, false, CurrentOrigin.get) + + def apply(unparsedIdentifier: String): UnresolvedAttribute = + apply(unparsedIdentifier, None, false, CurrentOrigin.get) } /**