diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index f0017abb5f26..7e6aaa86f25c 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -497,10 +497,6 @@ object Trees { extends TermTree[T] { type ThisTree[-T >: Untyped] = If[T] } - class InlineIf[T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T]) - extends If(cond, thenp, elsep) { - override def toString = s"InlineIf($cond, $thenp, $elsep)" - } /** A closure with an environment and a reference to a method. * @param env The captured parameters of the closure @@ -521,10 +517,6 @@ object Trees { extends TermTree[T] { type ThisTree[-T >: Untyped] = Match[T] } - class InlineMatch[T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]]) - extends Match(selector, cases) { - override def toString = s"InlineMatch($selector, $cases)" - } /** case pat if guard => body; only appears as child of a Match */ case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T]) @@ -910,10 +902,8 @@ object Trees { type Assign = Trees.Assign[T] type Block = Trees.Block[T] type If = Trees.If[T] - type InlineIf = Trees.InlineIf[T] type Closure = Trees.Closure[T] type Match = Trees.Match[T] - type InlineMatch = Trees.InlineMatch[T] type CaseDef = Trees.CaseDef[T] type Labeled = Trees.Labeled[T] type Return = Trees.Return[T] @@ -959,11 +949,6 @@ object Trees { case ys => Thicket(ys) } - /** Extractor for the synthetic scrutinee tree of an implicit match */ - object ImplicitScrutinee { - def apply() = Ident(nme.IMPLICITkw) - def unapply(id: Ident): Boolean = id.name == nme.IMPLICITkw && !id.isInstanceOf[BackquotedIdent] - } // ----- Helper classes for copying, transforming, accumulating ----------------- val cpy: TreeCopier @@ -1045,9 +1030,6 @@ object Trees { case _ => finalize(tree, untpd.Block(stats, expr)) } def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(implicit ctx: Context): If = tree match { - case tree: InlineIf => - if ((cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep)) tree - else finalize(tree, untpd.InlineIf(cond, thenp, elsep)) case tree: If if (cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep) => tree case _ => finalize(tree, untpd.If(cond, thenp, elsep)) } @@ -1056,9 +1038,6 @@ object Trees { case _ => finalize(tree, untpd.Closure(env, meth, tpt)) } def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(implicit ctx: Context): Match = tree match { - case tree: InlineMatch => - if ((selector eq tree.selector) && (cases eq tree.cases)) tree - else finalize(tree, untpd.InlineMatch(selector, cases)) case tree: Match if (selector eq tree.selector) && (cases eq tree.cases) => tree case _ => finalize(tree, untpd.Match(selector, cases)) } @@ -1169,10 +1148,6 @@ object Trees { case tree: Annotated if (arg eq tree.arg) && (annot eq tree.annot) => tree case _ => finalize(tree, untpd.Annotated(arg, annot)) } - def UntypedSplice(tree: Tree)(splice: untpd.Tree) = tree match { - case tree: tpd.UntypedSplice if tree.splice `eq` splice => tree - case _ => finalize(tree, tpd.UntypedSplice(splice)) - } def Thicket(tree: Tree)(trees: List[Tree]): Thicket = tree match { case tree: Thicket if trees eq tree.trees => tree case _ => finalize(tree, untpd.Thicket(trees)) @@ -1329,21 +1304,9 @@ object Trees { def transformSub[Tr <: Tree](trees: List[Tr])(implicit ctx: Context): List[Tr] = transform(trees).asInstanceOf[List[Tr]] - protected def transformMoreCases(tree: Tree)(implicit ctx: Context): Tree = tree match { - case tpd.UntypedSplice(usplice) => - // For a typed tree map: homomorphism on the untyped part with - // recursive mapping of typed splices. - // The case is overridden in UntypedTreeMap.## - val untpdMap = new untpd.UntypedTreeMap { - override def transform(tree: untpd.Tree)(implicit ctx: Context): untpd.Tree = tree match { - case untpd.TypedSplice(tsplice) => - untpd.cpy.TypedSplice(tree)(self.transform(tsplice).asInstanceOf[tpd.Tree]) - // the cast is safe, since the UntypedSplice case is overridden in UntypedTreeMap. - case _ => super.transform(tree) - } - } - cpy.UntypedSplice(tree)(untpdMap.transform(usplice)) - case _ if ctx.reporter.errorsReported => tree + protected def transformMoreCases(tree: Tree)(implicit ctx: Context): Tree = { + assert(ctx.reporter.errorsReported) + tree } } @@ -1453,23 +1416,13 @@ object Trees { } } - def foldMoreCases(x: X, tree: Tree)(implicit ctx: Context): X = tree match { - case tpd.UntypedSplice(usplice) => - // For a typed tree accumulator: skip the untyped part and fold all typed splices. - // The case is overridden in UntypedTreeAccumulator. - val untpdAcc = new untpd.UntypedTreeAccumulator[X] { - override def apply(x: X, tree: untpd.Tree)(implicit ctx: Context): X = tree match { - case untpd.TypedSplice(tsplice) => self(x, tsplice) - case _ => foldOver(x, tree) - } - } - untpdAcc(x, usplice) - case _ if ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive) => + def foldMoreCases(x: X, tree: Tree)(implicit ctx: Context): X = { + assert(ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive)) // In interactive mode, errors might come from previous runs. // In case of errors it may be that typed trees point to untyped ones. // The IDE can still traverse inside such trees, either in the run where errors // are reported, or in subsequent ones. - x + x } } diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 1c69062a1430..ffa501ce7181 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -21,8 +21,6 @@ import scala.io.Codec /** Some creators for typed trees */ object tpd extends Trees.Instance[Type] with TypedTreeInfo { - case class UntypedSplice(splice: untpd.Tree) extends Tree - private def ta(implicit ctx: Context) = ctx.typeAssigner def Ident(tp: NamedType)(implicit ctx: Context): Ident = @@ -722,7 +720,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { new TreeTypeMap(oldOwners = from :: froms, newOwners = tos).apply(tree) } } - loop(from, Nil, to :: Nil) + if (from == to) tree else loop(from, Nil, to :: Nil) } /** After phase `trans`, set the owner of every definition in this tree that was formerly @@ -916,6 +914,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def outerSelect(levels: Int, tp: Type)(implicit ctx: Context): Tree = untpd.Select(tree, OuterSelectName(EmptyTermName, levels)).withType(SkolemType(tp)) + def underlyingArgument(implicit ctx: Context): Tree = mapToUnderlying.transform(tree) + // --- Higher order traversal methods ------------------------------- /** Apply `f` to each subtree of this tree */ @@ -942,26 +942,38 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } } + /** Map Inlined nodes and InlineProxy references to underlying arguments */ + object mapToUnderlying extends TreeMap { + override def transform(tree: Tree)(implicit ctx: Context): Tree = tree match { + case tree: Ident if tree.symbol.is(InlineProxy) => + tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs.underlyingArgument + case Inlined(_, _, arg) => + arg.underlyingArgument + case tree => + super.transform(tree) + } + } + implicit class ListOfTreeDecorator(val xs: List[tpd.Tree]) extends AnyVal { def tpes: List[Type] = xs map (_.tpe) } /** A trait for loaders that compute trees. Currently implemented just by DottyUnpickler. */ trait TreeProvider { - protected def computeTrees(implicit ctx: Context): List[Tree] + protected def computeRootTrees(implicit ctx: Context): List[Tree] private[this] var myTrees: List[Tree] = null /** Get trees defined by this provider. Cache them if -Yretain-trees is set. */ - def trees(implicit ctx: Context): List[Tree] = + def rootTrees(implicit ctx: Context): List[Tree] = if (ctx.settings.YretainTrees.value) { - if (myTrees == null) myTrees = computeTrees + if (myTrees == null) myTrees = computeRootTrees myTrees - } else computeTrees + } else computeRootTrees /** Get first tree defined by this provider, or EmptyTree if none exists */ def tree(implicit ctx: Context): Tree = - trees.headOption.getOrElse(EmptyTree) + rootTrees.headOption.getOrElse(EmptyTree) /** Is it possible that the tree to load contains a definition of or reference to `id`? */ def mightContain(id: String)(implicit ctx: Context) = true diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 53e637760438..e63580b4fd9f 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -272,10 +272,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def Assign(lhs: Tree, rhs: Tree): Assign = new Assign(lhs, rhs) def Block(stats: List[Tree], expr: Tree): Block = new Block(stats, expr) def If(cond: Tree, thenp: Tree, elsep: Tree): If = new If(cond, thenp, elsep) - def InlineIf(cond: Tree, thenp: Tree, elsep: Tree): If = new InlineIf(cond, thenp, elsep) def Closure(env: List[Tree], meth: Tree, tpt: Tree): Closure = new Closure(env, meth, tpt) def Match(selector: Tree, cases: List[CaseDef]): Match = new Match(selector, cases) - def InlineMatch(selector: Tree, cases: List[CaseDef]): Match = new InlineMatch(selector, cases) def CaseDef(pat: Tree, guard: Tree, body: Tree): CaseDef = new CaseDef(pat, guard, body) def Labeled(bind: Bind, expr: Tree): Labeled = new Labeled(bind, expr) def Return(expr: Tree, from: Tree): Return = new Return(expr, from) @@ -544,8 +542,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.ContextBounds(tree)(transformSub(bounds), transform(cxBounds)) case PatDef(mods, pats, tpt, rhs) => cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) - case tpd.UntypedSplice(splice) => - cpy.UntypedSplice(tree)(transform(splice)) case TypedSplice(_) => tree case _ => @@ -595,8 +591,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(this(x, pats), tpt), rhs) case TypedSplice(splice) => this(x, splice) - case tpd.UntypedSplice(splice) => - this(x, splice) case _ => super.foldMoreCases(x, tree) } diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 682eeb552507..87a168f26580 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -363,6 +363,13 @@ trait ConstraintHandling { final def canConstrain(param: TypeParamRef): Boolean = (!frozenConstraint || (caseLambda `eq` param.binder)) && constraint.contains(param) + /** Is `param` assumed to be a sub- and super-type of any other type? + * This holds if `TypeVarsMissContext` is set unless `param` is a part + * of a MatchType that is currently normalized. + */ + final def assumedTrue(param: TypeParamRef): Boolean = + ctx.mode.is(Mode.TypevarsMissContext) && (caseLambda `ne` param.binder) + /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise. * `bound` is assumed to be in normalized form, as specified in `firstTry` and * `secondTry` of `TypeComparer`. In particular, it should not be an alias type, diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index d25649bf0c12..d43611c53f52 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -342,10 +342,6 @@ class Definitions { def NullType = NullClass.typeRef lazy val RuntimeNullModuleRef = ctx.requiredModuleRef("scala.runtime.Null") - lazy val ImplicitScrutineeTypeSym = - newSymbol(ScalaPackageClass, tpnme.IMPLICITkw, EmptyFlags, TypeBounds.empty).entered - def ImplicitScrutineeTypeRef: TypeRef = ImplicitScrutineeTypeSym.typeRef - lazy val ScalaPredefModuleRef = ctx.requiredModuleRef("scala.Predef") def ScalaPredefModule(implicit ctx: Context) = ScalaPredefModuleRef.symbol @@ -939,7 +935,8 @@ class Definitions { def scalaClassName(ref: Type)(implicit ctx: Context): TypeName = scalaClassName(ref.classSymbol) private def isVarArityClass(cls: Symbol, prefix: String) = - scalaClassName(cls).testSimple(name => + cls.isClass && cls.owner.eq(ScalaPackageClass) && + cls.name.testSimple(name => name.startsWith(prefix) && name.length > prefix.length && name.drop(prefix.length).forall(_.isDigit)) @@ -1159,6 +1156,14 @@ class Definitions { def isAssuredNoInits(sym: Symbol) = (sym `eq` SomeClass) || isTupleClass(sym) + /** If `cls` is Tuple1..Tuple22, add the corresponding *: type as last parent to `parents` */ + def adjustForTuple(cls: ClassSymbol, tparams: List[TypeSymbol], parents: List[Type]): List[Type] = { + def syntheticParent(tparams: List[TypeSymbol]): Type = + if (tparams.isEmpty) TupleTypeRef + else (tparams :\ (UnitType: Type)) ((tparam, tail) => PairType.appliedTo(tparam.typeRef, tail)) + if (isTupleClass(cls) || cls == UnitClass) parents :+ syntheticParent(tparams) else parents + } + // ----- primitive value class machinery ------------------------------------------ /** This class would also be obviated by the implicit function type design */ @@ -1254,33 +1259,12 @@ class Definitions { /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ lazy val syntheticCoreMethods = - AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod, ImplicitScrutineeTypeSym) + AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod) lazy val reservedScalaClassNames: Set[Name] = syntheticScalaClasses.map(_.name).toSet private[this] var isInitialized = false - /** Add a `Tuple` as a parent to `Unit`. - * Add the right `*:` instance as a parent to Tuple1..Tuple22 - */ - def fixTupleCompleter(cls: ClassSymbol): Unit = cls.infoOrCompleter match { - case completer: LazyType => - cls.info = new LazyType { - def syntheticParent(tparams: List[TypeSymbol]): Type = - if (tparams.isEmpty) TupleTypeRef - else (tparams :\ (UnitType: Type)) ((tparam, tail) => PairType.appliedTo(tparam.typeRef, tail)) - override def complete(denot: SymDenotation)(implicit ctx: Context) = { - completer.complete(denot) - denot.info match { - case info: ClassInfo => - denot.info = info.derivedClassInfo( - classParents = info.classParents :+ syntheticParent(cls.typeParams)) - } - } - } - case _ => - } - def init()(implicit ctx: Context) = { this.ctx = ctx if (!isInitialized) { @@ -1298,10 +1282,6 @@ class Definitions { // force initialization of every symbol that is synthesized or hijacked by the compiler val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() - fixTupleCompleter(UnitClass) - for (i <- 1 to MaxTupleArity) - fixTupleCompleter(TupleType(i).symbol.asClass) - isInitialized = true } } diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index a3379a9c06d6..2cdceeec4e01 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -351,6 +351,9 @@ object Flags { /** A bridge method. Set by Erasure */ final val Bridge = termFlag(34, "") + /** A proxy for an argument to an inline method */ + final val InlineProxy = termFlag(35, "") + /** Symbol is a method which should be marked ACC_SYNCHRONIZED */ final val Synchronized = termFlag(36, "") @@ -545,6 +548,9 @@ object Flags { /** Either method or lazy or deferred */ final val MethodOrLazyOrDeferred = Method | Lazy | Deferred + /** An inline method or inline argument proxy */ + final val InlineOrProxy = Inline | InlineProxy + /** Assumed to be pure */ final val StableOrErased = Stable | Erased diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index 465e0d251203..9b366d22b804 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -12,7 +12,7 @@ trait Substituters { this: Context => case tp: BoundType => if (tp.binder eq from) tp.copyBoundType(to.asInstanceOf[tp.BT]) else tp case tp: NamedType => - if (tp.currentSymbol.isStatic || (tp.prefix `eq` NoPrefix)) tp + if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(subst(tp.prefix, from, to, theMap)) case _: ThisType => tp @@ -26,7 +26,7 @@ trait Substituters { this: Context => case tp: NamedType => val sym = tp.symbol if (sym eq from) return to - if (sym.isStatic && !from.isStatic || (tp.prefix `eq` NoPrefix)) tp + if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(subst1(tp.prefix, from, to, theMap)) case _: ThisType | _: BoundType => tp @@ -42,7 +42,7 @@ trait Substituters { this: Context => val sym = tp.symbol if (sym eq from1) return to1 if (sym eq from2) return to2 - if (sym.isStatic && !from1.isStatic && !from2.isStatic || (tp.prefix `eq` NoPrefix)) tp + if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(subst2(tp.prefix, from1, to1, from2, to2, theMap)) case _: ThisType | _: BoundType => tp @@ -63,7 +63,7 @@ trait Substituters { this: Context => fs = fs.tail ts = ts.tail } - if (sym.isStatic && !existsStatic(from) || (tp.prefix `eq` NoPrefix)) tp + if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(subst(tp.prefix, from, to, theMap)) case _: ThisType | _: BoundType => tp @@ -85,7 +85,7 @@ trait Substituters { this: Context => fs = fs.tail ts = ts.tail } - if (sym.isStatic && !existsStatic(from) || (tp.prefix `eq` NoPrefix)) tp + if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(substSym(tp.prefix, from, to, theMap)) case tp: ThisType => val sym = tp.cls @@ -123,7 +123,7 @@ trait Substituters { this: Context => case tp @ RecThis(binder) => if (binder eq from) to else tp case tp: NamedType => - if (tp.currentSymbol.isStatic || (tp.prefix `eq` NoPrefix)) tp + if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(substRecThis(tp.prefix, from, to, theMap)) case _: ThisType | _: BoundType => tp @@ -137,7 +137,7 @@ trait Substituters { this: Context => case tp: BoundType => if (tp == from) to else tp case tp: NamedType => - if (tp.currentSymbol.isStatic || (tp.prefix `eq` NoPrefix)) tp + if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(substParam(tp.prefix, from, to, theMap)) case _: ThisType => tp @@ -151,7 +151,7 @@ trait Substituters { this: Context => case tp: ParamRef => if (tp.binder == from) to(tp.paramNum) else tp case tp: NamedType => - if (tp.currentSymbol.isStatic || (tp.prefix `eq` NoPrefix)) tp + if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(substParams(tp.prefix, from, to, theMap)) case _: ThisType => tp @@ -160,11 +160,6 @@ trait Substituters { this: Context => .mapOver(tp) } - private def existsStatic(syms: List[Symbol]): Boolean = syms match { - case sym :: syms1 => sym.isStatic || existsStatic(syms1) - case nil => false - } - final class SubstBindingMap(from: BindingType, to: BindingType) extends DeepTypeMap { def apply(tp: Type) = subst(tp, from, to, this) } diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 75e53a0c4c43..f0aed548d984 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -382,8 +382,8 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader { if (mayLoadTreesFromTasty) { result match { case Some(unpickler: tasty.DottyUnpickler) => - classRoot.classSymbol.treeOrProvider = unpickler - moduleRoot.classSymbol.treeOrProvider = unpickler + classRoot.classSymbol.rootTreeOrProvider = unpickler + moduleRoot.classSymbol.rootTreeOrProvider = unpickler case _ => } } diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 6a9a2515188b..9bdfa4e5236d 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -431,6 +431,25 @@ object Symbols { myCoord = c } + private[this] var myDefTree: Tree = null + + /** The tree defining the symbol at pickler time, EmptyTree if none was retained */ + def defTree: Tree = + if (myDefTree == null) tpd.EmptyTree else myDefTree + + /** Set defining tree if this symbol retains its definition tree */ + def defTree_=(tree: Tree)(implicit ctx: Context) = + if (retainsDefTree) myDefTree = tree + + /** Does this symbol retain its definition tree? + * A good policy for this needs to balance costs and benefits, where + * costs are mainly memoty leaks, in particular across runs. + */ + def retainsDefTree(implicit ctx: Context): Boolean = + ctx.settings.YretainTrees.value || + denot.owner.isTerm || // no risk of leaking memory after a run for these + denot.is(InlineOrProxy) // need to keep inline info + /** The last denotation of this symbol */ private[this] var lastDenot: SymDenotation = _ private[this] var checkedPeriod: Period = Nowhere @@ -624,7 +643,7 @@ object Symbols { * the implicit conversion `sourcePos` will return the wrong result, careful! * TODO: Consider changing this method return type to `SourcePosition`. */ - def pos: Position = if (coord.isPosition) coord.toPosition else NoPosition + final def pos: Position = if (coord.isPosition) coord.toPosition else NoPosition // ParamInfo types and methods def isTypeParam(implicit ctx: Context) = denot.is(TypeParam) @@ -672,13 +691,13 @@ object Symbols { * Returns the TypeDef tree (possibly wrapped inside PackageDefs) for this class, otherwise EmptyTree. * This will force the info of the class. */ - def tree(implicit ctx: Context): Tree = treeContaining("") + def rootTree(implicit ctx: Context): Tree = rootTreeContaining("") /** Same as `tree` but load tree only if `id == ""` or the tree might contain `id`. * For Tasty trees this means consulting whether the name table defines `id`. * For already loaded trees, we maintain the referenced ids in an attachment. */ - def treeContaining(id: String)(implicit ctx: Context): Tree = { + def rootTreeContaining(id: String)(implicit ctx: Context): Tree = { denot.infoOrCompleter match { case _: NoCompleter => case _ => denot.ensureCompleted() @@ -696,9 +715,9 @@ object Symbols { } } - def treeOrProvider: TreeOrProvider = myTree + def rootTreeOrProvider: TreeOrProvider = myTree - private[dotc] def treeOrProvider_=(t: TreeOrProvider)(implicit ctx: Context): Unit = + private[dotc] def rootTreeOrProvider_=(t: TreeOrProvider)(implicit ctx: Context): Unit = myTree = t private def mightContain(tree: Tree, id: String)(implicit ctx: Context): Boolean = { diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index d3c73ef77530..38d164301cd2 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -108,6 +108,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { protected def gadtBounds(sym: Symbol)(implicit ctx: Context) = ctx.gadt.bounds(sym) protected def gadtSetBounds(sym: Symbol, b: TypeBounds) = ctx.gadt.setBounds(sym, b) + protected def typeVarInstance(tvar: TypeVar)(implicit ctx: Context) = tvar.underlying + // Subtype testing `<:<` def topLevelSubType(tp1: Type, tp2: Type): Boolean = { @@ -233,7 +235,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case tp2: BoundType => tp2 == tp1 || secondTry case tp2: TypeVar => - recur(tp1, tp2.underlying) + recur(tp1, typeVarInstance(tp2)) case tp2: WildcardType => def compareWild = tp2.optBounds match { case TypeBounds(_, hi) => recur(tp1, hi) @@ -325,7 +327,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { true } def compareTypeParamRef = - ctx.mode.is(Mode.TypevarsMissContext) || + assumedTrue(tp1) || isSubTypeWhenFrozen(bounds(tp1).hi, tp2) || { if (canConstrain(tp1) && !approx.high) addConstraint(tp1, tp2, fromBelow = false) && flagNothingBound @@ -348,7 +350,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case _ => thirdTry } case tp1: TypeVar => - recur(tp1.underlying, tp2) + recur(typeVarInstance(tp1), tp2) case tp1: WildcardType => def compareWild = tp1.optBounds match { case bounds: TypeBounds => recur(bounds.lo, tp2) @@ -428,7 +430,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { thirdTryNamed(tp2) case tp2: TypeParamRef => def compareTypeParamRef = - (ctx.mode is Mode.TypevarsMissContext) || { + assumedTrue(tp2) || { val alwaysTrue = // The following condition is carefully formulated to catch all cases // where the subtype relation is true without needing to add a constraint @@ -787,9 +789,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { tl => tparams1.map(tparam => tl.integrate(tparams, tparam.paramInfo).bounds), tl => tp1base.tycon.appliedTo(args1.take(lengthDiff) ++ tparams1.indices.toList.map(tl.paramRefs(_)))) - (ctx.mode.is(Mode.TypevarsMissContext) || - tryInstantiate(tycon2, tycon1.ensureLambdaSub)) && - recur(tp1, tycon1.appliedTo(args2)) + (assumedTrue(tycon2) || tryInstantiate(tycon2, tycon1.ensureLambdaSub)) && + recur(tp1, tycon1.appliedTo(args2)) } } case _ => false @@ -1829,6 +1830,11 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { super.gadtSetBounds(sym, b) } + override def typeVarInstance(tvar: TypeVar)(implicit ctx: Context) = { + footprint += tvar + super.typeVarInstance(tvar) + } + def matchCase(scrut: Type, cas: Type, instantiate: Boolean)(implicit ctx: Context): Type = { def paramInstances = new TypeAccumulator[Array[Type]] { diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index d1d1f6df7314..e810b940e2e5 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -301,8 +301,12 @@ object Types { } /** Does this type occur as a part of type `that`? */ - final def occursIn(that: Type)(implicit ctx: Context): Boolean = - that existsPart (this == _) + def occursIn(that: Type)(implicit ctx: Context): Boolean = + that.existsPart(this == _) + + /** Does this type not refer to TypeParamRefs or uninstantiated TypeVars? */ + final def isGround(implicit ctx: Context): Boolean = + (new isGroundAccumulator).apply(true, this) /** Is this a type of a repeated parameter? */ def isRepeatedParam(implicit ctx: Context): Boolean = @@ -551,8 +555,12 @@ object Types { } case tp: AppliedType => tp.tycon match { - case tc: TypeRef if tc.symbol.isClass => - go(tc) + case tc: TypeRef => + if (tc.symbol.isClass) go(tc) + else { + val normed = tp.tryNormalize + go(if (normed.exists) normed else tp.superType) + } case tc: HKTypeLambda => goApplied(tp, tc) case _ => @@ -568,6 +576,9 @@ object Types { goParam(tp) case tp: SuperType => goSuper(tp) + case tp: MatchType => + val normed = tp.tryNormalize + go(if (normed.exists) normed else tp.underlying) case tp: TypeProxy => go(tp.underlying) case tp: ClassInfo => @@ -3264,6 +3275,17 @@ object Types { private[this] var cachedSuper: Type = _ private[this] var myStableHash: Byte = 0 + private[this] var isGroundKnown: Boolean = false + private[this] var isGroundCache: Boolean = _ + + def isGround(acc: TypeAccumulator[Boolean])(implicit ctx: Context): Boolean = { + if (!isGroundKnown) { + isGroundCache = acc.foldOver(true, this) + isGroundKnown = true + } + isGroundCache + } + override def underlying(implicit ctx: Context): Type = tycon override def superType(implicit ctx: Context): Type = { @@ -3283,14 +3305,14 @@ object Types { case tycon: TypeRef => def tryMatchAlias = tycon.info match { case MatchAlias(alias) => - trace("normalize $this", typr, show = true) { + trace(i"normalize $this", typr, show = true) { alias.applyIfParameterized(args).tryNormalize } case _ => NoType } if (defn.isTypelevel_S(tycon.symbol) && args.length == 1) { - trace("normalize S $this", typr, show = true) { + trace(i"normalize S $this", typr, show = true) { args.head.normalized match { case ConstantType(Constant(n: Int)) => ConstantType(Constant(n + 1)) case none => tryMatchAlias @@ -3407,6 +3429,11 @@ object Types { def kindString = "Type" def copyBoundType(bt: BT) = bt.paramRefs(paramNum) + /** Optimized version of occursIn, avoid quadratic blowup when solving + * constraints over large ground types. + */ + override def occursIn(that: Type)(implicit ctx: Context) = !that.isGround && super.occursIn(that) + /** Looking only at the structure of `bound`, is one of the following true? * - fromBelow and param <:< bound * - !fromBelow and param >:> bound @@ -3591,7 +3618,7 @@ object Types { def underlying(implicit ctx: Context): Type = bound private[this] var myReduced: Type = null - private[this] var reductionContext: mutable.Map[Type, TypeBounds] = null + private[this] var reductionContext: mutable.Map[Type, Type] = null override def tryNormalize(implicit ctx: Context): Type = reduced.normalized @@ -3627,30 +3654,33 @@ object Types { } } - def isRelevant(tp: Type) = tp match { - case tp: TypeParamRef => ctx.typerState.constraint.entry(tp).exists + def isBounded(tp: Type) = tp match { + case tp: TypeParamRef => case tp: TypeRef => ctx.gadt.bounds.contains(tp.symbol) } - def contextBounds(tp: Type): TypeBounds = tp match { - case tp: TypeParamRef => ctx.typerState.constraint.fullBounds(tp) - case tp: TypeRef => ctx.gadt.bounds(tp.symbol) + def contextInfo(tp: Type): Type = tp match { + case tp: TypeParamRef => + val constraint = ctx.typerState.constraint + if (constraint.entry(tp).exists) constraint.fullBounds(tp) + else NoType + case tp: TypeRef => + val bounds = ctx.gadt.bounds(tp.symbol) + if (bounds == null) NoType else bounds + case tp: TypeVar => + tp.underlying } def updateReductionContext() = { reductionContext = new mutable.HashMap - for (tp <- cmp.footprint if isRelevant(tp)) - reductionContext(tp) = contextBounds(tp) + for (tp <- cmp.footprint) + reductionContext(tp) = contextInfo(tp) + typr.println(i"footprint for $this $hashCode: ${cmp.footprint.toList.map(x => (x, contextInfo(x)))}%, %") } def upToDate = - cmp.footprint.forall { tp => - !isRelevant(tp) || { - reductionContext.get(tp) match { - case Some(bounds) => bounds `eq` contextBounds(tp) - case None => false - } - } + reductionContext.keysIterator.forall { tp => + reductionContext(tp) `eq` contextInfo(tp) } record("MatchType.reduce called") @@ -3658,7 +3688,7 @@ object Types { record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") myReduced = - trace(i"reduce match type $this", typr, show = true) { + trace(i"reduce match type $this $hashCode", typr, show = true) { try if (defn.isBottomType(scrutinee)) defn.NothingType else if (reduceInParallel) reduceParallel(trackingCtx) @@ -4767,6 +4797,17 @@ object Types { } } + class isGroundAccumulator(implicit ctx: Context) extends TypeAccumulator[Boolean] { + def apply(x: Boolean, tp: Type) = x && { + tp match { + case _: TypeParamRef => false + case tp: TypeVar => apply(x, tp.underlying) + case tp: AppliedType => tp.isGround(this) + case _ => foldOver(x, tp) + } + } + } + // ----- Name Filters -------------------------------------------------- /** A name filter selects or discards a member name of a type `pre`. diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala index 7f726f4fb067..1c9acee3ba39 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala @@ -61,7 +61,7 @@ class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLe new TreeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt) } - protected def computeTrees(implicit ctx: Context) = treeUnpickler.unpickle(mode) + protected def computeRootTrees(implicit ctx: Context) = treeUnpickler.unpickle(mode) private[this] var ids: Array[String] = null diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index 5d45473608f8..9094a416de87 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -55,12 +55,13 @@ Standard-Section: "ASTs" TopLevelStat* Stat Stat = Term - VALDEF Length NameRef type_Term rhs_Term? Modifier* - DEFDEF Length NameRef TypeParam* Params* returnType_Term rhs_Term? - Modifier* + ValOrDefDef TYPEDEF Length NameRef (type_Term | Template) Modifier* OBJECTDEF Length NameRef Template Modifier* IMPORT Length qual_Term Selector* + ValOrDefDef = VALDEF Length NameRef type_Term rhs_Term? Modifier* + DEFDEF Length NameRef TypeParam* Params* returnType_Term rhs_Term? + Modifier* Selector = IMPORTED name_NameRef RENAMED to_NameRef @@ -85,7 +86,7 @@ Standard-Section: "ASTs" TopLevelStat* TYPED Length expr_Term ascriptionType_Tern ASSIGN Length lhs_Term rhs_Term BLOCK Length expr_Term Stat* - INLINED Length call_Term expr_Term Stat* + INLINED Length expr_Term call_Term? ValOrDefDef* LAMBDA Length meth_Term target_Type? IF Length cond_Term then_Term else_Term MATCH Length sel_Term CaseDef* @@ -109,10 +110,8 @@ Standard-Section: "ASTs" TopLevelStat* ORtpt Length left_Term right_Term MATCHtpt Length bound_Term? sel_Term CaseDef* BYNAMEtpt underlying_Term - EMPTYTREE SHAREDterm term_ASTRef HOLE Length idx_Nat arg_Tree* - UNTYPEDSPLICE Length splice_TermUntyped splice_Type CaseDef = CASEDEF Length pat_Term rhs_Tree guard_Tree? ImplicitArg = IMPLICITARG arg_Term @@ -186,6 +185,7 @@ Standard-Section: "ASTs" TopLevelStat* OVERRIDE INLINE MACRO // inline method containing toplevel splices + INLINEPROXY // symbol of binding representing an inline parameter STATIC // mapped to static Java member OBJECT // an object or its class TRAIT // a trait @@ -207,16 +207,6 @@ Standard-Section: "ASTs" TopLevelStat* Annotation = ANNOTATION Length tycon_Type fullAnnotation_Term -// --------------- untyped additions ------------------------------------------ - - TermUntyped = Term - TYPEDSPLICE Length splice_Term - FUNCTION Length body_Term arg_Term* - INFIXOP Length op_NameRef left_Term right_Term - TUPLE Length elem_Term* - PATDEF Length type_Term rhs_Term pattern_Term* Modifier* - EMPTYTYPETREE - Note: Tree tags are grouped into 5 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way. Category 1 (tags 1-49) : tag @@ -299,7 +289,7 @@ object TastyFormat { final val IMPLICIT = 13 final val LAZY = 14 final val OVERRIDE = 15 - + final val INLINEPROXY = 16 final val INLINE = 17 final val STATIC = 18 final val OBJECT = 19 @@ -320,8 +310,6 @@ object TastyFormat { final val MACRO = 34 final val ERASED = 35 final val PARAMsetter = 36 - final val EMPTYTREE = 37 - final val EMPTYTYPETREE = 38 // Cat. 2: tag Nat @@ -437,15 +425,6 @@ object TastyFormat { final val MATCHtype = 190 final val MATCHtpt = 191 - final val UNTYPEDSPLICE = 199 - - // Tags for untyped trees only: - final val TYPEDSPLICE = 200 - final val FUNCTION = 201 - final val INFIXOP = 202 - final val PATDEF = 203 - final val TUPLE = 204 - def methodType(isImplicit: Boolean = false, isErased: Boolean = false) = { val implicitOffset = if (isImplicit) 1 else 0 val erasedOffset = if (isErased) 2 else 0 @@ -461,7 +440,7 @@ object TastyFormat { /** Useful for debugging */ def isLegalTag(tag: Int) = - firstSimpleTreeTag <= tag && tag <= EMPTYTYPETREE || + firstSimpleTreeTag <= tag && tag <= PARAMsetter || firstNatTreeTag <= tag && tag <= SYMBOLconst || firstASTTreeTag <= tag && tag <= SINGLETONtpt || firstNatASTTreeTag <= tag && tag <= NAMEDARG || @@ -483,6 +462,7 @@ object TastyFormat { | LAZY | OVERRIDE | INLINE + | INLINEPROXY | MACRO | STATIC | OBJECT @@ -540,6 +520,7 @@ object TastyFormat { case LAZY => "LAZY" case OVERRIDE => "OVERRIDE" case INLINE => "INLINE" + case INLINEPROXY => "INLINEPROXY" case MACRO => "MACRO" case STATIC => "STATIC" case OBJECT => "OBJECT" @@ -557,8 +538,6 @@ object TastyFormat { case DEFAULTparameterized => "DEFAULTparameterized" case STABLE => "STABLE" case PARAMsetter => "PARAMsetter" - case EMPTYTREE => "EMPTYTREE" - case EMPTYTYPETREE => "EMPTYTYPETREE" case SHAREDterm => "SHAREDterm" case SHAREDtype => "SHAREDtype" @@ -661,13 +640,6 @@ object TastyFormat { case PRIVATEqualified => "PRIVATEqualified" case PROTECTEDqualified => "PROTECTEDqualified" case HOLE => "HOLE" - - case UNTYPEDSPLICE => "UNTYPEDSPLICE" - case TYPEDSPLICE => "TYPEDSPLICE" - case FUNCTION => "FUNCTION" - case INFIXOP => "INFIXOP" - case TUPLE => "TUPLE" - case PATDEF => "PATDEF" } /** @return If non-negative, the number of leading references (represented as nats) of a length/trees entry. diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index edd943c2f71e..42cf1c022112 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -448,7 +448,14 @@ class TreePickler(pickler: TastyPickler) { case Inlined(call, bindings, expansion) => writeByte(INLINED) bindings.foreach(preRegister) - withLength { pickleTree(call); pickleTree(expansion); bindings.foreach(pickleTree) } + withLength { + pickleTree(expansion) + if (!call.isEmpty) pickleTree(call) + bindings.foreach { b => + assert(b.isInstanceOf[DefDef] || b.isInstanceOf[ValDef]) + pickleTree(b) + } + } case Bind(name, body) => registerDef(tree.symbol) writeByte(BIND) @@ -563,11 +570,6 @@ class TreePickler(pickler: TastyPickler) { pickleTree(lo); if (hi ne lo) pickleTree(hi) } - case EmptyTree => - writeByte(EMPTYTREE) - case tpd.UntypedSplice(splice) => - writeByte(UNTYPEDSPLICE) - withLength { pickleUntyped(splice); pickleType(tree.tpe) } case Hole(idx, args) => writeByte(HOLE) withLength { @@ -620,6 +622,7 @@ class TreePickler(pickler: TastyPickler) { if (flags is Case) writeByte(CASE) if (flags is Override) writeByte(OVERRIDE) if (flags is Inline) writeByte(INLINE) + if (flags is InlineProxy) writeByte(INLINEPROXY) if (flags is Macro) writeByte(MACRO) if (flags is JavaStatic) writeByte(STATIC) if (flags is Module) writeByte(OBJECT) @@ -655,7 +658,9 @@ class TreePickler(pickler: TastyPickler) { // a different toplevel class, it is impossible to pickle a reference to it. // Such annotations will be reconstituted when unpickling the child class. // See tests/pickling/i3149.scala - case _ => false + case _ => + if (Inliner.typedInline) ann.symbol == defn.BodyAnnot // inline bodies are reconstituted automatically when unpickling + else false } def pickleAnnotation(owner: Symbol, ann: Annotation)(implicit ctx: Context) = @@ -664,250 +669,6 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) } } -// ---- pickling untyped trees ---------------------------------- - - def pickleUntyped(tree: untpd.Tree)(implicit ctx: Context): Unit = { - - def pickleDummyRef(): Unit = writeNat(0) - - def pickleDummyType(): Unit = writeByte(EMPTYTYPETREE) - - def pickleUnlessEmpty(tree: untpd.Tree): Unit = - if (!tree.isEmpty) pickleUntyped(tree) - - def pickleTpt(tree: untpd.Tree) = pickleUntyped(tree)(ctx.addMode(Mode.Type)) - def pickleTerm(tree: untpd.Tree) = pickleUntyped(tree)(ctx.retractMode(Mode.Type)) - - def pickleAllParams(tree: untpd.DefDef): Unit = { - pickleParams(tree.tparams) - for (vparams <- tree.vparamss) { - writeByte(PARAMS) - withLength { pickleParams(vparams) } - } - } - - def pickleParams(trees: List[untpd.Tree]): Unit = - trees.foreach(pickleParam) - - def pickleParam(tree: untpd.Tree): Unit = tree match { - case tree: untpd.ValDef => pickleDef(PARAM, tree, tree.tpt) - case tree: untpd.DefDef => pickleDef(PARAM, tree, tree.tpt, tree.rhs) - case tree: untpd.TypeDef => pickleDef(TYPEPARAM, tree, tree.rhs) - } - - def pickleParent(tree: untpd.Tree): Unit = tree match { - case _: untpd.Apply | _: untpd.TypeApply => pickleUntyped(tree) - case _ => pickleTpt(tree) - } - - def pickleDef(tag: Int, tree: untpd.MemberDef, tpt: untpd.Tree, rhs: untpd.Tree = untpd.EmptyTree, pickleParams: => Unit = ()) = { - import untpd.modsDeco - writeByte(tag) - withLength { - pickleName(tree.name) - pickleParams - pickleTpt(tpt) - pickleUnlessEmpty(rhs) - pickleModifiers(tree.mods, tree.name.isTermName) - } - } - - def pickleModifiers(mods: untpd.Modifiers, isTerm: Boolean): Unit = { - import Flags._ - var flags = mods.flags - val privateWithin = mods.privateWithin - if (!privateWithin.isEmpty) { - writeByte(if (flags is Protected) PROTECTEDqualified else PRIVATEqualified) - pickleUntyped(untpd.Ident(privateWithin)) - flags = flags &~ Protected - } - pickleFlags(flags, isTerm) - mods.annotations.foreach(pickleAnnotation) - } - - def pickleAnnotation(annotTree: untpd.Tree) = { - writeByte(ANNOTATION) - withLength { pickleDummyType(); pickleUntyped(annotTree) } - } - - try tree match { - case Ident(name) => - writeByte(if (name.isTypeName) TYPEREF else TERMREF) - pickleName(name) - pickleDummyType() - case This(qual) => - writeByte(QUALTHIS) - pickleUntyped(qual) - case Select(qual, name) => - writeByte(if (name.isTypeName) SELECTtpt else SELECT) - pickleName(name) - if (qual.isType) pickleTpt(qual) else pickleTerm(qual) - case Apply(fun, args) => - writeByte(APPLY) - withLength { - pickleUntyped(fun) - args.foreach(pickleUntyped) - } - case untpd.Throw(exc) => - writeByte(THROW) - pickleUntyped(exc) - case TypeApply(fun, args) => - writeByte(TYPEAPPLY) - withLength { - pickleUntyped(fun) - args.foreach(pickleTpt) - } - case Literal(const) => - pickleConstant(const) - case Super(qual, mix) => - writeByte(SUPER) - withLength { - pickleUntyped(qual); - if (!mix.isEmpty) pickleUntyped(mix) - } - case New(tpt) => - writeByte(NEW) - pickleTpt(tpt) - case Typed(expr, tpt) => - writeByte(TYPED) - withLength { pickleUntyped(expr); pickleTpt(tpt) } - case NamedArg(name, arg) => - writeByte(NAMEDARG) - pickleName(name) - pickleUntyped(arg) - case Assign(lhs, rhs) => - writeByte(ASSIGN) - withLength { pickleUntyped(lhs); pickleUntyped(rhs) } - case Block(stats, expr) => - writeByte(BLOCK) - withLength { pickleUntyped(expr); stats.foreach(pickleUntyped) } - case If(cond, thenp, elsep) => - writeByte(IF) - withLength { - if (tree.isInstanceOf[untpd.InlineIf]) writeByte(INLINE) - pickleUntyped(cond); pickleUntyped(thenp); pickleUntyped(elsep) - } - case Match(selector, cases) => - writeByte(MATCH) - withLength { - if (tree.isInstanceOf[untpd.InlineMatch]) writeByte(INLINE) - pickleUntyped(selector); cases.foreach(pickleUntyped) - } - case CaseDef(pat, guard, rhs) => - writeByte(CASEDEF) - withLength { pickleUntyped(pat); pickleUntyped(rhs); pickleUnlessEmpty(guard) } - case Return(expr, from) => - writeByte(RETURN) - withLength { pickleDummyRef(); pickleUnlessEmpty(expr) } - case WhileDo(cond, body) => - writeByte(WHILE) - withLength { pickleUntyped(cond); pickleUntyped(body) } - case Try(block, cases, finalizer) => - writeByte(TRY) - withLength { pickleUntyped(block); cases.foreach(pickleUntyped); pickleUnlessEmpty(finalizer) } - case Bind(name, body) => - writeByte(BIND) - withLength { - pickleName(name); pickleDummyType(); pickleUntyped(body) - } - case Alternative(alts) => - writeByte(ALTERNATIVE) - withLength { alts.foreach(pickleUntyped) } - case tree: untpd.ValDef => - pickleDef(VALDEF, tree, tree.tpt, tree.rhs) - case tree: untpd.DefDef => - pickleDef(DEFDEF, tree, tree.tpt, tree.rhs, pickleAllParams(tree)) - case tree: untpd.TypeDef => - pickleDef(TYPEDEF, tree, tree.rhs) - case tree: untpd.ModuleDef => - pickleDef(OBJECTDEF, tree, tree.impl) - case tree: untpd.Template => - writeByte(TEMPLATE) - withLength { - tree.parents.foreach(pickleParent) - if (!tree.self.isEmpty) { - writeByte(SELFDEF); pickleName(tree.self.name); pickleTpt(tree.self.tpt) - } - pickleUntyped(tree.constr) - tree.body.foreach(pickleUntyped) - } - case Import(expr, selectors) => - writeByte(IMPORT) - withLength { pickleUntyped(expr); pickleSelectors(selectors) } - case tree: untpd.TypeTree => - pickleDummyType() - case SingletonTypeTree(ref) => - writeByte(SINGLETONtpt) - pickleTerm(ref) - case RefinedTypeTree(parent, refinements) => - writeByte(REFINEDtpt) - withLength { pickleTpt(parent); refinements.foreach(pickleTerm) } - case AppliedTypeTree(tycon, args) => - writeByte(APPLIEDtpt) - withLength { pickleTpt(tycon); args.foreach(pickleTpt) } - case AndTypeTree(tp1, tp2) => - writeByte(ANDtpt) - withLength { pickleTpt(tp1); pickleTpt(tp2) } - case OrTypeTree(tp1, tp2) => - writeByte(ORtpt) - withLength { pickleTpt(tp1); pickleTpt(tp2) } - case ByNameTypeTree(tp) => - writeByte(BYNAMEtpt) - pickleTpt(tp) - case Annotated(tree, annot) => - writeByte(ANNOTATEDtpt) - withLength { pickleTpt(tree); pickleTerm(annot) } - case MatchTypeTree(bound, selector, cases) => - writeByte(MATCHtpt) - withLength { - if (!bound.isEmpty) pickleTpt(bound) - pickleTpt(selector) - cases.foreach(pickleUntyped) - } - case LambdaTypeTree(tparams, body) => - writeByte(LAMBDAtpt) - withLength { pickleParams(tparams); pickleTpt(body) } - case TypeBoundsTree(lo, hi) => - writeByte(TYPEBOUNDStpt) - withLength { - pickleTpt(lo); - if (hi ne lo) pickleTpt(hi) - } - case untpd.Function(args, body) => - writeByte(FUNCTION) - withLength { pickleUntyped(body); args.foreach(pickleUntyped) } - case untpd.InfixOp(l, op, r) => - writeByte(INFIXOP) - withLength { pickleUntyped(l); pickleUntyped(op); pickleUntyped(r) } - case untpd.Tuple(elems) => - writeByte(TUPLE) - withLength { elems.foreach(pickleUntyped) } - case untpd.PatDef(mods, pats, tpt, rhs) => - writeByte(PATDEF) - withLength { - pickleTpt(tpt) - pickleUntyped(rhs) - pats.foreach(pickleUntyped) - pickleModifiers(mods, isTerm = true) - } - case untpd.TypedSplice(splice) => - writeByte(TYPEDSPLICE) - withLength { pickleTree(splice) } - case Thicket(trees) => - if (trees.isEmpty) writeByte(EMPTYTREE) - else trees.foreach(pickleUntyped) - case _ => - val tree1 = desugar(tree) - assert(tree1 `ne` tree, s"Cannot pickle untyped tree $tree") - pickleUntyped(tree1) - } - catch { - case ex: AssertionError => - println(i"error when pickling tree $tree") - throw ex - } - } - // ---- main entry points --------------------------------------- def pickle(trees: List[Tree])(implicit ctx: Context) = { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 7012c26ed5e4..3058fdcf929c 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -24,6 +24,7 @@ import core.quoted.PickledQuotes import scala.quoted import scala.quoted.Types.TreeType import scala.quoted.Exprs.TastyTreeExpr +import typer.Inliner.typedInline import scala.annotation.internal.sharable @@ -556,6 +557,12 @@ class TreeUnpickler(reader: TastyReader, sym.completer.withDecls(newScope) forkAt(templateStart).indexTemplateParams()(localContext(sym)) } + else if (typedInline && sym.isInlineMethod) + sym.addAnnotation(LazyBodyAnnotation { ctx0 => + implicit val ctx: Context = localContext(sym)(ctx0).addMode(Mode.ReadPositions) + // avoids space leaks by not capturing the current context + forkAt(rhsStart).readTerm() + }) goto(start) sym } @@ -592,6 +599,7 @@ class TreeUnpickler(reader: TastyReader, case LAZY => addFlag(Lazy) case OVERRIDE => addFlag(Override) case INLINE => addFlag(Inline) + case INLINEPROXY => addFlag(InlineProxy) case MACRO => addFlag(Macro) case STATIC => addFlag(JavaStatic) case OBJECT => addFlag(Module) @@ -637,8 +645,10 @@ class TreeUnpickler(reader: TastyReader, val lazyAnnotTree = readLaterWithOwner(end, rdr => ctx => rdr.readTerm()(ctx)) owner => - if (tp.isRef(defn.BodyAnnot)) + if (tp.isRef(defn.BodyAnnot)) { + assert(!typedInline) LazyBodyAnnotation(implicit ctx => lazyAnnotTree(owner).complete) + } else Annotation.deferredSymAndTree( implicit ctx => tp.typeSymbol, @@ -741,6 +751,11 @@ class TreeUnpickler(reader: TastyReader, def readRhs(implicit ctx: Context) = if (nothingButMods(end)) EmptyTree + else if (sym.isInlineMethod && typedInline) + // The body of an inline method is stored in an annotation, so no need to unpickle it again + new Trees.Lazy[Tree] { + def complete(implicit ctx: Context) = typer.Inliner.bodyToInline(sym) + } else readLater(end, rdr => ctx => rdr.readTerm()(ctx.retractMode(Mode.InSuperCall))) @@ -832,6 +847,7 @@ class TreeUnpickler(reader: TastyReader, // Child annotations for local classes and enum values are not pickled, so // need to be re-established here. sym.registerIfChild(late = true) + sym.defTree = tree if (ctx.mode.is(Mode.ReadComments)) { assert(ctx.docCtx.isDefined, "Mode is `ReadComments`, but no `docCtx` is set.") @@ -864,7 +880,7 @@ class TreeUnpickler(reader: TastyReader, case _ => readTpt()(parentCtx) } } - val parentTypes = parents.map(_.tpe.dealias) + val parentTypes = defn.adjustForTuple(cls, cls.typeParams, parents.map(_.tpe.dealias)) val self = if (nextByte == SELFDEF) { readByte() @@ -1029,23 +1045,12 @@ class TreeUnpickler(reader: TastyReader, ByNameTypeTree(readTpt()) case NAMEDARG => NamedArg(readName(), readTerm()) - case EMPTYTREE => - EmptyTree case _ => readPathTerm() } def readLengthTerm(): Tree = { val end = readEnd() - - def readBlock(mkTree: (List[Tree], Tree) => Tree): Tree = { - val exprReader = fork - skipTree() - val stats = readStats(ctx.owner, end) - val expr = exprReader.readTerm() - mkTree(stats, expr) - } - val result = (tag: @switch) match { case SUPER => @@ -1064,10 +1069,22 @@ class TreeUnpickler(reader: TastyReader, case ASSIGN => Assign(readTerm(), readTerm()) case BLOCK => - readBlock(Block) + val exprReader = fork + skipTree() + val stats = readStats(ctx.owner, end) + val expr = exprReader.readTerm() + Block(stats, expr) case INLINED => - val call = readTerm() - readBlock((defs, expr) => Inlined(call, defs.asInstanceOf[List[MemberDef]], expr)) + val exprReader = fork + skipTree() + def maybeCall = nextUnsharedTag match { + case VALDEF | DEFDEF => EmptyTree + case _ => readTerm() + } + val call = ifBefore(end)(maybeCall, EmptyTree) + val bindings = readStats(ctx.owner, end).asInstanceOf[List[ValOrDefDef]] + val expansion = exprReader.readTerm() // need bindings in scope, so needs to be read before + Inlined(call, bindings, expansion) case IF => If(readTerm(), readTerm(), readTerm()) case LAMBDA => @@ -1144,8 +1161,6 @@ class TreeUnpickler(reader: TastyReader, TypeBoundsTree(lo, hi) case HOLE => readHole(end, isType = false) - case UNTYPEDSPLICE => - tpd.UntypedSplice(readUntyped()).withType(readType()) case _ => readPathTerm() } @@ -1215,215 +1230,6 @@ class TreeUnpickler(reader: TastyReader, PickledQuotes.quotedExprToTree(quotedExpr) } } -// ------ Reading untyped trees -------------------------------------------- - - def readUntyped()(implicit ctx: Context): untpd.Tree = { - val start = currentAddr - val tag = readByte() - pickling.println(s"reading term ${astTagToString(tag)} at $start") - - def readDummyType(): Unit = - assert(readByte() == EMPTYTYPETREE) - - def readIdent(): untpd.Ident = readUntyped().asInstanceOf[untpd.Ident] - - def readParams[T <: untpd.MemberDef](tag: Int): List[T] = - collectWhile(nextByte == tag) { - import untpd.modsDeco - val m: T = readUntyped().asInstanceOf[T] - m.withMods(m.mods | Param).asInstanceOf[T] - } - - def readParamss(): List[List[untpd.ValDef]] = - collectWhile(nextByte == PARAMS) { - readByte() - readEnd() - readParams[untpd.ValDef](PARAM) - } - - def readCases(end: Addr): List[untpd.CaseDef] = - collectWhile((nextUnsharedTag == CASEDEF) && currentAddr != end) { - readUntyped().asInstanceOf[untpd.CaseDef] - } - - def readSimpleTerm(): untpd.Tree = (tag: @switch) match { - case TERMREF => - val name = readName() - readDummyType() - untpd.Ident(name) - case TYPEREF => - val name = readName().toTypeName - readDummyType() - untpd.Ident(name) - case SELECT => - val name = readName() - val qual = readUntyped() - untpd.Select(qual, name) - case SELECTtpt => - val name = readName().toTypeName - val qual = readUntyped() - untpd.Select(qual, name) - case QUALTHIS => - untpd.This(readIdent()) - case NEW => - untpd.New(readUntyped()) - case THROW => - untpd.Throw(readUntyped()) - case SINGLETONtpt => - untpd.SingletonTypeTree(readUntyped()) - case BYNAMEtpt => - untpd.ByNameTypeTree(readUntyped()) - case NAMEDARG => - untpd.NamedArg(readName(), readUntyped()) - case EMPTYTREE => - untpd.EmptyTree - case EMPTYTYPETREE => - untpd.TypeTree() - case _ => - untpd.Literal(readConstant(tag)) - } - - def readLengthTerm(): untpd.Tree = { - val end = readEnd() - - def readMods(): untpd.Modifiers = { - val (flags, annots, privateWithin) = - readModifiers(end, readUntypedAnnot, readUntypedWithin, EmptyTypeName) - untpd.Modifiers(flags, privateWithin, annots.map(_(NoSymbol))) - } - - def readRhs(): untpd.Tree = - if (nothingButMods(end)) untpd.EmptyTree else readUntyped() - - val result = (tag: @switch) match { - case SUPER => - val qual = readUntyped() - val mixId = ifBefore(end)(readIdent(), untpd.EmptyTypeIdent) - untpd.Super(qual, mixId) - case APPLY => - val fn = readUntyped() - untpd.Apply(fn, until(end)(readUntyped())) - case TYPEAPPLY => - untpd.TypeApply(readUntyped(), until(end)(readUntyped())) - case TYPED => - val expr = readUntyped() - val tpt = readUntyped() - untpd.Typed(expr, tpt) - case ASSIGN => - untpd.Assign(readUntyped(), readUntyped()) - case BLOCK => - val expr = readUntyped() - val stats = until(end)(readUntyped()) - untpd.Block(stats, expr) - case IF => - val mkIf = if (nextByte == INLINE) { readByte(); untpd.InlineIf(_, _, _) } - else untpd.If(_, _, _) - mkIf(readUntyped(), readUntyped(), readUntyped()) - case MATCH => - val mkMatch = - if (nextByte == INLINE) { readByte(); untpd.InlineMatch(_, _) } - else untpd.Match(_, _) - mkMatch(readUntyped(), readCases(end)) - case CASEDEF => - val pat = readUntyped() - val rhs = readUntyped() - val guard = ifBefore(end)(readUntyped(), untpd.EmptyTree) - untpd.CaseDef(pat, guard, rhs) - case RETURN => - readNat() - val expr = ifBefore(end)(readUntyped(), untpd.EmptyTree) - untpd.Return(expr, untpd.EmptyTree) - case WHILE => - untpd.WhileDo(readUntyped(), readUntyped()) - case TRY => - untpd.Try(readUntyped(), readCases(end), ifBefore(end)(readUntyped(), untpd.EmptyTree)) - case BIND => - val name = readName() - readDummyType() - untpd.Bind(name, readUntyped()) - case ALTERNATIVE => - untpd.Alternative(until(end)(readUntyped())) - case DEFDEF => - untpd.DefDef(readName(), readParams[TypeDef](TYPEPARAM), readParamss(), readUntyped(), readRhs()) - .withMods(readMods()) - case VALDEF | PARAM => - untpd.ValDef(readName(), readUntyped(), readRhs()) - .withMods(readMods()) - case TYPEDEF | TYPEPARAM => - untpd.TypeDef(readName().toTypeName, readUntyped()) - .withMods(readMods()) - case OBJECTDEF => - untpd.ModuleDef(readName(), readUntyped().asInstanceOf[untpd.Template]) - .withMods(readMods()) - case TEMPLATE => - val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF)(readUntyped()) - val self = - if (nextByte == SELFDEF) { - readByte() - untpd.ValDef(readName(), readUntyped(), untpd.EmptyTree) - } - else untpd.EmptyValDef - val constr = readUntyped().asInstanceOf[untpd.DefDef] - val body = until(end)(readUntyped()) - untpd.Template(constr, parents, self, body) - case IMPORT => - untpd.Import(readUntyped(), readSelectors()) - case REFINEDtpt => - untpd.RefinedTypeTree(readUntyped(), until(end)(readUntyped())) - case APPLIEDtpt => - untpd.AppliedTypeTree(readUntyped(), until(end)(readUntyped())) - case ANDtpt => - untpd.AndTypeTree(readUntyped(), readUntyped()) - case ORtpt => - untpd.OrTypeTree(readUntyped(), readUntyped()) - case ANNOTATEDtpt => - untpd.Annotated(readUntyped(), readUntyped()) - case LAMBDAtpt => - val tparams = readParams[TypeDef](TYPEPARAM) - val body = readUntyped() - untpd.LambdaTypeTree(tparams, body) - case MATCHtpt => - val fst = readUntyped() - val (bound, scrut) = - if (nextUnsharedTag == CASEDEF) (EmptyTree, fst) else (fst, readUntyped()) - untpd.MatchTypeTree(bound, scrut, readCases(end)) - case TYPEBOUNDStpt => - val lo = readUntyped() - val hi = ifBefore(end)(readUntyped(), lo) - untpd.TypeBoundsTree(lo, hi) - case TYPEDSPLICE => - untpd.TypedSplice(readTerm()) - case FUNCTION => - val body = readUntyped() - import untpd.modsDeco - val params = until(end)(readUntyped()).map { - case param: untpd.ValDef => param.withMods(param.mods | Param) - case param => param - } - untpd.Function(params, body) - case INFIXOP => - untpd.InfixOp(readUntyped(), readIdent(), readUntyped()) - case TUPLE => - untpd.Tuple(until(end)(readUntyped())) - case PATDEF => - val tpt = readUntyped() - val rhs = readUntyped() - val pats = collectWhile(!nothingButMods(end))(readUntyped()) - untpd.PatDef(readMods(), pats, tpt, rhs) - } - assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") - result - } - - val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm() - setPos(start, tree) - } - - private val readUntypedWithin: Context => TypeName = - implicit ctx => readName().toTypeName - - private val readUntypedAnnot: Context => Symbol => untpd.Tree = - implicit ctx => _ => readUntyped() // ------ Setting positions ------------------------------------------------ @@ -1493,8 +1299,10 @@ class TreeUnpickler(reader: TastyReader, def search(cs: List[OwnerTree], current: Symbol): Symbol = try cs match { case ot :: cs1 => - if (ot.addr.index == addr.index) + if (ot.addr.index == addr.index) { + assert(current.exists, i"no symbol at $addr") current + } else if (ot.addr.index < addr.index && addr.index < ot.end.index) search(ot.children, reader.symbolAt(ot.addr)) else diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 29a311a16f1f..24d7606c2b9b 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -108,8 +108,8 @@ object Scala2Unpickler { val tempInfo = new TempClassInfo(denot.owner.thisType, cls, decls, ost) denot.info = tempInfo // first rough info to avoid CyclicReferences val normalizedParents = - if (parents.isEmpty) defn.ObjectType :: Nil - else parents.map(_.dealias) + defn.adjustForTuple(cls, tparams, + if (parents.isEmpty) defn.ObjectType :: Nil else parents.map(_.dealias)) for (tparam <- tparams) { val tsym = decls.lookup(tparam.name) if (tsym.exists) tsym.setFlag(TypeParam) diff --git a/compiler/src/dotty/tools/dotc/fromtasty/ReadTastyTreesFromClasses.scala b/compiler/src/dotty/tools/dotc/fromtasty/ReadTastyTreesFromClasses.scala index d22383d2f9b7..01197e8c1698 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/ReadTastyTreesFromClasses.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/ReadTastyTreesFromClasses.scala @@ -39,11 +39,11 @@ class ReadTastyTreesFromClasses extends FrontEnd { def compilationUnit(cls: Symbol): Option[CompilationUnit] = cls match { case cls: ClassSymbol => - (cls.treeOrProvider: @unchecked) match { + (cls.rootTreeOrProvider: @unchecked) match { case unpickler: tasty.DottyUnpickler => - if (cls.tree.isEmpty) None + if (cls.rootTree.isEmpty) None else { - val unit = mkCompilationUnit(cls, cls.tree, forceTrees = true) + val unit = mkCompilationUnit(cls, cls.rootTree, forceTrees = true) unit.pickled += (cls -> unpickler.unpickler.bytes) Some(unit) } @@ -64,7 +64,7 @@ class ReadTastyTreesFromClasses extends FrontEnd { case clsd: ClassDenotation => clsd.infoOrCompleter match { case info: ClassfileLoader => - info.load(clsd) // sets cls.treeOrProvider and cls.moduleClass.treeProvider as a side-effect + info.load(clsd) // sets cls.rootTreeOrProvider and cls.moduleClass.treeProvider as a side-effect case _ => } def moduleClass = clsd.owner.info.member(className.moduleClassName).symbol diff --git a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala index 2ef05c7393ed..37c033aa68e9 100644 --- a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala +++ b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala @@ -53,7 +53,7 @@ object SourceTree { Some(SourceTree(tree, sourceFile)) case _ => None } - sourceTreeOfClass(sym.treeContaining(id)) + sourceTreeOfClass(sym.rootTreeContaining(id)) } } } diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 476e7268ec2d..f31f7a2ada88 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1133,8 +1133,7 @@ object Parsers { val start = in.offset if (in.token == IMPLICIT || in.token == ERASED) { val imods = modifiers(funArgMods) - if (in.token == MATCH) implicitMatch(start, imods) - else implicitClosure(start, location, imods) + implicitClosure(start, location, imods) } else { val saved = placeholderParams placeholderParams = Nil @@ -1214,21 +1213,7 @@ object Parsers { case FOR => forExpr() case _ => - if (isIdent(nme.INLINEkw)) { - val start = in.skipToken() - in.token match { - case IF => - ifExpr(start, InlineIf) - case _ => - val t = postfixExpr() - if (in.token == MATCH) matchExpr(t, start, InlineMatch) - else { - syntaxErrorOrIncomplete("`match` or `if` expected but ${in.token} found") - t - } - } - } - else expr1Rest(postfixExpr(), location) + expr1Rest(postfixExpr(), location) } def expr1Rest(t: Tree, location: Location.Value) = in.token match { @@ -1242,7 +1227,7 @@ object Parsers { case COLON => ascription(t, location) case MATCH => - matchExpr(t, startOffset(t), Match) + matchExpr(t, startOffset(t)) case _ => t } @@ -1289,35 +1274,11 @@ object Parsers { /** `match' { CaseClauses } * `match' { ImplicitCaseClauses } */ - def matchExpr(t: Tree, start: Offset, mkMatch: (Tree, List[CaseDef]) => Match) = + def matchExpr(t: Tree, start: Offset) = atPos(start, in.skipToken()) { - inBraces(mkMatch(t, caseClauses(caseClause))) + inBraces(Match(t, caseClauses(caseClause))) } - /** `match' { ImplicitCaseClauses } - */ - def implicitMatch(start: Int, imods: Modifiers) = { - def markFirstIllegal(mods: List[Mod]) = mods match { - case mod :: _ => syntaxError(em"illegal modifier for implicit match", mod.pos) - case _ => - } - imods.mods match { - case Mod.Implicit() :: mods => markFirstIllegal(mods) - case mods => markFirstIllegal(mods) - } - val result @ Match(t, cases) = - matchExpr(ImplicitScrutinee().withPos(implicitKwPos(start)), start, InlineMatch) - for (CaseDef(pat, _, _) <- cases) { - def isImplicitPattern(pat: Tree) = pat match { - case Typed(pat1, _) => isVarPattern(pat1) - case pat => isVarPattern(pat) - } - if (!isImplicitPattern(pat)) - syntaxError(em"not a legal pattern for an implicit match", pat.pos) - } - result - } - /** `match' { TypeCaseClauses } */ def matchType(bound: Tree, t: Tree) = @@ -2650,8 +2611,6 @@ object Parsers { var imods = modifiers(funArgMods) if (isBindingIntro && !isIdent(nme.INLINEkw)) stats += implicitClosure(start, Location.InBlock, imods) - else if (in.token == MATCH) - stats += implicitMatch(start, imods) else stats +++= localDef(start, imods) } else { diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 5227fca1f632..b0c3a42abcc0 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -481,9 +481,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case TypedSplice(t) => if (ctx.settings.YprintDebug.value) "[" ~ toText(t) ~ "]#TS#" else toText(t) - case tpd.UntypedSplice(t) => - if (ctx.settings.YprintDebug.value) "[" ~ toText(t) ~ ":" ~ toText(tree.typeOpt) ~ "]#US#" - else toText(t) case tree @ ModuleDef(name, impl) => withEnclosingDef(tree) { modText(tree.mods, NoSymbol, keywordStr("object")) ~~ nameIdText(tree) ~ toTextTemplate(impl) diff --git a/compiler/src/dotty/tools/dotc/tastyreflect/TreeOpsImpl.scala b/compiler/src/dotty/tools/dotc/tastyreflect/TreeOpsImpl.scala index 4dbe00b76415..bc9d57d50544 100644 --- a/compiler/src/dotty/tools/dotc/tastyreflect/TreeOpsImpl.scala +++ b/compiler/src/dotty/tools/dotc/tastyreflect/TreeOpsImpl.scala @@ -184,6 +184,10 @@ trait TreeOpsImpl extends scala.tasty.reflect.TreeOps with TastyCoreImpl with He def TermDeco(term: Term): TermAPI = new TermAPI { def pos(implicit ctx: Context): Position = term.pos def tpe(implicit ctx: Context): Types.Type = term.tpe + def underlyingArgument(implicit ctx: Context): Term = { + import tpd._ + term.underlyingArgument + } } object IsTerm extends IsTermExtractor { diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala index 8d561f5c916a..5c3dea9cb79e 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -59,8 +59,6 @@ abstract class MacroTransform extends Phase { transform(parents)(ctx.superCallContext), transformSelf(self), transformStats(impl.body, tree.symbol)) - case UntypedSplice(_) => - tree case _ => super.transform(tree) } diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 9a98ae24921b..72b6f9e799f7 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -103,7 +103,7 @@ class Pickler extends Phase { } pickling.println("************* entered toplevel ***********") for ((cls, unpickler) <- unpicklers) { - val unpickled = unpickler.trees + val unpickled = unpickler.rootTrees testSame(i"$unpickled%\n%", beforePickling(cls), cls) } } diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index ba7397d8926f..b9b06f50691b 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -101,10 +101,12 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase private def transformAnnot(annot: Annotation)(implicit ctx: Context): Annotation = annot.derivedAnnotation(transformAnnot(annot.tree)) - private def transformMemberDef(tree: MemberDef)(implicit ctx: Context): Unit = { + private def processMemberDef(tree: Tree)(implicit ctx: Context): tree.type = { val sym = tree.symbol sym.registerIfChild() sym.transformAnnotations(transformAnnot) + sym.defTree = tree + tree } private def transformSelect(tree: Select, targs: List[Tree])(implicit ctx: Context): Tree = { @@ -256,14 +258,11 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase } case tree: ValDef => val tree1 = cpy.ValDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) - transformMemberDef(tree1) - super.transform(tree1) + processMemberDef(super.transform(tree1)) case tree: DefDef => val tree1 = cpy.DefDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) - transformMemberDef(tree1) - superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef]) + processMemberDef(superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef])) case tree: TypeDef => - transformMemberDef(tree) val sym = tree.symbol if (sym.isClass) { // Add SourceFile annotation to top-level classes @@ -273,7 +272,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase sym.addAnnotation(Annotation.makeSourceFile(ctx.compilationUnit.source.file.path)) tree } - super.transform(tree) + processMemberDef(super.transform(tree)) case tree: New if isCheckable(tree) => Checking.checkInstantiable(tree.tpe, tree.pos) super.transform(tree) diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 738d9ad1049e..51ac332351d0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -34,14 +34,7 @@ import ast.TreeInfo object Inliner { import tpd._ - /** A key to be used in a context property that provides a map from enclosing implicit - * value bindings to their right hand sides. - */ - private val InlineBindings = new Property.Key[MutableSymbolMap[Tree]] - - /** A map from the symbols of all enclosing inline value bindings to their right hand sides */ - def inlineBindings(implicit ctx: Context): MutableSymbolMap[Tree] = - ctx.property(InlineBindings).get + val typedInline = true /** `sym` is an inline method with a known body to inline (note: definitions coming * from Scala2x class files might be `@forceInline`, but still lack that body). @@ -118,12 +111,7 @@ object Inliner { else if (enclosingInlineds.length < ctx.settings.XmaxInlines.value) { val body = bodyToInline(tree.symbol) // can typecheck the tree and thereby produce errors if (ctx.reporter.hasErrors) tree - else { - val inlinerCtx = - if (ctx.property(InlineBindings).isDefined) ctx - else ctx.fresh.setProperty(InlineBindings, newMutableSymbolMap[Tree]) - new Inliner(tree, body)(inlinerCtx).inlined(pt) - } + else new Inliner(tree, body).inlined(pt) } else errorTree( @@ -216,7 +204,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { private val thisProxy = new mutable.HashMap[ClassSymbol, TermRef] /** A buffer for bindings that define proxies for actual arguments */ - private val bindingsBuf = new mutable.ListBuffer[MemberDef] + private val bindingsBuf = new mutable.ListBuffer[ValOrDefDef] private def newSym(name: Name, flags: FlagSet, info: Type): Symbol = ctx.newSymbol(ctx.owner, name, flags, info, coord = call.pos) @@ -231,17 +219,20 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { * @param bindingsBuf the buffer to which the definition should be appended */ private def paramBindingDef(name: Name, paramtp: Type, arg: Tree, - bindingsBuf: mutable.ListBuffer[MemberDef]): MemberDef = { + bindingsBuf: mutable.ListBuffer[ValOrDefDef]): ValOrDefDef = { val argtpe = arg.tpe.dealiasKeepAnnots val isByName = paramtp.dealias.isInstanceOf[ExprType] - val inlineFlag = if (paramtp.hasAnnotation(defn.InlineParamAnnot)) Inline else EmptyFlags + var inlineFlag = InlineProxy + if (paramtp.hasAnnotation(defn.InlineParamAnnot)) inlineFlag |= Inline val (bindingFlags, bindingType) = - if (isByName) (Method, ExprType(argtpe.widen)) + if (isByName) (Method | InlineProxy, ExprType(argtpe.widen)) else (inlineFlag, argtpe.widen) val boundSym = newSym(name, bindingFlags, bindingType).asTerm - val binding = + val binding = { if (isByName) DefDef(boundSym, arg.changeOwner(ctx.owner, boundSym)) else ValDef(boundSym, arg) + }.withPos(boundSym.pos) + boundSym.defTree = binding bindingsBuf += binding binding } @@ -293,7 +284,9 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { ref(rhsClsSym.sourceModule) else inlineCallPrefix - bindingsBuf += ValDef(selfSym.asTerm, rhs) + val binding = ValDef(selfSym.asTerm, rhs).withPos(selfSym.pos) + bindingsBuf += binding + selfSym.defTree = binding inlining.println(i"proxy at $level: $selfSym = ${bindingsBuf.last}") lastSelf = selfSym lastLevel = level @@ -321,7 +314,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { case tpe: ThisType if !canElideThis(tpe) && !thisProxy.contains(tpe.cls) => val proxyName = s"${tpe.cls.name}_this".toTermName val proxyType = tpe.asSeenFrom(inlineCallPrefix.tpe, inlinedMethod.owner) - thisProxy(tpe.cls) = newSym(proxyName, Synthetic, proxyType).termRef + thisProxy(tpe.cls) = newSym(proxyName, InlineProxy, proxyType).termRef if (!tpe.cls.isStaticOwner) registerType(inlinedMethod.owner.thisType) // make sure we have a base from which to outer-select case tpe: NamedType @@ -383,6 +376,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { computeThisBindings() val inlineTyper = new InlineTyper + val inlineCtx = inlineContext(call).fresh.setTyper(inlineTyper).setNewScope // A tree type map to prepare the inlined body for typechecked. @@ -423,26 +417,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { )(inlineCtx) // Apply inliner to `rhsToInline`, split off any implicit bindings from result, and - // make them part of `bindingsBuf`. The expansion is then the untyped tree that remains. - val expansion = inliner.transform(rhsToInline.withPos(call.pos)) match { - case Block(implicits, tpd.UntypedSplice(expansion)) => - val prevOwners = implicits.map(_.symbol.owner).distinct - val localizer = new TreeTypeMap(oldOwners = prevOwners, newOwners = prevOwners.map(_ => ctx.owner)) - val (_, implicits1) = localizer.transformDefs(implicits) - for (idef <- implicits1) { - bindingsBuf += idef.withType(idef.symbol.typeRef).asInstanceOf[ValOrDefDef] - // Note: Substituting new symbols does not automatically lead to good prefixes - // if the previous symbol was owned by a class. That's why we need to set the type - // of `idef` explicitly. It would be nice if substituters were smarter, but - // it seems non-trivial to come up with rules that work in all cases. - inlineCtx.enter(idef.symbol) - } - expansion - case tpd.UntypedSplice(expansion) => - expansion - case expansion => - expansion - } + // make them part of `bindingsBuf`. The expansion is then the tree that remains. + val expansion = inliner.transform(rhsToInline.withPos(call.pos)) def issueError() = callValueArgss match { case (msgArg :: rest) :: Nil => @@ -474,9 +450,6 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { trace(i"inlining $call", inlining, show = true) { - // The normalized bindings collected in `bindingsBuf` - bindingsBuf.transform(reducer.normalizeBinding(_)(inlineCtx)) - // Run a typing pass over the inlined tree. See InlineTyper for details. val expansion1 = inlineTyper.typed(expansion, pt)(inlineCtx) @@ -487,137 +460,19 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } // Drop unused bindings - val matchBindings = reducer.matchBindingsBuf.toList - val (finalBindings, finalExpansion) = dropUnusedDefs(bindingsBuf.toList ++ matchBindings, expansion1) - val (finalMatchBindings, finalArgBindings) = finalBindings.partition(matchBindings.contains(_)) + val (finalBindings, finalExpansion) = dropUnusedDefs(bindingsBuf.toList, expansion1) if (inlinedMethod == defn.Typelevel_error) issueError() // Take care that only argument bindings go into `bindings`, since positions are // different for bindings from arguments and bindings from body. - tpd.Inlined(call, finalArgBindings, seq(finalMatchBindings, finalExpansion)) + tpd.Inlined(call, finalBindings, finalExpansion) } } /** A utility object offering methods for rewriting inlined code */ object reducer { - /** Additional bindings established by reducing match expressions */ - val matchBindingsBuf = new mutable.ListBuffer[MemberDef] - - /** An extractor for terms equivalent to `new C(args)`, returning the class `C`, - * a list of bindings, and the arguments `args`. Can see inside blocks and Inlined nodes and can - * follow a reference to an inline value binding to its right hand side. - * @return optionally, a triple consisting of - * - the class `C` - * - the arguments `args` - * - any bindings that wrap the instance creation - * - whether the instance creation is precomputed or by-name - */ - private object NewInstance { - def unapply(tree: Tree)(implicit ctx: Context): Option[(Symbol, List[Tree], List[Tree], Boolean)] = { - def unapplyLet(bindings: List[Tree], expr: Tree) = - unapply(expr) map { - case (cls, reduced, prefix, precomputed) => (cls, reduced, bindings ::: prefix, precomputed) - } - tree match { - case Apply(fn, args) => - fn match { - case Select(New(tpt), nme.CONSTRUCTOR) => - Some((tpt.tpe.classSymbol, args, Nil, false)) - case TypeApply(Select(New(tpt), nme.CONSTRUCTOR), _) => - Some((tpt.tpe.classSymbol, args, Nil, false)) - case _ => - val meth = fn.symbol - if (meth.name == nme.apply && - meth.flags.is(Synthetic) && - meth.owner.linkedClass.is(Case)) - Some(meth.owner.linkedClass, args, Nil, false) - else None - } - case Ident(_) => - for { - binding <- inlineBindings.get(tree.symbol) - (cls, reduced, prefix, precomputed) <- unapply(binding) - } - yield (cls, reduced, prefix, precomputed || binding.isInstanceOf[ValDef]) - case Inlined(_, bindings, expansion) => - unapplyLet(bindings, expansion) - case Block(stats, expr) if isPureExpr(tree) => - unapplyLet(stats, expr) - case _ => - None - } - } - } - - /** If `tree` is equivalent to `new C(args).x` where class `C` does not have - * initialization code and `x` is a parameter corresponding to one of the - * arguments `args`, the corresponding argument, otherwise `tree` itself. - * Side effects of original arguments need to be preserved. - */ - def reduceProjection(tree: Tree)(implicit ctx: Context): Tree = { - if (ctx.debug) inlining.println(i"try reduce projection $tree") - tree match { - case Select(NewInstance(cls, args, prefix, precomputed), field) if cls.isNoInitsClass => - def matches(param: Symbol, selection: Symbol): Boolean = - param == selection || { - selection.name match { - case InlineAccessorName(underlying) => - param.name == underlying && selection.info.isInstanceOf[ExprType] - case _ => - false - } - } - val idx = cls.asClass.paramAccessors.indexWhere(matches(_, tree.symbol)) - if (idx >= 0 && idx < args.length) { - def finish(arg: Tree) = - new TreeTypeMap().transform(arg) // make sure local bindings in argument have fresh symbols - .reporting(res => i"projecting $tree -> $res", inlining) - val arg = args(idx) - if (precomputed) - if (isPureExpr(arg)) finish(arg) - else tree // nothing we can do here, projection would duplicate side effect - else { - // newInstance is evaluated in place, need to reflect side effects of - // arguments in the order they were written originally - def collectImpure(from: Int, end: Int) = - (from until end).filterNot(i => isPureExpr(args(i))).toList.map(args) - val leading = collectImpure(0, idx) - val trailing = collectImpure(idx + 1, args.length) - val argInPlace = - if (trailing.isEmpty) arg - else letBindUnless(TreeInfo.Pure, arg)(seq(trailing, _)) - finish(seq(prefix, seq(leading, argInPlace))) - } - } - else tree - case Block(stats, expr) if stats.forall(isPureBinding) => - cpy.Block(tree)(stats, reduceProjection(expr)) - case _ => tree - } - } - - /** If this is a value binding: - * - reduce its rhs if it is a projection and adjust its type accordingly, - * - record symbol -> rhs in the InlineBindings context propery. - */ - def normalizeBinding(binding: MemberDef)(implicit ctx: Context) = { - val binding1 = binding match { - case binding: ValDef => - val rhs1 = reduceProjection(binding.rhs) - inlineBindings(ctx).put(binding.symbol, rhs1) - if (rhs1 `eq` binding.rhs) binding - else { - binding.symbol.info = rhs1.tpe - cpy.ValDef(binding)(tpt = TypeTree(rhs1.tpe), rhs = rhs1) - } - case _ => - binding - } - binding1.withPos(call.pos) - } - /** An extractor for references to inlineable arguments. These are : * - by-value arguments marked with `inline` * - all by-name arguments @@ -625,9 +480,9 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { private object InlineableArg { lazy val paramProxies = paramProxy.values.toSet def unapply(tree: Trees.Ident[_])(implicit ctx: Context): Option[Tree] = { - def search(buf: mutable.ListBuffer[MemberDef]) = buf.find(_.name == tree.name) + def search(buf: mutable.ListBuffer[ValOrDefDef]) = buf.find(_.name == tree.name) if (paramProxies.contains(tree.typeOpt)) - search(bindingsBuf).orElse(search(matchBindingsBuf)) match { + search(bindingsBuf) match { case Some(vdef: ValDef) if vdef.symbol.is(Inline) => Some(integrate(vdef.rhs, vdef.symbol)) case Some(ddef: DefDef) => @@ -668,7 +523,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { case Apply(Select(cl @ closureDef(ddef), nme.apply), args) if defn.isFunctionType(cl.tpe) => ddef.tpe.widen match { case mt: MethodType if ddef.vparamss.head.length == args.length => - val bindingsBuf = new mutable.ListBuffer[MemberDef] + val bindingsBuf = new mutable.ListBuffer[ValOrDefDef] val argSyms = (mt.paramNames, mt.paramInfos, args).zipped.map { (name, paramtp, arg) => arg.tpe.dealias match { case ref @ TermRef(NoPrefix, _) => ref.symbol @@ -685,167 +540,6 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } case _ => tree } - - /** The result type of reducing a match. It consists, optionally of a list of bindings - * for the pattern-bound variables and the RHS of the selected case. - * Returns `None` if not case was selected. - */ - type MatchRedux = Option[(List[MemberDef], untpd.Tree)] - - /** Reduce an inline match - * @param scrutinee the scrutinee expression, assumed to be pure - * @param scrutType its fully defined type - * @param cases All cases of the match - * @param typer The current inline typer - * @return optionally, if match can be reduced to a matching case: A pair of - * bindings for all pattern-bound variables and the untyped RHS of the case. - */ - def reduceInlineMatch(scrutinee: Tree, scrutType: Type, cases: List[untpd.CaseDef], typer: Typer)(implicit ctx: Context): MatchRedux = { - - val gadtSyms = typer.gadtSyms(scrutType) - - /** Try to match pattern `pat` against scrutinee reference `scrut`. If successful add - * bindings for variables bound in this pattern to `bindingsBuf`. - */ - def reducePattern(bindingsBuf: mutable.ListBuffer[MemberDef], scrut: TermRef, pat: Tree)(implicit ctx: Context): Boolean = { - val isImplicit = scrut.info == defn.ImplicitScrutineeTypeRef - - def newBinding(name: TermName, flags: FlagSet, rhs: Tree): Symbol = { - val info = if (flags `is` Implicit) rhs.tpe.widen else rhs.tpe.widenTermRefExpr - val sym = newSym(name, flags, info).asTerm - bindingsBuf += ValDef(sym, constToLiteral(rhs)) - sym - } - - def searchImplicit(name: TermName, tpt: Tree) = { - val evidence = typer.inferImplicitArg(tpt.tpe, tpt.pos) - evidence.tpe match { - case fail: Implicits.AmbiguousImplicits => - ctx.error(typer.missingArgMsg(evidence, tpt.tpe, ""), tpt.pos) - true // hard error: return true to stop implicit search here - case fail: Implicits.SearchFailureType => - false - case _ => - if (name != nme.WILDCARD) newBinding(name, Implicit, evidence) - true - } - } - - pat match { - case Typed(pat1, tpt) => - val getBoundVars = new TreeAccumulator[List[TypeSymbol]] { - def apply(syms: List[TypeSymbol], t: Tree)(implicit ctx: Context) = { - val syms1 = t match { - case t: Bind if t.symbol.isType && t.name != tpnme.WILDCARD => - t.symbol.asType :: syms - case _ => - syms - } - foldOver(syms1, t) - } - } - val boundVars = getBoundVars(Nil, tpt) - for (bv <- boundVars) ctx.gadt.setBounds(bv, bv.info.bounds) - if (isImplicit) searchImplicit(nme.WILDCARD, tpt) - else scrut <:< tpt.tpe && { - for (bv <- boundVars) { - bv.info = TypeAlias(ctx.gadt.bounds(bv).lo) - // FIXME: This is very crude. We should approximate with lower or higher bound depending - // on variance, and we should also take care of recursive bounds. Basically what - // ConstraintHandler#approximation does. However, this only works for constrained paramrefs - // not GADT-bound variables. Hopefully we will get some way to improve this when we - // re-implement GADTs in terms of constraints. - bindingsBuf += TypeDef(bv) - } - reducePattern(bindingsBuf, scrut, pat1) - } - case pat @ Bind(name: TermName, Typed(_, tpt)) if isImplicit => - searchImplicit(name, tpt) - case pat @ Bind(name: TermName, body) => - reducePattern(bindingsBuf, scrut, body) && { - if (name != nme.WILDCARD) newBinding(name, EmptyFlags, ref(scrut)) - true - } - case Ident(nme.WILDCARD) => - true - case pat: Literal => - scrut.widenTermRefExpr =:= pat.tpe - case pat: RefTree => - scrut =:= pat.tpe || - scrut.widen.classSymbol.is(Module) && scrut.widen =:= pat.tpe.widen && { - scrut.prefix match { - case _: SingletonType | NoPrefix => true - case _ => false - } - } - case UnApply(unapp, _, pats) => - unapp.tpe.widen match { - case mt: MethodType if mt.paramInfos.length == 1 => - - def reduceSubPatterns(pats: List[Tree], selectors: List[Tree]): Boolean = (pats, selectors) match { - case (Nil, Nil) => true - case (pat :: pats1, selector :: selectors1) => - val elem = newBinding(InlineBinderName.fresh(), Synthetic, selector) - reducePattern(bindingsBuf, elem.termRef, pat) && - reduceSubPatterns(pats1, selectors1) - case _ => false - } - - val paramType = mt.paramInfos.head - val paramCls = paramType.classSymbol - if (paramCls.is(Case) && unapp.symbol.is(Synthetic) && scrut <:< paramType) { - val caseAccessors = - if (paramCls.is(Scala2x)) paramCls.caseAccessors.filter(_.is(Method)) - else paramCls.asClass.paramAccessors - val selectors = - for (accessor <- caseAccessors) - yield constToLiteral(reduceProjection(ref(scrut).select(accessor).ensureApplied)) - caseAccessors.length == pats.length && reduceSubPatterns(pats, selectors) - } - else if (unapp.symbol.isInlineMethod) { - val app = untpd.Apply(untpd.TypedSplice(unapp), untpd.ref(scrut)) - val app1 = typer.typedExpr(app) - val args = tupleArgs(app1) - args.nonEmpty && reduceSubPatterns(pats, args) - } - else false - case _ => - false - } - case _ => false - } - } - - /** The initial scrutinee binding: `val $scrutineeN = ` */ - val scrutineeSym = newSym(InlineScrutineeName.fresh(), Synthetic, scrutType).asTerm - val scrutineeBinding = normalizeBinding(ValDef(scrutineeSym, scrutinee)) - - def reduceCase(cdef: untpd.CaseDef): MatchRedux = { - val caseBindingsBuf = new mutable.ListBuffer[MemberDef]() - def guardOK(implicit ctx: Context) = cdef.guard.isEmpty || { - val guardCtx = ctx.fresh.setNewScope - caseBindingsBuf.foreach(binding => guardCtx.enter(binding.symbol)) - typer.typed(cdef.guard, defn.BooleanType)(guardCtx) match { - case ConstantValue(true) => true - case _ => false - } - } - if (scrutType != defn.ImplicitScrutineeTypeRef) caseBindingsBuf += scrutineeBinding - val gadtCtx = typer.gadtContext(gadtSyms).addMode(Mode.GADTflexible) - val pat1 = typer.typedPattern(cdef.pat, scrutType)(gadtCtx) - if (reducePattern(caseBindingsBuf, scrutineeSym.termRef, pat1)(gadtCtx) && guardOK) - Some((caseBindingsBuf.toList, cdef.body)) - else - None - } - - def recur(cases: List[untpd.CaseDef]): MatchRedux = cases match { - case Nil => None - case cdef :: cases1 => reduceCase(cdef) `orElse` recur(cases1) - } - - recur(cases) - } } /** A typer for inlined bodies. Beyond standard typing, an inline typer performs @@ -857,7 +551,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { * 4. Make sure inlined code is type-correct. * 5. Make sure that the tree's typing is idempotent (so that future -Ycheck passes succeed) */ - class InlineTyper extends Typer { + class InlineTyper extends ReTyper { import reducer._ override def ensureAccessible(tpe: Type, superAccess: Boolean, pos: Position)(implicit ctx: Context): Type = { @@ -873,11 +567,17 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { super.ensureAccessible(tpe, superAccess, pos) } - override def typedTypedSplice(tree: untpd.TypedSplice)(implicit ctx: Context): Tree = - reduceProjection(tryInline(tree.splice) `orElse` super.typedTypedSplice(tree)) - override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context) = - constToLiteral(reduceProjection(super.typedSelect(tree, pt))) + override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context) = + tryInline(tree.asInstanceOf[tpd.Tree]) `orElse` super.typedIdent(tree, pt) + + override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = { + assert(tree.hasType, tree) + val qual1 = typed(tree.qualifier, selectionProto(tree.name, pt, this)) + val res = untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt) + ensureAccessible(res.tpe, tree.qualifier.isInstanceOf[untpd.Super], tree.pos) + res + } override def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context) = typed(tree.cond, defn.BooleanType) match { @@ -887,35 +587,10 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { if (isIdempotentExpr(cond1)) selected else Block(cond1 :: Nil, selected) case cond1 => - if (tree.isInstanceOf[untpd.InlineIf]) - errorTree(tree, em"""cannot reduce inline if - | its condition ${tree.cond} - | is not a constant value.""") val if1 = untpd.cpy.If(tree)(cond = untpd.TypedSplice(cond1)) super.typedIf(if1, pt) } - override def typedMatchFinish(tree: untpd.Match, sel: Tree, selType: Type, pt: Type)(implicit ctx: Context) = tree match { - case _: untpd.InlineMatch if !ctx.owner.isInlineMethod => // don't reduce match of nested inline method yet - reduceInlineMatch(sel, sel.tpe, tree.cases, this) match { - case Some((caseBindings, rhs)) => - var rhsCtx = ctx.fresh.setNewScope - for (binding <- caseBindings) { - matchBindingsBuf += binding - rhsCtx.enter(binding.symbol) - } - typedExpr(rhs, pt)(rhsCtx) - case None => - def guardStr(guard: untpd.Tree) = if (guard.isEmpty) "" else i" if $guard" - def patStr(cdef: untpd.CaseDef) = i"case ${cdef.pat}${guardStr(cdef.guard)}" - errorTree(tree, em"""cannot reduce inline match with - | scrutinee: $sel : ${sel.tpe} - | patterns : ${tree.cases.map(patStr).mkString("\n ")}.""") - } - case _ => - super.typedMatchFinish(tree, sel, selType, pt) - } - override def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context) = constToLiteral(betaReduce(super.typedApply(tree, pt))) @@ -925,12 +600,12 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { /** Drop any side-effect-free bindings that are unused in expansion or other reachable bindings. * Inline def bindings that are used only once. */ - def dropUnusedDefs(bindings: List[MemberDef], tree: Tree)(implicit ctx: Context): (List[MemberDef], Tree) = { + def dropUnusedDefs(bindings: List[ValOrDefDef], tree: Tree)(implicit ctx: Context): (List[ValOrDefDef], Tree) = { val refCount = newMutableSymbolMap[Int] - val bindingOfSym = newMutableSymbolMap[MemberDef] + val bindingOfSym = newMutableSymbolMap[ValOrDefDef] val dealiased = new java.util.IdentityHashMap[Type, Type]() - def isInlineable(binding: MemberDef) = binding match { + def isInlineable(binding: ValOrDefDef) = binding match { case DefDef(_, Nil, Nil, _, _) => true case vdef @ ValDef(_, _, _) => isPureExpr(vdef.rhs) case _ => false @@ -1028,7 +703,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } val dealiasedTermBindings = - termBindings.mapconserve(dealiasTypeBindings.transform).asInstanceOf[List[MemberDef]] + termBindings.mapconserve(dealiasTypeBindings.transform).asInstanceOf[List[ValOrDefDef]] val dealiasedTree = dealiasTypeBindings.transform(tree) val retained = dealiasedTermBindings.filterConserve(binding => retain(binding.symbol)) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 01f17d3b292b..762cf2e6da16 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -939,7 +939,8 @@ class Namer { typer: Typer => index(rest)(ctx.inClassContext(selfInfo)) symbolOfTree(constr).ensureCompleted() - val parentTypes = ensureFirstIsClass(parents.map(checkedParentType(_)), cls.pos) + val parentTypes = defn.adjustForTuple(cls, cls.typeParams, + ensureFirstIsClass(parents.map(checkedParentType(_)), cls.pos)) typr.println(i"completing $denot, parents = $parents%, %, parentTypes = $parentTypes%, %") tempInfo.finalize(denot, parentTypes) diff --git a/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala index 5eada4788615..847466a7552b 100644 --- a/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala @@ -243,9 +243,8 @@ object PrepareInlineable { val typedBody = if (ctx.reporter.hasErrors) rawBody else ctx.compilationUnit.inlineAccessors.makeInlineable(rawBody) - if (inlined.isInlineMethod) - checkInlineMethod(inlined, typedBody) - val inlineableBody = addReferences(inlined, originalBody, typedBody) + checkInlineMethod(inlined, typedBody) + val inlineableBody = typedBody inlining.println(i"Body to inline for $inlined: $inlineableBody") inlineableBody }) @@ -261,182 +260,4 @@ object PrepareInlineable { em"inline unapply method can be rewritten only if its right hand side is a tuple (e1, ..., eN)", body.pos) } - - /** Tweak untyped tree `original` so that all external references are typed - * and it reflects the changes in the corresponding typed tree `typed` that - * make `typed` inlineable. Concretely: - * - * - all external references via identifiers or this-references are converted - * to typed splices, - * - if X gets an inline accessor in `typed`, references to X in `original` - * are converted to the inline accessor name. - */ - private def addReferences(inlineMethod: Symbol, - original: untpd.Tree, typed: tpd.Tree)(implicit ctx: Context): tpd.Tree = { - - // Maps from positions to external reference types and inline selector names. - object referenced extends TreeTraverser { - val typeAtPos = mutable.Map[Position, Type]() - val accessorAtPos = mutable.Map[Position, Symbol]() - val implicitRefTypes = mutable.Set[Type]() - val implicitRefs = new mutable.ListBuffer[Tree] - - def registerIfContextualImplicit(tree: Tree) = tree match { - case tree: RefTree - if tree.removeAttachment(ContextualImplicit).isDefined && - tree.symbol.exists && - !isLocalOrParam(tree.symbol, inlineMethod) && - !implicitRefTypes.contains(tree.tpe) => - if (tree.existsSubTree(t => isLocal(tree.symbol, inlineMethod))) - ctx.warning(i"implicit reference $tree is dropped at inline site because it refers to local symbol(s)", tree.pos) - else { - implicitRefTypes += tree.tpe - implicitRefs += tree - } - case _ => - } - - def registerAccessor(tree: Tree) = { - inlining.println(i"accessor: $tree at ${tree.pos}") - accessorAtPos(tree.pos.toSynthetic) = tree.symbol - // Note: It's possible that during traversals several accessors are stored under the same - // position. This could happen for instance for implicit conersions added around a tree. - // or for a setter containing a getter in an op-assignment node. - // In general, it's always the innermost tree that holds the relevant symbol. The traversal - // order guarantees that the innermost tree's symbol is stored last, and thereby replaces all previously - // stored symbols. - } - - def traverse(tree: Tree)(implicit ctx: Context): Unit = { - val sym = tree.symbol - tree match { - case Ident(nme.WILDCARD) => - case _: Ident | _: This => - //println(i"leaf: $tree at ${tree.pos}") - if (sym.exists && !isLocal(sym, inlineMethod)) { - if (ctx.debug) inlining.println(i"type at $tree @ ${tree.pos.toSynthetic} = ${tree.tpe}") - tree.tpe match { - case tp: NamedType if tp.prefix.member(sym.name).isOverloaded => - // refer to prefix instead of to ident directly, so that overloading can be resolved - // again at expansion site - typeAtPos(tree.pos.startPos) = tp.prefix - case _ => - typeAtPos(tree.pos.toSynthetic) = tree.tpe - } - // Note: It's possible that during traversals several types are stored under the same - // position. This could happen for instance for implicit conersions added around a tree. - // In general, it's always the innermost tree that holds the relevant type. The traversal - // order guarantees that the innermost tree's type is stored last, and thereby replaces all previously - // stored types. - } - case _: Select => - sym.name match { - case InlineAccessorName(UniqueInlineName(_, _)) => return // was already recorded in Apply - case InlineAccessorName(_) => registerAccessor(tree) - case _ => - } - case Apply(_: RefTree | _: TypeApply, receiver :: Nil) => - sym.name match { - case InlineAccessorName(UniqueInlineName(_, _)) => registerAccessor(tree) - case _ => - } - case _ => - } - registerIfContextualImplicit(tree) - traverseChildren(tree) - } - } - referenced.traverse(typed) - - // The untyped tree transform that applies the tweaks - object addRefs extends untpd.UntypedTreeMap { - override def transform(tree: untpd.Tree)(implicit ctx: Context): untpd.Tree = { - - def adjustLeaf(tree: untpd.Tree): untpd.Tree = referenced.typeAtPos.get(tree.pos.toSynthetic) match { - case Some(tpe) => untpd.TypedSplice(tree.withType(tpe)) - case none => tree - } - - def adjustForAccessor(ref: untpd.RefTree) = - referenced.accessorAtPos.get(ref.pos.toSynthetic) match { - case Some(acc) => - def accessorRef = untpd.TypedSplice(tpd.ref(acc)) - acc.name match { - case InlineAccessorName(UniqueInlineName(_, _)) => - // In this case we are seeing a pair like this: - // untyped typed - // t.x inline$x(t) - // Drop the selection, since it is part of the accessor - val Select(qual, _) = ref - untpd.Apply(accessorRef, qual :: Nil) - case _ => - accessorRef - } - case none => ref - } - - def adjustQualifier(tree: untpd.Tree): untpd.Tree = tree match { - case tree @ Ident(name1) => - referenced.typeAtPos.get(tree.pos.startPos) match { - case Some(tp: ThisType) => - val qual = untpd.TypedSplice(This(tp.cls).withPos(tree.pos.startPos)) - cpy.Select(tree)(qual, name1) - case none => - tree - } - case tree => tree - } - - def isAccessorLHS(lhs: untpd.Tree): Boolean = lhs match { - case lhs: untpd.Apply => isAccessorLHS(lhs.fun) - case lhs: untpd.TypeApply => isAccessorLHS(lhs.fun) - case lhs: untpd.RefTree => lhs.name.is(InlineAccessorName) - case untpd.TypedSplice(lhs1) => lhs1.symbol.name.is(InlineAccessorName) - case _ => false - } - - val tree1 = super.transform(tree) - tree1 match { - case This(_) => - adjustLeaf(tree1) - case tree1: untpd.Ident => - adjustQualifier(adjustLeaf(adjustForAccessor(tree1))) - case tree1: untpd.Select => - adjustForAccessor(tree1) - case Assign(lhs, rhs) if isAccessorLHS(lhs) => - cpy.Apply(tree1)(lhs, rhs :: Nil) - case tree: untpd.DerivedTypeTree => - inlining.println(i"inlining derived $tree --> ${ctx.typer.typed(tree)}") - untpd.TypedSplice(ctx.typer.typed(tree)) - case _ => - tree1 - } - } - } - val implicitBindings = - for (iref <- referenced.implicitRefs.toList) yield { - val localImplicit = iref.symbol.asTerm.copy( - owner = inlineMethod, - name = UniqueInlineName.fresh(iref.symbol.name.asTermName), - flags = Implicit | Method | Stable | iref.symbol.flags & (Inline | Erased), - info = iref.tpe.widen.ensureMethodic, - coord = inlineMethod.pos).asTerm - val idef = polyDefDef(localImplicit, tps => vrefss => - iref.appliedToTypes(tps).appliedToArgss(vrefss)) - if (localImplicit.is(Inline)) { - // produce a Body annotation for inlining - def untype(tree: Tree): untpd.Tree = tree match { - case Apply(fn, args) => untpd.cpy.Apply(tree)(untype(fn), args) - case TypeApply(fn, args) => untpd.cpy.TypeApply(tree)(untype(fn), args) - case _ => untpd.TypedSplice(tree) - } - val inlineBody = tpd.UntypedSplice(untype(idef.rhs)).withType(idef.rhs.tpe) - inlining.println(i"body annot for $idef: $inlineBody") - localImplicit.addAnnotation(ConcreteBodyAnnotation(inlineBody)) - } - idef - } - val untpdSplice = tpd.UntypedSplice(addRefs.transform(original)).withType(typed.tpe) - seq(implicitBindings, untpdSplice) - } } \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 9dcf519b157f..52882c91d63a 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -81,7 +81,7 @@ object ProtoTypes { * achieved by replacing expected type parameters with wildcards. */ def constrainResult(meth: Symbol, mt: Type, pt: Type)(implicit ctx: Context): Boolean = - if (Inliner.isInlineable(meth)) { + if (Inliner.isInlineable(meth) && !Inliner.typedInline) { constrainResult(mt, wildApprox(pt)) true } diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index a8346503888b..7818eff87d34 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -117,6 +117,8 @@ class ReTyper extends Typer with ReChecking { throw ex } + override def inlineExpansion(mdef: DefDef)(implicit ctx: Context): Tree = mdef + override def checkVariance(tree: Tree)(implicit ctx: Context) = () override def inferView(from: Tree, to: Type)(implicit ctx: Context): Implicits.SearchResult = Implicits.NoMatchingImplicitsFailure diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 6e5807bbd1f3..8a83f3814582 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -569,8 +569,7 @@ class Typer extends Namer def typedTpt = checkSimpleKinded(typedType(tree.tpt)) def handlePattern: Tree = { val tpt1 = typedTpt - if (!ctx.isAfterTyper && pt != defn.ImplicitScrutineeTypeRef) - constrainPatternType(tpt1.tpe, pt)(ctx.addMode(Mode.GADTflexible)) + if (!ctx.isAfterTyper) constrainPatternType(tpt1.tpe, pt)(ctx.addMode(Mode.GADTflexible)) // special case for an abstract type that comes with a class tag tryWithClassTag(ascription(tpt1, isWildcard = true), pt) } @@ -711,7 +710,6 @@ class Typer extends Namer } def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context): Tree = track("typedIf") { - if (tree.isInstanceOf[untpd.InlineIf]) checkInInlineContext("inline if", tree.pos) val cond1 = typed(tree.cond, defn.BooleanType) val thenp2 :: elsep2 :: Nil = harmonic(harmonize) { val thenp1 = typed(tree.thenp, pt.notApplied) @@ -976,12 +974,7 @@ class Typer extends Namer val (protoFormals, _) = decomposeProtoFunction(pt, 1) val unchecked = pt.isRef(defn.PartialFunctionClass) typed(desugar.makeCaseLambda(tree.cases, protoFormals.length, unchecked) withPos tree.pos, pt) - case id @ untpd.ImplicitScrutinee() => - checkInInlineContext("implicit match", tree.pos) - val sel1 = id.withType(defn.ImplicitScrutineeTypeRef) - typedMatchFinish(tree, sel1, sel1.tpe, pt) case _ => - if (tree.isInstanceOf[untpd.InlineMatch]) checkInInlineContext("inline match", tree.pos) val sel1 = typedExpr(tree.selector) val selType = fullyDefinedType(sel1.tpe, "pattern selector", tree.pos).widen typedMatchFinish(tree, sel1, selType, pt) @@ -1382,7 +1375,6 @@ class Typer extends Namer if (body1.tpe.isInstanceOf[TermRef]) pt1 else body1.tpe.underlyingIfRepeated(isJava = false) val sym = ctx.newPatternBoundSymbol(tree.name, symTp, tree.pos) - if (pt == defn.ImplicitScrutineeTypeRef) sym.setFlag(Implicit) if (ctx.mode.is(Mode.InPatternAlternative)) ctx.error(i"Illegal variable ${sym.name} in pattern alternative", tree.pos) assignType(cpy.Bind(tree)(tree.name, body1), sym) @@ -1622,7 +1614,7 @@ class Typer extends Namer // check value class constraints checkDerivedValueClass(cls, body1) - if (ctx.settings.YretainTrees.value) cls.treeOrProvider = cdef1 + if (ctx.settings.YretainTrees.value) cls.rootTreeOrProvider = cdef1 cdef1 @@ -1999,10 +1991,17 @@ class Typer extends Namer case none => typed(mdef) match { case mdef1: DefDef if Inliner.hasBodyToInline(mdef1.symbol) => - assert(mdef1.symbol.isInlineMethod, mdef.symbol) - Inliner.bodyToInline(mdef1.symbol) // just make sure accessors are computed, - buf += mdef1 // but keep original definition, since inline-expanded code - // is pickled in this case. + if (Inliner.typedInline) { + buf += inlineExpansion(mdef1) + // replace body with expansion, because it will be used as inlined body + // from separately compiled files - the original BodyAnnotation is not kept. + } + else { + assert(mdef1.symbol.isInlineMethod, mdef.symbol) + Inliner.bodyToInline(mdef1.symbol) // just make sure accessors are computed, + buf += mdef1 // but keep original definition, since inline-expanded code + // is pickled in this case. + } case mdef1 => import untpd.modsDeco mdef match { @@ -2034,6 +2033,13 @@ class Typer extends Namer checkEnumCompanions(traverse(stats)(localCtx), enumContexts) } + /** Given an inline method `mdef`, the method rewritten so that its body + * uses accessors to access non-public members. + * Overwritten in Retyper to return `mdef` unchanged. + */ + protected def inlineExpansion(mdef: DefDef)(implicit ctx: Context): Tree = + tpd.cpy.DefDef(mdef)(rhs = Inliner.bodyToInline(mdef.symbol)) + def typedExpr(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): Tree = typed(tree, pt)(ctx retractMode Mode.PatternOrTypeBits) def typedType(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): Tree = // todo: retract mode between Type and Pattern? @@ -2445,8 +2451,9 @@ class Typer extends Namer else if (Inliner.isInlineable(tree) && !ctx.settings.YnoInline.value && !ctx.isAfterTyper && - !ctx.reporter.hasErrors) { - tree.tpe <:< wildApprox(pt) + !ctx.reporter.hasErrors && + (!Inliner.typedInline || tree.tpe <:< pt)) { + if (!Inliner.typedInline) tree.tpe <:< wildApprox(pt) readaptSimplified(Inliner.inlineCall(tree, pt)) } else if (tree.tpe <:< pt) { diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index e3b97557b42a..645ddc6c66ad 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -38,6 +38,7 @@ implicit-dep.scala inline-access-levels macro-with-array macro-with-type +matchtype.scala phantom-Eq2 power-macro quote-lift-inline-params diff --git a/compiler/test/dotc/run-test-pickling.blacklist b/compiler/test/dotc/run-test-pickling.blacklist index cec951285d9b..fb9a0ef0f2d2 100644 --- a/compiler/test/dotc/run-test-pickling.blacklist +++ b/compiler/test/dotc/run-test-pickling.blacklist @@ -20,7 +20,6 @@ i4803d i4803e i4803f i4947b -implicitMatch.scala implicitShortcut lazy-implicit-lists.scala lazy-implicit-nums.scala diff --git a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala index 4cafc1673027..5b9500d9cdf4 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala @@ -112,7 +112,7 @@ class CommentPicklingTest { val trees = files.flatMap { f => val unpickler = new DottyUnpickler(f.bytes().toArray) unpickler.enter(roots = Set.empty) - unpickler.trees(ctx) + unpickler.rootTrees(ctx) } fn(trees, ctx) } diff --git a/docs/docs/internals/syntax.md b/docs/docs/internals/syntax.md index 2ec2cc770708..4145825cd73e 100644 --- a/docs/docs/internals/syntax.md +++ b/docs/docs/internals/syntax.md @@ -163,9 +163,9 @@ BlockResult ::= [FunArgMods] FunParams ‘=>’ Block FunParams ::= Bindings | id | ‘_’ -Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} +Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] If(Parens(cond), thenp, elsep?) - | [‘inline’] ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) + | ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) | ‘while’ ‘(’ Expr ‘)’ {nl} Expr WhileDo(Parens(cond), body) | ‘while’ Expr ‘do’ Expr WhileDo(cond, body) | ‘do’ Expr [semi] ‘while’ Expr DoWhile(expr, cond) @@ -177,8 +177,7 @@ Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} | [SimpleExpr ‘.’] id ‘=’ Expr Assign(expr, expr) | SimpleExpr1 ArgumentExprs ‘=’ Expr Assign(expr, expr) | PostfixExpr [Ascription] - | [‘inline’] PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ Match(expr, cases) -- point on match - | ‘implicit’ ‘match’ ‘{’ ImplicitCaseClauses ‘}’ + | PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ Match(expr, cases) -- point on match Ascription ::= ‘:’ InfixType Typed(expr, tp) | ‘:’ Annotation {Annotation} Typed(expr, Annotated(EmptyTree, annot)*) Catches ::= ‘catch’ Expr @@ -227,8 +226,6 @@ Guard ::= ‘if’ PostfixExpr CaseClauses ::= CaseClause { CaseClause } Match(EmptyTree, cases) CaseClause ::= ‘case’ (Pattern [Guard] ‘=>’ Block | INT) CaseDef(pat, guard?, block) // block starts at => -ImplicitCaseClauses ::= ImplicitCaseClause { ImplicitCaseClause } -ImplicitCaseClause ::= ‘case’ PatVar [‘:’ RefinedType] [Guard] ‘=>’ Block TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type [nl] diff --git a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala index e1a9f41d6e58..8194af9da24a 100644 --- a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala +++ b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala @@ -262,7 +262,7 @@ class DottyLanguageServer extends LanguageServer && funSym.owner.is(CaseClass)) { funSym.owner.info.member(name).symbol } else { - val classTree = funSym.topLevelClass.asClass.tree + val classTree = funSym.topLevelClass.asClass.rootTree tpd.defPath(funSym, classTree).lastOption.flatMap { case DefDef(_, _, paramss, _, _) => paramss.flatten.find(_.name == name).map(_.symbol) diff --git a/library/src-scala3/scala/Tuple.scala b/library/src-scala3/scala/Tuple.scala index 3662ba000eb9..7350fccaa8c9 100644 --- a/library/src-scala3/scala/Tuple.scala +++ b/library/src-scala3/scala/Tuple.scala @@ -5,107 +5,118 @@ import typelevel._ sealed trait Tuple extends Any { import Tuple._ - inline def toArray: Array[Object] = inline constValueOpt[BoundedSize[this.type]] match { - case Some(0) => - $emptyArray - case Some(1) => - val t = asInstanceOf[Tuple1[Object]] - Array(t._1) - case Some(2) => - val t = asInstanceOf[Tuple2[Object, Object]] - Array(t._1, t._2) - case Some(3) => - val t = asInstanceOf[Tuple3[Object, Object, Object]] - Array(t._1, t._2, t._3) - case Some(4) => - val t = asInstanceOf[Tuple4[Object, Object, Object, Object]] - Array(t._1, t._2, t._3, t._4) - case Some(n) if n <= $MaxSpecialized => - $toArray(this, n) - case Some(n) => - asInstanceOf[TupleXXL].elems - case None => - dynamicToArray(this) - } - - inline def *: [H] (x: H): H *: this.type = { - type Result = H *: this.type - inline constValueOpt[BoundedSize[this.type]] match { + inline def toArray: Array[Object] = + /*if (specialize) + inline constValueOpt[BoundedSize[this.type]] match { case Some(0) => - Tuple1(x).asInstanceOf[Result] + $emptyArray case Some(1) => - Tuple2(x, asInstanceOf[Tuple1[_]]._1).asInstanceOf[Result] + val t = asInstanceOf[Tuple1[Object]] + Array(t._1) case Some(2) => - val t = asInstanceOf[Tuple2[_, _]] - Tuple3(x, t._1, t._2).asInstanceOf[Result] + val t = asInstanceOf[Tuple2[Object, Object]] + Array(t._1, t._2) case Some(3) => - val t = asInstanceOf[Tuple3[_, _, _]] - Tuple4(x, t._1, t._2, t._3).asInstanceOf[Result] + val t = asInstanceOf[Tuple3[Object, Object, Object]] + Array(t._1, t._2, t._3) case Some(4) => - val t = asInstanceOf[Tuple4[_, _, _, _]] - Tuple5(x, t._1, t._2, t._3, t._4).asInstanceOf[Result] + val t = asInstanceOf[Tuple4[Object, Object, Object, Object]] + Array(t._1, t._2, t._3, t._4) + case Some(n) if n <= $MaxSpecialized => + $toArray(this, n) case Some(n) => - fromArray[Result]($consArray(x, toArray)) - case _ => - dynamic_*:[this.type, H](this, x) - } - } - - inline def ++(that: Tuple): Concat[this.type, that.type] = { - type Result = Concat[this.type, that.type] - inline constValueOpt[BoundedSize[this.type]] match { - case Some(0) => - that.asInstanceOf[Result] - case Some(1) => - if (constValue[BoundedSize[that.type]] == 0) this.asInstanceOf[Result] - else (asInstanceOf[Tuple1[_]]._1 *: that).asInstanceOf[Result] - case Some(2) => - val t = asInstanceOf[Tuple2[_, _]] - inline constValue[BoundedSize[that.type]] match { - case 0 => this.asInstanceOf[Result] - case 1 => - val u = that.asInstanceOf[Tuple1[_]] - Tuple3(t._1, t._2, u._1).asInstanceOf[Result] - case 2 => - val u = that.asInstanceOf[Tuple2[_, _]] - Tuple4(t._1, t._2, u._1, u._2).asInstanceOf[Result] - case _ => - genericConcat[Result](this, that).asInstanceOf[Result] - } - case Some(3) => - val t = asInstanceOf[Tuple3[_, _, _]] - inline constValue[BoundedSize[that.type]] match { - case 0 => this.asInstanceOf[Result] - case 1 => - val u = that.asInstanceOf[Tuple1[_]] - Tuple4(t._1, t._2, t._3, u._1).asInstanceOf[Result] - case _ => - genericConcat[Result](this, that).asInstanceOf[Result] - } - case Some(_) => - if (constValue[BoundedSize[that.type]] == 0) this.asInstanceOf[Result] - else genericConcat[Result](this, that).asInstanceOf[Result] + asInstanceOf[TupleXXL].elems case None => - dynamic_++[this.type, that.type](this, that) + dynamicToArray(this) } - } + else*/ dynamicToArray(this) + + inline def *: [H] (x: H): H *: this.type = + /*if (specialize) { + type Result = H *: this.type + inline constValueOpt[BoundedSize[this.type]] match { + case Some(0) => + Tuple1(x).asInstanceOf[Result] + case Some(1) => + Tuple2(x, asInstanceOf[Tuple1[_]]._1).asInstanceOf[Result] + case Some(2) => + val t = asInstanceOf[Tuple2[_, _]] + Tuple3(x, t._1, t._2).asInstanceOf[Result] + case Some(3) => + val t = asInstanceOf[Tuple3[_, _, _]] + Tuple4(x, t._1, t._2, t._3).asInstanceOf[Result] + case Some(4) => + val t = asInstanceOf[Tuple4[_, _, _, _]] + Tuple5(x, t._1, t._2, t._3, t._4).asInstanceOf[Result] + case Some(n) => + fromArray[Result]($consArray(x, toArray)) + case _ => + dynamic_*:[this.type, H](this, x) + } + } + else*/ dynamic_*:[this.type, H](this, x) + + inline def ++(that: Tuple): Concat[this.type, that.type] = + /*if (specialize) { + type Result = Concat[this.type, that.type] + inline constValueOpt[BoundedSize[this.type]] match { + case Some(0) => + that.asInstanceOf[Result] + case Some(1) => + if (constValue[BoundedSize[that.type]] == 0) this.asInstanceOf[Result] + else (asInstanceOf[Tuple1[_]]._1 *: that).asInstanceOf[Result] + case Some(2) => + val t = asInstanceOf[Tuple2[_, _]] + inline constValue[BoundedSize[that.type]] match { + case 0 => this.asInstanceOf[Result] + case 1 => + val u = that.asInstanceOf[Tuple1[_]] + Tuple3(t._1, t._2, u._1).asInstanceOf[Result] + case 2 => + val u = that.asInstanceOf[Tuple2[_, _]] + Tuple4(t._1, t._2, u._1, u._2).asInstanceOf[Result] + case _ => + genericConcat[Result](this, that).asInstanceOf[Result] + } + case Some(3) => + val t = asInstanceOf[Tuple3[_, _, _]] + inline constValue[BoundedSize[that.type]] match { + case 0 => this.asInstanceOf[Result] + case 1 => + val u = that.asInstanceOf[Tuple1[_]] + Tuple4(t._1, t._2, t._3, u._1).asInstanceOf[Result] + case _ => + genericConcat[Result](this, that).asInstanceOf[Result] + } + case Some(_) => + if (constValue[BoundedSize[that.type]] == 0) this.asInstanceOf[Result] + else genericConcat[Result](this, that).asInstanceOf[Result] + case None => + dynamic_++[this.type, that.type](this, that) + } + } + else*/ dynamic_++[this.type, that.type](this, that) inline def genericConcat[T <: Tuple](xs: Tuple, ys: Tuple): Tuple = fromArray[T](xs.toArray ++ ys.toArray) - inline def size: Size[this.type] = { - type Result = Size[this.type] - inline constValueOpt[BoundedSize[this.type]] match { - case Some(n) => n.asInstanceOf[Result] - case _ => dynamicSize(this).asInstanceOf[Result] + inline def size: Size[this.type] = + /*if (specialize) { + type Result = Size[this.type] + inline constValueOpt[BoundedSize[this.type]] match { + case Some(n) => n.asInstanceOf[Result] + case _ => dynamicSize(this) + } } - } + else*/ dynamicSize(this) } object Tuple { inline val $MaxSpecialized = 22 inline private val XXL = $MaxSpecialized + 1 + final val specialize = false + type Head[+X <: NonEmptyTuple] = X match { case x *: _ => x } @@ -164,32 +175,34 @@ object Tuple { } inline def fromArray[T <: Tuple](xs: Array[Object]): T = - inline constValue[BoundedSize[T]] match { - case 0 => ().asInstanceOf[T] - case 1 => Tuple1(xs(0)).asInstanceOf[T] - case 2 => Tuple2(xs(0), xs(1)).asInstanceOf[T] - case 3 => Tuple3(xs(0), xs(1), xs(2)).asInstanceOf[T] - case 4 => Tuple4(xs(0), xs(1), xs(2), xs(3)).asInstanceOf[T] - case 5 => Tuple5(xs(0), xs(1), xs(2), xs(3), xs(4)).asInstanceOf[T] - case 6 => Tuple6(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5)).asInstanceOf[T] - case 7 => Tuple7(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6)).asInstanceOf[T] - case 8 => Tuple8(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7)).asInstanceOf[T] - case 9 => Tuple9(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8)).asInstanceOf[T] - case 10 => Tuple10(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9)).asInstanceOf[T] - case 11 => Tuple11(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10)).asInstanceOf[T] - case 12 => Tuple12(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11)).asInstanceOf[T] - case 13 => Tuple13(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12)).asInstanceOf[T] - case 14 => Tuple14(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13)).asInstanceOf[T] - case 15 => Tuple15(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14)).asInstanceOf[T] - case 16 => Tuple16(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15)).asInstanceOf[T] - case 17 => Tuple17(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16)).asInstanceOf[T] - case 18 => Tuple18(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17)).asInstanceOf[T] - case 19 => Tuple19(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18)).asInstanceOf[T] - case 20 => Tuple20(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19)).asInstanceOf[T] - case 21 => Tuple21(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20)).asInstanceOf[T] - case 22 => Tuple22(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20), xs(21)).asInstanceOf[T] - case _ => TupleXXL(xs).asInstanceOf[T] - } + /*if (specialize) + inline constValue[BoundedSize[T]] match { + case 0 => ().asInstanceOf[T] + case 1 => Tuple1(xs(0)).asInstanceOf[T] + case 2 => Tuple2(xs(0), xs(1)).asInstanceOf[T] + case 3 => Tuple3(xs(0), xs(1), xs(2)).asInstanceOf[T] + case 4 => Tuple4(xs(0), xs(1), xs(2), xs(3)).asInstanceOf[T] + case 5 => Tuple5(xs(0), xs(1), xs(2), xs(3), xs(4)).asInstanceOf[T] + case 6 => Tuple6(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5)).asInstanceOf[T] + case 7 => Tuple7(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6)).asInstanceOf[T] + case 8 => Tuple8(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7)).asInstanceOf[T] + case 9 => Tuple9(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8)).asInstanceOf[T] + case 10 => Tuple10(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9)).asInstanceOf[T] + case 11 => Tuple11(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10)).asInstanceOf[T] + case 12 => Tuple12(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11)).asInstanceOf[T] + case 13 => Tuple13(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12)).asInstanceOf[T] + case 14 => Tuple14(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13)).asInstanceOf[T] + case 15 => Tuple15(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14)).asInstanceOf[T] + case 16 => Tuple16(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15)).asInstanceOf[T] + case 17 => Tuple17(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16)).asInstanceOf[T] + case 18 => Tuple18(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17)).asInstanceOf[T] + case 19 => Tuple19(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18)).asInstanceOf[T] + case 20 => Tuple20(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19)).asInstanceOf[T] + case 21 => Tuple21(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20)).asInstanceOf[T] + case 22 => Tuple22(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20), xs(21)).asInstanceOf[T] + case _ => TupleXXL(xs).asInstanceOf[T] + } + else */dynamicFromArray[T](xs) def dynamicFromArray[T <: Tuple](xs: Array[Object]): T = xs.length match { case 0 => ().asInstanceOf[T] @@ -272,10 +285,10 @@ object Tuple { dynamicFromArray[Result](dynamicToArray(self) ++ dynamicToArray(that)) } - def dynamicSize[This <: Tuple](self: This) = (self: Any) match { - case self: Unit => 0 - case self: TupleXXL => self.elems.length - case self: Product => self.productArity + def dynamicSize[This <: Tuple](self: This): Size[This] = (self: Any) match { + case self: Unit => 0.asInstanceOf[Size[This]] + case self: TupleXXL => self.elems.length.asInstanceOf[Size[This]] + case self: Product => self.productArity.asInstanceOf[Size[This]] } } @@ -283,110 +296,118 @@ abstract sealed class NonEmptyTuple extends Tuple { import Tuple._ import NonEmptyTuple._ - inline def head: Head[this.type] = { - type Result = Head[this.type] - val resVal = inline constValueOpt[BoundedSize[this.type]] match { - case Some(1) => - val t = asInstanceOf[Tuple1[_]] - t._1 - case Some(2) => - val t = asInstanceOf[Tuple2[_, _]] - t._1 - case Some(3) => - val t = asInstanceOf[Tuple3[_, _, _]] - t._1 - case Some(4) => - val t = asInstanceOf[Tuple4[_, _, _, _]] - t._1 - case Some(n) if n > 4 && n <= $MaxSpecialized => - asInstanceOf[Product].productElement(0) - case Some(n) if n > $MaxSpecialized => - val t = asInstanceOf[TupleXXL] - t.elems(0) - case None => - dynamicHead[this.type](this) + inline def head: Head[this.type] = + /*if (specialize) { + type Result = Head[this.type] + val resVal = inline constValueOpt[BoundedSize[this.type]] match { + case Some(1) => + val t = asInstanceOf[Tuple1[_]] + t._1 + case Some(2) => + val t = asInstanceOf[Tuple2[_, _]] + t._1 + case Some(3) => + val t = asInstanceOf[Tuple3[_, _, _]] + t._1 + case Some(4) => + val t = asInstanceOf[Tuple4[_, _, _, _]] + t._1 + case Some(n) if n > 4 && n <= $MaxSpecialized => + asInstanceOf[Product].productElement(0) + case Some(n) if n > $MaxSpecialized => + val t = asInstanceOf[TupleXXL] + t.elems(0) + case None => + dynamicHead[this.type](this) + } + resVal.asInstanceOf[Result] } - resVal.asInstanceOf[Result] - } - - inline def tail: Tail[this.type] = { - type Result = Tail[this.type] - inline constValueOpt[BoundedSize[this.type]] match { - case Some(1) => - ().asInstanceOf[Result] - case Some(2) => - val t = asInstanceOf[Tuple2[_, _]] - Tuple1(t._2).asInstanceOf[Result] - case Some(3) => - val t = asInstanceOf[Tuple3[_, _, _]] - Tuple2(t._2, t._3).asInstanceOf[Result] - case Some(4) => - val t = asInstanceOf[Tuple4[_, _, _, _]] - Tuple3(t._2, t._3, t._4).asInstanceOf[Result] - case Some(5) => - val t = asInstanceOf[Tuple5[_, _, _, _, _]] - Tuple4(t._2, t._3, t._4, t._5).asInstanceOf[Result] - case Some(n) if n > 5 => - fromArray[Result](toArray.tail) - case None => - dynamicTail[this.type](this) + else*/ dynamicHead[this.type](this) + + inline def tail: Tail[this.type] = + /*if (specialize) { + type Result = Tail[this.type] + inline constValueOpt[BoundedSize[this.type]] match { + case Some(1) => + ().asInstanceOf[Result] + case Some(2) => + val t = asInstanceOf[Tuple2[_, _]] + Tuple1(t._2).asInstanceOf[Result] + case Some(3) => + val t = asInstanceOf[Tuple3[_, _, _]] + Tuple2(t._2, t._3).asInstanceOf[Result] + case Some(4) => + val t = asInstanceOf[Tuple4[_, _, _, _]] + Tuple3(t._2, t._3, t._4).asInstanceOf[Result] + case Some(5) => + val t = asInstanceOf[Tuple5[_, _, _, _, _]] + Tuple4(t._2, t._3, t._4, t._5).asInstanceOf[Result] + case Some(n) if n > 5 => + fromArray[Result](toArray.tail) + case None => + dynamicTail[this.type](this) + } } - } + else*/ dynamicTail[this.type](this) + /* inline def fallbackApply(n: Int) = inline constValueOpt[n.type] match { - case Some(n: Int) => error("index out of bounds", n) + case Some(n: Int) => error("index out of bounds: ", n) case None => dynamicApply[this.type](this, n) } - - inline def apply(n: Int): Elem[this.type, n.type] = { - type Result = Elem[this.type, n.type] - inline constValueOpt[Size[this.type]] match { - case Some(1) => - val t = asInstanceOf[Tuple1[_]] - inline constValueOpt[n.type] match { - case Some(0) => t._1.asInstanceOf[Result] - case _ => fallbackApply(n).asInstanceOf[Result] - } - case Some(2) => - val t = asInstanceOf[Tuple2[_, _]] - inline constValueOpt[n.type] match { - case Some(0) => t._1.asInstanceOf[Result] - case Some(1) => t._2.asInstanceOf[Result] - case _ => fallbackApply(n).asInstanceOf[Result] - } - case Some(3) => - val t = asInstanceOf[Tuple3[_, _, _]] - inline constValueOpt[n.type] match { - case Some(0) => t._1.asInstanceOf[Result] - case Some(1) => t._2.asInstanceOf[Result] - case Some(2) => t._3.asInstanceOf[Result] - case _ => fallbackApply(n).asInstanceOf[Result] - } - case Some(4) => - val t = asInstanceOf[Tuple4[_, _, _, _]] - inline constValueOpt[n.type] match { - case Some(0) => t._1.asInstanceOf[Result] - case Some(1) => t._2.asInstanceOf[Result] - case Some(2) => t._3.asInstanceOf[Result] - case Some(3) => t._4.asInstanceOf[Result] - case _ => fallbackApply(n).asInstanceOf[Result] - } - case Some(s) if s > 4 && s <= $MaxSpecialized => - val t = asInstanceOf[Product] - inline constValueOpt[n.type] match { - case Some(n) if n >= 0 && n < s => t.productElement(n).asInstanceOf[Result] - case _ => fallbackApply(n).asInstanceOf[Result] - } - case Some(s) if s > $MaxSpecialized => - val t = asInstanceOf[TupleXXL] - inline constValueOpt[n.type] match { - case Some(n) if n >= 0 && n < s => t.elems(n).asInstanceOf[Result] - case _ => fallbackApply(n).asInstanceOf[Result] - } - case _ => fallbackApply(n).asInstanceOf[Result] + */ + + inline def apply(n: Int): Elem[this.type, n.type] = + /*if (specialize) { + type Result = Elem[this.type, n.type] + inline constValueOpt[Size[this.type]] match { + case Some(1) => + val t = asInstanceOf[Tuple1[_]] + inline constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case Some(2) => + val t = asInstanceOf[Tuple2[_, _]] + inline constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case Some(1) => t._2.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case Some(3) => + val t = asInstanceOf[Tuple3[_, _, _]] + inline constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case Some(1) => t._2.asInstanceOf[Result] + case Some(2) => t._3.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case Some(4) => + val t = asInstanceOf[Tuple4[_, _, _, _]] + inline constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case Some(1) => t._2.asInstanceOf[Result] + case Some(2) => t._3.asInstanceOf[Result] + case Some(3) => t._4.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case Some(s) if s > 4 && s <= $MaxSpecialized => + val t = asInstanceOf[Product] + inline constValueOpt[n.type] match { + case Some(n) if n >= 0 && n < s => t.productElement(n).asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case Some(s) if s > $MaxSpecialized => + val t = asInstanceOf[TupleXXL] + inline constValueOpt[n.type] match { + case Some(n) if n >= 0 && n < s => t.elems(n).asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case _ => fallbackApply(n).asInstanceOf[Result] + } } - } + else*/ dynamicApply[this.type](this, n) } object NonEmptyTuple { @@ -408,7 +429,7 @@ object NonEmptyTuple { def dynamicTail[This <: NonEmptyTuple] (self: This): Tail[This] = { type Result = Tail[This] val res = (self: Any) match { - case self: Tuple1[_] => self._1 + case self: Tuple1[_] => () case self: Tuple2[_, _] => Tuple1(self._2) case self: Tuple3[_, _, _] => Tuple2(self._2, self._3) case self: Tuple4[_, _, _, _] => Tuple3(self._2, self._3, self._4) @@ -431,5 +452,5 @@ object NonEmptyTuple { sealed class *:[+H, +T <: Tuple] extends NonEmptyTuple object *: { - inline def unapply[H, T <: Tuple](x: H *: T) = (x.head, x.tail) + def unapply[H, T <: Tuple](x: H *: T) = (x.head, x.tail) } diff --git a/library/src/dotty/DottyPredef.scala b/library/src/dotty/DottyPredef.scala index b831f1c3f6dc..9cc046127317 100644 --- a/library/src/dotty/DottyPredef.scala +++ b/library/src/dotty/DottyPredef.scala @@ -24,12 +24,12 @@ object DottyPredef { */ abstract class ImplicitConverter[-T, +U] extends Function1[T, U] - @forceInline final def assert(assertion: Boolean, message: => Any): Unit = { + @forceInline final def assert(assertion: => Boolean, message: => Any): Unit = { if (!assertion) assertFail(message) } - @forceInline final def assert(assertion: Boolean): Unit = { + @forceInline final def assert(assertion: => Boolean): Unit = { if (!assertion) assertFail() } diff --git a/library/src/scala/tasty/reflect/TreeOps.scala b/library/src/scala/tasty/reflect/TreeOps.scala index dec65a71f800..f956e97f6369 100644 --- a/library/src/scala/tasty/reflect/TreeOps.scala +++ b/library/src/scala/tasty/reflect/TreeOps.scala @@ -147,6 +147,7 @@ trait TreeOps extends TastyCore { trait TermAPI { def tpe(implicit ctx: Context): Type def pos(implicit ctx: Context): Position + def underlyingArgument(implicit ctx: Context): Term } implicit def TermDeco(term: Term): TermAPI diff --git a/tests/neg/tuple-oob1.scala b/tests/disabled/neg/tuple-oob1.scala similarity index 100% rename from tests/neg/tuple-oob1.scala rename to tests/disabled/neg/tuple-oob1.scala diff --git a/tests/neg/implicitMatch-ambiguous.scala b/tests/invalid/neg/implicitMatch-ambiguous.scala similarity index 100% rename from tests/neg/implicitMatch-ambiguous.scala rename to tests/invalid/neg/implicitMatch-ambiguous.scala diff --git a/tests/neg/typelevel-erased-leak.scala b/tests/invalid/neg/typelevel-erased-leak.scala similarity index 100% rename from tests/neg/typelevel-erased-leak.scala rename to tests/invalid/neg/typelevel-erased-leak.scala diff --git a/tests/neg/typelevel-nomatch.scala b/tests/invalid/neg/typelevel-nomatch.scala similarity index 100% rename from tests/neg/typelevel-nomatch.scala rename to tests/invalid/neg/typelevel-nomatch.scala diff --git a/tests/neg/typelevel.scala b/tests/invalid/neg/typelevel.scala similarity index 100% rename from tests/neg/typelevel.scala rename to tests/invalid/neg/typelevel.scala diff --git a/tests/pos/transparent-overload.scala b/tests/invalid/pos/transparent-overload.scala similarity index 100% rename from tests/pos/transparent-overload.scala rename to tests/invalid/pos/transparent-overload.scala diff --git a/tests/pos/typelevel-vector1.scala b/tests/invalid/pos/typelevel-vector1.scala similarity index 100% rename from tests/pos/typelevel-vector1.scala rename to tests/invalid/pos/typelevel-vector1.scala diff --git a/tests/run/Tuple.scala b/tests/invalid/run/Tuple.scala similarity index 100% rename from tests/run/Tuple.scala rename to tests/invalid/run/Tuple.scala diff --git a/tests/run/implicitMatch.scala b/tests/invalid/run/implicitMatch.scala similarity index 100% rename from tests/run/implicitMatch.scala rename to tests/invalid/run/implicitMatch.scala diff --git a/tests/run/typelevel-defaultValue.scala b/tests/invalid/run/typelevel-defaultValue.scala similarity index 100% rename from tests/run/typelevel-defaultValue.scala rename to tests/invalid/run/typelevel-defaultValue.scala diff --git a/tests/run/typelevel-patmat.scala b/tests/invalid/run/typelevel-patmat.scala similarity index 100% rename from tests/run/typelevel-patmat.scala rename to tests/invalid/run/typelevel-patmat.scala diff --git a/tests/run/typelevel.scala b/tests/invalid/run/typelevel.scala similarity index 100% rename from tests/run/typelevel.scala rename to tests/invalid/run/typelevel.scala diff --git a/tests/run/typelevel1.scala b/tests/invalid/run/typelevel1.scala similarity index 100% rename from tests/run/typelevel1.scala rename to tests/invalid/run/typelevel1.scala diff --git a/tests/run/typelevel3.scala b/tests/invalid/run/typelevel3.scala similarity index 100% rename from tests/run/typelevel3.scala rename to tests/invalid/run/typelevel3.scala diff --git a/tests/neg/implicitMatch-syntax.scala b/tests/neg/implicitMatch-syntax.scala deleted file mode 100644 index 2df0357a01da..000000000000 --- a/tests/neg/implicitMatch-syntax.scala +++ /dev/null @@ -1,33 +0,0 @@ -object Test { - import collection.immutable.TreeSet - import collection.immutable.HashSet - - inline def f1[T] = implicit implicit match { // error: repeated modifier // error: illegal modifier - case ord: Ordered[T] => new TreeSet[T] // error: no implicit - case _ => new HashSet[T] - - } - - inline def f2[T] = implicit erased match { // error: illegal modifier - case ord: Ordered[T] => new TreeSet[T] // error: no implicit - case _ => new HashSet[T] - } - - inline def f3[T] = erased implicit match { // error: illegal modifier - case ord: Ordered[T] => new TreeSet[T] // error: no implicit - case _ => new HashSet[T] - } - - inline def f4() = implicit match { - case Nil => ??? // error: not a legal pattern - case x :: xs => ??? // error: not a legal pattern - } - - inline def f5[T] = locally { implicit match { // Ok - case _ => new HashSet[T] - }} - - def f6[T] = implicit match { // error: implicit match cannot be used here - case _ => new HashSet[T] - } -} \ No newline at end of file diff --git a/tests/neg/nested-rewrites.scala b/tests/neg/nested-rewrites.scala index d8d3e0e53f28..3dcce36182dc 100644 --- a/tests/neg/nested-rewrites.scala +++ b/tests/neg/nested-rewrites.scala @@ -15,14 +15,14 @@ object Test { object Test0 { def f(x: Int) = { - inline def g(x: Int) = inline x match { + inline def g(x: Int) = x match { case 0 => 0 } g(0) inline val Y = 0 g(Y) - inline def h(x: Int) = inline x match { + inline def h(x: Int) = x match { case Y => 0 } h(0) @@ -35,14 +35,14 @@ object Test0 { object Test1 { erased inline def f(x: Int) = { - erased inline def g(x: Int) = inline x match { // error: implementation restriction: nested inline methods are not supported + erased inline def g(x: Int) = x match { // error: implementation restriction: nested inline methods are not supported case 0 => 0 } g(0) inline val Y = 0 g(Y) - inline def h(x: Int) = inline x match { // error: implementation restriction: nested inline methods are not supported + inline def h(x: Int) = x match { // error: implementation restriction: nested inline methods are not supported case Y => 0 } h(0) diff --git a/tests/neg/transparent-override/B_2.scala b/tests/neg/transparent-override/B_2.scala index 5c83c7f5e75b..b24bf7d5345d 100644 --- a/tests/neg/transparent-override/B_2.scala +++ b/tests/neg/transparent-override/B_2.scala @@ -1,5 +1,5 @@ class B extends A { - inline def f(x: Int): Int = inline x match { // error + inline def f(x: Int): Int = x match { // error case 0 => 1 case _ => x } diff --git a/tests/neg/tuple-nonconstant3.scala b/tests/neg/tuple-nonconstant3.scala deleted file mode 100644 index c6f5da1f024d..000000000000 --- a/tests/neg/tuple-nonconstant3.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Test { - def elem[Xs <: NonEmptyTuple](xs: Xs) = xs(1) // error: selection (...) cannot be applied to tuple of unknown size -} \ No newline at end of file diff --git a/tests/neg/tuple-patterns2.scala b/tests/neg/tuple-patterns2.scala deleted file mode 100644 index bba0dd0e999f..000000000000 --- a/tests/neg/tuple-patterns2.scala +++ /dev/null @@ -1,5 +0,0 @@ -object Test { - (1, 2) match { - case x *: xs => // error: call to inline unapply - } -} diff --git a/tests/neg/typelevel-noeta.scala b/tests/neg/typelevel-noeta.scala index 547a4ed0e6c1..1641678310de 100644 --- a/tests/neg/typelevel-noeta.scala +++ b/tests/neg/typelevel-noeta.scala @@ -2,12 +2,12 @@ object Test { def anyValue[T]: T = ??? - inline def test(x: Int) = inline x match { + inline def test(x: Int) = x match { case _: Byte => case _: Char => } - inline def test2() = inline 1 match { + inline def test2() = 1 match { case _: Byte => case _: Char => } diff --git a/tests/pos-deep-subtype/tuples23.scala b/tests/pos-deep-subtype/tuples23.scala index ffbcbf0f5e2c..5e7825fc5ac6 100644 --- a/tests/pos-deep-subtype/tuples23.scala +++ b/tests/pos-deep-subtype/tuples23.scala @@ -14,7 +14,7 @@ object Test extends App { case (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => println(x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + x12 + x13 + x14 + x15 + x16 + x17 + x18 + x19 + x20 + x21 + x22 + x23) } - inline def decompose3 = inline x23 match { case x *: y *: xs => (x, y, xs) } + inline def decompose3 = x23 match { case x *: y *: xs => (x, y, xs) } { val (x, y, xs) = decompose3 val xc: Int = x diff --git a/tests/pos/i2166.scala b/tests/pos/i2166.scala index 28cc05573d29..f796031d009d 100644 --- a/tests/pos/i2166.scala +++ b/tests/pos/i2166.scala @@ -1,5 +1,5 @@ object Test { - inline def f = inline "" match { case _ => false } + inline def f = "" match { case _ => false } def main(args: Array[String]): Unit = f } \ No newline at end of file diff --git a/tests/pos/matchtype.scala b/tests/pos/matchtype.scala index bbb3916d9c8e..58f452c2ad11 100644 --- a/tests/pos/matchtype.scala +++ b/tests/pos/matchtype.scala @@ -17,24 +17,15 @@ object Test { erased val x2: 1 = erasedValue[T1] - inline def checkSub[T1, T2] = - inline typelevel.erasedValue[T1] match { - case _: T2 => // OK - case _ => error("not a subtype T1/T2") - } - - inline def checkSame[T1, T2] = { - checkSub[T1, T2] - checkSub[T2, T1] - } - - checkSame[T2, S[S[S[0]]]] + erased val y0: S[S[S[0]]] = erasedValue[T2] + erased val z0: T2 = erasedValue[S[S[S[0]]]] type Head[X <: Tuple] = X match { case (x1, _) => x1 } - checkSame[Head[(Int, String)], Int] + erased val y1: Int = erasedValue[Head[(Int, String)]] + erased val z1: Head[(Int, String)] = 22 type Concat[X <: Tuple, Y <: Tuple] <: Tuple = X match { case Unit => Y @@ -57,15 +48,21 @@ object Test { erased val x3: String = erasedValue[Elem[(String, Int), 0]] erased val x4: Int = erasedValue[Elem1[(String, Int), 1]] - checkSame[Elem[(String, Int, Boolean), 0], String] - checkSame[Elem1[(String, Int, Boolean), 1], Int] - checkSame[Elem[(String, Int, Boolean), 2], Boolean] - - checkSame[Concat[Unit, (String, Int)], (String, Int)] - checkSame[Concat[(Boolean, Boolean), (String, Int)], Boolean *: Boolean *: (String, Int)] - checkSub[(Boolean, Boolean, String, Int), Concat[(Boolean, Boolean), String *: Int *: Unit]] - - inline def index[Xs <: NonEmptyTuple](xs: Xs, n: Int): Elem[Xs, n.type] = xs(n).asInstanceOf + erased val y2: Elem[(String, Int, Boolean), 0] = erasedValue[String] + erased val z2: String = erasedValue[Elem[(String, Int, Boolean), 0]] + erased val y3: Elem1[(String, Int, Boolean), 1] = erasedValue[Int] + erased val z3: Int = erasedValue[Elem1[(String, Int, Boolean), 1]] + erased val y4: Elem[(String, Int, Boolean), 2] = erasedValue[Boolean] + erased val z4: Boolean = erasedValue[Elem[(String, Int, Boolean), 2]] + + erased val y5: Concat[Unit, (String, Int)] = erasedValue[(String, Int)] + erased val z5: (String, Int) = erasedValue[Concat[Unit, (String, Int)]] + erased val y6: Concat[(Boolean, Boolean), (String, Int)] = erasedValue[Boolean *: Boolean *: (String, Int)] + erased val z6: Boolean *: Boolean *: (String, Int) = erasedValue[Concat[(Boolean, Boolean), (String, Int)]] + erased val y7: (Boolean, Boolean, String, Int) = erasedValue[Concat[(Boolean, Boolean), String *: Int *: Unit]] + erased val z7: Concat[(Boolean, Boolean), String *: Int *: Unit] = erasedValue[(Boolean, Boolean, String, Int)] + + def index[Xs <: NonEmptyTuple](xs: Xs, n: Int): Elem[Xs, n.type] = xs(n).asInstanceOf val test = (1, "hi", true, 2.0) index(test, 0): Int diff --git a/tests/run/TupleAbstract.scala b/tests/run/TupleAbstract.scala deleted file mode 100644 index b3e2ffeab980..000000000000 --- a/tests/run/TupleAbstract.scala +++ /dev/null @@ -1,300 +0,0 @@ -package test { - -import annotation.showAsInfix - -class TypeLevel { - type Tuple - - type Empty <: Tuple - // in the actual implementation, pick scala.Unit, and have it extend `Tuple`. - - @showAsInfix type *:[+H, +T <: Tuple] <: Tuple - - erased def erasedValue[T]: T = ??? - case class Typed[T](val value: T) { type Type = T } -} - -class TupleXXL private (es: Array[Object]) { - override def toString = elems.mkString("(", ",", ")") - override def hashCode = getClass.hashCode * 41 + elems.deep.hashCode - override def equals(that: Any) = that match { - case that: TupleXXL => this.elems.deep.equals(that.elems.deep) - case _ => false - } - def elems: Array[Object] = es -} -object TupleXXL { - def apply(elems: Array[Object]) = new TupleXXL(elems.clone) -} - -object Tuples { - val typelevel = new TypeLevel - import typelevel._ - - def unit = ().asInstanceOf[Empty] - - private final val MaxSpecialized = 7 // 22 in real life - - private inline def _empty: Tuple = erasedValue[Empty] - private inline def _pair[H, T <: Tuple] (x: H, xs: T): Tuple = erasedValue[H *: T] - - private inline def _size(xs: Tuple): Int = inline xs match { - case _: Empty => 0 - case _: (_ *: xs1) => _size(erasedValue[xs1]) + 1 - } - - private inline def _index(xs: Tuple, n: Int): Any = inline xs match { - case _: (x *: _) if n == 0 => erasedValue[x] - case _: (_ *: xs1) if n > 0 => _index(erasedValue[xs1], n - 1) - } - - private inline def _head(xs: Tuple): Any = inline xs match { - case _: (x *: _) => erasedValue[x] - } - - private inline def _tail(xs: Tuple): Tuple = inline xs match { - case _: (_ *: xs1) => erasedValue[xs1] - } - - private inline def _concat(xs: Tuple, ys: Tuple): Tuple = inline xs match { - case _: Empty => ys - case _: (x1 *: xs1) => _pair(erasedValue[x1], _concat(erasedValue[xs1], ys)) - } - - inline def fromArray[T <: Tuple](xs: Array[Object]): T = - inline _size(erasedValue[T]) match { - case 0 => ().asInstanceOf[T] - case 1 => Tuple1(xs(0)).asInstanceOf[T] - case 2 => Tuple2(xs(0), xs(1)).asInstanceOf[T] - case 3 => Tuple3(xs(0), xs(1), xs(2)).asInstanceOf[T] - case 4 => Tuple4(xs(0), xs(1), xs(2), xs(3)).asInstanceOf[T] - case 5 => Tuple5(xs(0), xs(1), xs(2), xs(3), xs(4)).asInstanceOf[T] - case 6 => Tuple6(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5)).asInstanceOf[T] - case 7 => Tuple7(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6)).asInstanceOf[T] - case _ => TupleXXL(xs).asInstanceOf[T] - } - - val emptyArray = Array[Object]() - - inline implicit def tupleDeco(xs: Tuple): TupleOps = new TupleOps(xs) - - class TupleOps(val xs: Tuple) extends AnyVal { - - inline def toArray: Array[Object] = inline _size(xs) match { - case 0 => - emptyArray - case 1 => - val t = xs.asInstanceOf[Tuple1[Object]] - Array(t._1) - case 2 => - val t = xs.asInstanceOf[Tuple2[Object, Object]] - Array(t._1, t._2) - case 3 => - val t = xs.asInstanceOf[Tuple3[Object, Object, Object]] - Array(t._1, t._2, t._3) - case 4 => - val t = xs.asInstanceOf[Tuple4[Object, Object, Object, Object]] - Array(t._1, t._2, t._3, t._4) - case 5 => - val t = xs.asInstanceOf[Tuple5[Object, Object, Object, Object, Object]] - Array(t._1, t._2, t._3, t._4, t._5) - case 6 => - val t = xs.asInstanceOf[Tuple6[Object, Object, Object, Object, Object, Object]] - Array(t._1, t._2, t._3, t._4, t._5, t._6) - case 7 => - val t = xs.asInstanceOf[Tuple7[Object, Object, Object, Object, Object, Object, Object]] - Array(t._1, t._2, t._3, t._4, t._5, t._6, t._7) - case _ => - xs.asInstanceOf[TupleXXL].elems - } - - inline def *: [H] (x: H): Tuple = { - erased val resTpe = Typed(_pair(x, xs)) - inline _size(xs) match { - case 0 => - Tuple1(x).asInstanceOf[resTpe.Type] - case 1 => - Tuple2(x, xs.asInstanceOf[Tuple1[_]]._1).asInstanceOf[resTpe.Type] - case 2 => - val t = xs.asInstanceOf[Tuple2[_, _]] - Tuple3(x, t._1, t._2).asInstanceOf[resTpe.Type] - case 3 => - val t = xs.asInstanceOf[Tuple3[_, _, _]] - Tuple4(x, t._1, t._2, t._3).asInstanceOf[resTpe.Type] - case 4 => - val t = xs.asInstanceOf[Tuple4[_, _, _, _]] - Tuple5(x, t._1, t._2, t._3, t._4).asInstanceOf[resTpe.Type] - case n => - fromArray[resTpe.Type](prepend(x, toArray)) - } - } - - private def prepend[H](x: H, elems: Array[Object]): Array[Object] = { - val elems1 = new Array[Object](elems.length + 1) - elems1(0) = x.asInstanceOf[Object] - Array.copy(elems, 0, elems1, 1, elems.length) - elems1 - } - - inline def head: Any = { - erased val resTpe = Typed(_head(xs)) - val resVal = inline _size(xs) match { - case 1 => - val t = xs.asInstanceOf[Tuple1[_]] - t._1 - case 2 => - val t = xs.asInstanceOf[Tuple2[_, _]] - t._1 - case 3 => - val t = xs.asInstanceOf[Tuple3[_, _, _]] - t._1 - case 4 => - val t = xs.asInstanceOf[Tuple4[_, _, _, _]] - t._1 - case n if n > 4 && n <= MaxSpecialized => - xs.asInstanceOf[Product].productElement(0) - case n if n > MaxSpecialized => - val t = xs.asInstanceOf[TupleXXL] - t.elems(0) - } - resVal.asInstanceOf[resTpe.Type] - } - - inline def tail: Any = { - erased val resTpe = Typed(_tail(xs)) - inline _size(xs) match { - case 1 => - unit - case 2 => - val t = xs.asInstanceOf[Tuple2[_, _]] - Tuple1(t._2).asInstanceOf[resTpe.Type] - case 3 => - val t = xs.asInstanceOf[Tuple3[_, _, _]] - Tuple2(t._2, t._3).asInstanceOf[resTpe.Type] - case 4 => - val t = xs.asInstanceOf[Tuple4[_, _, _, _]] - Tuple3(t._2, t._3, t._4).asInstanceOf[resTpe.Type] - case 5 => - val t = xs.asInstanceOf[Tuple5[_, _, _, _, _]] - Tuple4(t._2, t._3, t._4, t._5).asInstanceOf[resTpe.Type] - case n if n > 5 => - fromArray[resTpe.Type](toArray.tail) - } - } - - inline def apply(n: Int): Any = { - erased val resTpe = Typed(_index(xs, n)) - inline _size(xs) match { - case 1 => - val t = xs.asInstanceOf[Tuple1[_]] - inline n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - } - case 2 => - val t = xs.asInstanceOf[Tuple2[_, _]] - inline n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - case 1 => t._2.asInstanceOf[resTpe.Type] - } - case 3 => - val t = xs.asInstanceOf[Tuple3[_, _, _]] - inline n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - case 1 => t._2.asInstanceOf[resTpe.Type] - case 2 => t._3.asInstanceOf[resTpe.Type] - } - case 4 => - val t = xs.asInstanceOf[Tuple4[_, _, _, _]] - inline n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - case 1 => t._2.asInstanceOf[resTpe.Type] - case 2 => t._3.asInstanceOf[resTpe.Type] - case 3 => t._4.asInstanceOf[resTpe.Type] - } - case s if s > 4 && s <= MaxSpecialized && n >= 0 && n < s => - xs.asInstanceOf[Product].productElement(n).asInstanceOf[resTpe.Type] - case s if s > MaxSpecialized && n >= 0 && n < s => - xs.asInstanceOf[TupleXXL].elems(n).asInstanceOf[resTpe.Type] - } - } - - inline def ++(ys: Tuple): Tuple = { - erased val resTpe = Typed(_concat(xs, ys)) - inline _size(xs) match { - case 0 => ys - case 1 => - if (_size(ys) == 0) xs - else xs.head *: ys - case 2 => - val t = xs.asInstanceOf[Tuple2[_, _]] - inline _size(ys) match { - case 0 => xs - case 1 => - val u = ys.asInstanceOf[Tuple1[_]] - Tuple3(t._1, t._2, u._1).asInstanceOf[resTpe.Type] - case 2 => - val u = ys.asInstanceOf[Tuple2[_, _]] - Tuple4(t._1, t._2, u._1, u._2).asInstanceOf[resTpe.Type] - case _ => - genericConcat[resTpe.Type](xs, ys) - } - case 3 => - val t = xs.asInstanceOf[Tuple3[_, _, _]] - inline _size(ys) match { - case 0 => xs - case 1 => - val u = ys.asInstanceOf[Tuple1[_]] - Tuple4(t._1, t._2, t._3, u._1).asInstanceOf[resTpe.Type] - case _ => - genericConcat[resTpe.Type](xs, ys) - } - case _ => - if (_size(ys) == 0) xs - else genericConcat[resTpe.Type](xs, ys) - } - } - - inline def genericConcat[T <: Tuple](xs: Tuple, ys: Tuple): Tuple = - fromArray[T](xs.toArray ++ ys.toArray) - } -} -} -object Test extends App { - import test._ - import Tuples._ - import typelevel._ - val x0 = unit; println(x0) - val x1 = 1 *: x0; println(x1) - val x2 = "A" *: x1; println(x2) - val x3 = 2 *: x2; println(x3) - val x4 = "B" *: x3; println(x4) - val x5 = 3 *: x4; println(x5) - val x6 = "C" *: x5; println(x6) - val x7 = 4 *: x6; println(x7) - val x8 = "D" *: x7; println(x8) - val h1 = x1.head; val h1c: Int = h1; println(s"h1 = $h1") - val h2 = x2.head; val h2c: String = h2; println(s"h2 = $h2") - val h7 = x7.head; val h7c: Int = h7; println(s"h7 = $h7") - val h8 = x8.head; val h8c: String = h8; println(s"h8 = $h8") - val t1 = x1.tail; val t1c: Empty = t1; println(s"t1 = $t1") - val t2 = x2.tail; val t2c: Int *: Empty = t2; println(s"t2 = $t2") - val t7 = x7.tail; val t7c: String *: Int *: Empty = t7.tail.tail.tail.tail; println(s"t7 = $t7") - val t8 = x8.tail; val t8c: Int = t8(6); println(s"t8 = $t8") - val a1_0 = x1(0); val a1_0c: Int = a1_0; println(s"a1_0 = $a1_0") - val a2_0 = x2(0); val a2_0c: String = a2_0; println(s"a2_0 = $a2_0") - val a3_1 = x3(1); val a3_1c: String = a3_1; println(s"a3_1 = $a3_1") - val a4_3 = x4(3); val a4_3c: Int = a4_3; println(s"a4_3 = $a4_3") - val a6_4 = x6(4); val a6_4c: String = a6_4; println(s"a6_4 = $a6_4") - val a8_0 = x8(0); val a8_0c: String = a8_0; println(s"a8_0 = $a8_0") - val c0_0 = x0 ++ x0; val c0_0c: Empty = c0_0; println(s"c0_0 = $c0_0") - val c0_1 = x0 ++ x1; val c0_1c: Int *: Empty = c0_1c; println(s"c0_1 = $c0_1") - val c1_0 = x1 ++ x0; val c1_0c: Int *: Empty = c1_0c; println(s"c1_0 = $c1_0") - val c0_4 = x0 ++ x4; val c0_4c: String *: Int *: String *: Int *: Empty = c0_4; println(s"c0_4 = $c0_4") - val c4_0 = x4 ++ x0; val c4_0c: String *: Int *: String *: Int *: Empty = c4_0; println(s"c4_0 = $c4_0") - val c1_1 = x1 ++ x1; val c1_1c: Int *: Int *: Empty = c1_1; println(s"c1_1 = $c1_1") - val c1_8 = x1 ++ x8; val c1_8c: Int *: String *: Int *: String *: Int *: String *: Int *: String *: Int *: Empty = c1_8; println(s"c1_8 = $c1_8") - val c2_1 = x2 ++ x1; val c2_1c: String *: Int *: Int *: Empty = c2_1; println(s"c2_1 = $c2_1") - val c2_2 = x2 ++ x2; val c2_2c: String *: Int *: String *: Int *: Empty = c2_2; println(s"c2_2 = $c2_2") - val c2_3 = x2 ++ x3; val c2_3c: String *: Int *: Int *: String *: Int *: Empty = c2_3; println(s"c2_3 = $c2_3") - val c3_3 = x3 ++ x3; val c3_3c: Int *: String *: Int *: Int *: String *: Int *: Empty = c3_3; println(s"c3_3 = $c3_3") -} diff --git a/tests/run/tuples1.scala b/tests/run/tuples1.scala index 9d5ee962439b..1edb3af2c98f 100644 --- a/tests/run/tuples1.scala +++ b/tests/run/tuples1.scala @@ -34,8 +34,8 @@ object Test extends App { val c2_3 = x2 ++ x3; val c2_3c: (String, Int, Int, String, Int) = c2_3; println(s"c2_3 = $c2_3") val c3_3 = x3 ++ x3; val c3_3c: (Int, String, Int, Int, String, Int) = c3_3; println(s"c3_3 = $c3_3") - inline def decompose1 = inline x2 match { case x *: xs => (x, xs) } - inline def decompose2 = inline x2 match { case x *: y *: xs => (x, y, xs) } + inline def decompose1 = x2 match { case x *: xs => (x, xs) } + inline def decompose2 = x2 match { case x *: y *: xs => (x, y, xs) } { val (x, xs) = decompose1 val xc: String = x diff --git a/tests/run/typelevel-overrides.scala b/tests/run/typelevel-overrides.scala index c8f5f3b5f9df..2c0fa2631eb9 100644 --- a/tests/run/typelevel-overrides.scala +++ b/tests/run/typelevel-overrides.scala @@ -8,13 +8,13 @@ class A extends T { def f(x: Int) = x } class B extends A { - override inline def f(x: Int) = inline x match { + override inline def f(x: Int) = x match { case 0 => 0 case x => x } } class C extends A with U { - override inline def f(x: Int) = inline x match { + override inline def f(x: Int) = x match { case 0 => 0 case x => x } diff --git a/tests/run/xml-interpolation/Test_2.scala b/tests/run/xml-interpolation/Test_2.scala index f0e6e795cca2..6dda8d1a00d7 100644 --- a/tests/run/xml-interpolation/Test_2.scala +++ b/tests/run/xml-interpolation/Test_2.scala @@ -3,7 +3,7 @@ import XmlQuote._ object Test { def main(args: Array[String]): Unit = { // TODO: enable once #5119 is fixed - // assert(xml"Hello Allan!" == Xml("Hello Allan!", Nil) + // assert(xml"Hello Allan!" == Xml("Hello Allan!", Nil)) val name = new Object{} assert(xml"Hello $name!" == Xml("Hello ??!", List(name))) diff --git a/tests/run/xml-interpolation/XmlQuote_1.scala b/tests/run/xml-interpolation/XmlQuote_1.scala index 54a2287debfa..a011d6e6c41b 100644 --- a/tests/run/xml-interpolation/XmlQuote_1.scala +++ b/tests/run/xml-interpolation/XmlQuote_1.scala @@ -5,16 +5,13 @@ import scala.language.implicitConversions case class Xml(parts: String, args: List[Any]) -// Ideally should be an implicit class but the implicit conversion -// has to be a inline method -class XmlQuote(ctx: => StringContext) { - inline def xml(args: => Any*): Xml = ~XmlQuote.impl('(ctx), '(args)) -} - object XmlQuote { - implicit inline def XmlQuote(ctx: => StringContext): XmlQuote = new XmlQuote(ctx) - def impl(ctx: Expr[StringContext], args: Expr[Seq[Any]]) + implicit class SCOps(ctx: StringContext) { + inline def xml(args: => Any*): Xml = ~XmlQuote.impl('(this), '(args)) + } + + def impl(receiver: Expr[SCOps], args: Expr[Seq[Any]]) (implicit tasty: Tasty): Expr[Xml] = { import tasty._ import Term._ @@ -41,19 +38,25 @@ object XmlQuote { case _ => false } - // _root_.scala.StringContext.apply([p0, ...]: String*) - val parts = ctx.toTasty match { - case Inlined(_, _, - Apply( - Select(Select(Select(Ident("_root_"), "scala", _), "StringContext", _), "apply", _), - List(Typed(Repeated(values), _)))) if values.forall(isStringConstant) => + def isSCOpsConversion(tree: Term) = + tree.symbol.fullName == "XmlQuote$.SCOps" + + def isStringContextApply(tree: Term) = + tree.symbol.fullName == "scala.StringContext$.apply" + + // XmlQuote.SCOps(StringContext.apply([p0, ...]: String*) + val parts = receiver.toTasty.underlyingArgument match { + case Apply(conv, List(Apply(fun, List(Typed(Repeated(values), _))))) + if isSCOpsConversion(conv) && + isStringContextApply(fun) && + values.forall(isStringConstant) => values.collect { case Literal(Constant.String(value)) => value } case tree => - abort("String literal expected") + abort(s"String literal expected, but ${tree.show} found") } // [a0, ...]: Any* - val Inlined(_, _, Typed(Repeated(args0), _)) = args.toTasty + val Typed(Repeated(args0), _) = args.toTasty.underlyingArgument val string = parts.mkString("??") '(new Xml(~string.toExpr, ~liftListOfAny(args0)))