@@ -98,7 +98,7 @@ object Inferencing {
9898 inst
9999 }
100100
101- private var toMaximize : Boolean = false
101+ private var toMaximize : List [ TypeVar ] = Nil
102102
103103 def apply (x : Boolean , tp : Type ): Boolean = tp.dealias match {
104104 case _ : WildcardType | _ : ProtoType =>
@@ -113,29 +113,24 @@ object Inferencing {
113113 || variance >= 0 && (force.allowBottom || tvar.hasLowerBound)
114114 if (direction != 0 ) instantiate(tvar, direction < 0 )
115115 else if (preferMin) instantiate(tvar, fromBelow = true )
116- else toMaximize = true
116+ else toMaximize = tvar :: toMaximize
117117 foldOver(x, tvar)
118118 }
119119 case tp =>
120120 foldOver(x, tp)
121121 }
122122
123- private class UpperInstantiator (implicit ctx : Context ) extends TypeAccumulator [Unit ] {
124- def apply (x : Unit , tp : Type ): Unit = {
125- tp match {
126- case tvar : TypeVar if ! tvar.isInstantiated =>
123+ def process (tp : Type ): Boolean =
124+ // Maximize type vars in the order they were visited before */
125+ def maximize (tvars : List [TypeVar ]): Unit = tvars match
126+ case tvar :: tvars1 =>
127+ maximize(tvars1)
128+ if ! tvar.isInstantiated then
127129 instantiate(tvar, fromBelow = false )
128- case _ =>
129- }
130- foldOver(x, tp)
131- }
132- }
133-
134- def process (tp : Type ): Boolean = {
130+ case nil =>
135131 val res = apply(true , tp)
136- if ( res && toMaximize) new UpperInstantiator ().apply((), tp )
132+ if res then maximize(toMaximize )
137133 res
138- }
139134 }
140135
141136 /** For all type parameters occurring in `tp`:
0 commit comments