diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 5587be82333d..1f4ddc45da1a 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -306,6 +306,7 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] case mdef: TypeDef => def isBounds(rhs: Tree): Boolean = rhs match { case _: TypeBoundsTree => true + case _: MatchTypeTree => true // Typedefs with Match rhs classify as abstract case LambdaTypeTree(_, body) => isBounds(body) case _ => false } @@ -392,11 +393,12 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case Ident(_) => refPurity(tree) case Select(qual, _) => - refPurity(tree).min(exprPurity(qual)) + if (tree.symbol.is(Erased)) Pure + else refPurity(tree).min(exprPurity(qual)) case New(_) => SimplyPure case TypeApply(fn, _) => - exprPurity(fn) + if (fn.symbol.is(Erased)) Pure else exprPurity(fn) case Apply(fn, args) => def isKnownPureOp(sym: Symbol) = sym.owner.isPrimitiveValueClass || sym.owner == defn.StringClass @@ -404,8 +406,8 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => // A constant expression with pure arguments is pure. || fn.symbol.isStable) minOf(exprPurity(fn), args.map(exprPurity)) `min` Pure - else - Impure + else if (fn.symbol.is(Erased)) Pure + else Impure case Typed(expr, _) => exprPurity(expr) case Block(stats, expr) => diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index e7fea191eeb7..e512e8520359 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -661,6 +661,12 @@ object Trees { type ThisTree[-T >: Untyped] = LambdaTypeTree[T] } + /** [bound] selector match { cases } */ + case class MatchTypeTree[-T >: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]]) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = MatchTypeTree[T] + } + /** => T */ case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T]) extends TypTree[T] { @@ -916,6 +922,7 @@ object Trees { type RefinedTypeTree = Trees.RefinedTypeTree[T] type AppliedTypeTree = Trees.AppliedTypeTree[T] type LambdaTypeTree = Trees.LambdaTypeTree[T] + type MatchTypeTree = Trees.MatchTypeTree[T] type ByNameTypeTree = Trees.ByNameTypeTree[T] type TypeBoundsTree = Trees.TypeBoundsTree[T] type Bind = Trees.Bind[T] @@ -1099,6 +1106,10 @@ object Trees { case tree: LambdaTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree case _ => finalize(tree, untpd.LambdaTypeTree(tparams, body)) } + def MatchTypeTree(tree: Tree)(bound: Tree, selector: Tree, cases: List[CaseDef]): MatchTypeTree = tree match { + case tree: MatchTypeTree if (bound eq tree.bound) && (selector eq tree.selector) && (cases eq tree.cases) => tree + case _ => finalize(tree, untpd.MatchTypeTree(bound, selector, cases)) + } def ByNameTypeTree(tree: Tree)(result: Tree): ByNameTypeTree = tree match { case tree: ByNameTypeTree if result eq tree.result => tree case _ => finalize(tree, untpd.ByNameTypeTree(result)) @@ -1255,6 +1266,8 @@ object Trees { case LambdaTypeTree(tparams, body) => implicit val ctx = localCtx cpy.LambdaTypeTree(tree)(transformSub(tparams), transform(body)) + case MatchTypeTree(bound, selector, cases) => + cpy.MatchTypeTree(tree)(transform(bound), transform(selector), transformSub(cases)) case ByNameTypeTree(result) => cpy.ByNameTypeTree(tree)(transform(result)) case TypeBoundsTree(lo, hi) => @@ -1389,6 +1402,8 @@ object Trees { case LambdaTypeTree(tparams, body) => implicit val ctx = localCtx this(this(x, tparams), body) + case MatchTypeTree(bound, selector, cases) => + this(this(this(x, bound), selector), cases) case ByNameTypeTree(result) => this(x, result) case TypeBoundsTree(lo, hi) => diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index ec0884e6ee99..e22537d8e88a 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -115,10 +115,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } def CaseDef(pat: Tree, guard: Tree, body: Tree)(implicit ctx: Context): CaseDef = - ta.assignType(untpd.CaseDef(pat, guard, body), body) + ta.assignType(untpd.CaseDef(pat, guard, body), pat, body) def Match(selector: Tree, cases: List[CaseDef])(implicit ctx: Context): Match = - ta.assignType(untpd.Match(selector, cases), cases) + ta.assignType(untpd.Match(selector, cases), selector, cases) def Labeled(bind: Bind, expr: Tree)(implicit ctx: Context): Labeled = ta.assignType(untpd.Labeled(bind, expr)) @@ -165,6 +165,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(implicit ctx: Context): LambdaTypeTree = ta.assignType(untpd.LambdaTypeTree(tparams, body), tparams, body) + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(implicit ctx: Context): MatchTypeTree = + ta.assignType(untpd.MatchTypeTree(bound, selector, cases), bound, selector, cases) + def TypeBoundsTree(lo: Tree, hi: Tree)(implicit ctx: Context): TypeBoundsTree = ta.assignType(untpd.TypeBoundsTree(lo, hi), lo, hi) @@ -575,7 +578,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } } - override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(implicit ctx: Context): Closure = { val tree1 = untpd.cpy.Closure(tree)(env, meth, tpt) tree match { @@ -584,11 +586,12 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _ => ta.assignType(tree1, meth, tpt) } } + override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(implicit ctx: Context): Match = { val tree1 = untpd.cpy.Match(tree)(selector, cases) tree match { case tree: Match if sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, cases) + case _ => ta.assignType(tree1, selector, cases) } } @@ -596,7 +599,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { val tree1 = untpd.cpy.CaseDef(tree)(pat, guard, body) tree match { case tree: CaseDef if body.tpe eq tree.body.tpe => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, body) + case _ => ta.assignType(tree1, pat, body) } } @@ -821,7 +824,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** `tree == that` */ def equal(that: Tree)(implicit ctx: Context) = - applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) + if (that.tpe.widen.isRef(defn.NothingClass)) + Literal(Constant(false)) + else + applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) /** `tree.isInstanceOf[tp]`, with special treatment of singleton types */ def isInstance(tp: Type)(implicit ctx: Context): Tree = tp.dealias match { diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index b2278088a6c8..e0821f0325e0 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -293,6 +293,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def RefinedTypeTree(tpt: Tree, refinements: List[Tree]): RefinedTypeTree = new RefinedTypeTree(tpt, refinements) def AppliedTypeTree(tpt: Tree, args: List[Tree]): AppliedTypeTree = new AppliedTypeTree(tpt, args) def LambdaTypeTree(tparams: List[TypeDef], body: Tree): LambdaTypeTree = new LambdaTypeTree(tparams, body) + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef]): MatchTypeTree = new MatchTypeTree(bound, selector, cases) def ByNameTypeTree(result: Tree): ByNameTypeTree = new ByNameTypeTree(result) def TypeBoundsTree(lo: Tree, hi: Tree): TypeBoundsTree = new TypeBoundsTree(lo, hi) def Bind(name: Name, body: Tree): Bind = new Bind(name, body) diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index a8d09ba7b6d8..4366ac45170f 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -6,6 +6,7 @@ object Config { final val cacheAsSeenFrom = true final val cacheMemberNames = true final val cacheImplicitScopes = true + final val cacheMatchReduced = true final val checkCacheMembersNamed = false diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 2a9843394c9d..682eeb552507 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -35,6 +35,11 @@ trait ConstraintHandling { /** If the constraint is frozen we cannot add new bounds to the constraint. */ protected var frozenConstraint = false + /** Potentially a type lambda that is still instantiatable, even though the constraint + * is generally frozen. + */ + protected var caseLambda: Type = NoType + /** If set, align arguments `S1`, `S2`when taking the glb * `T1 { X = S1 } & T2 { X = S2 }` of a constraint upper bound for some type parameter. * Aligning means computing `S1 =:= S2` which may change the current constraint. @@ -47,7 +52,7 @@ trait ConstraintHandling { */ protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty - private def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean): Boolean = + protected def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean): Boolean = !constraint.contains(param) || { def occursIn(bound: Type): Boolean = { val b = bound.dealias @@ -167,19 +172,20 @@ trait ConstraintHandling { isSubType(tp1, tp2) } - final def isSubTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = { - val saved = frozenConstraint + @forceInline final def inFrozenConstraint[T](op: => T): T = { + val savedFrozen = frozenConstraint + val savedLambda = caseLambda frozenConstraint = true - try isSubType(tp1, tp2) - finally frozenConstraint = saved + caseLambda = NoType + try op + finally { + frozenConstraint = savedFrozen + caseLambda = savedLambda + } } - final def isSameTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = { - val saved = frozenConstraint - frozenConstraint = true - try isSameType(tp1, tp2) - finally frozenConstraint = saved - } + final def isSubTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = inFrozenConstraint(isSubType(tp1, tp2)) + final def isSameTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = inFrozenConstraint(isSameType(tp1, tp2)) /** Test whether the lower bounds of all parameters in this * constraint are a solution to the constraint. @@ -319,7 +325,7 @@ trait ConstraintHandling { } /** The current bounds of type parameter `param` */ - final def bounds(param: TypeParamRef): TypeBounds = { + def bounds(param: TypeParamRef): TypeBounds = { val e = constraint.entry(param) if (e.exists) e.bounds else { @@ -355,7 +361,7 @@ trait ConstraintHandling { /** Can `param` be constrained with new bounds? */ final def canConstrain(param: TypeParamRef): Boolean = - !frozenConstraint && (constraint contains param) + (!frozenConstraint || (caseLambda `eq` param.binder)) && constraint.contains(param) /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise. * `bound` is assumed to be in normalized form, as specified in `firstTry` and @@ -492,19 +498,18 @@ trait ConstraintHandling { /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ def checkPropagated(msg: => String)(result: Boolean): Boolean = { if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) { - val saved = frozenConstraint - frozenConstraint = true - for (p <- constraint.domainParams) { - def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = - assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") - for (u <- constraint.upper(p)) - check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") - for (l <- constraint.lower(p)) { - check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") - check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") + inFrozenConstraint { + for (p <- constraint.domainParams) { + def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = + assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") + for (u <- constraint.upper(p)) + check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") + for (l <- constraint.lower(p)) { + check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") + check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") + } } } - frozenConstraint = saved } result } diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala index 55ce0292f115..73924d0cf19d 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala @@ -2,7 +2,7 @@ package dotty.tools.dotc package core import Contexts._ -import config.Printers.typr +import config.Printers.{default, typr} trait ConstraintRunInfo { self: Run => private[this] var maxSize = 0 @@ -12,8 +12,9 @@ trait ConstraintRunInfo { self: Run => maxSize = size maxConstraint = c } - def printMaxConstraint()(implicit ctx: Context) = - if (maxSize > 0) typr.println(s"max constraint = ${maxConstraint.show}") - + def printMaxConstraint()(implicit ctx: Context) = { + val printer = if (ctx.settings.YdetailedStats.value) default else typr + if (maxSize > 0) printer.println(s"max constraint = ${maxConstraint.show}") + } protected def reset() = maxConstraint = null } diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 0483f699fc3d..3851c0239b1b 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -218,6 +218,15 @@ class Definitions { lazy val Sys_errorR = SysPackage.moduleClass.requiredMethodRef(nme.error) def Sys_error(implicit ctx: Context) = Sys_errorR.symbol + lazy val TypelevelPackageObjectRef = ctx.requiredModuleRef("scala.typelevel.package") + lazy val TypelevelPackageObject = TypelevelPackageObjectRef.symbol.moduleClass + lazy val Typelevel_errorR = TypelevelPackageObjectRef.symbol.requiredMethodRef(nme.error) + def Typelevel_error(implicit ctx: Context) = Typelevel_errorR.symbol + lazy val Typelevel_constValueR = TypelevelPackageObjectRef.symbol.requiredMethodRef("constValue") + def Typelevel_constValue(implicit ctx: Context) = Typelevel_constValueR.symbol + lazy val Typelevel_constValueOptR = TypelevelPackageObjectRef.symbol.requiredMethodRef("constValueOpt") + def Typelevel_constValueOpt(implicit ctx: Context) = Typelevel_constValueOptR.symbol + /** The `scalaShadowing` package is used to safely modify classes and * objects in scala so that they can be used from dotty. They will * be visible as members of the `scala` package, replacing any objects @@ -708,6 +717,8 @@ class Definitions { lazy val TupleTypeRef = ctx.requiredClassRef("scala.Tuple") def TupleClass(implicit ctx: Context) = TupleTypeRef.symbol.asClass + lazy val NonEmptyTupleTypeRef = ctx.requiredClassRef("scala.NonEmptyTuple") + def NonEmptyTupleClass(implicit ctx: Context) = NonEmptyTupleTypeRef.symbol.asClass lazy val PairType = ctx.requiredClassRef("scala.*:") def PairClass(implicit ctx: Context) = PairType.symbol.asClass @@ -884,6 +895,9 @@ class Definitions { } } + final def isTypelevel_S(sym: Symbol)(implicit ctx: Context) = + sym.name == tpnme.S && sym.owner == TypelevelPackageObject + // ----- Symbol sets --------------------------------------------------- lazy val AbstractFunctionType = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0) @@ -1213,7 +1227,7 @@ class Definitions { def isValueSubClass(sym1: Symbol, sym2: Symbol) = valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 - lazy val erasedToObject = Set[Symbol](AnyClass, AnyValClass, TupleClass, SingletonClass) + lazy val erasedToObject = Set[Symbol](AnyClass, AnyValClass, TupleClass, NonEmptyTupleClass, SingletonClass) // ----- Initialization --------------------------------------------------- diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 1a58805f389c..03c3f49ff94a 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -204,6 +204,7 @@ object StdNames { final val Object: N = "Object" final val PartialFunction: N = "PartialFunction" final val PrefixType: N = "PrefixType" + final val S: N = "S" final val Serializable: N = "Serializable" final val Singleton: N = "Singleton" final val Throwable: N = "Throwable" diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index da1554fb9d67..e4bcea5ce191 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -12,6 +12,7 @@ import util.common._ import Names._ import NameOps._ import NameKinds._ +import Constants.Constant import Flags._ import StdNames.tpnme import util.Positions.Position @@ -337,8 +338,8 @@ class TypeApplications(val self: Type) extends AnyVal { tl => arg.paramInfos.map(_.subst(arg, tl).bounds), tl => arg.resultType.subst(arg, tl) ) - case arg @ TypeAlias(alias) => - arg.derivedTypeAlias(adaptArg(alias)) + case arg: AliasingBounds => + arg.derivedAlias(adaptArg(arg.alias)) case arg @ TypeBounds(lo, hi) => arg.derivedTypeBounds(adaptArg(lo), adaptArg(hi)) case _ => @@ -401,8 +402,8 @@ class TypeApplications(val self: Type) extends AnyVal { dealiased.derivedAndType(dealiased.tp1.appliedTo(args), dealiased.tp2.appliedTo(args)) case dealiased: OrType => dealiased.derivedOrType(dealiased.tp1.appliedTo(args), dealiased.tp2.appliedTo(args)) - case dealiased: TypeAlias => - dealiased.derivedTypeAlias(dealiased.alias.appliedTo(args)) + case dealiased: AliasingBounds => + dealiased.derivedAlias(dealiased.alias.appliedTo(args)) case dealiased: TypeBounds => dealiased.derivedTypeBounds(dealiased.lo.appliedTo(args), dealiased.hi.appliedTo(args)) case dealiased: LazyRef => @@ -434,10 +435,13 @@ class TypeApplications(val self: Type) extends AnyVal { appliedTo(args) } - /** Turns non-bounds types to type aliases */ + /** Turns non-bounds types to type bounds. + * A (possible lambda abstracted) match type is turned into an abstract type. + * Every other type is turned into a type alias + */ final def toBounds(implicit ctx: Context): TypeBounds = self match { case self: TypeBounds => self // this can happen for wildcard args - case _ => TypeAlias(self) + case _ => if (self.isMatch) MatchAlias(self) else TypeAlias(self) } /** Translate a type of the form From[T] to To[T], keep other types as they are. diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 0f54da0573a2..d3c73ef77530 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -11,7 +11,10 @@ import config.Config import config.Printers.{typr, constr, subtyping, gadts, noPrinter} import TypeErasure.{erasedLub, erasedGlb} import TypeApplications._ +import Constants.Constant +import transform.TypeUtils._ import scala.util.control.NonFatal +import typer.ProtoTypes.constrained import reporting.trace /** Provides methods to compare types. @@ -102,6 +105,9 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { true } + protected def gadtBounds(sym: Symbol)(implicit ctx: Context) = ctx.gadt.bounds(sym) + protected def gadtSetBounds(sym: Symbol, b: TypeBounds) = ctx.gadt.setBounds(sym, b) + // Subtype testing `<:<` def topLevelSubType(tp1: Type, tp2: Type): Boolean = { @@ -284,6 +290,15 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case ConstantType(v1) => v1.value == v2.value case _ => secondTry } + case tp2: AnyConstantType => + if (tp2.tpe.exists) recur(tp1, tp2.tpe) + else tp1 match { + case tp1: ConstantType => + tp2.tpe = tp1 + true + case _ => + secondTry + } case _: FlexType => true case _ => @@ -336,7 +351,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { recur(tp1.underlying, tp2) case tp1: WildcardType => def compareWild = tp1.optBounds match { - case TypeBounds(lo, _) => recur(lo, tp2) + case bounds: TypeBounds => recur(bounds.lo, tp2) case _ => true } compareWild @@ -362,6 +377,9 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { false } joinOK || recur(tp11, tp2) && recur(tp12, tp2) + case tp1: MatchType => + val reduced = tp1.reduced + if (reduced.exists) recur(reduced, tp2) else thirdTry case _: FlexType => true case _ => @@ -369,15 +387,15 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } def thirdTryNamed(tp2: NamedType): Boolean = tp2.info match { - case TypeBounds(lo2, _) => + case info2: TypeBounds => def compareGADT: Boolean = { - val gbounds2 = ctx.gadt.bounds(tp2.symbol) + val gbounds2 = gadtBounds(tp2.symbol) (gbounds2 != null) && (isSubTypeWhenFrozen(tp1, gbounds2.lo) || narrowGADTBounds(tp2, tp1, approx, isUpper = false)) && GADTusage(tp2.symbol) } - isSubApproxHi(tp1, lo2) || compareGADT || fourthTry + isSubApproxHi(tp1, info2.lo) || compareGADT || fourthTry case _ => val cls2 = tp2.symbol @@ -389,10 +407,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { // Note: We would like to replace this by `if (tp1.hasHigherKind)` // but right now we cannot since some parts of the standard library rely on the // idiom that e.g. `List <: Any`. We have to bootstrap without scalac first. - val base = tp1.baseType(cls2) - if (base.exists && base.ne(tp1)) - return isSubType(base, tp2, if (tp1.isRef(cls2)) approx else approx.addLow) if (cls2 == defn.SingletonClass && tp1.isStable) return true + return tryBaseType(cls2) } else if (cls2.is(JavaDefined)) { // If `cls2` is parameterized, we are seeing a raw type, so we need to compare only the symbol @@ -532,6 +548,9 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case _ => } either(recur(tp1, tp21), recur(tp1, tp22)) || fourthTry + case tp2: MatchType => + val reduced = tp2.reduced + if (reduced.exists) recur(tp1, reduced) else fourthTry case tp2: MethodType => def compareMethod = tp1 match { case tp1: MethodType => @@ -589,12 +608,23 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { fourthTry } + def tryBaseType(cls2: Symbol) = { + val base = tp1.baseType(cls2) + if (base.exists && (base `ne` tp1)) + isSubType(base, tp2, if (tp1.isRef(cls2)) approx else approx.addLow) || + base.isInstanceOf[OrType] && fourthTry + // if base is a disjunction, this might have come from a tp1 type that + // expands to a match type. In this case, we should try to reduce the type + // and compare the redux. This is done in fourthTry + else fourthTry + } + def fourthTry: Boolean = tp1 match { case tp1: TypeRef => tp1.info match { case TypeBounds(_, hi1) => def compareGADT = { - val gbounds1 = ctx.gadt.bounds(tp1.symbol) + val gbounds1 = gadtBounds(tp1.symbol) (gbounds1 != null) && (isSubTypeWhenFrozen(gbounds1.hi, tp2) || narrowGADTBounds(tp1, tp2, approx, isUpper = true)) && @@ -665,6 +695,14 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case _ => } either(recur(tp11, tp2), recur(tp12, tp2)) + case tp1: MatchType => + def compareMatch = tp2 match { + case tp2: MatchType => + isSameType(tp1.scrutinee, tp2.scrutinee) && + tp1.cases.corresponds(tp2.cases)(isSubType) + case _ => false + } + recur(tp1.underlying, tp2) || compareMatch case tp1: AnnotatedType if tp1.isRefining => isNewSubType(tp1.parent) case JavaArrayType(elem1) => @@ -792,7 +830,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { * tp1 <:< app2 using isSubType (this might instantiate params in tp2) */ def compareLower(tycon2bounds: TypeBounds, tyconIsTypeRef: Boolean): Boolean = - if (tycon2bounds.lo eq tycon2bounds.hi) + if ((tycon2bounds.lo `eq` tycon2bounds.hi) && !tycon2bounds.isInstanceOf[MatchAlias]) if (tyconIsTypeRef) recur(tp1, tp2.superType) else isSubApproxHi(tp1, tycon2bounds.lo.applyIfParameterized(args2)) else @@ -804,15 +842,15 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { canConstrain(param2) && canInstantiate(param2) || compareLower(bounds(param2), tyconIsTypeRef = false) case tycon2: TypeRef => - isMatchingApply(tp1) || { + isMatchingApply(tp1) || + defn.isTypelevel_S(tycon2.symbol) && compareS(tp2, tp1, fromBelow = true) || { tycon2.info match { case info2: TypeBounds => compareLower(info2, tyconIsTypeRef = true) case info2: ClassInfo => - val base = tp1.baseType(info2.cls) - if (base.exists && base.ne(tp1)) - isSubType(base, tp2, if (tp1.isRef(info2.cls)) approx else approx.addLow) - else fourthTry + tycon2.name.toString.startsWith("Tuple") && + defn.isTupleType(tp2) && isSubType(tp1, tp2.toNestedPairs) || + tryBaseType(info2.cls) case _ => fourthTry } @@ -841,14 +879,39 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } canConstrain(param1) && canInstantiate || isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2, approx.addLow) - case tycon1: TypeRef if tycon1.symbol.isClass => - false + case tycon1: TypeRef => + val sym = tycon1.symbol + !sym.isClass && ( + defn.isTypelevel_S(sym) && compareS(tp1, tp2, fromBelow = false) || + recur(tp1.superType, tp2)) case tycon1: TypeProxy => recur(tp1.superType, tp2) case _ => false } + /** Compare `tp` of form `S[arg]` with `other`, via ">:>` if fromBelow is true, "<:<" otherwise. + * If `arg` is a Nat constant `n`, proceed with comparing `n + 1` and `other`. + * Otherwise, if `other` is a Nat constant `n`, proceed with comparing `arg` and `n - 1`. + */ + def compareS(tp: AppliedType, other: Type, fromBelow: Boolean): Boolean = tp.args match { + case arg :: Nil => + natValue(arg) match { + case Some(n) => + val succ = ConstantType(Constant(n + 1)) + if (fromBelow) recur(other, succ) else recur(succ, other) + case none => + natValue(other) match { + case Some(n) if n > 0 => + val pred = ConstantType(Constant(n - 1)) + if (fromBelow) recur(pred, arg) else recur(arg, pred) + case none => + false + } + } + case _ => false + } + /** Like tp1 <:< tp2, but returns false immediately if we know that * the case was covered previously during subtyping. */ @@ -890,6 +953,23 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } } + /** Optionally, the `n` such that `tp <:< ConstantType(Constant(n: Int))` */ + def natValue(tp: Type): Option[Int] = constValue(tp) match { + case Some(Constant(n: Int)) if n >= 0 => Some(n) + case _ => None + } + + /** Optionally, the constant `c` such that `tp <:< ConstantType(c)` */ + def constValue(tp: Type): Option[Constant] = { + val ct = new AnyConstantType + if (isSubTypeWhenFrozen(tp, ct)) + ct.tpe match { + case ConstantType(c) => Some(c) + case _ => None + } + else None + } + /** Subtype test for corresponding arguments in `args1`, `args2` according to * variances in type parameters `tparams`. */ @@ -1131,12 +1211,12 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { gadts.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.toString} ${bound.isRef(tparam)}") if (bound.isRef(tparam)) false else { - val oldBounds = ctx.gadt.bounds(tparam) + val oldBounds = gadtBounds(tparam) val newBounds = if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound) else TypeBounds(oldBounds.lo | bound, oldBounds.hi) isSubType(newBounds.lo, newBounds.hi) && - { ctx.gadt.setBounds(tparam, newBounds); true } + { gadtSetBounds(tparam, newBounds); true } } } } @@ -1689,6 +1769,11 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { object TypeComparer { + /** Class for unification variables used in `natValue`. */ + private class AnyConstantType extends UncachedGroundType with ValueType { + var tpe: Type = NoType + } + private[core] def show(res: Any)(implicit ctx: Context) = res match { case res: printing.Showable if !ctx.settings.YexplainLowlevel.value => res.show case _ => String.valueOf(res) @@ -1719,6 +1804,77 @@ object TypeComparer { } } +class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { + import state.constraint + + val footprint = mutable.Set[Type]() + + override def bounds(param: TypeParamRef): TypeBounds = { + if (param.binder `ne` caseLambda) footprint += param + super.bounds(param) + } + + override def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean): Boolean = { + if (param.binder `ne` caseLambda) footprint += param + super.addOneBound(param, bound, isUpper) + } + + override def gadtBounds(sym: Symbol)(implicit ctx: Context) = { + footprint += sym.typeRef + super.gadtBounds(sym) + } + + override def gadtSetBounds(sym: Symbol, b: TypeBounds) = { + footprint += sym.typeRef + super.gadtSetBounds(sym, b) + } + + def matchCase(scrut: Type, cas: Type, instantiate: Boolean)(implicit ctx: Context): Type = { + + def paramInstances = new TypeAccumulator[Array[Type]] { + def apply(inst: Array[Type], t: Type) = t match { + case t @ TypeParamRef(b, n) if b `eq` caseLambda => + inst(n) = approximation(t, fromBelow = variance >= 0).simplified + inst + case _ => + foldOver(inst, t) + } + } + + def instantiateParams(inst: Array[Type]) = new TypeMap { + def apply(t: Type) = t match { + case t @ TypeParamRef(b, n) if b `eq` caseLambda => inst(n) + case t: LazyRef => apply(t.ref) + case _ => mapOver(t) + } + } + + val saved = constraint + try { + inFrozenConstraint { + val cas1 = cas match { + case cas: HKTypeLambda => + caseLambda = constrained(cas) + caseLambda.resultType + case _ => + cas + } + val defn.FunctionOf(pat :: Nil, body, _, _) = cas1 + if (isSubType(scrut, pat)) + caseLambda match { + case caseLambda: HKTypeLambda if instantiate => + val instances = paramInstances(new Array(caseLambda.paramNames.length), pat) + instantiateParams(instances)(body) + case _ => + body + } + else NoType + } + } + finally constraint = saved + } +} + /** A type comparer that can record traces of subtype operations */ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { import TypeComparer._ diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 91703bb22dbf..23ddd3b31afa 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -80,35 +80,41 @@ trait TypeOps { this: Context => // TODO: Make standalone object. pre.isStable || !ctx.phase.isTyper /** Implementation of Types#simplified */ - final def simplify(tp: Type, theMap: SimplifyMap): Type = tp match { - case tp: NamedType => - if (tp.symbol.isStatic || (tp.prefix `eq` NoPrefix)) tp - else tp.derivedSelect(simplify(tp.prefix, theMap)) match { - case tp1: NamedType if tp1.denotationIsCurrent => - val tp2 = tp1.reduceProjection - //if (tp2 ne tp1) println(i"simplified $tp1 -> $tp2") - tp2 - case tp1 => tp1 - } - case tp: TypeParamRef => - if (tp.paramName.is(DepParamName)) { - val bounds = ctx.typeComparer.bounds(tp) - if (bounds.lo.isRef(defn.NothingClass)) bounds.hi else bounds.lo - } - else { - val tvar = typerState.constraint.typeVarOfParam(tp) - if (tvar.exists) tvar else tp - } - case _: ThisType | _: BoundType => - tp - case tp: TypeAlias => - tp.derivedTypeAlias(simplify(tp.alias, theMap)) - case AndType(l, r) if !ctx.mode.is(Mode.Type) => - simplify(l, theMap) & simplify(r, theMap) - case OrType(l, r) if !ctx.mode.is(Mode.Type) => - simplify(l, theMap) | simplify(r, theMap) - case _ => - (if (theMap != null) theMap else new SimplifyMap).mapOver(tp) + final def simplify(tp: Type, theMap: SimplifyMap): Type = { + def mapOver = (if (theMap != null) theMap else new SimplifyMap).mapOver(tp) + tp match { + case tp: NamedType => + if (tp.symbol.isStatic || (tp.prefix `eq` NoPrefix)) tp + else tp.derivedSelect(simplify(tp.prefix, theMap)) match { + case tp1: NamedType if tp1.denotationIsCurrent => + val tp2 = tp1.reduceProjection + //if (tp2 ne tp1) println(i"simplified $tp1 -> $tp2") + tp2 + case tp1 => tp1 + } + case tp: TypeParamRef => + if (tp.paramName.is(DepParamName)) { + val bounds = ctx.typeComparer.bounds(tp) + if (bounds.lo.isRef(defn.NothingClass)) bounds.hi else bounds.lo + } + else { + val tvar = typerState.constraint.typeVarOfParam(tp) + if (tvar.exists) tvar else tp + } + case _: ThisType | _: BoundType => + tp + case tp: AliasingBounds => + tp.derivedAlias(simplify(tp.alias, theMap)) + case AndType(l, r) if !ctx.mode.is(Mode.Type) => + simplify(l, theMap) & simplify(r, theMap) + case OrType(l, r) if !ctx.mode.is(Mode.Type) => + simplify(l, theMap) | simplify(r, theMap) + case _: AppliedType | _: MatchType => + val normed = tp.tryNormalize + if (normed.exists) normed else mapOver + case _ => + mapOver + } } class SimplifyMap extends TypeMap { diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 40b843cee067..e5c0c24daa5c 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -34,6 +34,7 @@ import annotation.tailrec import language.implicitConversions import scala.util.hashing.{ MurmurHash3 => hashing } import config.Printers.{core, typr} +import reporting.trace import java.lang.ref.WeakReference import scala.annotation.internal.sharable @@ -68,6 +69,7 @@ object Types { * | +- AnnotatedType * | +- TypeVar * | +- HKTypeLambda + * | +- MatchType * | * +- GroundType -+- AndType * +- OrType @@ -117,7 +119,7 @@ object Types { t.symbol.is(Provisional) || apply(x, t.prefix) || { t.info match { - case TypeAlias(alias) => apply(x, alias) + case info: AliasingBounds => apply(x, info.alias) case TypeBounds(lo, hi) => apply(apply(x, lo), hi) case _ => false } @@ -207,6 +209,10 @@ object Types { case tp: TypeRef => val sym = tp.symbol if (sym.isClass) sym.derivesFrom(cls) else loop(tp.superType): @tailrec + case tp: AppliedType => + tp.superType.derivesFrom(cls) + case tp: MatchType => + tp.bound.derivesFrom(cls) || tp.reduced.derivesFrom(cls) case tp: TypeProxy => loop(tp.underlying): @tailrec case tp: AndType => @@ -318,7 +324,7 @@ object Types { } /** Is this an alias TypeBounds? */ - final def isAlias: Boolean = this.isInstanceOf[TypeAlias] + final def isTypeAlias: Boolean = this.isInstanceOf[TypeAlias] /** Is this a MethodType which is from Java */ def isJavaMethod: Boolean = false @@ -329,6 +335,14 @@ object Types { /** Is this a MethodType for which the parameters will not be used */ def isErasedMethod: Boolean = false + /** Is this a match type or a higher-kinded abstraction of one? + */ + def isMatch(implicit ctx: Context): Boolean = stripTypeVar.stripAnnots match { + case _: MatchType => true + case tp: HKTypeLambda => tp.resType.isMatch + case _ => false + } + // ----- Higher-order combinators ----------------------------------- /** Returns true if there is a part of this type that satisfies predicate `p`. @@ -619,8 +633,8 @@ object Types { val rinfo = tp.refinedInfo if (name.isTypeName && !pinfo.isInstanceOf[ClassInfo]) { // simplified case that runs more efficiently val jointInfo = - if (rinfo.isAlias) rinfo - else if (pinfo.isAlias) pinfo + if (rinfo.isTypeAlias) rinfo + else if (pinfo.isTypeAlias) pinfo else if (ctx.base.pendingMemberSearches.contains(name)) pinfo safe_& rinfo else pinfo recoverable_& rinfo pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, jointInfo) @@ -1061,6 +1075,20 @@ object Types { /** Like `dealiasKeepAnnots`, but keeps only refining annotations */ final def dealiasKeepRefiningAnnots(implicit ctx: Context): Type = dealias1(keepIfRefining) + /** The result of normalization using `tryNormalize`, or the type itself if + * tryNormlize yields NoType + */ + final def normalized(implicit ctx: Context) = { + val normed = tryNormalize + if (normed.exists) normed else this + } + + /** If this type can be normalized at the top-level by rewriting match types + * of S[n] types, the result after applying all toplevel normalizations, + * otherwise NoType + */ + def tryNormalize(implicit ctx: Context): Type = NoType + private def widenDealias1(keep: AnnotatedType => Context => Boolean)(implicit ctx: Context): Type = { val res = this.widen.dealias1(keep) if (res eq this) res else res.widenDealias1(keep) @@ -1229,7 +1257,7 @@ object Types { */ @tailrec final def normalizedPrefix(implicit ctx: Context): Type = this match { case tp: NamedType => - if (tp.symbol.info.isAlias) tp.info.normalizedPrefix else tp.prefix + if (tp.symbol.info.isTypeAlias) tp.info.normalizedPrefix else tp.prefix case tp: ClassInfo => tp.prefix case tp: TypeProxy => @@ -3076,8 +3104,8 @@ object Types { def derivedLambdaAbstraction(paramNames: List[TypeName], paramInfos: List[TypeBounds], resType: Type)(implicit ctx: Context): Type = resType match { - case resType @ TypeAlias(alias) => - resType.derivedTypeAlias(newLikeThis(paramNames, paramInfos, alias)) + case resType: AliasingBounds => + resType.derivedAlias(newLikeThis(paramNames, paramInfos, resType.alias)) case resType @ TypeBounds(lo, hi) => resType.derivedTypeBounds( if (lo.isRef(defn.NothingClass)) lo else newLikeThis(paramNames, paramInfos, lo), @@ -3178,8 +3206,8 @@ object Types { override def fromParams[PI <: ParamInfo.Of[TypeName]](params: List[PI], resultType: Type)(implicit ctx: Context): Type = { def expand(tp: Type) = super.fromParams(params, tp) resultType match { - case rt: TypeAlias => - rt.derivedTypeAlias(expand(rt.alias)) + case rt: AliasingBounds => + rt.derivedAlias(expand(rt.alias)) case rt @ TypeBounds(lo, hi) => rt.derivedTypeBounds( if (lo.isRef(defn.NothingClass)) lo else expand(lo), expand(hi)) @@ -3251,6 +3279,29 @@ object Types { cachedSuper } + override def tryNormalize(implicit ctx: Context): Type = tycon match { + case tycon: TypeRef => + def tryMatchAlias = tycon.info match { + case MatchAlias(alias) => + trace("normalize $this", typr, show = true) { + alias.applyIfParameterized(args).tryNormalize + } + case _ => + NoType + } + if (defn.isTypelevel_S(tycon.symbol) && args.length == 1) { + trace("normalize S $this", typr, show = true) { + args.head.normalized match { + case ConstantType(Constant(n: Int)) => ConstantType(Constant(n + 1)) + case none => tryMatchAlias + } + } + } + else tryMatchAlias + case _ => + NoType + } + def lowerBound(implicit ctx: Context) = tycon.stripTypeVar match { case tycon: TypeRef => tycon.info match { @@ -3515,7 +3566,130 @@ object Types { type TypeVars = SimpleIdentitySet[TypeVar] - // ------ ClassInfo, Type Bounds ------------------------------------------------------------ + // ------ MatchType --------------------------------------------------------------- + + /** scrutinee match { case_1 ... case_n } + * + * where + * + * case_i = [X1, ..., Xn] patternType => resultType + * + * and `X_1,...X_n` are the type variables bound in `patternType` + */ + abstract case class MatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends CachedProxyType with ValueType { + + def derivedMatchType(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = + if (bound.eq(this.bound) && scrutinee.eq(this.scrutinee) && cases.eqElements(this.cases)) this + else MatchType(bound, scrutinee, cases) + + def caseType(tp: Type)(implicit ctx: Context): Type = tp match { + case tp: HKTypeLambda => caseType(tp.resType) + case defn.FunctionOf(_, restpe, _, _) => restpe + } + + def alternatives(implicit ctx: Context): List[Type] = cases.map(caseType) + def underlying(implicit ctx: Context): Type = bound + + private[this] var myReduced: Type = null + private[this] var reductionContext: mutable.Map[Type, TypeBounds] = null + + override def tryNormalize(implicit ctx: Context): Type = reduced.normalized + + /** Switch to choose parallel or sequential reduction */ + private final val reduceInParallel = false + + final def cantPossiblyMatch(cas: Type)(implicit ctx: Context) = + true // should be refined if we allow overlapping cases + + def reduced(implicit ctx: Context): Type = { + val trackingCtx = ctx.fresh.setTypeComparerFn(new TrackingTypeComparer(_)) + val cmp = trackingCtx.typeComparer.asInstanceOf[TrackingTypeComparer] + + def reduceSequential(cases: List[Type])(implicit ctx: Context): Type = cases match { + case Nil => NoType + case cas :: cases1 => + val r = cmp.matchCase(scrutinee, cas, instantiate = true) + if (r.exists) r + else if (cantPossiblyMatch(cas)) reduceSequential(cases1) + else NoType + } + + def reduceParallel(implicit ctx: Context) = { + val applicableBranches = cases + .map(cmp.matchCase(scrutinee, _, instantiate = true)(trackingCtx)) + .filter(_.exists) + applicableBranches match { + case Nil => NoType + case applicableBranch :: Nil => applicableBranch + case _ => + record(i"MatchType.multi-branch") + ctx.typeComparer.glb(applicableBranches) + } + } + + def isRelevant(tp: Type) = tp match { + case tp: TypeParamRef => ctx.typerState.constraint.entry(tp).exists + case tp: TypeRef => ctx.gadt.bounds.contains(tp.symbol) + } + + def contextBounds(tp: Type): TypeBounds = tp match { + case tp: TypeParamRef => ctx.typerState.constraint.fullBounds(tp) + case tp: TypeRef => ctx.gadt.bounds(tp.symbol) + } + + def updateReductionContext() = { + reductionContext = new mutable.HashMap + for (tp <- cmp.footprint if isRelevant(tp)) + reductionContext(tp) = contextBounds(tp) + } + + def upToDate = + cmp.footprint.forall { tp => + !isRelevant(tp) || { + reductionContext.get(tp) match { + case Some(bounds) => bounds `eq` contextBounds(tp) + case None => false + } + } + } + + record("MatchType.reduce called") + if (!Config.cacheMatchReduced || myReduced == null || !upToDate) { + record("MatchType.reduce computed") + if (myReduced != null) record("MatchType.reduce cache miss") + myReduced = + trace(i"reduce match type $this", typr, show = true) { + try + if (defn.isBottomType(scrutinee)) defn.NothingType + else if (reduceInParallel) reduceParallel(trackingCtx) + else reduceSequential(cases)(trackingCtx) + catch { + case ex: Throwable => + handleRecursive("reduce type ", i"$scrutinee match ...", ex) + } + } + updateReductionContext() + } + myReduced + } + + override def computeHash(bs: Binders) = doHash(bs, scrutinee, bound :: cases) + + override def eql(that: Type) = that match { + case that: MatchType => + bound.eq(that.bound) && scrutinee.eq(that.scrutinee) && cases.eqElements(that.cases) + case _ => false + } + } + + class CachedMatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends MatchType(bound, scrutinee, cases) + + object MatchType { + def apply(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = + unique(new CachedMatchType(bound, scrutinee, cases)) + } + + // ------ ClassInfo, Type Bounds -------------------------------------------------- type TypeOrSymbol = AnyRef /* should be: Type | Symbol */ @@ -3682,13 +3856,13 @@ object Types { override def equals(that: Any): Boolean = equals(that, null) override def iso(that: Any, bs: BinderPairs): Boolean = that match { - case that: TypeAlias => false + case that: AliasingBounds => false case that: TypeBounds => lo.equals(that.lo, bs) && hi.equals(that.hi, bs) case _ => false } override def eql(that: Type) = that match { - case that: TypeAlias => false + case that: AliasingBounds => false case that: TypeBounds => lo.eq(that.lo) && hi.eq(that.hi) case _ => false } @@ -3696,28 +3870,44 @@ object Types { class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) - abstract class TypeAlias(val alias: Type) extends TypeBounds(alias, alias) { + /** Common supertype of `TypeAlias` and `MatchAlias` */ + abstract class AliasingBounds(val alias: Type) extends TypeBounds(alias, alias) { - /** pre: this is a type alias */ - def derivedTypeAlias(alias: Type)(implicit ctx: Context) = - if (alias eq this.alias) this else TypeAlias(alias) + def derivedAlias(alias: Type)(implicit ctx: Context): AliasingBounds override def computeHash(bs: Binders) = doHash(bs, alias) override def stableHash = alias.stableHash override def iso(that: Any, bs: BinderPairs): Boolean = that match { - case that: TypeAlias => alias.equals(that.alias, bs) + case that: AliasingBounds => this.isTypeAlias == that.isTypeAlias && alias.equals(that.alias, bs) case _ => false } // equals comes from case class; no matching override is needed override def eql(that: Type): Boolean = that match { - case that: TypeAlias => alias.eq(that.alias) + case that: AliasingBounds => this.isTypeAlias == that.isTypeAlias && alias.eq(that.alias) case _ => false } } - class CachedTypeAlias(alias: Type) extends TypeAlias(alias) + /** = T + */ + class TypeAlias(alias: Type) extends AliasingBounds(alias) { + def derivedAlias(alias: Type)(implicit ctx: Context) = + if (alias eq this.alias) this else TypeAlias(alias) + } + + /** = T where `T` is a `MatchType` + * + * Match aliases are treated differently from type aliases. Their sides are mutually + * subtypes of each other but one side is not generally substitutable for the other. + * If we assumed full substitutivity, we would have to reject all recursive match + * aliases (or else take the jump and allow full recursive types). + */ + class MatchAlias(alias: Type) extends AliasingBounds(alias) { + def derivedAlias(alias: Type)(implicit ctx: Context) = + if (alias eq this.alias) this else MatchAlias(alias) + } object TypeBounds { def apply(lo: Type, hi: Type)(implicit ctx: Context): TypeBounds = @@ -3728,11 +3918,15 @@ object Types { } object TypeAlias { - def apply(alias: Type)(implicit ctx: Context) = - unique(new CachedTypeAlias(alias)) + def apply(alias: Type)(implicit ctx: Context) = unique(new TypeAlias(alias)) def unapply(tp: TypeAlias): Option[Type] = Some(tp.alias) } + object MatchAlias { + def apply(alias: Type)(implicit ctx: Context) = unique(new MatchAlias(alias)) + def unapply(tp: MatchAlias): Option[Type] = Some(tp.alias) + } + // ----- Annotated and Import types ----------------------------------------------- /** An annotated type tpe @ annot */ @@ -3934,8 +4128,8 @@ object Types { def apply(tp: Type): Type = tp match { case tp: TypeRef if tp.symbol.is(ClassTypeParam) && tp.symbol.owner == cls => tp.info match { - case TypeAlias(alias) => - mapOver(alias) + case info: AliasingBounds => + mapOver(info.alias) case TypeBounds(lo, hi) => range(atVariance(-variance)(apply(lo)), apply(hi)) case _ => @@ -3991,8 +4185,8 @@ object Types { tp.derivedRefinedType(parent, tp.refinedName, info) protected def derivedRecType(tp: RecType, parent: Type): Type = tp.rebind(parent) - protected def derivedTypeAlias(tp: TypeAlias, alias: Type): Type = - tp.derivedTypeAlias(alias) + protected def derivedAlias(tp: AliasingBounds, alias: Type): Type = + tp.derivedAlias(alias) protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type = tp.derivedTypeBounds(lo, hi) protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type): Type = @@ -4003,6 +4197,8 @@ object Types { tp.derivedAndType(tp1, tp2) protected def derivedOrType(tp: OrType, tp1: Type, tp2: Type): Type = tp.derivedOrType(tp1, tp2) + protected def derivedMatchType(tp: MatchType, bound: Type, scrutinee: Type, cases: List[Type]): Type = + tp.derivedMatchType(bound, scrutinee, cases) protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type = tp.derivedAnnotatedType(underlying, annot) protected def derivedWildcardType(tp: WildcardType, bounds: Type): Type = @@ -4057,8 +4253,8 @@ object Types { case tp: RefinedType => derivedRefinedType(tp, this(tp.parent), this(tp.refinedInfo)) - case tp: TypeAlias => - derivedTypeAlias(tp, atVariance(0)(this(tp.alias))) + case tp: AliasingBounds => + derivedAlias(tp, atVariance(0)(this(tp.alias))) case tp: TypeBounds => variance = -variance @@ -4100,6 +4296,9 @@ object Types { case tp: OrType => derivedOrType(tp, this(tp.tp1), this(tp.tp2)) + case tp: MatchType => + derivedMatchType(tp, this(tp.bound), this(tp.scrutinee), tp.cases.mapConserve(this)) + case tp: SkolemType => tp @@ -4274,7 +4473,7 @@ object Types { else info match { case Range(infoLo: TypeBounds, infoHi: TypeBounds) => assert(variance == 0) - if (!infoLo.isAlias && !infoHi.isAlias) propagate(infoLo, infoHi) + if (!infoLo.isTypeAlias && !infoHi.isTypeAlias) propagate(infoLo, infoHi) else range(defn.NothingType, tp.parent) case Range(infoLo, infoHi) => propagate(infoLo, infoHi) @@ -4290,13 +4489,13 @@ object Types { case _ => tp.rebind(parent) } - override protected def derivedTypeAlias(tp: TypeAlias, alias: Type) = + override protected def derivedAlias(tp: AliasingBounds, alias: Type) = if (alias eq tp.alias) tp else alias match { case Range(lo, hi) => if (variance > 0) TypeBounds(lo, hi) - else range(TypeAlias(lo), TypeAlias(hi)) - case _ => tp.derivedTypeAlias(alias) + else range(tp.derivedAlias(lo), tp.derivedAlias(hi)) + case _ => tp.derivedAlias(alias) } override protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type) = @@ -4474,6 +4673,9 @@ object Types { case tp: OrType => this(this(x, tp.tp1), tp.tp2) + case tp: MatchType => + foldOver(this(this(x, tp.bound), tp.scrutinee), tp.cases) + case AnnotatedType(underlying, annot) => this(applyToAnnot(x, annot), underlying) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index f14f0c76019a..7de5d2f76aa4 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -101,11 +101,12 @@ Standard-Section: "ASTs" TopLevelStat* SINGLETONtpt ref_Term REFINEDtpt Length underlying_Term refinement_Stat* APPLIEDtpt Length tycon_Term arg_Term* - POLYtpt Length TypeParam* body_Term + LAMBDAtpt Length TypeParam* body_Term TYPEBOUNDStpt Length low_Term high_Term? ANNOTATEDtpt Length underlying_Term fullAnnotation_Term ANDtpt Length left_Term right_Term ORtpt Length left_Term right_Term + MATCHtpt Length bound_Term? sel_Term CaseDef* BYNAMEtpt underlying_Term EMPTYTREE SHAREDterm term_ASTRef @@ -157,6 +158,7 @@ Standard-Section: "ASTs" TopLevelStat* ANNOTATEDtype Length underlying_Type fullAnnotation_Term ANDtype Length left_Type right_Type ORtype Length left_Type right_Type + MATCHtype Length bound_Type sel_Type case_Type* BIND Length boundName_NameRef bounds_Type // for type-variables defined in a type pattern BYNAMEtype underlying_Type @@ -431,6 +433,9 @@ object TastyFormat { final val ERASEDMETHODtype = 178 final val ERASEDIMPLICITMETHODtype = 179 + final val MATCHtype = 180 + final val MATCHtpt = 181 + final val UNTYPEDSPLICE = 199 // Tags for untyped trees only: @@ -459,7 +464,7 @@ object TastyFormat { firstNatTreeTag <= tag && tag <= SYMBOLconst || firstASTTreeTag <= tag && tag <= SINGLETONtpt || firstNatASTTreeTag <= tag && tag <= NAMEDARG || - firstLengthTreeTag <= tag && tag <= TYPEREFin || + firstLengthTreeTag <= tag && tag <= MATCHtpt || tag == HOLE def isParamTag(tag: Int) = tag == PARAM || tag == TYPEPARAM @@ -513,6 +518,7 @@ object TastyFormat { | ANDtpt | ORtpt | BYNAMEtpt + | MATCHtpt | BIND => true case _ => false } @@ -648,6 +654,8 @@ object TastyFormat { case ERASEDIMPLICITMETHODtype => "ERASEDIMPLICITMETHODtype" case TYPELAMBDAtype => "TYPELAMBDAtype" case LAMBDAtpt => "LAMBDAtpt" + case MATCHtype => "MATCHtype" + case MATCHtpt => "MATCHtpt" case PARAMtype => "PARAMtype" case ANNOTATION => "ANNOTATION" case PRIVATEqualified => "PRIVATEqualified" diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 3df4dca3f33d..ed4ae37592c7 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -248,6 +248,13 @@ class TreePickler(pickler: TastyPickler) { pickleType(tpe.underlying) case tpe: HKTypeLambda => pickleMethodic(TYPELAMBDAtype, tpe) + case tpe: MatchType => + writeByte(MATCHtype) + withLength { + pickleType(tpe.bound) + pickleType(tpe.scrutinee) + tpe.cases.foreach(pickleType(_)) + } case tpe: PolyType if richTypes => pickleMethodic(POLYtype, tpe) case tpe: MethodType if richTypes => @@ -531,6 +538,13 @@ class TreePickler(pickler: TastyPickler) { case OrTypeTree(tp1, tp2) => writeByte(ORtpt) withLength { pickleTree(tp1); pickleTree(tp2) } + case MatchTypeTree(bound, selector, cases) => + writeByte(MATCHtpt) + withLength { + if (!bound.isEmpty) pickleTree(bound) + pickleTree(selector) + cases.foreach(pickleTree) + } case ByNameTypeTree(tp) => writeByte(BYNAMEtpt) pickleTree(tp) @@ -838,6 +852,13 @@ class TreePickler(pickler: TastyPickler) { case Annotated(tree, annot) => writeByte(ANNOTATEDtpt) withLength { pickleTpt(tree); pickleTerm(annot) } + case MatchTypeTree(bound, selector, cases) => + writeByte(MATCHtpt) + withLength { + if (!bound.isEmpty) pickleTpt(bound) + pickleTpt(selector) + cases.foreach(pickleUntyped) + } case LambdaTypeTree(tparams, body) => writeByte(LAMBDAtpt) withLength { pickleParams(tparams); pickleTpt(body) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 767edd9a6a84..a453fc5184e1 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -325,7 +325,10 @@ class TreeUnpickler(reader: TastyReader, case APPLIEDtype => readType().appliedTo(until(end)(readType())) case TYPEBOUNDS => - TypeBounds(readType(), readType()) + val lo = readType() + val hi = readType() + if (lo.isMatch && (lo `eq` hi)) MatchAlias(lo) + else TypeBounds(lo, hi) case ANNOTATEDtype => AnnotatedType(readType(), Annotation(readTerm())) case ANDtype => @@ -334,6 +337,8 @@ class TreeUnpickler(reader: TastyReader, OrType(readType(), readType()) case SUPERtype => SuperType(readType(), readType()) + case MATCHtype => + MatchType(readType(), readType(), until(end)(readType())) case POLYtype => readMethodic(PolyType, _.toTypeName) case METHODtype => @@ -795,7 +800,7 @@ class TreeUnpickler(reader: TastyReader, } sym.info = rhs.tpe match { case _: TypeBounds | _: ClassInfo => checkNonCyclic(sym, rhs.tpe, reportErrors = false) - case _ => TypeAlias(rhs.tpe) + case _ => rhs.tpe.toBounds } sym.resetFlag(Provisional) TypeDef(rhs) @@ -1127,6 +1132,11 @@ class TreeUnpickler(reader: TastyReader, val tparams = readParams[TypeDef](TYPEPARAM) val body = readTpt() LambdaTypeTree(tparams, body) + case MATCHtpt => + val fst = readTpt() + val (bound, scrut) = + if (nextUnsharedTag == CASEDEF) (EmptyTree, fst) else (fst, readTpt()) + MatchTypeTree(bound, scrut, readCases(end)) case TYPEBOUNDStpt => val lo = readTpt() val hi = if (currentAddr == end) lo else readTpt() @@ -1369,6 +1379,11 @@ class TreeUnpickler(reader: TastyReader, val tparams = readParams[TypeDef](TYPEPARAM) val body = readUntyped() untpd.LambdaTypeTree(tparams, body) + case MATCHtpt => + val fst = readUntyped() + val (bound, scrut) = + if (nextUnsharedTag == CASEDEF) (EmptyTree, fst) else (fst, readUntyped()) + untpd.MatchTypeTree(bound, scrut, readCases(end)) case TYPEBOUNDStpt => val lo = readUntyped() val hi = ifBefore(end)(readUntyped(), lo) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index f4abf16104bf..b9478b373a9c 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -819,6 +819,7 @@ object Parsers { in.token match { case ARROW => functionRest(t :: Nil) + case MATCH => matchType(EmptyTree, t) case FORSOME => syntaxError(ExistentialTypesNoLongerSupported()); t case _ => if (imods.is(Implicit) && !t.isInstanceOf[FunctionWithMods]) @@ -1288,7 +1289,7 @@ object Parsers { */ def matchExpr(t: Tree, start: Offset, mkMatch: (Tree, List[CaseDef]) => Match) = atPos(start, in.skipToken()) { - inBraces(mkMatch(t, caseClauses())) + inBraces(mkMatch(t, caseClauses(caseClause))) } /** `match' { ImplicitCaseClauses } @@ -1315,6 +1316,13 @@ object Parsers { result } + /** `match' { TypeCaseClauses } + */ + def matchType(bound: Tree, t: Tree) = + atPos((if (bound.isEmpty) t else bound).pos.start, accept(MATCH)) { + inBraces(MatchTypeTree(bound, t, caseClauses(typeCaseClause))) + } + /** FunParams ::= Bindings * | id * | `_' @@ -1531,7 +1539,7 @@ object Parsers { */ def blockExpr(): Tree = atPos(in.offset) { inDefScopeBraces { - if (in.token == CASE) Match(EmptyTree, caseClauses()) + if (in.token == CASE) Match(EmptyTree, caseClauses(caseClause)) else block() } } @@ -1621,22 +1629,34 @@ object Parsers { /** CaseClauses ::= CaseClause {CaseClause} * ImplicitCaseClauses ::= ImplicitCaseClause {ImplicitCaseClause} + * TypeCaseClauses ::= TypeCaseClause {TypeCaseClause} */ - def caseClauses(): List[CaseDef] = { + def caseClauses(clause: () => CaseDef): List[CaseDef] = { val buf = new ListBuffer[CaseDef] - buf += caseClause() - while (in.token == CASE) buf += caseClause() + buf += clause() + while (in.token == CASE) buf += clause() buf.toList } - /** CaseClause ::= case Pattern [Guard] `=>' Block - * ImplicitCaseClause ::= case PatVar [Ascription] [Guard] `=>' Block + /** CaseClause ::= ‘case’ Pattern [Guard] `=>' Block + * ImplicitCaseClause ::= ‘case’ PatVar [Ascription] [Guard] `=>' Block */ - def caseClause(): CaseDef = atPos(in.offset) { + val caseClause = () => atPos(in.offset) { accept(CASE) CaseDef(pattern(), guard(), atPos(accept(ARROW)) { block() }) } + /** TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type [nl] + */ + val typeCaseClause = () => atPos(in.offset) { + accept(CASE) + CaseDef(infixType(), EmptyTree, atPos(accept(ARROW)) { + val t = typ() + if (isStatSep) in.nextToken() + t + }) + } + /* -------- PATTERNS ------------------------------------------- */ /** Pattern ::= Pattern1 { `|' Pattern1 } @@ -2261,20 +2281,30 @@ object Parsers { Block(stats, Literal(Constant(()))) } - /** TypeDef ::= type id [TypeParamClause] `=' Type - * TypeDcl ::= type id [TypeParamClause] TypeBounds + /** TypeDcl ::= id [TypeParamClause] (TypeBounds | ‘=’ Type) + * | id [TypeParamClause] <: Type = MatchType */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() atPos(start, nameStart) { val name = ident().toTypeName val tparams = typeParamClauseOpt(ParamOwner.Type) + def makeTypeDef(rhs: Tree): Tree = + TypeDef(name, lambdaAbstract(tparams, rhs)).withMods(mods).setComment(in.getDocComment(start)) in.token match { case EQUALS => in.nextToken() - TypeDef(name, lambdaAbstract(tparams, toplevelTyp())).withMods(mods).setComment(in.getDocComment(start)) - case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF => - TypeDef(name, lambdaAbstract(tparams, typeBounds())).withMods(mods).setComment(in.getDocComment(start)) + makeTypeDef(toplevelTyp()) + case SUBTYPE => + in.nextToken() + val bound = toplevelTyp() + if (in.token == EQUALS) { + in.nextToken() + makeTypeDef(matchType(bound, infixType())) + } + else makeTypeDef(TypeBoundsTree(EmptyTree, bound)) + case SUPERTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF => + makeTypeDef(typeBounds()) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) EmptyTree diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 778650f1ba1b..19ed444e5e7a 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -164,6 +164,14 @@ class PlainPrinter(_ctx: Context) extends Printer { changePrec(AndTypePrec) { toText(tp1) ~ " & " ~ atPrec(AndTypePrec + 1) { toText(tp2) } } case OrType(tp1, tp2) => changePrec(OrTypePrec) { toText(tp1) ~ " | " ~ atPrec(OrTypePrec + 1) { toText(tp2) } } + case MatchType(bound, scrutinee, cases) => + changePrec(GlobalPrec) { + def caseText(tp: Type): Text = "case " ~ toText(tp) + def casesText = Text(cases.map(caseText), "\n") + atPrec(InfixPrec) { toText(scrutinee) } ~ + keywordStr(" match ") ~ "{" ~ casesText ~ "}" ~ + (" <: " ~ toText(bound) provided !bound.isRef(defn.AnyClass)) + }.close case tp: ErrorType => s"" case tp: WildcardType => @@ -198,7 +206,7 @@ class PlainPrinter(_ctx: Context) extends Printer { val bounds = if (constr.contains(tp)) constr.fullBounds(tp.origin)(ctx.addMode(Mode.Printing)) else TypeBounds.empty - if (bounds.isAlias) toText(bounds.lo) ~ (Str("^") provided ctx.settings.YprintDebug.value) + if (bounds.isTypeAlias) toText(bounds.lo) ~ (Str("^") provided ctx.settings.YprintDebug.value) else if (ctx.settings.YshowVarBounds.value) "(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")" else toText(tp.origin) } @@ -310,7 +318,7 @@ class PlainPrinter(_ctx: Context) extends Printer { /** String representation of a definition's type following its name */ protected def toTextRHS(tp: Type): Text = controlled { homogenize(tp) match { - case tp: TypeAlias => + case tp: AliasingBounds => " = " ~ toText(tp.alias) case tp @ TypeBounds(lo, hi) => (if (lo isRef defn.NothingClass) Text() else " >: " ~ toText(lo)) ~ diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 5f42fa346f27..ecf85a649c88 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -24,7 +24,8 @@ import TypeApplications._ import Decorators._ import config.Config import util.Positions._ -import dotty.tools.dotc.transform.SymUtils._ +import transform.SymUtils._ +import transform.TypeUtils._ import dotty.tools.dotc.transform.FirstTransform import scala.annotation.switch @@ -176,11 +177,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } homogenize(tp) match { - case AppliedType(tycon, args) => + case tp @ AppliedType(tycon, args) => val cls = tycon.typeSymbol if (tycon.isRepeatedParam) return toTextLocal(args.head) ~ "*" if (defn.isFunctionClass(cls)) return toTextFunction(args, cls.name.isImplicitFunction, cls.name.isErasedFunction) - if (defn.isTupleClass(cls)) return toTextTuple(args) + if (tp.tupleArity >= 2) return toTextTuple(tp.tupleElementTypes) if (isInfixType(tp)) { val l :: r :: Nil = args val opName = tyconName(tycon) @@ -412,6 +413,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(GlobalPrec) { tparamsText(tparams) ~ " -> " ~ toText(body) } + case MatchTypeTree(bound, sel, cases) => + changePrec(GlobalPrec) { + toText(sel) ~ keywordStr(" match ") ~ blockText(cases) ~ + (" <: " ~ toText(bound) provided !bound.isEmpty) + } case ByNameTypeTree(tpt) => "=> " ~ toTextLocal(tpt) case TypeBoundsTree(lo, hi) => diff --git a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala index 84d0cae359cd..700bc8bcccb4 100644 --- a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala @@ -32,7 +32,7 @@ class ReplPrinter(_ctx: Context) extends DecompilerPrinter(_ctx) { override def dclText(sym: Symbol): Text = { toText(sym) ~ { if (sym.is(Method)) toText(sym.info) - else if (sym.isType && sym.info.isInstanceOf[TypeAlias]) toText(sym.info) + else if (sym.isType && sym.info.isTypeAlias) toText(sym.info) else if (sym.isType || sym.isClass) "" else ":" ~~ toText(sym.info) } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index ce3ce629a70a..d1fbf9f2e39a 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -166,7 +166,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder api.Annotation.of(api.Constant.of(Constants.emptyType, name), Array()) val orMarker = marker("Or") val byNameMarker = marker("ByName") - + val matchMarker = marker("Match") /** Extract the API representation of a source file */ def apiSource(tree: Tree): Seq[api.ClassLike] = { @@ -507,6 +507,9 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder withMarker(s, orMarker) case ExprType(resultType) => withMarker(apiType(resultType), byNameMarker) + case MatchType(bound, scrut, cases) => + val s = combineApiTypes(apiType(bound) :: apiType(scrut) :: cases.map(apiType): _*) + withMarker(s, matchMarker) case ConstantType(constant) => api.Constant.of(apiType(constant.tpe), constant.stringValue) case AnnotatedType(tpe, annot) => diff --git a/compiler/src/dotty/tools/dotc/tastyreflect/StandardDefinitions.scala b/compiler/src/dotty/tools/dotc/tastyreflect/StandardDefinitions.scala index 00cc6df81ddb..9d540858dab1 100644 --- a/compiler/src/dotty/tools/dotc/tastyreflect/StandardDefinitions.scala +++ b/compiler/src/dotty/tools/dotc/tastyreflect/StandardDefinitions.scala @@ -59,7 +59,6 @@ trait StandardDefinitions extends scala.tasty.reflect.StandardDefinitions { defn.FunctionClass(arity, isImplicit, isErased).asClass def TupleClass(arity: Int): Symbol = defn.TupleType(arity).classSymbol.asClass - def ScalaPrimitiveValueClasses: List[Symbol] = UnitClass :: BooleanClass :: ScalaNumericValueClasses def ScalaNumericValueClasses: List[Symbol] = diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 41dd824027d2..e95f90348e03 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -141,6 +141,7 @@ class Erasure extends Phase with DenotTransformer { assert(isErasedType(tp) || isAllowed(defn.ArrayClass, "Array.scala") || isAllowed(defn.TupleClass, "Tuple.scala") || + isAllowed(defn.NonEmptyTupleClass, "Tuple.scala") || isAllowed(defn.PairClass, "Tuple.scala"), i"The type $tp - ${tp.toString} of class ${tp.getClass} of tree $tree : ${tree.tpe} / ${tree.getClass} is illegal after erasure, phase = ${ctx.phase.prev}") } diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index dce23de8f09d..baa1cbe926c5 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -198,7 +198,7 @@ object GenericSignatures { assert(!sym.isAliasType, "Unexpected alias type: " + sym) typeParamSig(sym.name.lastPart) } - else if (sym == defn.AnyClass || sym == defn.AnyValClass || sym == defn.SingletonClass) + else if (defn.erasedToObject.contains(sym)) jsig(defn.ObjectType) else if (sym == defn.UnitClass || sym == defn.BoxedUnitModule) jsig(defn.BoxedUnitType) diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala index f32b9df793e8..d1e7dce35c53 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala @@ -50,5 +50,9 @@ object TypeUtils { else if (defn.isTupleClass(tp1.classSymbol)) tp1.dealias.argInfos else throw new AssertionError("not a tuple") } + + /** The `*:` equivalent of an instantce of a Tuple class */ + def toNestedPairs(implicit ctx: Context): Type = + (tupleElementTypes :\ (defn.UnitType: Type))(defn.PairType.appliedTo(_, _)) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index bce6fb03d7eb..7a1a3564bb56 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -877,7 +877,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic => val ttree = typedType(untpd.rename(tree, tree.name.toTypeName))(nestedCtx) ttree.tpe match { - case alias: TypeRef if alias.info.isAlias && !nestedCtx.reporter.hasErrors => + case alias: TypeRef if alias.info.isTypeAlias && !nestedCtx.reporter.hasErrors => companionRef(alias) match { case companion: TermRef => return untpd.ref(companion) withPos tree.pos case _ => diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 31281b413f88..de7b21f30366 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -165,29 +165,30 @@ object Checking { /** The last type top-level type checked when a CyclicReference occurs. */ var lastChecked: Type = NoType + private def checkPart(tp: Type, w: String) = + try apply(tp) + finally { + where = w + lastChecked = tp + } + + private def checkUpper(tp: Type, w: String) = { + val saved = nestedCycleOK + nestedCycleOK = true + try checkPart(tp, w) + finally nestedCycleOK = saved + } + /** Check info `tp` for cycles. Throw CyclicReference for illegal cycles, * break direct cycle with a LazyRef for legal, F-bounded cycles. */ def checkInfo(tp: Type): Type = tp match { case tp @ TypeAlias(alias) => - try tp.derivedTypeAlias(apply(alias)) - finally { - where = "alias" - lastChecked = alias - } + tp.derivedAlias(checkPart(alias, "alias")) + case tp @ MatchAlias(alias) => + tp.derivedAlias(checkUpper(alias, "match")) case tp @ TypeBounds(lo, hi) => - val lo1 = try apply(lo) finally { - where = "lower bound" - lastChecked = lo - } - val saved = nestedCycleOK - nestedCycleOK = true - try tp.derivedTypeBounds(lo1, apply(hi)) - finally { - nestedCycleOK = saved - where = "upper bound" - lastChecked = hi - } + tp.derivedTypeBounds(checkPart(lo, "lower bound"), checkUpper(hi, "upper bound")) case _ => tp } @@ -209,7 +210,7 @@ object Checking { this(tp.info) mapOver(tp) case tp @ AppliedType(tycon, args) => - tp.derivedAppliedType(this(tycon), args.map(this(_, nestedCycleOK, nestedCycleOK))) + tp.derivedAppliedType(this(tycon), args.mapConserve(this(_, nestedCycleOK, nestedCycleOK))) case tp @ RefinedType(parent, name, rinfo) => tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK)) case tp: RecType => @@ -476,7 +477,7 @@ object Checking { tp } else mapOver(tp) - if ((errors ne prevErrors) && !sym.isType && tp.info.isAlias) { + if ((errors ne prevErrors) && !sym.isType && tp.info.isTypeAlias) { // try to dealias to avoid a leak error val savedErrors = errors errors = prevErrors diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 476f1b7c079f..2e210cdb7db5 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1202,7 +1202,7 @@ class SearchHistory(val searchDepth: Int, val seen: Map[ClassSymbol, Int]) { foldOver(n + 1, tp) case tp: RefinedType => foldOver(n + 1, tp) - case tp: TypeRef if tp.info.isAlias => + case tp: TypeRef if tp.info.isTypeAlias => apply(n, tp.superType) case _ => foldOver(n, tp) diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 5f4b1d5a76b9..4dca9ab9483f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -349,8 +349,29 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { else result } + def tryConstValue: Tree = + ctx.typeComparer.constValue(callTypeArgs.head.tpe) match { + case Some(c) => Literal(c).withPos(call.pos) + case _ => EmptyTree + } + /** The Inlined node representing the inlined call */ - def inlined(pt: Type) = { + def inlined(pt: Type): Tree = { + + if (callTypeArgs.length == 1) + if (inlinedMethod == defn.Typelevel_constValue) { + val constVal = tryConstValue + if (!constVal.isEmpty) return constVal + ctx.error(i"not a constant type: ${callTypeArgs.head}; cannot take constValue", call.pos) + } + else if (inlinedMethod == defn.Typelevel_constValueOpt) { + val constVal = tryConstValue + return ( + if (constVal.isEmpty) ref(defn.NoneModuleRef) + else New(defn.SomeClass.typeRef.appliedTo(constVal.tpe), constVal :: Nil) + ) + } + // Compute bindings for all parameters, appending them to bindingsBuf computeParamBindings(inlinedMethod.info, callTypeArgs, callValueArgss) @@ -393,7 +414,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } case tree: Ident => paramProxy.get(tree.tpe) match { - case Some(t) if tree.isTerm && t.isSingleton => singleton(t).withPos(tree.pos) + case Some(t) if tree.isTerm && t.isSingleton => singleton(t.dealias).withPos(tree.pos) case Some(t) if tree.isType => TypeTree(t).withPos(tree.pos) case _ => tree } @@ -425,6 +446,34 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { expansion } + def issueError() = callValueArgss match { + case (msgArg :: rest) :: Nil => + msgArg.tpe match { + case ConstantType(Constant(msg: String)) => + // Usually `error` is called from within a rewrite method. In this + // case we need to report the error at the point of the outermost enclosing inline + // call. This way, a defensively written rewrite methid can always + // report bad inputs at the point of call instead of revealing its internals. + val callToReport = if (enclosingInlineds.nonEmpty) enclosingInlineds.last else call + val ctxToReport = ctx.outersIterator.dropWhile(enclosingInlineds(_).nonEmpty).next + def issueInCtx(implicit ctx: Context) = { + def decompose(arg: Tree): String = arg match { + case Typed(arg, _) => decompose(arg) + case SeqLiteral(elems, _) => elems.map(decompose).mkString(", ") + case arg => + arg.tpe.widenTermRefExpr match { + case ConstantType(Constant(c)) => c.toString + case _ => arg.show + } + } + ctx.error(s"$msg${rest.map(decompose).mkString(", ")}", callToReport.pos) + } + issueInCtx(ctxToReport) + case _ => + } + case _ => + } + trace(i"inlining $call", inlining, show = true) { // The normalized bindings collected in `bindingsBuf` @@ -444,6 +493,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { val (finalBindings, finalExpansion) = dropUnusedDefs(bindingsBuf.toList ++ matchBindings, expansion1) val (finalMatchBindings, finalArgBindings) = finalBindings.partition(matchBindings.contains(_)) + if (inlinedMethod == defn.Typelevel_error) issueError() + // Take care that only argument bindings go into `bindings`, since positions are // different for bindings from arguments and bindings from body. tpd.Inlined(call, finalArgBindings, seq(finalMatchBindings, finalExpansion)) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 1421bad81b34..9dcf519b157f 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -447,14 +447,17 @@ object ProtoTypes { * If the constraint contains already some of these parameters in its domain, * make a copy of the type lambda and add the copy's type parameters instead. * Return either the original type lambda, or the copy, if one was made. - * Also, if `owningTree` is non-empty, add a type variable for each parameter. + * Also, if `owningTree` is non-empty ot `alwaysAddTypeVars` is true, add a type variable + * for each parameter. * @return The added type lambda, and the list of created type variables. */ def constrained(tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean = false)(implicit ctx: Context): (TypeLambda, List[TypeTree]) = { val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty - assert(!(ctx.typerState.isCommittable && !addTypeVars), - s"inconsistent: no typevars were added to committable constraint ${state.constraint}") + if (tl.isInstanceOf[PolyType]) + assert(!(ctx.typerState.isCommittable && !addTypeVars), + s"inconsistent: no typevars were added to committable constraint ${state.constraint}") + // hk type lambdas can be added to constraints without typevars during match reduction def newTypeVars(tl: TypeLambda): List[TypeTree] = for (paramRef <- tl.paramRefs) @@ -573,8 +576,8 @@ object ProtoTypes { wildApprox(tp.parent, theMap, seen), tp.refinedName, wildApprox(tp.refinedInfo, theMap, seen)) - case tp: TypeAlias => // default case, inlined for speed - tp.derivedTypeAlias(wildApprox(tp.alias, theMap, seen)) + case tp: AliasingBounds => // default case, inlined for speed + tp.derivedAlias(wildApprox(tp.alias, theMap, seen)) case tp @ TypeParamRef(poly, pnum) => def wildApproxBounds(bounds: TypeBounds) = if (seen.contains(tp)) WildcardType diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 1f2a7a4813d1..9bad43a3d740 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -99,8 +99,8 @@ trait TypeAssigner { } case tp: TypeRef if toAvoid(tp.symbol) => tp.info match { - case TypeAlias(alias) => - apply(alias) + case info: AliasingBounds => + apply(info.alias) case TypeBounds(lo, hi) => range(atVariance(-variance)(apply(lo)), apply(hi)) case info: ClassInfo => @@ -460,10 +460,24 @@ trait TypeAssigner { if (target.isEmpty) meth.tpe.widen.toFunctionType(tree.env.length) else target.tpe) - def assignType(tree: untpd.CaseDef, body: Tree)(implicit ctx: Context) = - tree.withType(body.tpe) + def assignType(tree: untpd.CaseDef, pat: Tree, body: Tree)(implicit ctx: Context) = { + val ownType = + if (body.isType) { + val params = new TreeAccumulator[mutable.ListBuffer[TypeSymbol]] { + def apply(ps: mutable.ListBuffer[TypeSymbol], t: Tree)(implicit ctx: Context) = t match { + case t: Bind if t.symbol.isType => foldOver(ps += t.symbol.asType, t) + case _ => foldOver(ps, t) + } + } + HKTypeLambda.fromParams( + params(new mutable.ListBuffer[TypeSymbol](), pat).toList, + defn.FunctionOf(pat.tpe :: Nil, body.tpe)) + } + else body.tpe + tree.withType(ownType) + } - def assignType(tree: untpd.Match, cases: List[CaseDef])(implicit ctx: Context) = + def assignType(tree: untpd.Match, scrutinee: Tree, cases: List[CaseDef])(implicit ctx: Context) = tree.withType(ctx.typeComparer.lub(cases.tpes)) def assignType(tree: untpd.Labeled)(implicit ctx: Context) = @@ -520,6 +534,11 @@ trait TypeAssigner { def assignType(tree: untpd.LambdaTypeTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) = tree.withType(HKTypeLambda.fromParams(tparamDefs.map(_.symbol.asType), body.tpe)) + def assignType(tree: untpd.MatchTypeTree, bound: Tree, scrutinee: Tree, cases: List[CaseDef])(implicit ctx: Context) = { + val boundType = if (bound.isEmpty) defn.AnyType else bound.tpe + tree.withType(MatchType(boundType, scrutinee.tpe, cases.tpes)) + } + def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) = tree.withType(ExprType(result.tpe)) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index bf9c173ba3ab..1ec4b28eb8c4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -488,7 +488,9 @@ class Typer extends Namer } def typedLiteral(tree: untpd.Literal)(implicit ctx: Context): Tree = track("typedLiteral") { - assignType(tree) + val tree1 = assignType(tree) + if (ctx.mode.is(Mode.Type)) tpd.SingletonTypeTree(tree1) // this ensures that tree is classified as a type tree + else tree1 } def typedNew(tree: untpd.New, pt: Type)(implicit ctx: Context) = track("typedNew") { @@ -990,7 +992,7 @@ class Typer extends Namer def typedMatchFinish(tree: untpd.Match, sel: Tree, selType: Type, pt: Type)(implicit ctx: Context): Tree = { val cases1 = harmonic(harmonize)(typedCases(tree.cases, selType, pt.notApplied)) .asInstanceOf[List[CaseDef]] - assignType(cpy.Match(tree)(sel, cases1), cases1) + assignType(cpy.Match(tree)(sel, cases1), sel, cases1) } /** gadtSyms = "all type parameters of enclosing methods that appear @@ -1027,43 +1029,42 @@ class Typer extends Namer cases.mapconserve(typedCase(_, selType, pt, gadts)) } + /** - strip all instantiated TypeVars from pattern types. + * run/reducable.scala is a test case that shows stripping typevars is necessary. + * - enter all symbols introduced by a Bind in current scope + */ + private def indexPattern(cdef: untpd.CaseDef)(implicit ctx: Context) = new TreeMap { + val stripTypeVars = new TypeMap { + def apply(t: Type) = mapOver(t) + } + override def transform(trt: Tree)(implicit ctx: Context) = + super.transform(trt.withType(stripTypeVars(trt.tpe))) match { + case b: Bind => + val sym = b.symbol + if (sym.name != tpnme.WILDCARD) + if (ctx.scope.lookup(b.name) == NoSymbol) ctx.enter(sym) + else ctx.error(new DuplicateBind(b, cdef), b.pos) + if (!ctx.isAfterTyper) { + val bounds = ctx.gadt.bounds(sym) + if (bounds != null) sym.info = bounds + } + b + case t => t + } + } + /** Type a case. */ def typedCase(tree: untpd.CaseDef, selType: Type, pt: Type, gadtSyms: Set[Symbol])(implicit ctx: Context): CaseDef = track("typedCase") { val originalCtx = ctx - val gadtCtx = gadtContext(gadtSyms) - /** - strip all instantiated TypeVars from pattern types. - * run/reducable.scala is a test case that shows stripping typevars is necessary. - * - enter all symbols introduced by a Bind in current scope - */ - val indexPattern = new TreeMap { - val stripTypeVars = new TypeMap { - def apply(t: Type) = mapOver(t) - } - override def transform(trt: Tree)(implicit ctx: Context) = - super.transform(trt.withType(stripTypeVars(trt.tpe))) match { - case b: Bind => - val sym = b.symbol - if (sym.name != tpnme.WILDCARD) - if (ctx.scope.lookup(b.name) == NoSymbol) ctx.enter(sym) - else ctx.error(new DuplicateBind(b, tree), b.pos) - if (!ctx.isAfterTyper) { - val bounds = ctx.gadt.bounds(sym) - if (bounds != null) sym.info = bounds - } - b - case t => t - } - } - def caseRest(pat: Tree)(implicit ctx: Context) = { - val pat1 = indexPattern.transform(pat) + val pat1 = indexPattern(tree).transform(pat) val guard1 = typedExpr(tree.guard, defn.BooleanType) var body1 = ensureNoLocalRefs(typedExpr(tree.body, pt), pt, ctx.scope.toList) if (pt.isValueType) // insert a cast if body does not conform to expected type if we disregard gadt bounds body1 = body1.ensureConforms(pt)(originalCtx) - assignType(cpy.CaseDef(tree)(pat1, guard1, body1), body1) + assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1) } val pat1 = typedPattern(tree.pat, selType)(gadtCtx) @@ -1076,6 +1077,20 @@ class Typer extends Namer assignType(cpy.Labeled(tree)(bind1, expr1)) } + /** Type a case of a type match */ + def typedTypeCase(cdef: untpd.CaseDef, selType: Type, pt: Type)(implicit ctx: Context): CaseDef = { + def caseRest(implicit ctx: Context) = { + val pat1 = checkSimpleKinded(typedType(cdef.pat)(ctx.addMode(Mode.Pattern))) + if (!ctx.isAfterTyper) + constrainPatternType(pat1.tpe, selType)(ctx.addMode(Mode.GADTflexible)) + val pat2 = indexPattern(cdef).transform(pat1) + val body1 = typedType(cdef.body, pt) + assignType(cpy.CaseDef(cdef)(pat2, EmptyTree, body1), pat2, body1) + } + caseRest(ctx.fresh.setFreshGADTBounds.setNewScope) + } + + def typedReturn(tree: untpd.Return)(implicit ctx: Context): Return = track("typedReturn") { def returnProto(owner: Symbol, locals: Scope): Type = if (owner.isConstructor) defn.UnitType @@ -1301,6 +1316,16 @@ class Typer extends Namer assignType(cpy.LambdaTypeTree(tree)(tparams1, body1), tparams1, body1) } + def typedMatchTypeTree(tree: untpd.MatchTypeTree, pt: Type)(implicit ctx: Context): Tree = { + val bound1 = + if (tree.bound.isEmpty && isFullyDefined(pt, ForceDegree.none)) TypeTree(pt) + else typed(tree.bound) + val sel1 = typed(tree.selector) + val pt1 = if (bound1.isEmpty) pt else bound1.tpe + val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1.tpe, pt1)) + assignType(cpy.MatchTypeTree(tree)(bound1, sel1, cases1), bound1, sel1, cases1) + } + def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(implicit ctx: Context): ByNameTypeTree = track("typedByNameTypeTree") { val result1 = typed(tree.result) assignType(cpy.ByNameTypeTree(tree)(result1), result1) @@ -1877,6 +1902,7 @@ class Typer extends Namer case tree: untpd.RefinedTypeTree => typedRefinedTypeTree(tree) case tree: untpd.AppliedTypeTree => typedAppliedTypeTree(tree) case tree: untpd.LambdaTypeTree => typedLambdaTypeTree(tree)(ctx.localContext(tree, NoSymbol).setNewScope) + case tree: untpd.MatchTypeTree => typedMatchTypeTree(tree, pt) case tree: untpd.ByNameTypeTree => typedByNameTypeTree(tree) case tree: untpd.TypeBoundsTree => typedTypeBoundsTree(tree, pt) case tree: untpd.Alternative => typedAlternative(tree, pt) @@ -2626,7 +2652,8 @@ class Typer extends Namer tree match { case _: RefTree | _: Literal if !isVarPattern(tree) && - !(tree.tpe <:< pt) (ctx.addMode(Mode.GADTflexible)) => + !(pt <:< tree.tpe) && + !(tree.tpe <:< pt)(ctx.addMode(Mode.GADTflexible)) => val cmp = untpd.Apply( untpd.Select(untpd.TypedSplice(tree), nme.EQ), diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index a5d3a1878d3b..869ed3e87d56 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -95,6 +95,8 @@ class VarianceChecker()(implicit ctx: Context) { this(status, tp.resultType) // params will be checked in their TypeDef or ValDef nodes. case AnnotatedType(_, annot) if annot.symbol == defn.UncheckedVarianceAnnot => status + case tp: MatchType => + apply(status, tp.bound) case tp: ClassInfo => foldOver(status, tp.classParents) case _ => diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 514287a7ad52..063d88497b88 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -263,7 +263,7 @@ class ReplDriver(settings: Array[String], .filter(_.symbol.name.is(SimpleNameKind)) val typeAliases = - info.bounds.hi.typeMembers.filter(_.symbol.info.isInstanceOf[TypeAlias]) + info.bounds.hi.typeMembers.filter(_.symbol.info.isTypeAlias) ( typeAliases.map("// defined alias " + _.symbol.showUser) ++ diff --git a/compiler/test/dotc/pos-from-tasty.blacklist b/compiler/test/dotc/pos-from-tasty.blacklist index a78823e99acd..a48abcd017a0 100644 --- a/compiler/test/dotc/pos-from-tasty.blacklist +++ b/compiler/test/dotc/pos-from-tasty.blacklist @@ -15,3 +15,6 @@ repeatedArgs213.scala # Error printing parent constructors that are blocks default-super.scala + +# Need to implement printing of match types +matchtype.scala \ No newline at end of file diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 005c95099377..778062c31a6c 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -161,6 +161,7 @@ class CompilationTests extends ParallelTesting { compileFile("tests/neg-custom-args/i1754.scala", allowDeepSubtypes) + compileFilesInDir("tests/neg-custom-args/isInstanceOf", allowDeepSubtypes and "-Xfatal-warnings") + compileFile("tests/neg-custom-args/i3627.scala", allowDeepSubtypes) + + compileFile("tests/neg-custom-args/matchtype-loop.scala", allowDeepSubtypes) + compileFile("tests/neg-custom-args/completeFromSource/nested/Test1.scala", defaultOptions.and("-sourcepath", "tests/neg-custom-args", "-scansource")) }.checkExpectedErrors() diff --git a/docs/docs/internals/syntax.md b/docs/docs/internals/syntax.md index 4dd9c3e8ff20..83438de57693 100644 --- a/docs/docs/internals/syntax.md +++ b/docs/docs/internals/syntax.md @@ -119,12 +119,14 @@ ClassQualifier ::= ‘[’ id ‘]’ ```ebnf Type ::= [FunArgMods] FunArgTypes ‘=>’ Type Function(ts, t) | HkTypeParamClause ‘=>’ Type TypeLambda(ps, t) + | MatchType | InfixType FunArgMods ::= { ‘implicit’ | ‘erased’ } FunArgTypes ::= InfixType | ‘(’ [ FunArgType {‘,’ FunArgType } ] ‘)’ | ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ TypedFunParam ::= id ‘:’ Type +MatchType ::= InfixType `match` TypeCaseClauses InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= WithType {[nl] Refinement} RefinedTypeTree(t, ds) WithType ::= AnnotType {‘with’ AnnotType} (deprecated) @@ -227,6 +229,8 @@ CaseClauses ::= CaseClause { CaseClause } CaseClause ::= ‘case’ (Pattern [Guard] ‘=>’ Block | INT) CaseDef(pat, guard?, block) // block starts at => ImplicitCaseClauses ::= ImplicitCaseClause { ImplicitCaseClause } ImplicitCaseClause ::= ‘case’ PatVar [‘:’ RefinedType] [Guard] ‘=>’ Block +TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } +TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type [nl] Pattern ::= Pattern1 { ‘|’ Pattern1 } Alternative(pats) Pattern1 ::= PatVar ‘:’ RefinedType Bind(name, Typed(Ident(wildcard), tpe)) @@ -316,8 +320,8 @@ ValDcl ::= ids ‘:’ Type VarDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) DefDcl ::= DefSig [‘:’ Type] DefDef(_, name, tparams, vparamss, tpe, EmptyTree) DefSig ::= id [DefTypeParamClause] DefParamClauses -TypeDcl ::= id [TypTypeParamClause] [‘=’ Type] TypeDefTree(_, name, tparams, tpt) - | id [HkTypeParamClause] TypeBounds TypeDefTree(_, name, tparams, bounds) +TypeDcl ::= id [TypeParamClause] (TypeBounds | ‘=’ Type) TypeDefTree(_, name, tparams, bounds) + | id [TypeParamClause] <: Type = MatchType Def ::= ‘val’ PatDef | ‘var’ VarDef diff --git a/docs/docs/reference/match-types.md b/docs/docs/reference/match-types.md new file mode 100644 index 000000000000..03fa5b4b677e --- /dev/null +++ b/docs/docs/reference/match-types.md @@ -0,0 +1,250 @@ +--- +layout: doc-page +title: "Match Types" +--- + +A match type reduces to one of a number of right hand sides, depending on a scrutinee type. Example: + +```scala +type Elem[X] = X match { + case String => Char + case Array[t] => t + case Iterable[t] => t +} +``` +This defines a type that, depending on the scrutinee type `X`, can reduce to one of its right hand sides. For instance, +```scala + Elem[String] =:= Char + Elem[Array[Int]] =:= Int + Elem[List[Float]] =:= Float + Elem[Nil] =:= Nothing +``` +Here `=:=` is understood to mean that left and right hand sides are mutually subtypes of each other. + +In general, a match type is of the form +```scala + S match { P1 => Tn ... Pn => Tn } +``` +where `S`, `T1`, ..., `Tn` are types and `P1`, ..., `Pn` are type patterns. Type variables +in patterns start as usual with a lower case letter. + +Match types can form part of recursive type definitions. Example: +```scala + type LeafElem[X] = X match { + case String => Char + case Array[t] => LeafElem[t] + case Iterable[t] => LeafElem[t] + case t <: AnyVal => t + } +``` +Recursive match type definitions can also be given an upper bound, like this: +```scala + type Concat[+Xs <: Tuple, +Ys <: Tuple] <: Tuple = Xs match { + case Unit => Ys + case x *: xs => x *: Concat[xs, Ys] + } +``` +In this definition, every instance of `Concat[A, B]`, whether reducible or not, is known to be a subtype of `Tuple`. This is necessary to make the recursive invocation `x *: Concat[xs, Ys]` type check, since `*:` demands a `Tuple` as its right operand. + +## Representation of Match Types + +The internal representation of a match type +``` + S match { P1 => Tn ... Pn => Tn } +``` +is `Match(S, C1, ..., Cn) <: B` where each case `Ci` is of the form +``` + [Xs] => P => T +``` +Here, `[Xs]` is a type parameter clause of the variables bound in pattern `Pi`. If there are no bound type variables in a case, the type parameter clause is omitted and only the function type `P => T` is kept. So each case is either a unary function type or a type lambda over a unary function type. + +`B` is the declared upper bound of the match type, or `Any` if no such bound is given. +We will leave it out in places where it does not matter for the discussion. Scrutinee, bound and pattern types must be first-order types. + +## Match type reduction + +We define match type reduction in terms of an auxiliary relation, `can-reduce`: + +``` + Match(S, C1, ..., Cn) can-reduce i, T' +``` +if `Ci = [Xs] => P => T` and there are minimal instantiations `Is` of the type variables `Xs` such that +``` + S <: [Xs := Is] P + T' = [Xs := Is] T +``` +An instantiation `Is` is _minimal_ for `Xs` if all type variables in `Xs` that appear +covariantly and nonvariantly in `Is` are as small as possible and all type variables in `Xs` that appear contravariantly in `Is` are as large as possible. Here, "small" and "large" are understood wrt `<:`. + +For simplicity, we have omitted constraint handling so far. The full formulation of subtyping tests describes them as a function from a constraint and a pair of types to +either _success_ and a new constraint or _failure_. In the context of reduction, the subtyping test `S <: [Xs := Is] P` is understood to leave the bounds of all variables +in the input constraint unchanged, i.e. existing variables in the constraint cannot be instantiated by matching the scrutinee against the patterns. + +Using `can-reduce`, we can now define match type reduction proper in the `reduces-to` relation: +``` + Match(S, C1, ..., Cn) reduces-to T +``` +if +``` + Match(S, C1, ..., Cn) can-reduce i, T +``` +and, for `j` in `1..i-1`: `C_j` is disjoint from `C_i`, or else `S` cannot possibly match `C_j`. +See the section on overlapping patterns for an elaboration of "disjoint" and "cannot possibly match". + +## Subtyping Rules for Match Types + +The following rules apply to match types. For simplicity, we omit environments and constraints. + +The first rule is a structural comparison between two match types: +``` + Match(S, C1, ..., Cn) <: Match(T, D1, ..., Dm) +``` +` `if +``` + S <: T, m <= n, Ci <: Di for i in 1..n +``` +I.e. scrutinees and corresponding cases must be subtypes, no case re-ordering is allowed, but the subtype can have more cases than the supertype. + +The second rule states that a match type and its redux are mutual subtypes +``` + Match(S, Cs) <: T + T <: Match(S, Cs) +``` +` `if +``` + Match(S, Cs) reduces-to T +``` + +The third rule states that a match type conforms to its upper bound +``` + (Match(S, Cs) <: B) <: B +``` + +## Variance Laws for Match Types + +Within a match type `Match(S, Cs) <: B`, all occurrences of type variables count as covariant. By the nature of the cases `Ci` this means that occurrences in pattern position are contravarant (since patterns are represented as function type arguments). + +## Typing Rules for Match Expressions + +Typing rules for match expressions are tricky. First, they need some new form of GADT matching for value parameters. +Second, they have to account for the difference between sequential match on the term level and parallel match on the type level. As a running example consider: +```scala + type M[+X] = X match { + case A => 1 + case B => 2 + } +``` +We'd like to be able to typecheck +```scala + def m[X](x: X): M[X] = x match { + case _: A => 1 // type error + case _: B => 2 // type error + } +``` +Unfortunately, this goes nowhere. Let's try the first case. We have: `x.type <: A` and `x.type <: X`. This tells +us nothing useful about `X`, so we cannot reduce `M` in order to show that the right hand side of the case is valid. + +The following variant is more promising: +```scala + def m(x: Any): M[x.type] = x match { + case _: A => 1 + case _: B => 2 + } +``` +To make this work, we'd need a new form of GADT checking: If the scrutinee is a term variable `s`, we can make use of +the fact that `s.type` must conform to the pattern's type and derive a GADT constraint from that. For the first case above, +this would be the constraint `x.type <: A`. The new aspect here is that we need GADT constraints over singleton types where +before we just had constraints over type parameters. + +Assuming this extension, we can then try to typecheck as usual. E.g. to typecheck the first case `case _: A => 1` of the definition of `m` above, GADT matching will produce the constraint `x.type <: A`. Therefore, `M[x.type]` reduces to the singleton type `1`. The right hand side `1` of the case conforms to this type, so the case typechecks. + +Typechecking the second case hits a snag, though. In general, the assumption `x.type <: B` is not enough to prove that +`M[x.type]` reduces to `2`. However we can reduce `M[x.type]` to `2` if the types `A` and `B` do not overlap. +So correspondence of match terms to match types is feasible only in the case of non-overlapping patterns. + +For simplicity, we have disregarded the `null` value in this discussion. `null` does not cause a fundamental problem but complicates things somewhat because some forms of patterns do not match `null`. + +## Overlapping Patterns + +A complete defininition of when two patterns or types overlap still needs to be worked out. Some examples we want to cover are: + + - Two classes overlap only if one is a subtype of the other + - A final class `C` overlaps with a trait `T` only if `C` extends `T` directly or indirectly. + - A class overlaps with a sealed trait `T` only if it overlaps with one of the known subclasses of `T`. + - An abstract type or type parameter `A` overlaps with a type `B` only if `A`'s upper bound overlaps with `B`. + - A union type `A_1 | A_2` overlaps with `B` only if `A_1` overlaps with `B` or `A_2` overlaps with `B`. + - An intersection type `A_1 & A_2` overlaps with `B` only if both `A_1` and `A_2` overlap with `B`. + - If `C[X_1, ..., X_n]` is a case class, then the instance type `C[A_1, ..., A_n]` overlaps with the instance type `C[B_1, ..., B_n]` only if for every index `i` in `1..n`, + if `X_i` is the type of a parameter of the class, then `A_i` overlaps with `B_i`. + + The last rule in particular is needed to detect non-overlaps for cases where the scrutinee and the patterns are tuples. I.e. `(Int, String)` does not overlap `(Int, Int)` since +`String` does not overlap `Int`. + +## Handling Termination + +Match type definitions can be recursive, which raises the question whether and how to check +that reduction terminates. This is currently an open question. We should investigate whether +there are workable ways to enforce that recursion is primitive. + +Note that, since reduction is linked to subtyping, we already have a cycle dectection mechanism in place. +So the following will already give a reasonable error message: +```scala + type L[X] = X match { + case Int => L[X] + } + def g[X]: L[X] = ??? +``` + +``` + | val x: Int = g[Int] + | ^^^^^^ + | found: Test.L[Int] + | required: Int +``` + +The subtype cycle test can be circumvented by producing larger types in each recursive invocation, as in the following definitions: +```scala + type LL[X] = X match { + case Int => LL[LL[X]] + } + def gg[X]: LL[X] = ??? +``` +In this case subtyping enters into an infinite recursion. This is not as bad as it looks, however, because +`dotc` turns selected stack overflows into type errors. If there is a stackoverflow during subtyping, +the exception will be caught and turned into a compile-time error that indicates +a trace of the subtype tests that caused the overflow without showing a full stacktrace. +Concretely: +``` + | val xx: Int = gg[Int] + | ^ + |Recursion limit exceeded. + |Maybe there is an illegal cyclic reference? + |If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. + |A recurring operation is (inner to outer): + | + | subtype Test.LL[Int] <:< Int + | subtype Test.LL[Int] <:< Int + | ... + | subtype Test.LL[Int] <:< Int +``` +(The actual error message shows some additional lines in the stacktrace). + +## Related Work + +Match types have similarities with [closed type families](https://wiki.haskell.org/GHC/Type_families) in Haskell. Some differences are: + + - Subtyping instead of type equalities. + - Match type reduction does not tighten the underlying constraint, whereas type family reduction does unify. This difference in approach mirrors the difference between local type inference in Scala and global type inference in Haskell. + - No a-priori requirement that cases are non-overlapping. Uses parallel reduction + instead of always chosing a unique branch. + +Match types are also similar to Typescript's [conditional types](https://github.com/Microsoft/TypeScript/pull/21316). The main differences here are: + + - Conditional types only reduce if scrutinee and pattern are ground, whereas + match types also work for type parameters and abstract types. + - Match types can bind variables in type patterns. + - Match types support direct recursion. + +Conditional types in Typescript distribute through union types. We should evaluate whether match types should support this as well. + + diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 1da6f5358340..396bd336ed34 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -25,6 +25,8 @@ sidebar: url: docs/reference/union-types.html - title: Type lambdas url: docs/reference/type-lambdas.html + - title: Match types + url: docs/reference/match-types.html - title: Implicit Function Types url: docs/reference/implicit-function-types.html - title: Dependent Function Types diff --git a/library/src-scala3/scala/Tuple.scala b/library/src-scala3/scala/Tuple.scala index f058ec07d43a..a6f0a5196055 100644 --- a/library/src-scala3/scala/Tuple.scala +++ b/library/src-scala3/scala/Tuple.scala @@ -4,91 +4,144 @@ import typelevel._ sealed trait Tuple extends Any { import Tuple._ - rewrite def toArray: Array[Object] = rewrite _size(this) match { - case 0 => + + rewrite def toArray: Array[Object] = rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(0) => $emptyArray - case 1 => + case Some(1) => val t = asInstanceOf[Tuple1[Object]] Array(t._1) - case 2 => + case Some(2) => val t = asInstanceOf[Tuple2[Object, Object]] Array(t._1, t._2) - case 3 => + case Some(3) => val t = asInstanceOf[Tuple3[Object, Object, Object]] Array(t._1, t._2, t._3) - case 4 => + case Some(4) => val t = asInstanceOf[Tuple4[Object, Object, Object, Object]] Array(t._1, t._2, t._3, t._4) - case n if n <= $MaxSpecialized => + case Some(n) if n <= $MaxSpecialized => $toArray(this, n) - case n => + case Some(n) => asInstanceOf[TupleXXL].elems + case None => + dynamicToArray(this) } - rewrite def *: [H] (x: H): Tuple = { - erased val resTpe = Typed(_pair(x, this)) - rewrite _size(this) match { - case 0 => - Tuple1(x).asInstanceOf[resTpe.Type] - case 1 => - Tuple2(x, asInstanceOf[Tuple1[_]]._1).asInstanceOf[resTpe.Type] - case 2 => + rewrite def *: [H] (x: H): H *: this.type = { + type Result = H *: this.type + rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(0) => + Tuple1(x).asInstanceOf[Result] + case Some(1) => + Tuple2(x, asInstanceOf[Tuple1[_]]._1).asInstanceOf[Result] + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] - Tuple3(x, t._1, t._2).asInstanceOf[resTpe.Type] - case 3 => + Tuple3(x, t._1, t._2).asInstanceOf[Result] + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] - Tuple4(x, t._1, t._2, t._3).asInstanceOf[resTpe.Type] - case 4 => + Tuple4(x, t._1, t._2, t._3).asInstanceOf[Result] + case Some(4) => val t = asInstanceOf[Tuple4[_, _, _, _]] - Tuple5(x, t._1, t._2, t._3, t._4).asInstanceOf[resTpe.Type] - case n => - fromArray[resTpe.Type]($consArray(x, toArray)) + Tuple5(x, t._1, t._2, t._3, t._4).asInstanceOf[Result] + case Some(n) => + fromArray[Result]($consArray(x, toArray)) + case _ => + dynamic_*:[this.type, H](this, x) } } - rewrite def ++(that: Tuple): Tuple = { - erased val resTpe = Typed(_concat(this, that)) - rewrite _size(this) match { - case 0 => - that - case 1 => - if (_size(that) == 0) this - else (asInstanceOf[Tuple1[_]]._1 *: that).asInstanceOf[resTpe.Type] - case 2 => + rewrite def ++(that: Tuple): Concat[this.type, that.type] = { + type Result = Concat[this.type, that.type] + rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(0) => + that.asInstanceOf[Result] + case Some(1) => + if (constValue[BoundedSize[that.type]] == 0) this.asInstanceOf[Result] + else (asInstanceOf[Tuple1[_]]._1 *: that).asInstanceOf[Result] + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] - rewrite _size(that) match { - case 0 => this + rewrite constValue[BoundedSize[that.type]] match { + case 0 => this.asInstanceOf[Result] case 1 => val u = that.asInstanceOf[Tuple1[_]] - Tuple3(t._1, t._2, u._1).asInstanceOf[resTpe.Type] + Tuple3(t._1, t._2, u._1).asInstanceOf[Result] case 2 => val u = that.asInstanceOf[Tuple2[_, _]] - Tuple4(t._1, t._2, u._1, u._2).asInstanceOf[resTpe.Type] + Tuple4(t._1, t._2, u._1, u._2).asInstanceOf[Result] case _ => - genericConcat[resTpe.Type](this, that) + genericConcat[Result](this, that).asInstanceOf[Result] } - case 3 => + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] - rewrite _size(that) match { - case 0 => this + rewrite constValue[BoundedSize[that.type]] match { + case 0 => this.asInstanceOf[Result] case 1 => val u = that.asInstanceOf[Tuple1[_]] - Tuple4(t._1, t._2, t._3, u._1).asInstanceOf[resTpe.Type] + Tuple4(t._1, t._2, t._3, u._1).asInstanceOf[Result] case _ => - genericConcat[resTpe.Type](this, that) + genericConcat[Result](this, that).asInstanceOf[Result] } - case _ => - if (_size(that) == 0) this - else genericConcat[resTpe.Type](this, that) + case Some(_) => + if (constValue[BoundedSize[that.type]] == 0) this.asInstanceOf[Result] + else genericConcat[Result](this, that).asInstanceOf[Result] + case None => + dynamic_++[this.type, that.type](this, that) } } rewrite def genericConcat[T <: Tuple](xs: Tuple, ys: Tuple): Tuple = fromArray[T](xs.toArray ++ ys.toArray) + + rewrite def size: Size[this.type] = { + type Result = Size[this.type] + rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(n) => n.asInstanceOf[Result] + case _ => dynamicSize(this).asInstanceOf[Result] + } + } } object Tuple { transparent val $MaxSpecialized = 22 + transparent private val XXL = $MaxSpecialized + 1 + + type Head[+X <: NonEmptyTuple] = X match { + case x *: _ => x + } + + type Tail[+X <: NonEmptyTuple] <: Tuple = X match { + case _ *: xs => xs + } + + type Concat[+X <: Tuple, +Y <: Tuple] <: Tuple = X match { + case Unit => Y + case x1 *: xs1 => x1 *: Concat[xs1, Y] + } + + type Elem[+X <: Tuple, +N] = X match { + case x *: xs => + N match { + case 0 => x + case S[n1] => Elem[xs, n1] + } + } + + type Size[+X] <: Int = X match { + case Unit => 0 + case x *: xs => S[Size[xs]] + } + + private[scala] type BoundedSizeRecur[X, L <: Int] <: Int = X match { + case Unit => 0 + case x *: xs => + L match { + case 0 => 0 + case S[n] => S[BoundedSizeRecur[xs, n]] + } + } + + private[scala] type BoundedSize[X] = BoundedSizeRecur[X, 23] val $emptyArray = Array[Object]() @@ -110,35 +163,8 @@ object Tuple { elems1 } - private[scala] rewrite def _pair[H, T <: Tuple] (x: H, xs: T): Tuple = - erasedValue[H *: T] - - private[scala] rewrite def _size(xs: Tuple): Int = - rewrite xs match { - case _: Unit => 0 - case _: (_ *: xs1) => _size(erasedValue[xs1]) + 1 - } - - private[scala] rewrite def _head(xs: Tuple): Any = rewrite xs match { - case _: (x *: _) => erasedValue[x] - } - - private[scala] rewrite def _tail(xs: Tuple): Tuple = rewrite xs match { - case _: (_ *: xs1) => erasedValue[xs1] - } - - private[scala] rewrite def _index(xs: Tuple, n: Int): Any = rewrite xs match { - case _: (x *: _) if n == 0 => erasedValue[x] - case _: (_ *: xs1) if n > 0 => _index(erasedValue[xs1], n - 1) - } - - private[scala] rewrite def _concat(xs: Tuple, ys: Tuple): Tuple = rewrite xs match { - case _: Unit => ys - case _: (x1 *: xs1) => _pair(erasedValue[x1], _concat(erasedValue[xs1], ys)) - } - rewrite def fromArray[T <: Tuple](xs: Array[Object]): T = - rewrite _size(erasedValue[T]) match { + rewrite constValue[BoundedSize[T]] match { case 0 => ().asInstanceOf[T] case 1 => Tuple1(xs(0)).asInstanceOf[T] case 2 => Tuple2(xs(0), xs(1)).asInstanceOf[T] @@ -164,95 +190,246 @@ object Tuple { case 22 => Tuple22(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20), xs(21)).asInstanceOf[T] case _ => TupleXXL(xs).asInstanceOf[T] } + + def dynamicFromArray[T <: Tuple](xs: Array[Object]): T = xs.length match { + case 0 => ().asInstanceOf[T] + case 1 => Tuple1(xs(0)).asInstanceOf[T] + case 2 => Tuple2(xs(0), xs(1)).asInstanceOf[T] + case 3 => Tuple3(xs(0), xs(1), xs(2)).asInstanceOf[T] + case 4 => Tuple4(xs(0), xs(1), xs(2), xs(3)).asInstanceOf[T] + case 5 => Tuple5(xs(0), xs(1), xs(2), xs(3), xs(4)).asInstanceOf[T] + case 6 => Tuple6(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5)).asInstanceOf[T] + case 7 => Tuple7(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6)).asInstanceOf[T] + case 8 => Tuple8(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7)).asInstanceOf[T] + case 9 => Tuple9(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8)).asInstanceOf[T] + case 10 => Tuple10(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9)).asInstanceOf[T] + case 11 => Tuple11(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10)).asInstanceOf[T] + case 12 => Tuple12(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11)).asInstanceOf[T] + case 13 => Tuple13(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12)).asInstanceOf[T] + case 14 => Tuple14(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13)).asInstanceOf[T] + case 15 => Tuple15(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14)).asInstanceOf[T] + case 16 => Tuple16(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15)).asInstanceOf[T] + case 17 => Tuple17(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16)).asInstanceOf[T] + case 18 => Tuple18(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17)).asInstanceOf[T] + case 19 => Tuple19(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18)).asInstanceOf[T] + case 20 => Tuple20(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19)).asInstanceOf[T] + case 21 => Tuple21(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20)).asInstanceOf[T] + case 22 => Tuple22(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20), xs(21)).asInstanceOf[T] + case _ => TupleXXL(xs).asInstanceOf[T] + } + + def dynamicToArray(self: Tuple): Array[Object] = (self: Any) match { + case self: Unit => + $emptyArray + case self: Tuple1[_] => + val t = self.asInstanceOf[Tuple1[Object]] + Array(t._1) + case self: Tuple2[_, _] => + val t = self.asInstanceOf[Tuple2[Object, Object]] + Array(t._1, t._2) + case self: Tuple3[_, _, _] => + val t = self.asInstanceOf[Tuple3[Object, Object, Object]] + Array(t._1, t._2, t._3) + case self: Tuple4[_, _, _, _] => + val t = self.asInstanceOf[Tuple4[Object, Object, Object, Object]] + Array(t._1, t._2, t._3, t._4) + case self: TupleXXL => + asInstanceOf[TupleXXL].elems + case self: Product => + val arr = new Array[Object](self.productArity) + for (i <- 0 until arr.length) arr(i) = self.productElement(i).asInstanceOf[Object] + arr + } + + def dynamic_*: [This <: Tuple, H] (self: Tuple, x: H): H *: This = { + type Result = H *: This + (self: Any) match { + case Unit => + Tuple1(x).asInstanceOf[Result] + case self: Tuple1[_] => + Tuple2(x, self._1).asInstanceOf[Result] + case self: Tuple2[_, _] => + Tuple3(x, self._1, self._2).asInstanceOf[Result] + case self: Tuple3[_, _, _] => + Tuple4(x, self._1, self._2, self._3).asInstanceOf[Result] + case self: Tuple4[_, _, _, _] => + Tuple5(x, self._1, self._2, self._3, self._4).asInstanceOf[Result] + case _ => + dynamicFromArray[Result]($consArray(x, dynamicToArray(self))) + } + } + + def dynamic_++[This <: Tuple, That <: Tuple](self: This, that: That): Concat[This, That] = { + type Result = Concat[This, That] + (this: Any) match { + case self: Unit => return self.asInstanceOf[Result] + case _ => + } + (that: Any) match { + case that: Unit => return self.asInstanceOf[Result] + case _ => + } + dynamicFromArray[Result](dynamicToArray(self) ++ dynamicToArray(that)) + } + + def dynamicSize[This <: Tuple](self: This) = (self: Any) match { + case self: Unit => 0 + case self: TupleXXL => self.elems.length + case self: Product => self.productArity + } } -@showAsInfix -sealed class *:[+H, +T <: Tuple] extends Tuple { +abstract sealed class NonEmptyTuple extends Tuple { import Tuple._ + import NonEmptyTuple._ - rewrite def head: Any = { - erased val resTpe = Typed(_head(this)) - val resVal = rewrite _size(this) match { - case 1 => + rewrite def head: Head[this.type] = { + type Result = Head[this.type] + val resVal = rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(1) => val t = asInstanceOf[Tuple1[_]] t._1 - case 2 => + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] t._1 - case 3 => + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] t._1 - case 4 => + case Some(4) => val t = asInstanceOf[Tuple4[_, _, _, _]] t._1 - case n if n > 4 && n <= $MaxSpecialized => + case Some(n) if n > 4 && n <= $MaxSpecialized => asInstanceOf[Product].productElement(0) - case n if n > $MaxSpecialized => + case Some(n) if n > $MaxSpecialized => val t = asInstanceOf[TupleXXL] t.elems(0) + case None => + dynamicHead[this.type](this) } - resVal.asInstanceOf[resTpe.Type] + resVal.asInstanceOf[Result] } - rewrite def tail: Tuple = { - erased val resTpe = Typed(_tail(this)) - rewrite _size(this) match { - case 1 => - () - case 2 => + rewrite def tail: Tail[this.type] = { + type Result = Tail[this.type] + rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(1) => + ().asInstanceOf[Result] + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] - Tuple1(t._2).asInstanceOf[resTpe.Type] - case 3 => + Tuple1(t._2).asInstanceOf[Result] + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] - Tuple2(t._2, t._3).asInstanceOf[resTpe.Type] - case 4 => + Tuple2(t._2, t._3).asInstanceOf[Result] + case Some(4) => val t = asInstanceOf[Tuple4[_, _, _, _]] - Tuple3(t._2, t._3, t._4).asInstanceOf[resTpe.Type] - case 5 => + Tuple3(t._2, t._3, t._4).asInstanceOf[Result] + case Some(5) => val t = asInstanceOf[Tuple5[_, _, _, _, _]] - Tuple4(t._2, t._3, t._4, t._5).asInstanceOf[resTpe.Type] - case n if n > 5 => - fromArray[resTpe.Type](toArray.tail) + Tuple4(t._2, t._3, t._4, t._5).asInstanceOf[Result] + case Some(n) if n > 5 => + fromArray[Result](toArray.tail) + case None => + dynamicTail[this.type](this) } } - rewrite def apply(n: Int): Any = { - erased val resTpe = Typed(_index(this, n)) - rewrite _size(this) match { - case 1 => + rewrite def fallbackApply(n: Int) = + rewrite constValueOpt[n.type] match { + case Some(n: Int) => error("index out of bounds", n) + case None => dynamicApply[this.type](this, n) + } + + rewrite def apply(n: Int): Elem[this.type, n.type] = { + type Result = Elem[this.type, n.type] + rewrite constValueOpt[Size[this.type]] match { + case Some(1) => val t = asInstanceOf[Tuple1[_]] - rewrite n match { - case 0 => t._1.asInstanceOf[resTpe.Type] + rewrite constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] } - case 2 => + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] - rewrite n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - case 1 => t._2.asInstanceOf[resTpe.Type] + rewrite constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case Some(1) => t._2.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] } - case 3 => + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] - rewrite n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - case 1 => t._2.asInstanceOf[resTpe.Type] - case 2 => t._3.asInstanceOf[resTpe.Type] + rewrite constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case Some(1) => t._2.asInstanceOf[Result] + case Some(2) => t._3.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] } - case 4 => + case Some(4) => val t = asInstanceOf[Tuple4[_, _, _, _]] - rewrite n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - case 1 => t._2.asInstanceOf[resTpe.Type] - case 2 => t._3.asInstanceOf[resTpe.Type] - case 3 => t._4.asInstanceOf[resTpe.Type] + rewrite constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case Some(1) => t._2.asInstanceOf[Result] + case Some(2) => t._3.asInstanceOf[Result] + case Some(3) => t._4.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case Some(s) if s > 4 && s <= $MaxSpecialized => + val t = asInstanceOf[Product] + rewrite constValueOpt[n.type] match { + case Some(n) if n >= 0 && n < s => t.productElement(n).asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] } - case s if s > 4 && s <= $MaxSpecialized && n >= 0 && n < s => - asInstanceOf[Product].productElement(n).asInstanceOf[resTpe.Type] - case s if s > $MaxSpecialized && n >= 0 && n < s => - asInstanceOf[TupleXXL].elems(n).asInstanceOf[resTpe.Type] + case Some(s) if s > $MaxSpecialized => + val t = asInstanceOf[TupleXXL] + rewrite constValueOpt[n.type] match { + case Some(n) if n >= 0 && n < s => t.elems(n).asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case _ => fallbackApply(n).asInstanceOf[Result] + } + } +} + +object NonEmptyTuple { + import Tuple._ + + def dynamicHead[This <: NonEmptyTuple] (self: This): Head[This] = { + type Result = Head[This] + val res = (self: Any) match { + case self: Tuple1[_] => self._1 + case self: Tuple2[_, _] => self._1 + case self: Tuple3[_, _, _] => self._1 + case self: Tuple4[_, _, _, _] => self._1 + case self: TupleXXL => self.elems(0) + case self: Product => self.productElement(0) } + res.asInstanceOf[Result] + } + + def dynamicTail[This <: NonEmptyTuple] (self: This): Tail[This] = { + type Result = Tail[This] + val res = (self: Any) match { + case self: Tuple1[_] => self._1 + case self: Tuple2[_, _] => Tuple1(self._2) + case self: Tuple3[_, _, _] => Tuple2(self._2, self._3) + case self: Tuple4[_, _, _, _] => Tuple3(self._2, self._3, self._4) + case _ => dynamicFromArray[Result](self.toArray.tail) + } + res.asInstanceOf[Result] + } + + def dynamicApply[This <: NonEmptyTuple] (self: This, n: Int): Elem[This, n.type] = { + type Result = Elem[This, n.type] + val res = (self: Any) match { + case self: TupleXXL => self.elems(n) + case self: Product => self.productElement(n) + } + res.asInstanceOf[Result] } } +@showAsInfix +sealed class *:[+H, +T <: Tuple] extends NonEmptyTuple + object *: { rewrite def unapply[H, T <: Tuple](x: H *: T) = (x.head, x.tail) } diff --git a/library/src-scala3/scala/typelevel/package.scala b/library/src-scala3/scala/typelevel/package.scala index 17220ab3187c..adfc59329b8a 100644 --- a/library/src-scala3/scala/typelevel/package.scala +++ b/library/src-scala3/scala/typelevel/package.scala @@ -1,6 +1,16 @@ package scala package object typelevel { + erased def erasedValue[T]: T = ??? + case class Typed[T](val value: T) { type Type = T } + + rewrite def error(transparent msg: String, objs: Any*): Nothing = ??? + + rewrite def constValueOpt[T]: Option[T] = ??? + + rewrite def constValue[T]: T = ??? + + type S[X <: Int] <: Int } \ No newline at end of file diff --git a/library/src/scala/TupleXXL.scala b/library/src/scala/TupleXXL.scala index 48a1410574f0..fe46225a206b 100644 --- a/library/src/scala/TupleXXL.scala +++ b/library/src/scala/TupleXXL.scala @@ -2,6 +2,7 @@ package scala import java.util.Arrays.{deepEquals, deepHashCode} final class TupleXXL private (es: Array[Object]) { + assert(es.length > 22) override def toString = elems.mkString("(", ",", ")") override def hashCode = getClass.hashCode * 41 + deepHashCode(elems) override def equals(that: Any) = that match { diff --git a/tests/neg-custom-args/matchtype-loop.scala b/tests/neg-custom-args/matchtype-loop.scala new file mode 100644 index 000000000000..316897b808a5 --- /dev/null +++ b/tests/neg-custom-args/matchtype-loop.scala @@ -0,0 +1,17 @@ +object Test { + type L[X] = X match { + case Int => L[X] + } + type LL[X] = X match { + case Int => LL[LL[X]] + } + def a: L[Boolean] = ??? + def b: L[Int] = ??? + def g[X]: L[X] = ??? + val x: Int = g[Int] // error: found: L[Int], required: Int + + def aa: LL[Boolean] = ??? + def bb: LL[Int] = ??? // error: recursion limit exceeded with reduce type LazyRef(Test.LL[Int]) match ... + def gg[X]: LL[X] = ??? + val xx: Int = gg[Int] // error: recursion limit exceeded with reduce type LazyRef(Test.LL[Int]) match ... +} diff --git a/tests/neg/NoneMatch.scala b/tests/neg/NoneMatch.scala new file mode 100644 index 000000000000..7a17dd74103c --- /dev/null +++ b/tests/neg/NoneMatch.scala @@ -0,0 +1,7 @@ +object Test { + + None match { + case Some(0) => ??? // error: unreachable + } + +} diff --git a/tests/neg/ensureReported.scala b/tests/neg/ensureReported.scala index b40f8837511c..38e1e1307fd4 100644 --- a/tests/neg/ensureReported.scala +++ b/tests/neg/ensureReported.scala @@ -1,6 +1,6 @@ object AnonymousF { val f = { - case l @ List(1) => // error: missing parameter type // error: Ambiguous overload + case l @ List(1) => // error: missing parameter type Some(l) } } diff --git a/tests/neg/tuple-nonconstant3.scala b/tests/neg/tuple-nonconstant3.scala new file mode 100644 index 000000000000..c6f5da1f024d --- /dev/null +++ b/tests/neg/tuple-nonconstant3.scala @@ -0,0 +1,3 @@ +object Test { + def elem[Xs <: NonEmptyTuple](xs: Xs) = xs(1) // error: selection (...) cannot be applied to tuple of unknown size +} \ No newline at end of file diff --git a/tests/neg/tuple-oob1.scala b/tests/neg/tuple-oob1.scala new file mode 100644 index 000000000000..aacde0aeed2b --- /dev/null +++ b/tests/neg/tuple-oob1.scala @@ -0,0 +1,3 @@ +object Test { + def elem(xs: (Int, String)) = xs(2) // error: index out of bounds: 2 +} \ No newline at end of file diff --git a/tests/pending/pos/matchterm.scala b/tests/pending/pos/matchterm.scala new file mode 100644 index 000000000000..cb84e3efedfb --- /dev/null +++ b/tests/pending/pos/matchterm.scala @@ -0,0 +1,12 @@ +case class A() +case class B() +object Test { + type T[X] = X match { + case A => Int + case B => String + } + def f(x: Any): T[x.type] = x match { + case A() => 1 + case B() => "" + } +} \ No newline at end of file diff --git a/tests/pos-deep-subtype/tuples2.scala b/tests/pos-deep-subtype/tuples2.scala new file mode 100644 index 000000000000..6d4e78edf1f5 --- /dev/null +++ b/tests/pos-deep-subtype/tuples2.scala @@ -0,0 +1,40 @@ +object Test extends App { + val xs0 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) + assert(xs0(15) == 16) + // 2.787s + + val xs1 = xs0 ++ xs0 + assert(xs1(31) == 16) + // 3.354s + + val xs2 = xs1 ++ xs1 + assert(xs2(63) == 16) + // 3.523s + + val xs3 = xs2 ++ xs2 + assert(xs3(127) == 16) + // 3.722s + +/* The following operations exhaust the standard stack, but succeed with -Xs10m: + + val xs4 = xs3 ++ xs3 + assert(xs4(255) == 16) + // 4.023s + + val xs5a = xs3 ++ xs4 + assert(xs5a(383) == 16) + // 4.243s + + val xs5 = xs4 ++ xs4 + assert(xs5(511) == 16) + // 4.416s + + val xs6 = xs5 ++ xs5 + assert(xs6(1023) == 16) + // 4.900s + + val xs7 = xs6 ++ xs6 + assert(xs7(2047) == 16) + // 5.538s +*/ +} diff --git a/tests/pos-deep-subtype/tuples23.scala b/tests/pos-deep-subtype/tuples23.scala new file mode 100644 index 000000000000..e442842e834b --- /dev/null +++ b/tests/pos-deep-subtype/tuples23.scala @@ -0,0 +1,27 @@ +object Test extends App { + val x23 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) + type T23 = (Int, Int, Int, Int, Int, + Int, Int, Int, Int, Int, + Int, Int, Int, Int, Int, + Int, Int, Int, Int, Int, + Int, Int, Int) + val x23c: T23 = x23 + println(x23) + assert(x23(0) == 1) + assert(x23(22) == 23) + + x23 match { + case (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => + println(x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + x12 + x13 + x14 + x15 + x16 + x17 + x18 + x19 + x20 + x21 + x22 + x23) + } + rewrite def decompose3 = rewrite x23 match { case x *: y *: xs => (x, y, xs) } + + { val (x, y, xs) = decompose3 + val xc: Int = x + val yc: Int = y + val xsc: Unit = xs + println(s"$x23 -> $x, $y, $xs") + } + + val x23s: 23 = x23.size +} \ No newline at end of file diff --git a/tests/pos/matchtype.scala b/tests/pos/matchtype.scala new file mode 100644 index 000000000000..d3aa07a2561f --- /dev/null +++ b/tests/pos/matchtype.scala @@ -0,0 +1,73 @@ +import typelevel._ +object Test { + type T[X] = X match { + case String => Int + case Int => String + } + + type Len[X] <: Int = X match { + case Unit => 0 + case x *: xs => S[Len[xs]] + } + + type T2 = Len[(1, 2, 3)] + erased val x: 3 = erasedValue[T2] + + type T1 = S[0] + + erased val x2: 1 = erasedValue[T1] + + rewrite def checkSub[T1, T2] = + rewrite typelevel.erasedValue[T1] match { + case _: T2 => // OK + case _ => error("not a subtype T1/T2") + } + + rewrite def checkSame[T1, T2] = { + checkSub[T1, T2] + checkSub[T2, T1] + } + + checkSame[T2, S[S[S[0]]]] + + type Head[X <: Tuple] = X match { + case (x1, _) => x1 + } + + checkSame[Head[(Int, String)], Int] + + type Concat[X <: Tuple, Y <: Tuple] <: Tuple = X match { + case Unit => Y + case x1 *: xs1 => x1 *: Concat[xs1, Y] + } + + type Elem[X <: Tuple, N] = X match { + case x *: xs => + N match { + case 0 => x + case S[n1] => Elem[xs, n1] + } + } + + type Elem1[X <: Tuple, N] = (X, N) match { + case (x *: xs, 0) => x + case (x *: xs, S[n1]) => Elem1[xs, n1] + } + + erased val x3: String = erasedValue[Elem[(String, Int), 0]] + erased val x4: Int = erasedValue[Elem1[(String, Int), 1]] + + checkSame[Elem[(String, Int, Boolean), 0], String] + checkSame[Elem1[(String, Int, Boolean), 1], Int] + checkSame[Elem[(String, Int, Boolean), 2], Boolean] + + checkSame[Concat[Unit, (String, Int)], (String, Int)] + checkSame[Concat[(Boolean, Boolean), (String, Int)], Boolean *: Boolean *: (String, Int)] + checkSub[(Boolean, Boolean, String, Int), Concat[(Boolean, Boolean), String *: Int *: Unit]] + + rewrite def index[Xs <: NonEmptyTuple](xs: Xs, n: Int): Elem[Xs, n.type] = xs(n).asInstanceOf + + val test = (1, "hi", true, 2.0) + index(test, 0): Int + index(test, 1): String +} \ No newline at end of file diff --git a/tests/run/tuples1.check b/tests/run/tuples1.check index db564031fb7e..5fea28566dba 100644 --- a/tests/run/tuples1.check +++ b/tests/run/tuples1.check @@ -32,8 +32,5 @@ c2_1 = (A,1,1) c2_2 = (A,1,A,1) c2_3 = (A,1,2,A,1) c3_3 = (2,A,1,2,A,1) -(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) -276 (A,1) -> A, (1) (A,1) -> A, 1, () -(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) -> 1, 2, (3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) diff --git a/tests/run/tuples1.scala b/tests/run/tuples1.scala index 3ba09dd116fa..dd1ce0c2d554 100644 --- a/tests/run/tuples1.scala +++ b/tests/run/tuples1.scala @@ -14,7 +14,7 @@ object Test extends App { val h8 = x8.head; val h8c: String = h8; println(s"h8 = $h8") val t1 = x1.tail; val t1c: Unit = t1; println(s"t1 = $t1") val t2 = x2.tail; val t2c: Int *: Unit = t2; println(s"t2 = $t2") - val t7 = x7.tail; val t7c: String *: Int *: Unit = t7.tail.tail.tail.tail; println(s"t7 = $t7") + val t7 = x7.tail; val t7c: (String, Int) = t7.tail.tail.tail.tail; println(s"t7 = $t7") val t8 = x8.tail; val t8c: Int = t8(6); println(s"t8 = $t8") val a1_0 = x1(0); val a1_0c: Int = a1_0; println(s"a1_0 = $a1_0") val a2_0 = x2(0); val a2_0c: String = a2_0; println(s"a2_0 = $a2_0") @@ -25,33 +25,17 @@ object Test extends App { val c0_0 = x0 ++ x0; val c0_0c: Unit = c0_0; println(s"c0_0 = $c0_0") val c0_1 = x0 ++ x1; val c0_1c: Int *: Unit = c0_1c; println(s"c0_1 = $c0_1") val c1_0 = x1 ++ x0; val c1_0c: Int *: Unit = c1_0c; println(s"c1_0 = $c1_0") - val c0_4 = x0 ++ x4; val c0_4c: String *: Int *: String *: Int *: Unit = c0_4; println(s"c0_4 = $c0_4") - val c4_0 = x4 ++ x0; val c4_0c: String *: Int *: String *: Int *: Unit = c4_0; println(s"c4_0 = $c4_0") - val c1_1 = x1 ++ x1; val c1_1c: Int *: Int *: Unit = c1_1; println(s"c1_1 = $c1_1") - val c1_8 = x1 ++ x8; val c1_8c: Int *: String *: Int *: String *: Int *: String *: Int *: String *: Int *: Unit = c1_8; println(s"c1_8 = $c1_8") - val c2_1 = x2 ++ x1; val c2_1c: String *: Int *: Int *: Unit = c2_1; println(s"c2_1 = $c2_1") - val c2_2 = x2 ++ x2; val c2_2c: String *: Int *: String *: Int *: Unit = c2_2; println(s"c2_2 = $c2_2") - val c2_3 = x2 ++ x3; val c2_3c: String *: Int *: Int *: String *: Int *: Unit = c2_3; println(s"c2_3 = $c2_3") - val c3_3 = x3 ++ x3; val c3_3c: Int *: String *: Int *: Int *: String *: Int *: Unit = c3_3; println(s"c3_3 = $c3_3") + val c0_4 = x0 ++ x4; val c0_4c: (String, Int, String, Int) = c0_4; println(s"c0_4 = $c0_4") + val c4_0 = x4 ++ x0; val c4_0c: (String, Int, String, Int) = c4_0; println(s"c4_0 = $c4_0") + val c1_1 = x1 ++ x1; val c1_1c: (Int, Int) = c1_1; println(s"c1_1 = $c1_1") + val c1_8 = x1 ++ x8; val c1_8c: (Int, String, Int, String, Int, String, Int, String, Int) = c1_8; println(s"c1_8 = $c1_8") + val c2_1 = x2 ++ x1; val c2_1c: (String, Int, Int) = c2_1; println(s"c2_1 = $c2_1") + val c2_2 = x2 ++ x2; val c2_2c: (String, Int, String, Int) = c2_2; println(s"c2_2 = $c2_2") + val c2_3 = x2 ++ x3; val c2_3c: (String, Int, Int, String, Int) = c2_3; println(s"c2_3 = $c2_3") + val c3_3 = x3 ++ x3; val c3_3c: (Int, String, Int, Int, String, Int) = c3_3; println(s"c3_3 = $c3_3") - val x23 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) - type T23 = (Int, Int, Int, Int, Int, - Int, Int, Int, Int, Int, - Int, Int, Int, Int, Int, - Int, Int, Int, Int, Int, - Int, Int, Int) - val x23c: T23 = x23 - println(x23) - assert(x23(0) == 1) - assert(x23(22) == 23) - - x23 match { - case (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => - println(x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + x12 + x13 + x14 + x15 + x16 + x17 + x18 + x19 + x20 + x21 + x22 + x23) - } rewrite def decompose1 = rewrite x2 match { case x *: xs => (x, xs) } rewrite def decompose2 = rewrite x2 match { case x *: y *: xs => (x, y, xs) } - rewrite def decompose3 = rewrite x23 match { case x *: y *: xs => (x, y, xs) } { val (x, xs) = decompose1 val xc: String = x @@ -66,10 +50,45 @@ object Test extends App { println(s"$x2 -> $x, $y, $xs") } - { val (x, y, xs) = decompose3 - val xc: Int = x - val yc: Int = y - val xsc: Unit = xs - println(s"$x23 -> $x, $y, $xs") - } + val x3s: 3 = x3.size + val us: 0 = ().size + +// dynamic operations + + def head1(x: NonEmptyTuple): Tuple.Head[x.type] = x.head + def head2[X <: NonEmptyTuple](x: X): Tuple.Head[X] = x.head + + val hd1: Int = head1(x3) + val hd2: Int = head2(x3) + + def tail1(x: NonEmptyTuple): Tuple.Tail[x.type] = x.tail + def tail2[X <: NonEmptyTuple](x: X): Tuple.Tail[X] = x.tail + + val tl1: (String, Int) = tail1(x3) + val tl2: (String, Int) = tail2(x3) + + def elem[X <: NonEmptyTuple](x: X, n: Int): Tuple.Elem[X, n.type] = x(n) + val elem1: String = x3(1) + + def toArray[X <: Tuple](x: X): Array[Object] = x.toArray + val toArray1 = x3.toArray + + def cons[X, Y <: Tuple](x: X, y: Y): X *: Y = x *: y + val cons1: Boolean *: Int *: (String, Int) = cons(true, x3) + + def concat[X <: Tuple, Y <: Tuple](x: X, y: Y): Tuple.Concat[X, Y] = x ++ y + def concat0(x: Tuple, y: Tuple): Tuple.Concat[x.type, y.type] = x ++ y + val conc1: (String, Int) = concat((), tl1) + val conc2: (String, Int) = concat(tl1, ()) + val conc3: (String, Int, String, Int) = concat(tl1, tl1) + val conc4: (String, Int) = concat0((), tl1) + val conc5: (String, Int) = concat0(tl1, ()) + val conc6: (String, Int, String, Int) = concat0(tl1, tl1) + + def size[X <: Tuple](x: X): Tuple.Size[X] = x.size + def size0(x: Tuple): Tuple.Size[x.type] = x.size + val x3s0: 3 = size(x3) + val us0: 0 = size(()) + val x3s1: 3 = size0(x3) + val us1: 0 = size0(()) } \ No newline at end of file