diff --git a/community-build/community-projects/shapeless-3 b/community-build/community-projects/shapeless-3 index d27c5ba1ae51..90f0c977b536 160000 --- a/community-build/community-projects/shapeless-3 +++ b/community-build/community-projects/shapeless-3 @@ -1 +1 @@ -Subproject commit d27c5ba1ae5111b85df2cfb65a26b9246c52570c +Subproject commit 90f0c977b536c06305496600b8b2014c9e8e3d86 diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 6ed05976a19e..6a988f4e375f 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -10,7 +10,7 @@ import Annotations.Annotation import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, Chars} -import config.Feature.{sourceVersion, migrateTo3, enabled} +import config.{Feature, Config} import config.SourceVersion.* import collection.mutable.ListBuffer import reporting.* @@ -46,6 +46,11 @@ object desugar { */ val UntupledParam: Property.Key[Unit] = Property.StickyKey() + /** An attachment key to indicate that a ValDef is an evidence parameter + * for a context bound. + */ + val ContextBoundParam: Property.Key[Unit] = Property.StickyKey() + /** What static check should be applied to a Match? */ enum MatchCheck { case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom @@ -195,17 +200,6 @@ object desugar { else vdef1 end valDef - def makeImplicitParameters( - tpts: List[Tree], implicitFlag: FlagSet, - mkParamName: Int => TermName, - forPrimaryConstructor: Boolean = false - )(using Context): List[ValDef] = - for (tpt, i) <- tpts.zipWithIndex yield { - val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param - val epname = mkParamName(i) - ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) - } - def mapParamss(paramss: List[ParamClause]) (mapTypeParam: TypeDef => TypeDef) (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = @@ -232,34 +226,66 @@ object desugar { private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) - private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = - val DefDef(_, paramss, tpt, rhs) = meth - val evidenceParamBuf = ListBuffer[ValDef]() + private def desugarContextBounds( + tdef: TypeDef, + evidenceBuf: ListBuffer[ValDef], + flags: FlagSet, + freshName: untpd.Tree => TermName, + allParamss: List[ParamClause])(using Context): TypeDef = - var seenContextBounds: Int = 0 - def desugarContextBounds(rhs: Tree): Tree = rhs match + val evidenceNames = ListBuffer[TermName]() + + def desugarRhs(rhs: Tree): Tree = rhs match case ContextBounds(tbounds, cxbounds) => - val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit - evidenceParamBuf ++= makeImplicitParameters( - cxbounds, iflag, - // Just like with `makeSyntheticParameter` on nameless parameters of - // using clauses, we only need names that are unique among the - // parameters of the method since shadowing does not affect - // implicit resolution in Scala 3. - mkParamName = i => - val index = seenContextBounds + 1 // Start at 1 like FreshNameCreator. - val ret = ContextBoundParamName(EmptyTermName, index) - seenContextBounds += 1 - ret, - forPrimaryConstructor = isPrimaryConstructor) + val isMember = flags.isAllOf(DeferredGivenFlags) + for bound <- cxbounds do + val evidenceName = bound match + case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => + ownName + case _ if !isMember && cxbounds.tail.isEmpty + && Feature.enabled(Feature.modularity) && Config.nameSingleContextBounds => + tdef.name.toTermName + case _ => + freshName(bound) + evidenceNames += evidenceName + val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(flags) + evidenceParam.pushAttachment(ContextBoundParam, ()) + evidenceBuf += evidenceParam tbounds case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) + cpy.LambdaTypeTree(rhs)(tparams, desugarRhs(body)) case _ => rhs + + val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + if Feature.enabled(Feature.modularity) + && evidenceNames.nonEmpty + && !evidenceNames.contains(tdef.name.toTermName) + && !allParamss.nestedExists(_.name == tdef.name.toTermName) + then + tdef1.withAddedAnnotation: + WitnessNamesAnnot(evidenceNames.toList).withSpan(tdef.span) + else + tdef1 + end desugarContextBounds + + private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = + val DefDef(_, paramss, tpt, rhs) = meth + val evidenceParamBuf = ListBuffer[ValDef]() + var seenContextBounds: Int = 0 + def freshName(unused: Tree) = + seenContextBounds += 1 // Start at 1 like FreshNameCreator. + ContextBoundParamName(EmptyTermName, seenContextBounds) + // Just like with `makeSyntheticParameter` on nameless parameters of + // using clauses, we only need names that are unique among the + // parameters of the method since shadowing does not affect + // implicit resolution in Scala 3. + val paramssNoContextBounds = + val iflag = if Feature.sourceVersion.isAtLeast(`future`) then Given else Implicit + val flags = if isPrimaryConstructor then iflag | LocalParamAccessor else iflag | Param mapParamss(paramss) { - tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) + tparam => desugarContextBounds(tparam, evidenceParamBuf, flags, freshName, paramss) }(identity) rhs match @@ -305,9 +331,9 @@ object desugar { def getterParamss(n: Int): List[ParamClause] = mapParamss(takeUpTo(paramssNoRHS, n)) { - tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) + tparam => dropContextBounds(toDefParam(tparam, KeepAnnotations.All)) } { - vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) + vparam => toDefParam(vparam, KeepAnnotations.All, keepDefault = false) } def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match @@ -399,43 +425,85 @@ object desugar { (Nil, tree) /** Add all evidence parameters in `params` as implicit parameters to `meth`. - * If the parameters of `meth` end in an implicit parameter list or using clause, - * evidence parameters are added in front of that list. Otherwise they are added - * as a separate parameter clause. + * The position of the added parameters is determined as follows: + * + * - If there is an existing parameter list that refers to one of the added + * parameters in one of its parameter types, add the new parameters + * in front of the first such parameter list. + * - Otherwise, if the last parameter list consists implicit or using parameters, + * join the new parameters in front of this parameter list, creating one + * parameter list (this is equilavent to Scala 2's scheme). + * - Otherwise, add the new parameter list at the end as a separate parameter clause. */ private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = - params match + if params.isEmpty then return meth + + var boundNames = params.map(_.name).toSet + for mparams <- meth.paramss; mparam <- mparams do + mparam match + case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => + boundNames += tparam.name.toTermName + case _ => + + //println(i"add ev params ${meth.name}, ${boundNames.toList}") + + def references(vdef: ValDef): Boolean = + vdef.tpt.existsSubTree: + case Ident(name: TermName) => boundNames.contains(name) + case _ => false + + def recur(mparamss: List[ParamClause]): List[ParamClause] = mparamss match + case ValDefs(mparams) :: _ if mparams.exists(references) => + params :: mparamss + case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => + (params ++ mparams) :: Nil + case mparams :: mparamss1 => + mparams :: recur(mparamss1) case Nil => - meth - case evidenceParams => - val paramss1 = meth.paramss.reverse match - case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => - ((evidenceParams ++ vparams) :: rparamss).reverse - case _ => - meth.paramss :+ evidenceParams - cpy.DefDef(meth)(paramss = paramss1) + params :: Nil + + cpy.DefDef(meth)(paramss = recur(meth.paramss)) + end addEvidenceParams /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = meth.paramss.reverse match { case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.name.is(ContextBoundParamName)) + vparams.takeWhile(_.hasAttachment(ContextBoundParam)) case _ => Nil } @sharable private val synthetic = Modifiers(Synthetic) - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { - var mods = tparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) - tparam.withMods(mods & (EmptyFlags | Sealed) | Param) - } - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { - var mods = vparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) + /** Which annotations to keep in derived parameters */ + private enum KeepAnnotations: + case None, All, WitnessOnly + + /** Filter annotations in `mods` according to `keep` */ + private def filterAnnots(mods: Modifiers, keep: KeepAnnotations)(using Context) = keep match + case KeepAnnotations.None => mods.withAnnotations(Nil) + case KeepAnnotations.All => mods + case KeepAnnotations.WitnessOnly => + mods.withAnnotations: + mods.annotations.filter: + case WitnessNamesAnnot(_) => true + case _ => false + + private def toDefParam(tparam: TypeDef, keep: KeepAnnotations)(using Context): TypeDef = + val mods = filterAnnots(tparam.rawMods, keep) + tparam.withMods(mods & EmptyFlags | Param) + + private def toDefParam(vparam: ValDef, keep: KeepAnnotations, keepDefault: Boolean)(using Context): ValDef = { + val mods = filterAnnots(vparam.rawMods, keep) val hasDefault = if keepDefault then HasDefault else EmptyFlags - vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) + // Need to ensure that tree is duplicated since term parameters can be watched + // and cloning a term parameter will copy its watchers to the clone, which means + // we'd get cross-talk between the original parameter and the clone. + ValDef(vparam.name, vparam.tpt, vparam.rhs) + .withSpan(vparam.span) + .withAttachmentsFrom(vparam) + .withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = @@ -448,6 +516,14 @@ object desugar { Apply(fn, params.map(refOfDef)) } + def typeDef(tdef: TypeDef)(using Context): Tree = + val evidenceBuf = new ListBuffer[ValDef] + val result = desugarContextBounds( + tdef, evidenceBuf, + (tdef.mods.flags.toTermFlags & AccessFlags) | Lazy | DeferredGivenFlags, + inventGivenOrExtensionName, Nil) + if evidenceBuf.isEmpty then result else Thicket(result :: evidenceBuf.toList) + /** The expansion of a class definition. See inline comments for what is involved */ def classDef(cdef: TypeDef)(using Context): Tree = { val impl @ Template(constr0, _, self, _) = cdef.rhs: @unchecked @@ -520,7 +596,7 @@ object desugar { // Annotations on class _type_ parameters are set on the derived parameters // but not on the constructor parameters. The reverse is true for // annotations on class _value_ parameters. - val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) + val constrTparams = impliedTparams.map(toDefParam(_, KeepAnnotations.WitnessOnly)) val constrVparamss = if (originalVparamss.isEmpty) { // ensure parameter list is non-empty if (isCaseClass) @@ -531,7 +607,7 @@ object desugar { report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) ListOfNil } - else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) + else originalVparamss.nestedMap(toDefParam(_, KeepAnnotations.All, keepDefault = true)) val derivedTparams = constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) @@ -553,7 +629,7 @@ object desugar { defDef( addEvidenceParams( cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), - evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) + evidenceParams(constr1).map(toDefParam(_, KeepAnnotations.None, keepDefault = false))))) case stat => stat } @@ -609,6 +685,11 @@ object desugar { case _ => false } + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { + case PostfixOp(_, Ident(tpnme.raw.STAR)) => true + case _ => false + } + def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { val targs = for (tparam <- tparams) yield { val targ = refOfDef(tparam) @@ -625,11 +706,6 @@ object desugar { appliedTypeTree(tycon, targs) } - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { - case PostfixOp(_, Ident(tpnme.raw.STAR)) => true - case _ => false - } - // a reference to the class type bound by `cdef`, with type parameters coming from the constructor val classTypeRef = appliedRef(classTycon) @@ -667,7 +743,7 @@ object desugar { } ensureApplied(nu) - val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags + val copiedAccessFlags = if Feature.migrateTo3 then EmptyFlags else AccessFlags // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) // def _1: T1 = this.p1 @@ -850,19 +926,17 @@ object desugar { Nil } else { - val defParamss = constrVparamss match { + val defParamss = constrVparamss match case Nil :: paramss => paramss // drop leading () that got inserted by class // TODO: drop this once we do not silently insert empty class parameters anymore case paramss => paramss - } val finalFlag = if ctx.settings.YcompileScala2Library.value then EmptyFlags else Final // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. DefDef( - className.toTermName, joinParams(constrTparams, defParamss), - classTypeRef, creatorExpr) - .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) + className.toTermName, joinParams(constrTparams, defParamss), classTypeRef, creatorExpr + ) .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) .withSpan(cdef.span) :: Nil } @@ -890,7 +964,9 @@ object desugar { } if mods.isAllOf(Given | Inline | Transparent) then report.error("inline given instances cannot be trasparent", cdef) - val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + var classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + if vparamAccessors.exists(_.mods.is(Tracked)) then + classMods |= Dependent cpy.TypeDef(cdef: TypeDef)( name = className, rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, @@ -1143,6 +1219,8 @@ object desugar { case tree: TypeDef => tree.name.toString case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" + case ContextBoundTypeTree(tycon, paramName, _) => + s"${apply(x, tycon)}_$paramName" case InfixOp(left, op, right) => if followArgs then s"${op.name}_${extractArgs(List(left, right))}" else op.name.toString @@ -1379,7 +1457,7 @@ object desugar { case tree: TypeDef => if (tree.isClassDef) classDef(tree) else if (ctx.mode.isQuotedPattern) quotedPatternTypeDef(tree) - else tree + else typeDef(tree) case tree: DefDef => if (tree.name.isConstructorName) tree // was already handled by enclosing classDef else defDef(tree) @@ -1600,14 +1678,13 @@ object desugar { .collect: case vd: ValDef => vd - def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = { - val mods = Given - val params = makeImplicitParameters(formals, mods, - mkParamName = i => - if paramNamesOrNil.isEmpty then ContextFunctionParamName.fresh() - else paramNamesOrNil(i)) - FunctionWithMods(params, body, Modifiers(mods), erasedParams) - } + def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = + val paramNames = + if paramNamesOrNil.nonEmpty then paramNamesOrNil + else formals.map(_ => ContextFunctionParamName.fresh()) + val params = for (tpt, pname) <- formals.zip(paramNames) yield + ValDef(pname, tpt, EmptyTree).withFlags(Given | Param) + FunctionWithMods(params, body, Modifiers(Given), erasedParams) private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { val vdef = ValDef(named.name.asTermName, tpt, rhs) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 28d3ef6daaef..21f1a44220cf 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -5,6 +5,8 @@ package ast import core.* import Flags.*, Trees.*, Types.*, Contexts.* import Names.*, StdNames.*, NameOps.*, Symbols.* +import Annotations.Annotation +import NameKinds.ContextBoundParamName import typer.ConstFold import reporting.trace @@ -376,6 +378,35 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => tree.tpe.isInstanceOf[ThisType] } + + /** Extractor for annotation.internal.WitnessNames(name_1, ..., name_n)` + * represented as an untyped or typed tree. + */ + object WitnessNamesAnnot: + def apply(names0: List[TermName])(using Context): untpd.Tree = + untpd.TypedSplice(tpd.New( + defn.WitnessNamesAnnot.typeRef, + tpd.SeqLiteral(names0.map(n => tpd.Literal(Constant(n.toString))), tpd.TypeTree(defn.StringType)) :: Nil + )) + + def unapply(tree: Tree)(using Context): Option[List[TermName]] = + def isWitnessNames(tp: Type) = tp match + case tp: TypeRef => + tp.name == tpnme.WitnessNames && tp.symbol == defn.WitnessNamesAnnot + case _ => + false + unsplice(tree) match + case Apply( + Select(New(tpt: tpd.TypeTree), nme.CONSTRUCTOR), + SeqLiteral(elems, _) :: Nil + ) if isWitnessNames(tpt.tpe) => + Some: + elems.map: + case Literal(Constant(str: String)) => + ContextBoundParamName.unmangle(str.toTermName.asSimpleName) + case _ => + None + end WitnessNamesAnnot } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 08f3db4981ff..9abb818cb567 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -118,6 +118,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class ContextBoundTypeTree(tycon: Tree, paramName: TypeName, ownName: TermName)(implicit @constructorOnly src: SourceFile) extends Tree case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -230,6 +231,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) + case class Tracked()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Tracked) + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) } @@ -675,6 +678,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def ContextBoundTypeTree(tree: Tree)(tycon: Tree, paramName: TypeName, ownName: TermName)(using Context): Tree = tree match + case tree: ContextBoundTypeTree if (tycon eq tree.tycon) && paramName == tree.paramName && ownName == tree.ownName => tree + case _ => finalize(tree, untpd.ContextBoundTypeTree(tycon, paramName, ownName)(tree.source)) def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) @@ -740,6 +746,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) case ExtMethods(paramss, methods) => cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case ContextBoundTypeTree(tycon, paramName, ownName) => + cpy.ContextBoundTypeTree(tree)(transform(tycon), paramName, ownName) case ImportSelector(imported, renamed, bound) => cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) case Number(_, _) | TypedSplice(_) => @@ -795,6 +803,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(this(x, pats), tpt), rhs) case ExtMethods(paramss, methods) => this(paramss.foldLeft(x)(apply), methods) + case ContextBoundTypeTree(tycon, paramName, ownName) => + this(x, tycon) case ImportSelector(imported, renamed, bound) => this(this(this(x, imported), renamed), bound) case Number(_, _) => diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 2746476261e5..a7f9bdb2022d 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -235,4 +235,11 @@ object Config { */ inline val checkLevelsOnConstraints = false inline val checkLevelsOnInstantiation = true + + /** If a type parameter `X` has a single context bounf `X: C`, should the + * witness parameter be named `X`? This would prevent the creation of a + * context bound companion. + */ + inline val nameSingleContextBounds = true } + diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 7eb95badd4d0..5efd0f51cb1c 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -32,6 +32,7 @@ object Feature: val pureFunctions = experimental("pureFunctions") val captureChecking = experimental("captureChecking") val into = experimental("into") + val modularity = experimental("modularity") /** Is `feature` enabled by by a command-line setting? The enabling setting is * diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index ae21c6fb8763..97779e7c6957 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -12,6 +12,7 @@ import Symbols.* import Scopes.* import Uniques.* import ast.Trees.* +import Flags.ParamAccessor import ast.untpd import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} @@ -399,7 +400,8 @@ object Contexts { * * - as owner: The primary constructor of the class * - as outer context: The context enclosing the class context - * - as scope: The parameter accessors in the class context + * - as scope: type parameters, the parameter accessors, and + * the context bound companions in the class context, * * The reasons for this peculiar choice of attributes are as follows: * @@ -413,10 +415,11 @@ object Contexts { * context see the constructor parameters instead, but then we'd need a final substitution step * from constructor parameters to class parameter accessors. */ - def superCallContext: Context = { - val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors*) - superOrThisCallContext(owner.primaryConstructor, locals) - } + def superCallContext: Context = + val locals = owner.typeParams + ++ owner.asClass.unforcedDecls.filter: sym => + sym.is(ParamAccessor) || sym.isContextBoundCompanion + superOrThisCallContext(owner.primaryConstructor, newScopeWith(locals*)) /** The context for the arguments of a this(...) constructor call. * The context is computed from the local auxiliary constructor context. diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 789e744fbfc9..9740a0568b39 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -240,6 +240,7 @@ class Definitions { @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") + @tu lazy val Compiletime_deferred : Symbol = CompiletimePackageClass.requiredMethod("deferred") @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") @@ -458,6 +459,13 @@ class Definitions { @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) + @tu lazy val CBCompanion: TypeSymbol = // type ``[-Refs] + enterPermanentSymbol(tpnme.CBCompanion, + TypeBounds(NothingType, + HKTypeLambda(tpnme.syntheticTypeParamName(0) :: Nil, Contravariant :: Nil)( + tl => TypeBounds.empty :: Nil, + tl => AnyType))).asType + /** Method representing a throw */ @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, MethodType(List(ThrowableType), NothingType)) @@ -1063,6 +1071,7 @@ class Definitions { @tu lazy val RetainsCapAnnot: ClassSymbol = requiredClass("scala.annotation.retainsCap") @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") @tu lazy val PublicInBinaryAnnot: ClassSymbol = requiredClass("scala.annotation.publicInBinary") + @tu lazy val WitnessNamesAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WitnessNames") @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") @@ -2140,6 +2149,7 @@ class Definitions { NullClass, NothingClass, SingletonClass, + CBCompanion, MaybeCapabilityAnnot) @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 249940d8ff99..0549245d6fcd 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -377,6 +377,9 @@ object Flags { /** Symbol cannot be found as a member during typer */ val (Invisible @ _, _, _) = newFlags(45, "") + /** Tracked modifier for class parameter / a class with some tracked parameters */ + val (Tracked @ _, _, Dependent @ _) = newFlags(46, "tracked") + // ------------ Flags following this one are not pickled ---------------------------------- /** Symbol is not a member of its owner */ @@ -452,7 +455,7 @@ object Flags { CommonSourceModifierFlags.toTypeFlags | Abstract | Sealed | Opaque | Open val TermSourceModifierFlags: FlagSet = - CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy + CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy | Tracked /** Flags representing modifiers that can appear in trees */ val ModifierFlags: FlagSet = @@ -466,7 +469,7 @@ object Flags { val FromStartFlags: FlagSet = commonFlags( Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessor, Scala2SpecialFlags, MutableOrOpen, Opaque, Touched, JavaStatic, - OuterOrCovariant, LabelOrContravariant, CaseAccessor, + OuterOrCovariant, LabelOrContravariant, CaseAccessor, Tracked, Extension, NonMember, Implicit, Given, Permanent, Synthetic, Exported, SuperParamAliasOrScala2x, Inline, Macro, ConstructorProxy, Invisible) @@ -477,7 +480,7 @@ object Flags { */ val AfterLoadFlags: FlagSet = commonFlags( FromStartFlags, AccessFlags, Final, AccessorOrSealed, - Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent) + Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent, Tracked) /** A value that's unstable unless complemented with a Stable flag */ val UnstableValueFlags: FlagSet = Mutable | Method @@ -540,8 +543,6 @@ object Flags { /** Flags retained in type export forwarders */ val RetainedExportTypeFlags = Infix - val MandatoryExportTypeFlags = Exported | Final - /** Flags that apply only to classes */ val ClassOnlyFlags = Sealed | Open | Abstract.toTypeFlags @@ -569,6 +570,7 @@ object Flags { val DeferredOrLazyOrMethod: FlagSet = Deferred | Lazy | Method val DeferredOrTermParamOrAccessor: FlagSet = Deferred | ParamAccessor | TermParam // term symbols without right-hand sides val DeferredOrTypeParam: FlagSet = Deferred | TypeParam // type symbols without right-hand sides + val DeferredGivenFlags = Deferred | Given | HasDefault val EnumValue: FlagSet = Enum | StableRealizable // A Scala enum value val FinalOrInline: FlagSet = Final | Inline val FinalOrModuleClass: FlagSet = Final | ModuleClass // A module class or a final class diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 71b49394ae14..17a36e4fee32 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -41,6 +41,8 @@ object Mode { val Pattern: Mode = newMode(0, "Pattern") val Type: Mode = newMode(1, "Type") + val PatternOrTypeBits: Mode = Pattern | Type + val ImplicitsEnabled: Mode = newMode(2, "ImplicitsEnabled") val InferringReturnType: Mode = newMode(3, "InferringReturnType") @@ -101,16 +103,19 @@ object Mode { */ val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") - /** Use Scala2 scheme for overloading and implicit resolution */ - val OldOverloadingResolution: Mode = newMode(15, "OldOverloadingResolution") + /** Use previous Scheme for implicit resolution. Currently significant + * in 3.0-migration where we use Scala-2's scheme instead and in 3.5-migration + * where we use the previous scheme up to 3.4 instead. + */ + val OldImplicitResolution: Mode = newMode(15, "OldImplicitResolution") /** Treat CapturingTypes as plain AnnotatedTypes even in phase CheckCaptures. - * Reuses the value of OldOverloadingResolution to save Mode bits. - * This is OK since OldOverloadingResolution only affects implicit search, which + * Reuses the value of OldImplicitResolution to save Mode bits. + * This is OK since OldImplicitResolution only affects implicit search, which * is done during phases Typer and Inlinig, and IgnoreCaptures only has an * effect during phase CheckCaptures. */ - val IgnoreCaptures = OldOverloadingResolution + val IgnoreCaptures = OldImplicitResolution /** Allow hk applications of type lambdas to wildcard arguments; * used for checking that such applications do not normally arise @@ -120,8 +125,6 @@ object Mode { /** Read original positions when unpickling from TASTY */ val ReadPositions: Mode = newMode(17, "ReadPositions") - val PatternOrTypeBits: Mode = Pattern | Type - /** We are elaborating the fully qualified name of a package clause. * In this case, identifiers should never be imported. */ diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index d4f009cbbbd5..74d440562824 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -182,13 +182,13 @@ object NameKinds { case DerivedName(underlying, info: this.NumberedInfo) => Some((underlying, info.num)) case _ => None } - protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = { + protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = var i = name.length - while (i > 0 && name(i - 1).isDigit) i -= 1 - if (i > separator.length && i < name.length && - name.slice(i - separator.length, i).toString == separator) i + while i > 0 && name(i - 1).isDigit do i -= 1 + if i >= separator.length && i < name.length + && name.slice(i - separator.length, i).toString == separator + then i else -1 - } numberedNameKinds(tag) = this: @unchecked } @@ -240,6 +240,16 @@ object NameKinds { } } + /** Unique names that can be unmangled */ + class UniqueNameKindWithUnmangle(separator: String) extends UniqueNameKind(separator): + override def unmangle(name: SimpleName): TermName = + val i = skipSeparatorAndNum(name, separator) + if i > 0 then + val index = name.drop(i).toString.toInt + val original = name.take(i - separator.length).asTermName + apply(original, index) + else name + /** Names of the form `prefix . name` */ val QualifiedName: QualifiedNameKind = new QualifiedNameKind(QUALIFIED, ".") @@ -288,7 +298,7 @@ object NameKinds { * * The "evidence$" prefix is a convention copied from Scala 2. */ - val ContextBoundParamName: UniqueNameKind = new UniqueNameKind("evidence$") + val ContextBoundParamName: UniqueNameKind = new UniqueNameKindWithUnmangle("evidence$") /** The name of an inferred contextual function parameter: * @@ -323,20 +333,7 @@ object NameKinds { val InlineBinderName: UniqueNameKind = new UniqueNameKind("$proxy") val MacroNames: UniqueNameKind = new UniqueNameKind("$macro$") - /** A kind of unique extension methods; Unlike other unique names, these can be - * unmangled. - */ - val UniqueExtMethName: UniqueNameKind = new UniqueNameKind("$extension") { - override def unmangle(name: SimpleName): TermName = { - val i = skipSeparatorAndNum(name, separator) - if (i > 0) { - val index = name.drop(i).toString.toInt - val original = name.take(i - separator.length).asTermName - apply(original, index) - } - else name - } - } + val UniqueExtMethName: UniqueNameKind = new UniqueNameKindWithUnmangle("$extension") /** Kinds of unique names generated by the pattern matcher */ val PatMatStdBinderName: UniqueNameKind = new UniqueNameKind("x") diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 75a135826785..58b4ad681c6f 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -4,7 +4,10 @@ package core import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.* import SymDenotations.{LazyType, SymDenotation}, StdNames.nme +import ContextOps.enter import TypeApplications.EtaExpansion +import collection.mutable +import config.Printers.typr /** Operations that are shared between Namer and TreeUnpickler */ object NamerOps: @@ -15,8 +18,41 @@ object NamerOps: */ def effectiveResultType(ctor: Symbol, paramss: List[List[Symbol]])(using Context): Type = paramss match - case TypeSymbols(tparams) :: _ => ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)) - case _ => ctor.owner.typeRef + case TypeSymbols(tparams) :: rest => + addParamRefinements(ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)), rest) + case _ => + addParamRefinements(ctor.owner.typeRef, paramss) + + /** Given a method with tracked term-parameters `p1, ..., pn`, and result type `R`, add the + * refinements R { p1 = p1' } ... { pn = pn' }, where pi' is the term parameter ref + * of the parameter and pi is its name. This matters only under experimental.modularity, + * since wothout it there are no tracked parameters. Parameter refinements are added for + * constructors and given companion methods. + */ + def addParamRefinements(resType: Type, paramss: List[List[Symbol]])(using Context): Type = + paramss.flatten.foldLeft(resType): (rt, param) => + if param.is(Tracked) then RefinedType(rt, param.name, param.termRef) + else rt + + /** Split dependent class refinements off parent type. Add them to `refinements`, + * unless it is null. + */ + extension (tp: Type) + def separateRefinements(cls: ClassSymbol, refinements: mutable.LinkedHashMap[Name, Type] | Null)(using Context): Type = + tp match + case RefinedType(tp1, rname, rinfo) => + try tp1.separateRefinements(cls, refinements) + finally + if refinements != null then + refinements(rname) = refinements.get(rname) match + case Some(tp) => tp & rinfo + case None => rinfo + case tp @ AnnotatedType(tp1, ann) => + tp.derivedAnnotatedType(tp1.separateRefinements(cls, refinements), ann) + case tp: RecType => + tp.parent.substRecThis(tp, cls.thisType).separateRefinements(cls, refinements) + case tp => + tp /** If isConstructor, make sure it has at least one non-implicit parameter list * This is done by adding a () in front of a leading old style implicit parameter, @@ -222,4 +258,55 @@ object NamerOps: rhsCtx.gadtState.addBound(psym, tr, isUpper = true) } + /** Create a context-bound companion for type symbol `tsym`, which has a context + * bound that defines a set of witnesses with names `witnessNames`. + * + * @param parans If `tsym` is a type parameter, a list of parameter symbols + * that include all witnesses, otherwise the empty list. + * + * The context-bound companion has as name the name of `tsym` translated to + * a term name. We create a synthetic val of the form + * + * val A: ``[witnessRef1 | ... | witnessRefN] + * + * where + * + * is the CBCompanion type created in Definitions + * withnessRefK is a refence to the K'th witness. + * + * The companion has the same access flags as the original type. + */ + def addContextBoundCompanionFor(tsym: Symbol, witnessNames: List[TermName], params: List[Symbol])(using Context): Unit = + val prefix = ctx.owner.thisType + val companionName = tsym.name.toTermName + val witnessRefs = + if params.nonEmpty then + witnessNames.map: witnessName => + prefix.select(params.find(_.name == witnessName).get) + else + witnessNames.map(TermRef(prefix, _)) + val cbtype = defn.CBCompanion.typeRef.appliedTo: + witnessRefs.reduce[Type](OrType(_, _, soft = false)) + val cbc = newSymbol( + ctx.owner, companionName, + (tsym.flagsUNSAFE & (AccessFlags)).toTermFlags | Synthetic, + cbtype) + typr.println(s"context bound companion created $cbc for $witnessNames in ${ctx.owner}") + ctx.enter(cbc) + end addContextBoundCompanionFor + + /** Add context bound companions to all context-bound types declared in + * this class. This assumes that these types already have their + * WitnessNames annotation set even before they are completed. This is + * the case for unpickling but currently not for Namer. So the method + * is only called during unpickling, and is not part of NamerOps. + */ + def addContextBoundCompanions(cls: ClassSymbol)(using Context): Unit = + for sym <- cls.info.decls do + if sym.isType && !sym.isClass then + for ann <- sym.annotationsUNSAFE do + if ann.symbol == defn.WitnessNamesAnnot then + ann.tree match + case ast.tpd.WitnessNamesAnnot(witnessNames) => + addContextBoundCompanionFor(sym, witnessNames, Nil) end NamerOps diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 38f8e19e2737..9273c2ec59b3 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -88,11 +88,6 @@ trait PatternTypeConstrainer { self: TypeComparer => } } - def stripRefinement(tp: Type): Type = tp match { - case tp: RefinedOrRecType => stripRefinement(tp.parent) - case tp => tp - } - def tryConstrainSimplePatternType(pat: Type, scrut: Type) = { val patCls = pat.classSymbol val scrCls = scrut.classSymbol @@ -181,14 +176,14 @@ trait PatternTypeConstrainer { self: TypeComparer => case AndType(scrut1, scrut2) => constrainPatternType(pat, scrut1) && constrainPatternType(pat, scrut2) case scrut: RefinedOrRecType => - constrainPatternType(pat, stripRefinement(scrut)) + constrainPatternType(pat, scrut.stripRefinement) case scrut => dealiasDropNonmoduleRefs(pat) match { case OrType(pat1, pat2) => either(constrainPatternType(pat1, scrut), constrainPatternType(pat2, scrut)) case AndType(pat1, pat2) => constrainPatternType(pat1, scrut) && constrainPatternType(pat2, scrut) case pat: RefinedOrRecType => - constrainPatternType(stripRefinement(pat), scrut) + constrainPatternType(pat.stripRefinement, scrut) case pat => tryConstrainSimplePatternType(pat, scrut) || classesMayBeCompatible && constrainUpcasted(scrut) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 9772199678d7..d819bdaeeb6e 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -288,6 +288,7 @@ object StdNames { // Compiler-internal val CAPTURE_ROOT: N = "cap" + val CBCompanion: N = "" val CONSTRUCTOR: N = "" val STATIC_CONSTRUCTOR: N = "" val EVT2U: N = "evt2u$" @@ -384,6 +385,7 @@ object StdNames { val RootPackage: N = "RootPackage" val RootClass: N = "RootClass" val Select: N = "Select" + val Self: N = "Self" val Shape: N = "Shape" val StringContext: N = "StringContext" val This: N = "This" @@ -393,6 +395,7 @@ object StdNames { val TypeApply: N = "TypeApply" val TypeRef: N = "TypeRef" val UNIT : N = "UNIT" + val WitnessNames: N = "WitnessNames" val acc: N = "acc" val adhocExtensions: N = "adhocExtensions" val andThen: N = "andThen" @@ -452,6 +455,7 @@ object StdNames { val create: N = "create" val currentMirror: N = "currentMirror" val curried: N = "curried" + val deferred: N = "deferred" val definitions: N = "definitions" val delayedInit: N = "delayedInit" val delayedInitArg: N = "delayedInit$body" @@ -625,6 +629,7 @@ object StdNames { val toString_ : N = "toString" val toTypeConstructor: N = "toTypeConstructor" val tpe : N = "tpe" + val tracked: N = "tracked" val transparent : N = "transparent" val tree : N = "tree" val true_ : N = "true" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 5304c9efadc0..78637bc20727 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1190,21 +1190,25 @@ object SymDenotations { final def isExtensibleClass(using Context): Boolean = isClass && !isOneOf(FinalOrModuleClass) && !isAnonymousClass - /** A symbol is effectively final if it cannot be overridden in a subclass */ + /** A symbol is effectively final if it cannot be overridden */ final def isEffectivelyFinal(using Context): Boolean = isOneOf(EffectivelyFinalFlags) || is(Inline, butNot = Deferred) || is(JavaDefinedVal, butNot = Method) || isConstructor - || !owner.isExtensibleClass + || !owner.isExtensibleClass && !is(Deferred) + // Deferred symbols can arise through parent refinements. + // For them, the overriding relationship reverses anyway, so + // being in a final class does not mean the symbol cannot be + // implemented concretely in a superclass. /** A class is effectively sealed if has the `final` or `sealed` modifier, or it * is defined in Scala 3 and is neither abstract nor open. */ final def isEffectivelySealed(using Context): Boolean = isOneOf(FinalOrSealed) - || isClass && (!isOneOf(EffectivelyOpenFlags) - || isLocalToCompilationUnit) + || isClass + && (!isOneOf(EffectivelyOpenFlags) || isLocalToCompilationUnit) final def isLocalToCompilationUnit(using Context): Boolean = is(Private) diff --git a/compiler/src/dotty/tools/dotc/core/SymUtils.scala b/compiler/src/dotty/tools/dotc/core/SymUtils.scala index 65634241b790..3a97a0053dbd 100644 --- a/compiler/src/dotty/tools/dotc/core/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/SymUtils.scala @@ -87,6 +87,9 @@ class SymUtils: !d.isPrimitiveValueClass } + def isContextBoundCompanion(using Context): Boolean = + self.is(Synthetic) && self.infoOrCompleter.typeSymbol == defn.CBCompanion + /** Is this a case class for which a product mirror is generated? * Excluded are value classes, abstract classes and case classes with more than one * parameter section. diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 78c736649605..49fc53c172df 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -308,7 +308,6 @@ object Symbols extends SymUtils { * With the given setup, all such calls will give implicit-not found errors */ final def symbol(implicit ev: DontUseSymbolOnSymbol): Nothing = unsupported("symbol") - type DontUseSymbolOnSymbol final def source(using Context): SourceFile = { def valid(src: SourceFile): SourceFile = @@ -398,13 +397,12 @@ object Symbols extends SymUtils { flags: FlagSet = this.flags, info: Type = this.info, privateWithin: Symbol = this.privateWithin, - coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap - compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.associatedFile` once we bootstrap + coord: Coord = NoCoord, // Can be `= owner.coord` once we have new default args + compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.compilationUnitInfo` once we have new default args ): Symbol = { val coord1 = if (coord == NoCoord) owner.coord else coord val compilationUnitInfo1 = if (compilationUnitInfo == null) owner.compilationUnitInfo else compilationUnitInfo - if isClass then newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, compilationUnitInfo1) else @@ -932,6 +930,8 @@ object Symbols extends SymUtils { case (x: Symbol) :: _ if x.isType => Some(xs.asInstanceOf[List[TypeSymbol]]) case _ => None + type DontUseSymbolOnSymbol + // ----- Locating predefined symbols ---------------------------------------- def requiredPackage(path: PreName)(using Context): TermSymbol = { diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index c76b5117dc89..7c4acaebb18b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -6,11 +6,11 @@ import TypeErasure.ErasedValueType import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* import Names.Name -class TypeUtils { +class TypeUtils: /** A decorator that provides methods on types * that are needed in the transformer pipeline. */ - extension (self: Type) { + extension (self: Type) def isErasedValueType(using Context): Boolean = self.isInstanceOf[ErasedValueType] @@ -150,5 +150,11 @@ class TypeUtils { case _ => val cls = self.underlyingClassRef(refinementOK = false).typeSymbol cls.isTransparentClass && (!traitOnly || cls.is(Trait)) - } -} + + /** Strip all outer refinements off this type */ + def stripRefinement: Type = self match + case self: RefinedOrRecType => self.parent.stripRefinement + case seld => self + +end TypeUtils + diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 62844a54bf48..997614fdbb4e 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1631,17 +1631,19 @@ object Types extends TypeUtils { * * P { ... type T = / += / -= U ... } # T * - * to just U. Does not perform the reduction if the resulting type would contain - * a reference to the "this" of the current refined type, except in the following situation + * to just U. Analogously, `P { val x: S} # x` is reduced tp `S` is `S` + * is a singleton type. * - * (1) The "this" reference can be avoided by following an alias. Example: + * Does not perform the reduction if the resulting type would contain + * a reference to the "this" of the current refined type, except if the "this" + * reference can be avoided by following an alias. Example: * * P { type T = String, type R = P{...}.T } # R --> String * * (*) normalizes means: follow instantiated typevars and aliases. */ - def lookupRefined(name: Name)(using Context): Type = { - @tailrec def loop(pre: Type): Type = pre.stripTypeVar match { + def lookupRefined(name: Name)(using Context): Type = + @tailrec def loop(pre: Type): Type = pre match case pre: RefinedType => pre.refinedInfo match { case tp: AliasingBounds => @@ -1664,12 +1666,13 @@ object Types extends TypeUtils { case TypeAlias(alias) => loop(alias) case _ => NoType } + case pre: (TypeVar | AnnotatedType) => + loop(pre.underlying) case _ => NoType - } loop(this) - } + end lookupRefined /** The type , reduced if possible */ def select(name: Name)(using Context): Type = @@ -2809,35 +2812,30 @@ object Types extends TypeUtils { def derivedSelect(prefix: Type)(using Context): Type = if prefix eq this.prefix then this else if prefix.isExactlyNothing then prefix - else { - val res = - if (isType && currentValidSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) + else + val reduced = + if isType && currentValidSymbol.isAllOf(ClassTypeParam) then argForParam(prefix) else prefix.lookupRefined(name) - if (res.exists) return res - if (isType) { - if (Config.splitProjections) - prefix match { - case prefix: AndType => - def isMissing(tp: Type) = tp match { - case tp: TypeRef => !tp.info.exists - case _ => false - } - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return ( - if (isMissing(derived1)) derived2 - else if (isMissing(derived2)) derived1 - else prefix.derivedAndType(derived1, derived2)) - case prefix: OrType => - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return prefix.derivedOrType(derived1, derived2) - case _ => - } - } - if (prefix.isInstanceOf[WildcardType]) WildcardType.sameKindAs(this) + if reduced.exists then return reduced + if Config.splitProjections && isType then + prefix match + case prefix: AndType => + def isMissing(tp: Type) = tp match + case tp: TypeRef => !tp.info.exists + case _ => false + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return + if isMissing(derived1) then derived2 + else if isMissing(derived2) then derived1 + else prefix.derivedAndType(derived1, derived2) + case prefix: OrType => + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return prefix.derivedOrType(derived1, derived2) + case _ => + if prefix.isInstanceOf[WildcardType] then WildcardType.sameKindAs(this) else withPrefix(prefix) - } /** A reference like this one, but with the given symbol, if it exists */ private def withSym(sym: Symbol)(using Context): ThisType = @@ -4921,20 +4919,21 @@ object Types extends TypeUtils { def isInstantiated(using Context): Boolean = instanceOpt.exists /** Instantiate variable with given type */ - def instantiateWith(tp: Type)(using Context): Type = { - assert(tp ne this, i"self instantiation of $origin, constraint = ${ctx.typerState.constraint}") - assert(!myInst.exists, i"$origin is already instantiated to $myInst but we attempted to instantiate it to $tp") - typr.println(i"instantiating $this with $tp") + def instantiateWith(tp: Type)(using Context): Type = + if myInst.exists then myInst + else + assert(tp ne this, i"self instantiation of $origin, constraint = ${ctx.typerState.constraint}") + assert(!myInst.exists, i"$origin is already instantiated to $myInst but we attempted to instantiate it to $tp") + typr.println(i"instantiating $this with $tp") - if Config.checkConstraintsSatisfiable then - assert(currentEntry.bounds.contains(tp), - i"$origin is constrained to be $currentEntry but attempted to instantiate it to $tp") + if Config.checkConstraintsSatisfiable then + assert(currentEntry.bounds.contains(tp), + i"$origin is constrained to be $currentEntry but attempted to instantiate it to $tp") - if ((ctx.typerState eq owningState.nn.get.uncheckedNN) && !TypeComparer.subtypeCheckInProgress) - setInst(tp) - ctx.typerState.constraint = ctx.typerState.constraint.replace(origin, tp) - tp - } + if ((ctx.typerState eq owningState.nn.get.uncheckedNN) && !TypeComparer.subtypeCheckInProgress) + setInst(tp) + ctx.typerState.constraint = ctx.typerState.constraint.replace(origin, tp) + tp def typeToInstantiateWith(fromBelow: Boolean)(using Context): Type = TypeComparer.instanceType(origin, fromBelow, widenUnions, nestingLevel) @@ -4947,11 +4946,7 @@ object Types extends TypeUtils { * is also a singleton type. */ def instantiate(fromBelow: Boolean)(using Context): Type = - val tp = typeToInstantiateWith(fromBelow) - if myInst.exists then // The line above might have triggered instantiation of the current type variable - myInst - else - instantiateWith(tp) + instantiateWith(typeToInstantiateWith(fromBelow)) /** Widen unions when instantiating this variable in the current context? */ def widenUnions(using Context): Boolean = !ctx.typerState.constraint.isHard(this) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 7d2d95aa9601..47cb0ba3d4d6 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -807,6 +807,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { if (flags.is(Exported)) writeModTag(EXPORTED) if (flags.is(Given)) writeModTag(GIVEN) if (flags.is(Implicit)) writeModTag(IMPLICIT) + if (flags.is(Tracked)) writeModTag(TRACKED) if (isTerm) { if (flags.is(Lazy, butNot = Module)) writeModTag(LAZY) if (flags.is(AbsOverride)) { writeModTag(ABSTRACT); writeModTag(OVERRIDE) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 57c0b2217e9d..3f8d78663762 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -31,7 +31,8 @@ import util.{SourceFile, Property} import ast.{Trees, tpd, untpd} import Trees.* import Decorators.* -import dotty.tools.dotc.quoted.QuotePatterns +import config.Feature +import quoted.QuotePatterns import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer.* @@ -751,6 +752,7 @@ class TreeUnpickler(reader: TastyReader, case INVISIBLE => addFlag(Invisible) case TRANSPARENT => addFlag(Transparent) case INFIX => addFlag(Infix) + case TRACKED => addFlag(Tracked) case PRIVATEqualified => readByte() privateWithin = readWithin @@ -918,6 +920,8 @@ class TreeUnpickler(reader: TastyReader, val resType = if name == nme.CONSTRUCTOR then effectiveResultType(sym, paramss) + else if sym.isAllOf(Given | Method) && Feature.enabled(Feature.modularity) then + addParamRefinements(tpt.tpe, paramss) else tpt.tpe sym.info = methodType(paramss, resType) @@ -1063,7 +1067,7 @@ class TreeUnpickler(reader: TastyReader, } val parentReader = fork val parents = readParents(withArgs = false)(using parentCtx) - val parentTypes = parents.map(_.tpe.dealias) + val parentTypes = parents.map(_.tpe.dealiasKeepAnnots.separateRefinements(cls, null)) if cls.is(JavaDefined) && parentTypes.exists(_.derivesFrom(defn.JavaAnnotationClass)) then cls.setFlag(JavaAnnotation) val self = @@ -1123,6 +1127,7 @@ class TreeUnpickler(reader: TastyReader, }) defn.patchStdLibClass(cls) NamerOps.addConstructorProxies(cls) + NamerOps.addContextBoundCompanions(cls) setSpan(start, untpd.Template(constr, mappedParents, self, lazyStats) .withType(localDummy.termRef)) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index addd54df9d69..63c0e6facc23 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -62,7 +62,7 @@ object Parsers { case ExtensionFollow // extension clause, following extension parameter def isClass = // owner is a class - this == Class || this == CaseClass + this == Class || this == CaseClass || this == Given def takesOnlyUsingClauses = // only using clauses allowed for this owner this == Given || this == ExtensionFollow def acceptsVariance = @@ -948,12 +948,14 @@ object Parsers { * i.e. an identifier followed by type and value parameters, followed by `:`? * @pre The current token is an identifier */ - def followingIsGivenSig() = + def followingIsOldStyleGivenSig() = val lookahead = in.LookaheadScanner() if lookahead.isIdent then lookahead.nextToken() + var paramsSeen = false def skipParams(): Unit = if lookahead.token == LPAREN || lookahead.token == LBRACKET then + paramsSeen = true lookahead.skipParens() skipParams() else if lookahead.isNewLine then @@ -961,6 +963,16 @@ object Parsers { skipParams() skipParams() lookahead.isColon + && { + !in.featureEnabled(Feature.modularity) + || { // with modularity language import, a `:` at EOL after an identifier represents a single identifier given + // Example: + // given C: + // def f = ... + lookahead.nextToken() + !lookahead.isAfterLineEnd + } + } def followingIsExtension() = val next = in.lookahead.token @@ -1771,9 +1783,11 @@ object Parsers { */ def infixType(): Tree = infixTypeRest(refinedType()) - def infixTypeRest(t: Tree): Tree = - infixOps(t, canStartInfixTypeTokens, refinedTypeFn, Location.ElseWhere, ParseKind.Type, - isOperator = !followingIsVararg() && !isPureArrow + def infixTypeRest(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = + infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, + isOperator = !followingIsVararg() + && !isPureArrow + && !(isIdent(nme.as) && in.featureEnabled(Feature.modularity)) && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] @@ -1837,6 +1851,10 @@ object Parsers { */ def annotType(): Tree = annotTypeRest(simpleType()) + /** AnnotType1 ::= SimpleType1 {Annotation} + */ + def annotType1(): Tree = annotTypeRest(simpleType1()) + def annotTypeRest(t: Tree): Tree = if (in.token == AT) annotTypeRest(atSpan(startOffset(t)) { @@ -2141,20 +2159,33 @@ object Parsers { if (in.token == tok) { in.nextToken(); toplevelTyp() } else EmptyTree - /** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} + /** TypeAndCtxBounds ::= TypeBounds {`<%' Type} [`:` ContextBounds] */ - def typeParamBounds(pname: TypeName): Tree = { + def typeAndCtxBounds(pname: TypeName): Tree = { val t = typeBounds() val cbs = contextBounds(pname) if (cbs.isEmpty) t else atSpan((t.span union cbs.head.span).start) { ContextBounds(t, cbs) } } + /** ContextBound ::= Type [`as` id] */ + def contextBound(pname: TypeName): Tree = + val t = toplevelTyp() + val ownName = + if isIdent(nme.as) && in.featureEnabled(Feature.modularity) then + in.nextToken() + ident() + else EmptyTermName + ContextBoundTypeTree(t, pname, ownName) + + /** ContextBounds ::= ContextBound | `{` ContextBound {`,` ContextBound} `}` + */ def contextBounds(pname: TypeName): List[Tree] = if in.isColon then - atSpan(in.skipToken()) { - AppliedTypeTree(toplevelTyp(), Ident(pname)) - } :: contextBounds(pname) + in.nextToken() + if in.token == LBRACE && in.featureEnabled(Feature.modularity) + then inBraces(commaSeparated(() => contextBound(pname))) + else contextBound(pname) :: contextBounds(pname) else if in.token == VIEWBOUND then report.errorOrMigrationWarning( em"view bounds `<%' are no longer supported, use a context bound `:' instead", @@ -3147,6 +3178,7 @@ object Parsers { case nme.open => Mod.Open() case nme.transparent => Mod.Transparent() case nme.infix => Mod.Infix() + case nme.tracked => Mod.Tracked() } } @@ -3213,7 +3245,8 @@ object Parsers { * | AccessModifier * | override * | opaque - * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | inline | transparent | infix | erased + * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | erased | + * inline | transparent */ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = { @tailrec @@ -3330,7 +3363,7 @@ object Parsers { val isAbstractOwner = paramOwner == ParamOwner.Type || paramOwner == ParamOwner.TypeParam val start = in.offset var mods = annotsAsMods() | Param - if paramOwner == ParamOwner.Class || paramOwner == ParamOwner.CaseClass then + if paramOwner.isClass then mods |= PrivateLocal if isIdent(nme.raw.PLUS) && checkVarianceOK() then mods |= Covariant @@ -3344,7 +3377,7 @@ object Parsers { } else ident().toTypeName val hkparams = typeParamClauseOpt(ParamOwner.Type) - val bounds = if (isAbstractOwner) typeBounds() else typeParamBounds(name) + val bounds = if (isAbstractOwner) typeBounds() else typeAndCtxBounds(name) TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) } } @@ -3366,8 +3399,8 @@ object Parsers { /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause * UsingClsTermParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ * ClsParams ::= ClsParam {‘,’ ClsParam} - * ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’)] Param - * + * ClsParam ::= {Annotation} + * [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param * TypelessClause ::= DefTermParamClause * | UsingParamClause * @@ -3403,6 +3436,8 @@ object Parsers { if isErasedKw then mods = addModifier(mods) if paramOwner.isClass then + if isIdent(nme.tracked) && in.featureEnabled(Feature.modularity) && !in.lookahead.isColon then + mods = addModifier(mods) mods = addFlag(modifiers(start = mods), ParamAccessor) mods = if in.token == VAL then @@ -3474,7 +3509,8 @@ object Parsers { val isParams = !impliedMods.is(Given) || startParamTokens.contains(in.token) - || isIdent && (in.name == nme.inline || in.lookahead.isColon) + || isIdent + && (in.name == nme.inline || in.name == nme.tracked || in.lookahead.isColon) (mods, isParams) (if isParams then commaSeparated(() => param()) else contextTypes(paramOwner, numLeadParams, impliedMods)) match { @@ -3852,14 +3888,16 @@ object Parsers { argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] + /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() atSpan(start, nameStart) { val nameIdent = typeIdent() + val tname = nameIdent.name.asTypeName val tparams = typeParamClauseOpt(ParamOwner.Type) val vparamss = funParamClauses() + def makeTypeDef(rhs: Tree): Tree = { val rhs1 = lambdaAbstractAll(tparams :: vparamss, rhs) val tdef = TypeDef(nameIdent.name.toTypeName, rhs1) @@ -3867,36 +3905,37 @@ object Parsers { tdef.pushAttachment(Backquoted, ()) finalizeDef(tdef, mods, start) } + in.token match { case EQUALS => in.nextToken() makeTypeDef(toplevelTyp()) case SUBTYPE | SUPERTYPE => - val bounds = typeBounds() - if (in.token == EQUALS) { - val eqOffset = in.skipToken() - var rhs = toplevelTyp() - rhs match { - case mtt: MatchTypeTree => - bounds match { - case TypeBoundsTree(EmptyTree, upper, _) => - rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) - case _ => - syntaxError(em"cannot combine lower bound and match type alias", eqOffset) - } - case _ => - if mods.is(Opaque) then - rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) - else - syntaxError(em"cannot combine bound and alias", eqOffset) - } - makeTypeDef(rhs) - } - else makeTypeDef(bounds) + typeAndCtxBounds(tname) match + case bounds: TypeBoundsTree if in.token == EQUALS => + val eqOffset = in.skipToken() + var rhs = toplevelTyp() + rhs match { + case mtt: MatchTypeTree => + bounds match { + case TypeBoundsTree(EmptyTree, upper, _) => + rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) + case _ => + syntaxError(em"cannot combine lower bound and match type alias", eqOffset) + } + case _ => + if mods.is(Opaque) then + rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) + else + syntaxError(em"cannot combine bound and alias", eqOffset) + } + makeTypeDef(rhs) + case bounds => makeTypeDef(bounds) case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => - makeTypeDef(typeBounds()) - case _ if (staged & StageKind.QuotedPattern) != 0 => - makeTypeDef(typeBounds()) + makeTypeDef(typeAndCtxBounds(tname)) + case _ if (staged & StageKind.QuotedPattern) != 0 + || in.featureEnabled(Feature.modularity) && in.isColon => + makeTypeDef(typeAndCtxBounds(tname)) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) return EmptyTree // return to avoid setting the span to EmptyTree @@ -4050,13 +4089,38 @@ object Parsers { syntaxError(em"extension clause can only define methods", stat.span) } - /** GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) - * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + /** GivenDef ::= OldGivenDef | NewGivenDef + * OldGivenDef ::= [OldGivenSig] (GivenType [‘=’ Expr] | StructuralInstance) + * OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + * StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + * + * NewGivenDef ::= [GivenConditional '=>'] NewGivenSig + * GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} + * NewGivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + * | ConstrApps ['as' id] TemplateBody + * + * GivenType ::= AnnotType1 {id [nl] AnnotType1} */ def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) val nameStart = in.offset - val name = if isIdent && followingIsGivenSig() then ident() else EmptyTermName + var name = if isIdent && followingIsOldStyleGivenSig() then ident() else EmptyTermName + var newSyntaxAllowed = in.featureEnabled(Feature.modularity) + + // TODO Change syntax description + def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = + paramss.nestedMap: param => + if !param.mods.isAllOf(PrivateLocal) then + syntaxError(em"method parameter ${param.name} may not be a `val`", param.span) + param.withMods(param.mods &~ (AccessFlags | ParamAccessor | Tracked | Mutable) | Param) + .asInstanceOf[List[ParamClause]] + + def moreConstrApps() = + if newSyntaxAllowed && in.token == COMMA then + in.nextToken() + constrApps() + else // need to be careful with last `with` + withConstrApps() val gdef = val tparams = typeParamClauseOpt(ParamOwner.Given) @@ -4067,10 +4131,24 @@ object Parsers { else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty - if !(name.isEmpty && noParams) then acceptColon() + if !(name.isEmpty && noParams) then + if in.isColon then + newSyntaxAllowed = false + in.nextToken() + else if newSyntaxAllowed then accept(ARROW) + else acceptColon() val parents = - if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil - else refinedTypeRest(constrApp()) :: withConstrApps() + if isSimpleLiteral then + rejectWildcardType(annotType()) :: Nil + else constrApp() match + case parent: Apply => parent :: moreConstrApps() + case parent if in.isIdent => + infixTypeRest(parent, _ => annotType1()) :: Nil + case parent => parent :: moreConstrApps() + if newSyntaxAllowed && in.isIdent(nme.as) then + in.nextToken() + name = ident() + val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then accept(EQUALS) @@ -4079,19 +4157,25 @@ object Parsers { mods1 |= Lazy ValDef(name, parents.head, subExpr()) else - DefDef(name, joinParams(tparams, vparamss), parents.head, subExpr()) - else if (isStatSep || isStatSeqEnd) && parentsIsType then + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) + else if (isStatSep || isStatSeqEnd) && parentsIsType && !newSyntaxAllowed then if name.isEmpty then syntaxError(em"anonymous given cannot be abstract") - DefDef(name, joinParams(tparams, vparamss), parents.head, EmptyTree) + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else - val tparams1 = tparams.map(tparam => tparam.withMods(tparam.mods | PrivateLocal)) - val vparamss1 = vparamss.map(_.map(vparam => - vparam.withMods(vparam.mods &~ Param | ParamAccessor | Protected))) - val constr = makeConstructor(tparams1, vparamss1) + val vparamss1 = vparamss.nestedMap: vparam => + if vparam.mods.is(Private) + then vparam.withMods(vparam.mods &~ PrivateLocal | Protected) + else vparam + val constr = makeConstructor(tparams, vparamss1) val templ = - if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil) - else withTemplate(constr, parents) + if isStatSep || isStatSeqEnd then + Template(constr, parents, Nil, EmptyValDef, Nil) + else if !newSyntaxAllowed || in.token == WITH then + withTemplate(constr, parents) + else + possibleTemplateStart() + templateBodyOpt(constr, parents, Nil) if noParams && !mods.is(Inline) then ModuleDef(name, templ) else TypeDef(name.toTypeName, templ) end gdef @@ -4163,10 +4247,10 @@ object Parsers { /* -------- TEMPLATES ------------------------------------------- */ - /** ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} + /** ConstrApp ::= AnnotType1 {ParArgumentExprs} */ val constrApp: () => Tree = () => - val t = rejectWildcardType(annotTypeRest(simpleType1()), + val t = rejectWildcardType(annotType1(), fallbackTree = Ident(tpnme.ERROR)) // Using Ident(tpnme.ERROR) to avoid causing cascade errors on non-user-written code if in.token == LPAREN then parArgumentExprss(wrapNew(t)) else t diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index ac7b4ef39604..6847b76f485e 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -112,7 +112,7 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def refinementNameString(tp: RefinedType): String = nameString(tp.refinedName) /** String representation of a refinement */ - protected def toTextRefinement(rt: RefinedType): Text = + def toTextRefinement(rt: RefinedType): Text = val keyword = rt.refinedInfo match { case _: ExprType | _: MethodOrPoly => "def " case _: TypeBounds => "type " @@ -430,11 +430,11 @@ class PlainPrinter(_ctx: Context) extends Printer { sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName /** String representation of a definition's type following its name, - * if symbol is completed, "?" otherwise. + * if symbol is completed, ": ?" otherwise. */ protected def toTextRHS(optType: Option[Type]): Text = optType match { case Some(tp) => toTextRHS(tp) - case None => "?" + case None => ": ?" } protected def decomposeLambdas(bounds: TypeBounds): (Text, TypeBounds) = diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 8687925ed5fb..297dc31ea94a 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -4,7 +4,7 @@ package printing import core.* import Texts.*, ast.Trees.* -import Types.{Type, SingletonType, LambdaParam, NamedType}, +import Types.{Type, SingletonType, LambdaParam, NamedType, RefinedType}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation, Contexts.Context import typer.Implicits.* @@ -104,6 +104,9 @@ abstract class Printer { /** Textual representation of a prefix of some reference, ending in `.` or `#` */ def toTextPrefixOf(tp: NamedType): Text + /** textual representation of a refinement, with no enclosing {...} */ + def toTextRefinement(rt: RefinedType): Text + /** Textual representation of a reference in a capture set */ def toTextCaptureRef(tp: Type): Text diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 93e280f8a13c..01765cd95388 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -373,7 +373,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(GlobalPrec) { keywordStr("for ") ~ Text(enums map enumText, "; ") ~ sep ~ toText(expr) } def cxBoundToText(bound: untpd.Tree): Text = bound match { // DD - case AppliedTypeTree(tpt, _) => " : " ~ toText(tpt) + case ContextBoundTypeTree(tpt, _, _) => " : " ~ toText(tpt) case untpd.Function(_, tpt) => " <% " ~ toText(tpt) } @@ -640,7 +640,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextAnnot = toTextLocal(arg) ~~ annotText(annot.symbol.enclosingClass, annot) def toTextRetainsAnnot = - try changePrec(GlobalPrec)(toText(arg) ~ "^" ~ toTextCaptureSet(captureSet)) + try changePrec(GlobalPrec)(toTextLocal(arg) ~ "^" ~ toTextCaptureSet(captureSet)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner.isRetains && Feature.ccEnabled && !printDebug @@ -729,9 +729,18 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case GenAlias(pat, expr) => toText(pat) ~ " = " ~ toText(expr) case ContextBounds(bounds, cxBounds) => - cxBounds.foldLeft(toText(bounds)) {(t, cxb) => - t ~ cxBoundToText(cxb) - } + if Feature.enabled(Feature.modularity) then + def boundsText(bounds: Tree) = bounds match + case ContextBoundTypeTree(tpt, _, ownName) => + toText(tpt) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) + case bounds => toText(bounds) + cxBounds match + case bound :: Nil => ": " ~ boundsText(bound) + case _ => ": {" ~ Text(cxBounds.map(boundsText), ", ") ~ "}" + else + cxBounds.foldLeft(toText(bounds)) {(t, cxb) => + t ~ cxBoundToText(cxb) + } case PatDef(mods, pats, tpt, rhs) => modText(mods, NoSymbol, keywordStr("val"), isType = false) ~~ toText(pats, ", ") ~ optAscription(tpt) ~ optText(rhs)(" = " ~ _) @@ -776,6 +785,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { prefix ~~ idx.toString ~~ "|" ~~ tpeText ~~ "|" ~~ argsText ~~ "|" ~~ contentText ~~ postfix case CapturesAndResult(refs, parent) => changePrec(GlobalPrec)("^{" ~ Text(refs.map(toText), ", ") ~ "}" ~ toText(parent)) + case ContextBoundTypeTree(tycon, pname, ownName) => + toText(pname) ~ " : " ~ toText(tycon) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) case _ => tree.fallbackToText(this) } diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 6011587a7100..fe0778d5eadb 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -207,6 +207,8 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case MatchTypeLegacyPatternID // errorNumber: 191 case UnstableInlineAccessorID // errorNumber: 192 case VolatileOnValID // errorNumber: 193 + case ConstructorProxyNotValueID // errorNumber: 194 + case ContextBoundCompanionNotValueID // errorNumber: 195 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 484bc88c0983..e0d9d06bdee6 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3159,3 +3159,39 @@ class VolatileOnVal()(using Context) extends SyntaxMsg(VolatileOnValID): protected def msg(using Context): String = "values cannot be volatile" protected def explain(using Context): String = "" + +class ConstructorProxyNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"constructor proxy $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A constructor proxy is a symbol made up by the compiler to represent a non-existent + |factory method of a class. For instance, in + | + | class C(x: Int) + | + |C does not have an apply method since it is not a case class. Yet one can + |still create instances with applications like `C(3)` which expand to `new C(3)`. + |The `C` in this call is a constructor proxy. It can only be used as applications + |but not as a stand-alone value.""" + +class ContextBoundCompanionNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"context bound companion $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A context bound companion is a symbol made up by the compiler to represent the + |witness or witnesses generated for the context bound(s) of a type parameter or type. + |For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + |there is just a type `A` declared but not a value `A`. Nevertheless, one can write + |the selection `A.unit`, which works because the compiler created a context bound + |companion value with the (term-)name `A`. However, these context bound companions + |are not values themselves, they can only be referred to in selections.""" + diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 01fc423b0076..3daa32af7695 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -567,7 +567,13 @@ object Erasure { case Some(annot) => val message = annot.argumentConstant(0) match case Some(c) => - c.stringValue.toMessage + val addendum = tree match + case tree: RefTree + if tree.symbol == defn.Compiletime_deferred && tree.name != nme.deferred => + i".\nNote that `deferred` can only be used under its own name when implementing a given in a trait; `${tree.name}` is not accepted." + case _ => + "" + (c.stringValue ++ addendum).toMessage case _ => em"""Reference to ${tree.symbol.showLocated} should not have survived, |it should have been processed and eliminated during expansion of an enclosing macro or term erasure.""" diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 3bcec80b5b10..62f188320f2c 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -261,9 +261,13 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } } - def checkNoConstructorProxy(tree: Tree)(using Context): Unit = + def checkUsableAsValue(tree: Tree)(using Context): Unit = + def unusable(msg: Symbol => Message) = + report.error(msg(tree.symbol), tree.srcPos) if tree.symbol.is(ConstructorProxy) then - report.error(em"constructor proxy ${tree.symbol} cannot be used as a value", tree.srcPos) + unusable(ConstructorProxyNotValue(_)) + if tree.symbol.isContextBoundCompanion then + unusable(ContextBoundCompanionNotValue(_)) def checkStableSelection(tree: Tree)(using Context): Unit = def check(qual: Tree) = @@ -293,7 +297,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if tree.isType then checkNotPackage(tree) else - checkNoConstructorProxy(tree) + checkUsableAsValue(tree) registerNeedsInlining(tree) tree.tpe match { case tpe: ThisType => This(tpe.cls).withSpan(tree.span) @@ -305,7 +309,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => Checking.checkRealizable(qual.tpe, qual.srcPos) withMode(Mode.Type)(super.transform(checkNotPackage(tree))) else - checkNoConstructorProxy(tree) + checkUsableAsValue(tree) transformSelect(tree, Nil) case tree: Apply => val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] @@ -336,11 +340,15 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => case Select(nu: New, nme.CONSTRUCTOR) if isCheckable(nu) => // need to check instantiability here, because the type of the New itself // might be a type constructor. - ctx.typer.checkClassType(tree.tpe, tree.srcPos, traitReq = false, stablePrefixReq = true) + def checkClassType(tpe: Type, stablePrefixReq: Boolean) = + ctx.typer.checkClassType(tpe, tree.srcPos, + traitReq = false, stablePrefixReq = stablePrefixReq, + refinementOK = Feature.enabled(Feature.modularity)) + checkClassType(tree.tpe, true) if !nu.tpe.isLambdaSub then // Check the constructor type as well; it could be an illegal singleton type // which would not be reflected as `tree.tpe` - ctx.typer.checkClassType(nu.tpe, tree.srcPos, traitReq = false, stablePrefixReq = false) + checkClassType(nu.tpe, false) Checking.checkInstantiable(tree.tpe, nu.tpe, nu.srcPos) withNoCheckNews(nu :: Nil)(app1) case _ => @@ -416,8 +424,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => // Constructor parameters are in scope when typing a parent. // While they can safely appear in a parent tree, to preserve // soundness we need to ensure they don't appear in a parent - // type (#16270). - val illegalRefs = parent.tpe.namedPartsWith(p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym)) + // type (#16270). We can strip any refinement of a parent type since + // these refinements are split off from the parent type constructor + // application `parent` in Namer and don't show up as parent types + // of the class. + val illegalRefs = parent.tpe.dealias.stripRefinement.namedPartsWith: + p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym) if illegalRefs.nonEmpty then report.error( em"The type of a class parent cannot refer to constructor parameters, but ${parent.tpe} refers to ${illegalRefs.map(_.name.show).mkString(",")}", parent.srcPos) @@ -429,8 +441,14 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => val relativePath = util.SourceFile.relativePath(ctx.compilationUnit.source, reference) sym.addAnnotation(Annotation(defn.SourceFileAnnot, Literal(Constants.Constant(relativePath)), tree.span)) else - if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then - Checking.checkGoodBounds(tree.symbol) + if !sym.is(Param) then + if !sym.owner.isOneOf(AbstractOrTrait) then + Checking.checkGoodBounds(tree.symbol) + if sym.owner.isClass && sym.hasAnnotation(defn.WitnessNamesAnnot) then + val decls = sym.owner.info.decls + for cbCompanion <- decls.lookupAll(sym.name.toTermName) do + if cbCompanion.isContextBoundCompanion then + decls.openForMutations.unlink(cbCompanion) (tree.rhs, sym.info) match case (rhs: LambdaTypeTree, bounds: TypeBounds) => VarianceChecker.checkLambda(rhs, bounds) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 4a7548f40f43..dea2b3ce0769 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -310,9 +310,11 @@ object TreeChecker { def assertDefined(tree: untpd.Tree)(using Context): Unit = if (tree.symbol.maybeOwner.isTerm) { val sym = tree.symbol + def isAllowed = // constructor proxies and context bound companions are flagged at PostTyper + isSymWithoutDef(sym) && ctx.phase.id < postTyperPhase.id assert( - nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym), - i"undefined symbol ${sym} at line " + tree.srcPos.line + nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym) || isAllowed, + i"undefined symbol ${sym} in ${sym.owner} at line " + tree.srcPos.line ) if (!ctx.phase.patternTranslated) @@ -383,6 +385,9 @@ object TreeChecker { case _ => } + def isSymWithoutDef(sym: Symbol)(using Context): Boolean = + sym.is(ConstructorProxy) || sym.isContextBoundCompanion + /** Exclude from double definition checks any erased symbols that were * made `private` in phase `UnlinkErasedDecls`. These symbols will be removed * completely in phase `Erasure` if they are defined in a currently compiled unit. @@ -609,14 +614,12 @@ object TreeChecker { val decls = cls.classInfo.decls.toList.toSet.filter(isNonMagicalMember) val defined = impl.body.map(_.symbol) - def isAllowed(sym: Symbol): Boolean = sym.is(ConstructorProxy) - - val symbolsNotDefined = (decls -- defined - constr.symbol).filterNot(isAllowed) + val symbolsMissingDefs = (decls -- defined - constr.symbol).filterNot(isSymWithoutDef) - assert(symbolsNotDefined.isEmpty, - i" $cls tree does not define members: ${symbolsNotDefined.toList}%, %\n" + - i"expected: ${decls.toList}%, %\n" + - i"defined: ${defined}%, %") + assert(symbolsMissingDefs.isEmpty, + i"""$cls tree does not define members: ${symbolsMissingDefs.toList}%, % + |expected: ${decls.toList}%, % + |defined: ${defined}%, %""") super.typedClassDef(cdef, cls) } diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala index 756fd1a0a8e7..e11d0e1e21a5 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -20,6 +20,7 @@ object Util: def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match case tref: TypeRef => tref + case RefinedType(parent, _, _) => typeRefOf(parent) case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 82f4c89ae203..fb750f6aec63 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -22,7 +22,7 @@ import ProtoTypes.* import Inferencing.* import reporting.* import Nullables.*, NullOpsDecorator.* -import config.Feature +import config.{Feature, SourceVersion} import collection.mutable import config.Printers.{overload, typr, unapp} @@ -1657,6 +1657,12 @@ trait Applications extends Compatibility { /** Compare two alternatives of an overloaded call or an implicit search. * * @param alt1, alt2 Non-overloaded references indicating the two choices + * @param preferGeneral When comparing two value types, prefer the more general one + * over the more specific one iff `preferGeneral` is true. + * `preferGeneral` is set to `true` when we compare two given values, since + * then we want the most general evidence that matches the target + * type. It is set to `false` for overloading resolution, when we want the + * most specific type instead. * @return 1 if 1st alternative is preferred over 2nd * -1 if 2nd alternative is preferred over 1st * 0 if neither alternative is preferred over the other @@ -1672,27 +1678,28 @@ trait Applications extends Compatibility { * an alternative that takes more implicit parameters wins over one * that takes fewer. */ - def compare(alt1: TermRef, alt2: TermRef)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { + def compare(alt1: TermRef, alt2: TermRef, preferGeneral: Boolean = false)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { record("resolveOverloaded.compare") - /** Is alternative `alt1` with type `tp1` as specific as alternative + val compareGivens = alt1.symbol.is(Given) || alt2.symbol.is(Given) + + /** Is alternative `alt1` with type `tp1` as good as alternative * `alt2` with type `tp2` ? * - * 1. A method `alt1` of type `(p1: T1, ..., pn: Tn)U` is as specific as `alt2` + * 1. A method `alt1` of type `(p1: T1, ..., pn: Tn)U` is as good as `alt2` * if `alt1` is nullary or `alt2` is applicable to arguments (p1, ..., pn) of * types T1,...,Tn. If the last parameter `pn` has a vararg type T*, then * `alt1` must be applicable to arbitrary numbers of `T` parameters (which * implies that it must be a varargs method as well). * 2. A polymorphic member of type [a1 >: L1 <: U1, ..., an >: Ln <: Un]T is as - * specific as `alt2` of type `tp2` if T is as specific as `tp2` under the + * good as `alt2` of type `tp2` if T is as good as `tp2` under the * assumption that for i = 1,...,n each ai is an abstract type name bounded * from below by Li and from above by Ui. * 3. A member of any other type `tp1` is: - * a. always as specific as a method or a polymorphic method. - * b. as specific as a member of any other type `tp2` if `tp1` is compatible - * with `tp2`. + * a. always as good as a method or a polymorphic method. + * b. as good as a member of any other type `tp2` is `asGoodValueType(tp1, tp2) = true` */ - def isAsSpecific(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { + def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { tp1 match case tp1: MethodType => // (1) tp1.paramInfos.isEmpty && tp2.isInstanceOf[LambdaType] @@ -1714,69 +1721,89 @@ trait Applications extends Compatibility { fullyDefinedType(tp1Params, "type parameters of alternative", alt1.symbol.srcPos) val tparams = newTypeParams(alt1.symbol, tp1.paramNames, EmptyFlags, tp1.instantiateParamInfos(_)) - isAsSpecific(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) + isAsGood(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) } case _ => // (3) tp2 match case tp2: MethodType => true // (3a) case tp2: PolyType if tp2.resultType.isInstanceOf[MethodType] => true // (3a) case tp2: PolyType => // (3b) - explore(isAsSpecificValueType(tp1, instantiateWithTypeVars(tp2))) + explore(isAsGoodValueType(tp1, instantiateWithTypeVars(tp2))) case _ => // 3b) - isAsSpecificValueType(tp1, tp2) + isAsGoodValueType(tp1, tp2) } - /** Test whether value type `tp1` is as specific as value type `tp2`. - * Let's abbreviate this to `tp1 <:s tp2`. - * Previously, `<:s` was the same as `<:`. This behavior is still - * available under mode `Mode.OldOverloadingResolution`. The new behavior - * is different, however. Here, `T <:s U` iff + /** Test whether value type `tp1` is as good as value type `tp2`. + * Let's abbreviate this to `tp1 <:p tp2`. The behavior depends on the Scala version + * and mode. * - * flip(T) <: flip(U) + * - In Scala 2, `<:p` was the same as `<:`. This behavior is still + * available in 3.0-migration if mode `Mode.OldImplicitResolution` is turned on as well. + * It is used to highlight differences between Scala 2 and 3 behavior. * - * where `flip` changes covariant occurrences of contravariant type parameters to - * covariant ones. Intuitively `<:s` means subtyping `<:`, except that all arguments - * to contravariant parameters are compared as if they were covariant. E.g. given class + * - In Scala 3.0-3.4, the behavior is as follows: `T <:p U` iff there is an impliit conversion + * from `T` to `U`, or * - * class Cmp[-X] + * flip(T) <: flip(U) * - * `Cmp[T] <:s Cmp[U]` if `T <: U`. On the other hand, non-variant occurrences - * of parameters are not affected. So `T <: U` would imply `Set[Cmp[U]] <:s Set[Cmp[T]]`, - * as usual, because `Set` is non-variant. + * where `flip` changes covariant occurrences of contravariant type parameters to + * covariant ones. Intuitively `<:p` means subtyping `<:`, except that all arguments + * to contravariant parameters are compared as if they were covariant. E.g. given class * - * This relation might seem strange, but it models closely what happens for methods. - * Indeed, if we integrate the existing rules for methods into `<:s` we have now that + * class Cmp[-X] * - * (T)R <:s (U)R + * `Cmp[T] <:p Cmp[U]` if `T <: U`. On the other hand, non-variant occurrences + * of parameters are not affected. So `T <: U` would imply `Set[Cmp[U]] <:p Set[Cmp[T]]`, + * as usual, because `Set` is non-variant. * - * iff + * - From Scala 3.5, `T <:p U` means `T <: U` or `T` convertible to `U` + * for overloading resolution (when `preferGeneral is false), and the opposite relation + * `U <: T` or `U convertible to `T` for implicit disambiguation between givens + * (when `preferGeneral` is true). For old-style implicit values, the 3.4 behavior is kept. * - * T => R <:s U => R + * - In Scala 3.5-migration, use the 3.5 scheme normally, and the 3.4 scheme if + * `Mode.OldImplicitResolution` is on. This is used to highlight differences in the + * two resolution schemes. * - * Also: If a compared type refers to a given or its module class, use + * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. */ - def isAsSpecificValueType(tp1: Type, tp2: Type)(using Context) = - if (ctx.mode.is(Mode.OldOverloadingResolution)) + def isAsGoodValueType(tp1: Type, tp2: Type)(using Context) = + val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) + if !preferGeneral || Feature.migrateTo3 && oldResolution then + // Normal specificity test for overloading resolution (where `preferGeneral` is false) + // and in mode Scala3-migration when we compare with the old Scala 2 rules. isCompatible(tp1, tp2) - else { - val flip = new TypeMap { - def apply(t: Type) = t match { - case t @ AppliedType(tycon, args) => - def mapArg(arg: Type, tparam: TypeParamInfo) = - if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionNOf(arg :: Nil, defn.UnitType) - else arg - mapOver(t.derivedAppliedType(tycon, args.zipWithConserve(tycon.typeParams)(mapArg))) - case _ => mapOver(t) - } - } - def prepare(tp: Type) = tp.stripTypeVar match { + else + def prepare(tp: Type) = tp.stripTypeVar match case tp: NamedType if tp.symbol.is(Module) && tp.symbol.sourceModule.is(Given) => - flip(tp.widen.widenToParents) - case _ => flip(tp) - } - (prepare(tp1) relaxed_<:< prepare(tp2)) || viewExists(tp1, tp2) - } + tp.widen.widenToParents + case _ => + tp + + val tp1p = prepare(tp1) + val tp2p = prepare(tp2) + + if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) + || oldResolution + || !compareGivens + then + // Intermediate rules: better means specialize, but map all type arguments downwards + // These are enabled for 3.0-3.4, and for all comparisons between old-style implicits, + // and in 3.5-migration when we compare with previous rules. + val flip = new TypeMap: + def apply(t: Type) = t match + case t @ AppliedType(tycon, args) => + def mapArg(arg: Type, tparam: TypeParamInfo) = + if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionNOf(arg :: Nil, defn.UnitType) + else arg + mapOver(t.derivedAppliedType(tycon, args.zipWithConserve(tycon.typeParams)(mapArg))) + case _ => mapOver(t) + (flip(tp1p) relaxed_<:< flip(tp2p)) || viewExists(tp1, tp2) + else + // New rules: better means generalize + (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) + end isAsGoodValueType /** Widen the result type of synthetic given methods from the implementation class to the * type that's implemented. Example @@ -1809,8 +1836,8 @@ trait Applications extends Compatibility { def compareWithTypes(tp1: Type, tp2: Type) = { val ownerScore = compareOwner(alt1.symbol.maybeOwner, alt2.symbol.maybeOwner) - def winsType1 = isAsSpecific(alt1, tp1, alt2, tp2) - def winsType2 = isAsSpecific(alt2, tp2, alt1, tp1) + val winsType1 = isAsGood(alt1, tp1, alt2, tp2) + def winsType2 = isAsGood(alt2, tp2, alt1, tp1) overload.println(i"compare($alt1, $alt2)? $tp1 $tp2 $ownerScore $winsType1 $winsType2") if winsType1 && winsType2 diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 56f67574a72d..edb66e829c6b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -33,8 +33,7 @@ import Applications.unapplyArgs import Inferencing.isFullyDefined import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotePattern} import transform.ValueClasses.underlyingOfValueClass -import config.Feature -import config.Feature.sourceVersion +import config.Feature, Feature.{sourceVersion, modularity} import config.SourceVersion.* import config.MigrationVersion import printing.Formatting.hlAsKeyword @@ -197,7 +196,7 @@ object Checking { * and that the instance conforms to the self type of the created class. */ def checkInstantiable(tp: Type, srcTp: Type, pos: SrcPos)(using Context): Unit = - tp.underlyingClassRef(refinementOK = false) match + tp.underlyingClassRef(refinementOK = Feature.enabled(modularity)) match case tref: TypeRef => val cls = tref.symbol if (cls.isOneOf(AbstractOrTrait)) { @@ -605,6 +604,7 @@ object Checking { // The issue with `erased inline` is that the erased semantics get lost // as the code is inlined and the reference is removed before the erased usage check. checkCombination(Erased, Inline) + checkNoConflict(Tracked, Mutable, em"mutable variables may not be `tracked`") checkNoConflict(Lazy, ParamAccessor, em"parameter may not be `lazy`") } @@ -1058,8 +1058,8 @@ trait Checking { * check that class prefix is stable. * @return `tp` itself if it is a class or trait ref, ObjectType if not. */ - def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = - tp.underlyingClassRef(refinementOK = false) match { + def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean = false)(using Context): Type = + tp.underlyingClassRef(refinementOK) match case tref: TypeRef => if (traitReq && !tref.symbol.is(Trait)) report.error(TraitIsExpected(tref.symbol), pos) if (stablePrefixReq && ctx.phase <= refchecksPhase) checkStable(tref.prefix, pos, "class prefix") @@ -1067,7 +1067,6 @@ trait Checking { case _ => report.error(NotClassType(tp), pos) defn.ObjectType - } /** If `sym` is an old-style implicit conversion, check that implicit conversions are enabled. * @pre sym.is(GivenOrImplicit) @@ -1323,20 +1322,20 @@ trait Checking { } /** Check that user-defined (result) type is fully applied */ - def checkFullyAppliedType(tree: Tree)(using Context): Unit = tree match + def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = tree match case TypeBoundsTree(lo, hi, alias) => - checkFullyAppliedType(lo) - checkFullyAppliedType(hi) - checkFullyAppliedType(alias) + checkFullyAppliedType(lo, prefix) + checkFullyAppliedType(hi, prefix) + checkFullyAppliedType(alias, prefix) case Annotated(arg, annot) => - checkFullyAppliedType(arg) + checkFullyAppliedType(arg, prefix) case LambdaTypeTree(_, body) => - checkFullyAppliedType(body) + checkFullyAppliedType(body, prefix) case _: TypeTree => case _ => if tree.tpe.typeParams.nonEmpty then val what = if tree.symbol.exists then tree.symbol.show else i"type $tree" - report.error(em"$what takes type parameters", tree.srcPos) + report.error(em"$prefix$what takes type parameters", tree.srcPos) /** Check that we are in an inline context (inside an inline method or in inline code) */ def checkInInlineContext(what: String, pos: SrcPos)(using Context): Unit = @@ -1601,7 +1600,7 @@ trait ReChecking extends Checking { override def checkEnumParent(cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = () - override def checkFullyAppliedType(tree: Tree)(using Context): Unit = () + override def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = () override def checkEnumCaseRefsLegal(cdef: TypeDef, enumCtx: Context)(using Context): Unit = () override def checkAnnotApplicable(annot: Tree, sym: Symbol)(using Context): Boolean = true override def checkMatchable(tp: Type, pos: SrcPos, pattern: Boolean)(using Context): Unit = () @@ -1617,7 +1616,7 @@ trait NoChecking extends ReChecking { override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = info override def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = () override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = () - override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = tp + override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean)(using Context): Type = tp override def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = () override def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = () override def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = tp diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5162b3fed1b9..a8fa0fa93b74 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -922,10 +922,10 @@ trait Implicits: /** Search an implicit argument and report error if not found */ - def implicitArgTree(formal: Type, span: Span)(using Context): Tree = { + def implicitArgTree(formal: Type, span: Span, where: => String = "")(using Context): Tree = { val arg = inferImplicitArg(formal, span) if (arg.tpe.isInstanceOf[SearchFailureType]) - report.error(missingArgMsg(arg, formal, ""), ctx.source.atSpan(span)) + report.error(missingArgMsg(arg, formal, where), ctx.source.atSpan(span)) arg } @@ -1105,8 +1105,8 @@ trait Implicits: case result: SearchFailure if result.isAmbiguous => val deepPt = pt.deepenProto if (deepPt ne pt) inferImplicit(deepPt, argument, span) - else if (migrateTo3 && !ctx.mode.is(Mode.OldOverloadingResolution)) - withMode(Mode.OldOverloadingResolution)(inferImplicit(pt, argument, span)) match { + else if (migrateTo3 && !ctx.mode.is(Mode.OldImplicitResolution)) + withMode(Mode.OldImplicitResolution)(inferImplicit(pt, argument, span)) match { case altResult: SearchSuccess => report.migrationWarning( result.reason.msg @@ -1221,7 +1221,7 @@ trait Implicits: assert(argument.isEmpty || argument.tpe.isValueType || argument.tpe.isInstanceOf[ExprType], em"found: $argument: ${argument.tpe}, expected: $pt") - private def nestedContext() = + private def searchContext() = ctx.fresh.setMode(ctx.mode &~ Mode.ImplicitsEnabled) private def isCoherent = pt.isRef(defn.CanEqualClass) @@ -1265,7 +1265,7 @@ trait Implicits: else val history = ctx.searchHistory.nest(cand, pt) val typingCtx = - nestedContext().setNewTyperState().setFreshGADTBounds.setSearchHistory(history) + searchContext().setNewTyperState().setFreshGADTBounds.setSearchHistory(history) val result = typedImplicit(cand, pt, argument, span)(using typingCtx) result match case res: SearchSuccess => @@ -1290,9 +1290,24 @@ trait Implicits: * 0 if neither alternative is preferred over the other */ def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = + def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level - else explore(compare(alt1.ref, alt2.ref))(using nestedContext()) + else + val cmp = comp(using searchContext()) + if Feature.sourceVersion == SourceVersion.`3.5-migration` then + val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) + if cmp != prev then + def choice(c: Int) = c match + case -1 => "the second alternative" + case 1 => "the first alternative" + case _ => "none - it's ambiguous" + report.warning( + em"""Change in given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} + |Previous choice: ${choice(prev)} + |New choice : ${choice(cmp)}""", srcPos) + cmp + end compareAlternatives /** If `alt1` is also a search success, try to disambiguate as follows: * - If alt2 is preferred over alt1, pick alt2, otherwise return an @@ -1328,8 +1343,8 @@ trait Implicits: else ctx.typerState - diff = inContext(ctx.withTyperState(comparisonState)): - compare(ref1, ref2) + diff = inContext(searchContext().withTyperState(comparisonState)): + compare(ref1, ref2, preferGeneral = true) else // alt1 is a conversion, prefer extension alt2 over it diff = -1 if diff < 0 then alt2 diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala index 78cba674bfff..04e6f03565d5 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala @@ -180,8 +180,6 @@ class ImportInfo(symf: Context ?=> Symbol, assert(myUnimported != null) myUnimported.uncheckedNN - private val isLanguageImport: Boolean = untpd.languageImport(qualifier).isDefined - private var myUnimported: Symbol | Null = uninitialized private var featureCache: SimpleIdentityMap[TermName, java.lang.Boolean] = SimpleIdentityMap.empty diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index 33643a0fae2f..5ab6a4a5fae6 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -296,7 +296,7 @@ trait ImportSuggestions: var i = 0 var diff = 0 while i < filled && diff == 0 do - diff = compare(ref, top(i))(using noImplicitsCtx) + diff = compare(ref, top(i), preferGeneral = true)(using noImplicitsCtx) if diff > 0 then rest += top(i) top(i) = ref diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index c1bde8d4fd3c..e101ff906522 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -7,7 +7,7 @@ import ast.* import Trees.*, StdNames.*, Scopes.*, Denotations.*, NamerOps.*, ContextOps.* import Contexts.*, Symbols.*, Types.*, SymDenotations.*, Names.*, NameOps.*, Flags.* import Decorators.*, Comments.{_, given} -import NameKinds.DefaultGetterName +import NameKinds.{DefaultGetterName, ContextBoundParamName} import ast.desugar, ast.desugar.* import ProtoTypes.* import util.Spans.* @@ -26,7 +26,7 @@ import Nullables.* import transform.ValueClasses.* import TypeErasure.erasure import reporting.* -import config.Feature.sourceVersion +import config.Feature.{sourceVersion, modularity} import config.SourceVersion.* import scala.compiletime.uninitialized @@ -55,11 +55,12 @@ class Namer { typer: Typer => import untpd.* - val TypedAhead : Property.Key[tpd.Tree] = new Property.Key - val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key - val ExportForwarders: Property.Key[List[tpd.MemberDef]] = new Property.Key - val SymOfTree : Property.Key[Symbol] = new Property.Key - val AttachedDeriver : Property.Key[Deriver] = new Property.Key + val TypedAhead : Property.Key[tpd.Tree] = new Property.Key + val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key + val ExportForwarders : Property.Key[List[tpd.MemberDef]] = new Property.Key + val ParentRefinements: Property.Key[List[Symbol]] = new Property.Key + val SymOfTree : Property.Key[Symbol] = new Property.Key + val AttachedDeriver : Property.Key[Deriver] = new Property.Key // was `val Deriver`, but that gave shadowing problems with constructor proxies /** A partial map from unexpanded member and pattern defs and to their expansions. @@ -121,7 +122,8 @@ class Namer { typer: Typer => /** Record `sym` as the symbol defined by `tree` */ def recordSym(sym: Symbol, tree: Tree)(using Context): Symbol = { - for (refs <- tree.removeAttachment(References); ref <- refs) ref.watching(sym) + for refs <- tree.removeAttachment(References); ref <- refs do + ref.watching(sym) tree.pushAttachment(SymOfTree, sym) sym } @@ -294,12 +296,15 @@ class Namer { typer: Typer => createOrRefine[Symbol](tree, name, flags, ctx.owner, _ => info, (fs, _, pwithin) => newSymbol(ctx.owner, name, fs, info, pwithin, tree.nameSpan)) case tree: Import => - recordSym(newImportSymbol(ctx.owner, Completer(tree)(ctx), tree.span), tree) + recordSym(newImportSym(tree), tree) case _ => NoSymbol } } + private def newImportSym(imp: Import)(using Context): Symbol = + newImportSymbol(ctx.owner, Completer(imp)(ctx), imp.span) + /** If `sym` exists, enter it in effective scope. Check that * package members are not entered twice in the same run. */ @@ -401,6 +406,11 @@ class Namer { typer: Typer => enterSymbol(sym) setDocstring(sym, origStat) addEnumConstants(mdef, sym) + mdef match + case tdef: TypeDef if ctx.owner.isClass => + for case WitnessNamesAnnot(witnessNames) <- tdef.mods.annotations do + addContextBoundCompanionFor(symbolOfTree(tdef), witnessNames, Nil) + case _ => ctx case stats: Thicket => stats.toList.foreach(recur) @@ -524,11 +534,9 @@ class Namer { typer: Typer => } /** Transfer all references to `from` to `to` */ - def transferReferences(from: ValDef, to: ValDef): Unit = { - val fromRefs = from.removeAttachment(References).getOrElse(Nil) - val toRefs = to.removeAttachment(References).getOrElse(Nil) - to.putAttachment(References, fromRefs ++ toRefs) - } + def transferReferences(from: ValDef, to: ValDef): Unit = + for ref <- from.removeAttachment(References).getOrElse(Nil) do + ref.watching(to) /** Merge the module class `modCls` in the expanded tree of `mdef` with the * body and derived clause of the synthetic module class `fromCls`. @@ -706,7 +714,18 @@ class Namer { typer: Typer => enterSymbol(companion) end addAbsentCompanions - stats.foreach(expand) + /** Expand each statement, keeping track of language imports in the context. This is + * necessary since desugaring might depend on language imports. + */ + def expandTopLevel(stats: List[Tree])(using Context): Unit = stats match + case (imp @ Import(qual, _)) :: stats1 if untpd.languageImport(qual).isDefined => + expandTopLevel(stats1)(using ctx.importContext(imp, newImportSym(imp))) + case stat :: stats1 => + expand(stat) + expandTopLevel(stats1) + case Nil => + + expandTopLevel(stats) mergeCompanionDefs() val ctxWithStats = stats.foldLeft(ctx)((ctx, stat) => indexExpanded(stat)(using ctx)) inContext(ctxWithStats) { @@ -1203,7 +1222,9 @@ class Namer { typer: Typer => target = target.etaExpand newSymbol( cls, forwarderName, - MandatoryExportTypeFlags | (sym.flags & RetainedExportTypeFlags), + Exported + | (sym.flags & RetainedExportTypeFlags) + | (if Feature.enabled(modularity) then EmptyFlags else Final), TypeAlias(target), coord = span) // Note: This will always create unparameterzied aliases. So even if the original type is @@ -1500,6 +1521,7 @@ class Namer { typer: Typer => /** The type signature of a ClassDef with given symbol */ override def completeInCreationContext(denot: SymDenotation): Unit = { val parents = impl.parents + val parentRefinements = new mutable.LinkedHashMap[Name, Type] /* The type of a parent constructor. Types constructor arguments * only if parent type contains uninstantiated type parameters. @@ -1513,8 +1535,9 @@ class Namer { typer: Typer => core match case Select(New(tpt), nme.CONSTRUCTOR) => val targs1 = targs map (typedAheadType(_)) - val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes - if (ptype.typeParams.isEmpty) ptype + val ptype = typedAheadType(tpt).tpe.appliedTo(targs1.tpes) + if ptype.typeParams.isEmpty && !ptype.dealias.typeSymbol.is(Dependent) then + ptype else if (denot.is(ModuleClass) && denot.sourceModule.isOneOf(GivenOrImplicit)) missingType(denot.symbol, "parent ")(using creationContext) @@ -1554,8 +1577,13 @@ class Namer { typer: Typer => val ptype = parentType(parent)(using completerCtx.superCallContext).dealiasKeepAnnots if (cls.isRefinementClass) ptype else { - val pt = checkClassType(ptype, parent.srcPos, - traitReq = parent ne parents.head, stablePrefixReq = !isJava) + val pt = checkClassType( + if Feature.enabled(modularity) + then ptype.separateRefinements(cls, parentRefinements) + else ptype, + parent.srcPos, + traitReq = parent ne parents.head, + stablePrefixReq = !isJava) if (pt.derivesFrom(cls)) { val addendum = parent match { case Select(qual: Super, _) if Feature.migrateTo3 => @@ -1582,6 +1610,22 @@ class Namer { typer: Typer => } } + /** Enter all parent refinements as public class members, unless a definition + * with the same name already exists in the class. + */ + def enterParentRefinementSyms(refinements: List[(Name, Type)]) = + val refinedSyms = mutable.ListBuffer[Symbol]() + for (name, tp) <- refinements do + if decls.lookupEntry(name) == null then + val flags = tp match + case tp: MethodOrPoly => Method | Synthetic | Deferred | Tracked + case _ if name.isTermName => Synthetic | Deferred | Tracked + case _ => Synthetic | Deferred + refinedSyms += newSymbol(cls, name, flags, tp, coord = original.rhs.span.startPos).entered + if refinedSyms.nonEmpty then + typr.println(i"parent refinement symbols: ${refinedSyms.toList}") + original.pushAttachment(ParentRefinements, refinedSyms.toList) + /** If `parents` contains references to traits that have supertraits with implicit parameters * add those supertraits in linearization order unless they are already covered by other * parent types. For instance, in @@ -1623,11 +1667,9 @@ class Namer { typer: Typer => val parentTypes = defn.adjustForTuple(cls, cls.typeParams, defn.adjustForBoxedUnit(cls, - addUsingTraits( - locally: - val isJava = ctx.isJava - ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) - ) + addUsingTraits: + val isJava = ctx.isJava + ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) ) ) typr.println(i"completing $denot, parents = $parents%, %, parentTypes = $parentTypes%, %") @@ -1652,6 +1694,7 @@ class Namer { typer: Typer => cls.invalidateMemberCaches() // we might have checked for a member when parents were not known yet. cls.setNoInitsFlags(parentsKind(parents), untpd.bodyKind(rest)) cls.setStableConstructor() + enterParentRefinementSyms(parentRefinements.toList) processExports(using localCtx) defn.patchStdLibClass(cls) addConstructorProxies(cls) @@ -1698,12 +1741,6 @@ class Namer { typer: Typer => val sym = tree.symbol if sym.isConstructor then sym.owner else sym - /** Enter and typecheck parameter list */ - def completeParams(params: List[MemberDef])(using Context): Unit = { - index(params) - for (param <- params) typedAheadExpr(param) - } - /** The signature of a module valdef. * This will compute the corresponding module class TypeRef immediately * without going through the defined type of the ValDef. This is necessary @@ -1779,6 +1816,18 @@ class Namer { typer: Typer => case _ => WildcardType } + + // translate `given T = deferred` to an abstract given with HasDefault flag + if sym.is(Given) then + mdef.rhs match + case rhs: RefTree + if rhs.name == nme.deferred + && typedAheadExpr(rhs).symbol == defn.Compiletime_deferred + && sym.maybeOwner.is(Trait) => + sym.resetFlag(Final) + sym.setFlag(Deferred | HasDefault) + case _ => + val mbrTpe = paramFn(checkSimpleKinded(typedAheadType(mdef.tpt, tptProto)).tpe) if (ctx.explicitNulls && mdef.mods.is(JavaDefined)) JavaNullInterop.nullifyMember(sym, mbrTpe, mdef.mods.isAllOf(JavaEnumValue)) @@ -1786,10 +1835,34 @@ class Namer { typer: Typer => } /** The type signature of a DefDef with given symbol */ - def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = { + def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR + val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() + if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then + for params <- ddef.paramss; case tdef: TypeDef <- params do + for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do + witnessNamesOfParam(tdef) = ws + + /** Are all names in `wnames` defined by the longest prefix of all `params` + * that have been typed ahead (i.e. that carry the TypedAhead attachment)? + */ + def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = + (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty + + /** Enter and typecheck parameter list, add context companions as. + * Once all witness parameters for a context bound are seen, create a + * context bound companion for it. + */ + def completeParams(params: List[MemberDef])(using Context): Unit = + index(params) + for param <- params do + typedAheadExpr(param) + for (tdef, wnames) <- witnessNamesOfParam do + if wnames.contains(param.name) && allParamsSeen(wnames, params) then + addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) + // The following 3 lines replace what was previously just completeParams(tparams). // But that can cause bad bounds being computed, as witnessed by // tests/pos/paramcycle.scala. The problematic sequence is this: @@ -1822,16 +1895,52 @@ class Namer { typer: Typer => ddef.trailingParamss.foreach(completeParams) val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) + + /** We add `tracked` to context bound witnesses that have abstract type members */ + def needsTracked(sym: Symbol, param: ValDef)(using Context) = + !sym.is(Tracked) + && param.hasAttachment(ContextBoundParam) + && sym.info.memberNames(abstractTypeNameFilter).nonEmpty + + /** Set every context bound evidence parameter of a class to be tracked, + * provided it has a type that has an abstract type member. Reset private and local flags + * so that the parameter becomes a `val`. + */ + def setTracked(param: ValDef): Unit = + val sym = symbolOfTree(param) + sym.maybeOwner.maybeOwner.infoOrCompleter match + case info: TempClassInfo if needsTracked(sym, param) => + typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") + for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do + acc.resetFlag(PrivateLocal) + acc.setFlag(Tracked) + sym.setFlag(Tracked) + case _ => + def wrapMethType(restpe: Type): Type = instantiateDependent(restpe, paramSymss) methodType(paramSymss, restpe, ddef.mods.is(JavaDefined)) + + def wrapRefinedMethType(restpe: Type): Type = + wrapMethType(addParamRefinements(restpe, paramSymss)) + if isConstructor then + if sym.isPrimaryConstructor && Feature.enabled(modularity) then + ddef.termParamss.foreach(_.foreach(setTracked)) // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) wrapMethType(effectiveResultType(sym, paramSymss)) + else if sym.isAllOf(Given | Method) && Feature.enabled(modularity) then + // set every context bound evidence parameter of a given companion method + // to be tracked, provided it has a type that has an abstract type member. + // Add refinements for all tracked parameters to the result type. + for params <- ddef.termParamss; param <- params do + val psym = symbolOfTree(param) + if needsTracked(psym, param) then psym.setFlag(Tracked) + valOrDefDefSig(ddef, sym, paramSymss, wrapRefinedMethType) else valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) - } + end defDefSig def inferredResultType( mdef: ValOrDefDef, diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index e152b5e6b9c7..253c4fda9396 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -189,4 +189,5 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override protected def checkEqualityEvidence(tree: tpd.Tree, pt: Type)(using Context): Unit = () override protected def matchingApply(methType: MethodOrPoly, pt: FunProto)(using Context): Boolean = true override protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = promote(call) + override protected def migrate[T](migration: => T, disabled: => T = ()): T = disabled } diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 173d5e6b1f7e..12848e26cf26 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -552,7 +552,7 @@ object RefChecks { overrideError("is an extension method, cannot override a normal method") else if (other.is(ExtensionMethod) && !member.is(ExtensionMethod)) // (1.3) overrideError("is a normal method, cannot override an extension method") - else if !other.is(Deferred) + else if (!other.is(Deferred) || other.isAllOf(Given | HasDefault)) && !member.is(Deferred) && !other.name.is(DefaultGetterName) && !member.isAnyOverride @@ -610,8 +610,13 @@ object RefChecks { overrideError("is not inline, cannot implement an inline method") else if (other.isScala2Macro && !member.isScala2Macro) // (1.11) overrideError("cannot be used here - only Scala-2 macros can override Scala-2 macros") - else if (!compatTypes(memberTp(self), otherTp(self)) && - !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf))) + else if !compatTypes(memberTp(self), otherTp(self)) + && !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf)) + && !member.is(Tracked) + // Tracked members need to be excluded since they are abstract type members with + // singleton types. Concrete overrides usually have a wider type. + // TODO: Should we exclude all refinements inherited from parents? + then overrideError("has incompatible type", compareTypes = true) else if (member.targetName != other.targetName) if (other.targetName != other.name) @@ -620,7 +625,9 @@ object RefChecks { overrideError("cannot have a @targetName annotation since external names would be different") else if intoOccurrences(memberTp(self)) != intoOccurrences(otherTp(self)) then overrideError("has different occurrences of `into` modifiers", compareTypes = true) - else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.12) + else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) + && !member.is(Tracked) + then // (1.12) report.errorOrMigrationWarning( em"cannot override val parameter ${other.showLocated}", member.srcPos, @@ -670,6 +677,10 @@ object RefChecks { mbr.isType || mbr.isSuperAccessor // not yet synthesized || mbr.is(JavaDefined) && hasJavaErasedOverriding(mbr) + || mbr.is(Tracked) + // Tracked members correspond to existing val parameters, so they don't + // count as deferred. The val parameter could not implement the tracked + // refinement since it usually has a wider type. def isImplemented(mbr: Symbol) = val mbrDenot = mbr.asSeenFrom(clazz.thisType) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 0b05bcd078ff..db2924ce3c13 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -40,8 +40,7 @@ import annotation.tailrec import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} -import config.Feature -import config.Feature.{sourceVersion, migrateTo3} +import config.Feature, Feature.{sourceVersion, migrateTo3, modularity} import config.SourceVersion.* import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel @@ -158,6 +157,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Overridden in derived typers def newLikeThis(nestingLevel: Int): Typer = new Typer(nestingLevel) + // Overridden to do nothing in derived typers + protected def migrate[T](migration: => T, disabled: => T = ()): T = migration + /** Find the type of an identifier with given `name` in given context `ctx`. * @param name the name of the identifier * @param pt the expected type @@ -760,6 +762,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else typedDynamicSelect(tree2, Nil, pt) else + if qual.tpe.typeSymbol == defn.CBCompanion then + val witnessSelection = typedCBSelect(tree0, pt, qual) + if !witnessSelection.isEmpty then return witnessSelection assignType(tree, rawType match case rawType: NamedType => @@ -768,6 +773,76 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer notAMemberErrorType(tree, qual, pt)) end typedSelect + /** Expand a selection A.m on a context bound companion A with type + * `[ref_1 | ... | ref_N]` as described by + * Step 3 of the doc comment of annotation.internal.WitnessNames. + * @return the best alternative if it exists, + * or EmptyTree if no witness admits selecting with the given name, + * or EmptyTree and report an ambiguity error of there are several + * possible witnesses and no selection is better than the other + * according to the critera given in Step 3. + */ + def typedCBSelect(tree: untpd.Select, pt: Type, qual: Tree)(using Context): Tree = + + type Alts = List[(/*prev: */Tree, /*prevState: */TyperState, /*prevWitness: */TermRef)] + + /** Compare two alternative selections `alt1` and `alt2` from witness types + * `wit1`, `wit2` according to the 3 criteria in the enclosing doc comment. I.e. + * + * alt1 = qual1.m, alt2 = qual2.m, qual1: wit1, qual2: wit2 + * + * @return 1 if 1st alternative is preferred over 2nd + * -1 if 2nd alternative is preferred over 1st + * 0 if neither alternative is preferred over the other + */ + def compareAlts(alt1: Tree, alt2: Tree, wit1: TermRef, wit2: TermRef): Int = + val cmpPrefix = compare(wit1, wit2, preferGeneral = true) + typr.println(i"compare witnesses $wit1: ${wit1.info}, $wit2: ${wit2.info} = $cmpPrefix") + if cmpPrefix != 0 then cmpPrefix + else (alt1.tpe, alt2.tpe) match + case (tp1: TypeRef, tp2: TypeRef) => + if tp1.dealias == tp2.dealias then 1 else 0 + case (tp1: TermRef, tp2: TermRef) => + if tp1.info.isSingleton && (tp1 frozen_=:= tp2) then 1 + else compare(tp1, tp2, preferGeneral = false) + case (tp1: TermRef, _) => 1 + case (_, tp2: TermRef) => -1 + case _ => 0 + + /** Find the set of maximally preferred alternative among `prev` and the + * remaining alternatives generated from `witnesses` with is a union type + * of witness references. + */ + def tryAlts(prevs: Alts, witnesses: Type): Alts = witnesses match + case OrType(wit1, wit2) => + tryAlts(tryAlts(prevs, wit1), wit2) + case witness: TermRef => + val altQual = tpd.ref(witness).withSpan(qual.span) + val altCtx = ctx.fresh.setNewTyperState() + val alt = typedSelect(tree, pt, altQual)(using altCtx) + def current = (alt, altCtx.typerState, witness) + if altCtx.reporter.hasErrors then prevs + else + val cmps = prevs.map: (prevTree, prevState, prevWitness) => + compareAlts(prevTree, alt, prevWitness, witness) + if cmps.exists(_ == 1) then prevs + else current :: prevs.zip(cmps).collect{ case (prev, cmp) if cmp != -1 => prev } + + qual.tpe.widen match + case AppliedType(_, arg :: Nil) => + tryAlts(Nil, arg) match + case Nil => EmptyTree + case (best @ (bestTree, bestState, _)) :: Nil => + bestState.commit() + bestTree + case multiAlts => + report.error( + em"""Ambiguous witness reference. None of the following alternatives is more specific than the other: + |${multiAlts.map((alt, _, witness) => i"\n $witness.${tree.name}: ${alt.tpe.widen}")}""", + tree.srcPos) + EmptyTree + end typedCBSelect + def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { record("typedSelect") @@ -925,10 +1000,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tp.exists && !tp.typeSymbol.is(Final) && (!tp.isTopType || tp.isAnyRef) // Object is the only toplevel class that can be instantiated - if (templ1.parents.isEmpty && - isFullyDefined(pt, ForceDegree.flipBottom) && - isSkolemFree(pt) && - isEligible(pt.underlyingClassRef(refinementOK = false))) + if templ1.parents.isEmpty + && isFullyDefined(pt, ForceDegree.flipBottom) + && isSkolemFree(pt) + && isEligible(pt.underlyingClassRef(refinementOK = Feature.enabled(modularity))) + then templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) for case parent: RefTree <- templ1.parents do typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) @@ -2201,6 +2277,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree.tpFun(tsyms, vsyms) completeTypeTree(InferredTypeTree(), tp, tree) + def typedContextBoundTypeTree(tree: untpd.ContextBoundTypeTree)(using Context): Tree = + val tycon = typedType(tree.tycon) + val tyconSplice = untpd.TypedSplice(tycon) + val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) + if tycon.tpe.typeParams.nonEmpty then + typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) + else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractType then + val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) + typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) + else + errorTree(tree, + em"""Illegal context bound: ${tycon.tpe} does not take type parameters and + |does not have an abstract type member named `Self` either.""") + def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) checkStable(ref1.tpe, tree.srcPos, "singleton type") @@ -2208,7 +2298,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = { - val tpt1 = if (tree.tpt.isEmpty) TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) + val tpt1 = if tree.tpt == EmptyTree then TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements).withSpan(tree.span) val refineCls = createSymbol(refineClsDef).asClass val TypeDef(_, impl: Template) = typed(refineClsDef): @unchecked @@ -2556,12 +2646,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) - if (sym.isOneOf(GivenOrImplicit)) checkImplicitConversionDefOK(sym) + if sym.is(Implicit) then checkImplicitConversionDefOK(sym) if sym.is(Module) then checkNoModuleClash(sym) val tpt1 = checkSimpleKinded(typedType(tpt)) val rhs1 = vdef.rhs match { - case rhs @ Ident(nme.WILDCARD) => rhs withType tpt1.tpe - case rhs => typedExpr(rhs, tpt1.tpe.widenExpr) + case rhs @ Ident(nme.WILDCARD) => + rhs.withType(tpt1.tpe) + case Ident(nme.deferred) if sym.isAllOf(DeferredGivenFlags, butNot = Param) => + EmptyTree + case rhs => + typedExpr(rhs, tpt1.tpe.widenExpr) } val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) postProcessInfo(vdef1, sym) @@ -2622,9 +2716,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.isInlineMethod then rhsCtx.addMode(Mode.InlineableBody) if sym.is(ExtensionMethod) then rhsCtx.addMode(Mode.InExtensionMethod) - val rhs1 = PrepareInlineable.dropInlineIfError(sym, - if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) - else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) + val rhs1 = ddef.rhs match + case Ident(nme.deferred) if sym.isAllOf(DeferredGivenFlags) => + EmptyTree + case rhs => + PrepareInlineable.dropInlineIfError(sym, + if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) + else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then if StagingLevel.level > 0 then @@ -2697,8 +2795,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typeIndexedLambdaTypeTree(rhs, tparams, body) case rhs => typedType(rhs) - checkFullyAppliedType(rhs1) - if sym.isOpaqueAlias then checkNoContextFunctionType(rhs1) + if sym.isOpaqueAlias then + checkFullyAppliedType(rhs1, "Opaque type alias must be fully applied, but ") + checkNoContextFunctionType(rhs1) assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } @@ -2791,6 +2890,72 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } + /** Add all parent refinement symbols as declarations to this class */ + def addParentRefinements(body: List[Tree])(using Context): List[Tree] = + cdef.getAttachment(ParentRefinements) match + case Some(refinedSyms) => + val refinements = refinedSyms.map: sym => + ( if sym.isType then TypeDef(sym.asType) + else if sym.is(Method) then DefDef(sym.asTerm) + else ValDef(sym.asTerm) + ).withSpan(impl.span.startPos) + body ++ refinements + case None => + body + + /** Implement givens that were declared with a `deferred` rhs. + * The a given value matching the declared type is searched in a + * context directly enclosing the current class, in which all given + * parameters of the current class are also defined. + */ + def implementDeferredGivens(body: List[Tree]): List[Tree] = + if cls.is(Trait) || ctx.isAfterTyper then body + else + def isGivenValue(mbr: TermRef) = + val dcl = mbr.symbol + if dcl.is(Method) then + report.error( + em"""Cannnot infer the implementation of the deferred ${dcl.showLocated} + |since that given is parameterized. An implementing given needs to be written explicitly.""", + cdef.srcPos) + false + else true + + def givenImpl(mbr: TermRef): ValDef = + val dcl = mbr.symbol + val target = dcl.info.asSeenFrom(cls.thisType, dcl.owner) + val constr = cls.primaryConstructor + val usingParamAccessors = cls.paramAccessors.filter(_.is(Given)) + val paramScope = newScopeWith(usingParamAccessors*) + val searchCtx = ctx.outer.fresh.setScope(paramScope) + val rhs = implicitArgTree(target, cdef.span, + where = i"inferring the implementation of the deferred ${dcl.showLocated}" + )(using searchCtx) + + val impl = dcl.copy(cls, + flags = dcl.flags &~ (HasDefault | Deferred) | Final | Override, + info = target, + coord = rhs.span).entered.asTerm + + def anchorParams = new TreeMap: + override def transform(tree: Tree)(using Context): Tree = tree match + case id: Ident if usingParamAccessors.contains(id.symbol) => + cpy.Select(id)(This(cls), id.name) + case _ => + super.transform(tree) + ValDef(impl, anchorParams.transform(rhs)) + end givenImpl + + val givenImpls = + cls.thisType.implicitMembers + //.showing(i"impl def givens for $cls/$result") + .filter(_.symbol.isAllOf(DeferredGivenFlags, butNot = Param)) + //.showing(i"impl def filtered givens for $cls/$result") + .filter(isGivenValue) + .map(givenImpl) + body ++ givenImpls + end implementDeferredGivens + ensureCorrectSuperClass() completeAnnotations(cdef, cls) val constr1 = typed(constr).asInstanceOf[DefDef] @@ -2811,7 +2976,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cdef.withType(UnspecifiedErrorType) else { val dummy = localDummy(cls, impl) - val body1 = addAccessorDefs(cls, typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1) + val body1 = + implementDeferredGivens( + addParentRefinements( + addAccessorDefs(cls, + typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1))) checkNoDoubleDeclaration(cls) val impl1 = cpy.Template(impl)(constr1, parents1, Nil, self1, body1) @@ -3174,6 +3343,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.UnApply => typedUnApply(tree, pt) case tree: untpd.Tuple => typedTuple(tree, pt) case tree: untpd.InLambdaTypeTree => typedInLambdaTypeTree(tree, pt) + case tree: untpd.ContextBoundTypeTree => typedContextBoundTypeTree(tree) case tree: untpd.InfixOp => typedInfixOp(tree, pt) case tree: untpd.ParsedTry => typedTry(tree, pt) case tree @ untpd.PostfixOp(qual, Ident(nme.WILDCARD)) => typedAsFunction(tree, pt) @@ -4342,7 +4512,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) case _ => errorTree(tree, em"cannot convert from $tree to an instance creation expression") - val tycon = ctorResultType.underlyingClassRef(refinementOK = false) + val tycon = ctorResultType.underlyingClassRef(refinementOK = true) typed( untpd.Select( untpd.New(untpd.TypedSplice(tpt.withType(tycon))), diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 0bd407261125..736633e0f6a7 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -495,8 +495,8 @@ object Signatures { case res => List(tpe) def isSyntheticEvidence(name: String) = - if !name.startsWith(NameKinds.ContextBoundParamName.separator) then false else - symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.is(Flags.Implicit)) + name.startsWith(NameKinds.ContextBoundParamName.separator) + && symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.is(Flags.Implicit)) def toTypeParam(tpe: PolyType): List[Param] = val evidenceParams = (tpe.paramNamess.flatten zip tpe.paramInfoss.flatten).flatMap: diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 3785f8fa6e06..80f5846a4798 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -118,3 +118,12 @@ i7445b.scala # more aggresive reduce projection makes a difference i15525.scala +# alias types at different levels of dereferencing +parsercombinators-givens.scala +parsercombinators-givens-2.scala +parsercombinators-ctx-bounds.scala +parsercombinators-this.scala +parsercombinators-arrow.scala +parsercombinators-new-syntax.scala +hylolib + diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 10f068e53c7f..0245ce802243 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -191,6 +191,7 @@ MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) AnnotType ::= SimpleType {Annotation} Annotated(t, annot) +AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) SimpleType ::= SimpleLiteral SingletonTypeTree(l) | ‘?’ TypeBounds @@ -220,7 +221,9 @@ IntoTargetType ::= Type TypeArgs ::= ‘[’ Types ‘]’ ts Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) -TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] ContextBounds(typeBounds, tps) +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} ``` @@ -351,7 +354,7 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ```ebnf ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) - id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) + id [HkTypeParamClause] TypeAndCtxBounds Bound(below, above, context) TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -365,7 +368,7 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var - [{Modifier} (‘val’ | ‘var’)] Param + [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent DefParamClause ::= DefTypeParamClause @@ -376,7 +379,7 @@ TypelessClause ::= DefTermParamClause | UsingParamClause DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ @@ -446,7 +449,7 @@ PatDef ::= ids [‘:’ Type] [‘=’ Expr] DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound +TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef @@ -458,9 +461,13 @@ ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present -StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody +GivenType ::= AnnotType1 {id [nl] AnnotType1} + Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> diff --git a/docs/_docs/reference/changed-features/implicit-resolution.md b/docs/_docs/reference/changed-features/implicit-resolution.md index 1396ed04b6d3..0df8d2d60a7a 100644 --- a/docs/_docs/reference/changed-features/implicit-resolution.md +++ b/docs/_docs/reference/changed-features/implicit-resolution.md @@ -165,7 +165,22 @@ Condition (*) is new. It is necessary to ensure that the defined relation is tra [//]: # todo: expand with precise rules -**9.** The following change is currently enabled in `-source future`: + +**9.** Given disambiguation has changed. When comparing two givens that both match an expected type, we used to pick the most specific one, in alignment with +overloading resolution. From Scala 3.5 on, we pick the most general one instead. Compiling with Scala 3.5-migration will print a warning in all cases where the preference has changed. Example: +```scala +class A +class B extends A +class C extends A + +given A = A() +given B = B() +given C = C() + +summon[A] // was ambiguous, will now return `given_A` +``` + +**10.** The following change is currently enabled in `-source future`: Implicit resolution now avoids generating recursive givens that can lead to an infinite loop at runtime. Here is an example: diff --git a/docs/_docs/reference/experimental/modularity.md b/docs/_docs/reference/experimental/modularity.md new file mode 100644 index 000000000000..1936bcb34ee6 --- /dev/null +++ b/docs/_docs/reference/experimental/modularity.md @@ -0,0 +1,189 @@ +--- +layout: doc-page +title: "Modularity Improvements" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/modularity.html +--- + +# Modularity Improvements + +Martin Odersky, 7.1.2024 + +Scala is a language in the SML tradition, in the sense that it has +abstract and alias types as members of modules (which in Scala take the form of objects and classes). This leads to a simple dependently +typed system, where dependencies in types are on paths instead of full terms. + +So far, some key ingredients were lacking which meant that module composition with functors is harder in Scala than in SML. In particular, one often needs to resort the infamous `Aux` pattern that lifts type members into type parameters so that they can be tracked across class instantiations. This makes modular, dependently typed programs +much harder to write and read, and makes such programming only accessible to experts. + +In this note I propose some small changes to Scala's dependent typing that makes +modular programming much more straightforward. + +The suggested improvements have been implemented and are available +in source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +## Tracked Parameters + +Scala is dependently typed for functions, but unfortunately not for classes. +For instance, consider the following definitions: + +```scala + class C: + type T + ... + + def f(x: C): x.T = ... + + val y: C { type T = Int } +``` +Then `f(y)` would have type `Int`, since the compiler will substitute the +concrete parameter reference `y` for the formal parameter `x` in the result +type of `f`, and `y.T = Int` + +However, if we use a class `F` instead of a method `f`, things go wrong. + +```scala + class F(val x: C): + val result: x.T = ... +``` +Now `F(y).result` would not have type `Int` but instead the rather less useful type `?1.T` where `?1` is a so-called skolem constant of type `C` (a skolem represents an unknown value). + +This shortcoming means that classes cannot really be used for advanced +modularity constructs that rely on dependent typing. + +**Proposal:** Introduce a `tracked` modifier that can be added to +a `val` parameter of a class or trait. For every tracked class parameter of a class `C`, add a refinement in the constructor type of `C` that the class member is the same as the parameter. + +**Example:** In the setting above, assume `F` is instead declared like this: +```scala + class F(tracked val x: C): + val result: x.T = ... +``` +Then the constructor `F` would get roughly the following type: +```scala + F(x1: C): F { val x: x1.type } +``` +_Aside:_ More precisely, both parameter and refinement would apply to the same name `x` but the refinement still refers to the parameter. We unfortunately can't express that in source, however, so we chose the new name `x1` for the parameter in the explanation. + +With the new constructor type, the expression `F(y).result` would now have the type `Int`, as hoped for. The reasoning to get there is as follows: + + - The result of the constructor `F(y)` has type `F { val x: y.type }` by + the standard typing for dependent functions. + - The type of `result` inside `F` is `x.T`. + - Hence, the type of `result` as a member of `F { val x: y.type }` is `y.T`, which is equal to `Int`. + +The addition of tracked parameters makes classes suitable as a fundamental modularity construct supporting dependent typing. Here is an example, taken from issue #3920: + +```scala +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) +``` +This works as it should now. Without the addition of `tracked` to the +parameter of `SetFunctor` typechecking would immediately lose track of +the element type `T` after an `add`, and would therefore fail. + +**Syntax Change** + +``` +ClsParam ::= {Annotation} [{Modifier | ‘tracked’} (‘val’ | ‘var’) | ‘inline’] Param +``` + +The (soft) `tracked` modifier is only allowed for `val` parameters of classes. + +**Discussion** + +Since `tracked` is so useful, why not assume it by default? First, `tracked` makes sense only for `val` parameters. If a class parameter is not also a field declared using `val` then there's nothing to refine in the constructor result type. One could think of at least making all `val` parameters tracked by default, but that would be a backwards incompatible change. For instance, the following code would break: + +```scala +case class Foo(x: Int) +var foo = Foo(1) +if someCondition then foo = Foo(2) +``` +If we assume `tracked` for parameter `x` (which is implicitly a `val`), +then `foo` would get inferred type `Foo { val x: 1 }`, so it could not +be reassigned to a value of type `Foo { val x: 2 }` on the next line. + +Another approach might be to assume `tracked` for a `val` parameter `x` +only if the class refers to a type member of `x`. But it turns out that this +scheme is unimplementable since it would quickly lead to cyclic references +when typechecking recursive class graphs. So an explicit `tracked` looks like the best available option. + +## Allow Class Parents to be Refined Types + +Since `tracked` parameters create refinements in constructor types, +it is now possible that a class has a parent that is a refined type. +Previously such types were not permitted, since we were not quite sure how to handle them. But with tracked parameters it becomes pressing so +admit such types. + +**Proposal** Allow refined types as parent types of classes. All refinements that are inherited in this way become synthetic members of the class. + +**Example** + +```scala +class C: + type T + def m(): T + +type R = C: + type T = Int + def m(): 22 + +class D extends R: + def next(): D +``` +This code now compiles. The definition of `D` is expanded as follows: + +```scala +class D extends C: + def next(): D + /*synthetic*/ type T = Int + /*synthetic*/ def m(): 22 +``` +Note how class refinements are moved from the parent constructor of `D` into the body of class `D` itself. + +This change does not entail a syntax change. Syntactically, parent types cannot be refined types themselves. So the following would be illegal: +```scala +class D extends C { type T = Int; def m(): 22 }: // error + def next(): D +``` +If a refined type should be used directly as a parent type of a class, it needs to come in parentheses: +```scala +class D extends (C { type T = Int; def m(): 22 }) // ok + def next(): D +``` + +## A Small Relaxation To Export Rules + +The rules for export forwarders are changed as follows. + +Previously, all export forwarders were declared `final`. Now, only term members are declared `final`. Type aliases are left aside. + +This makes it possible to export the same type member into several traits and then mix these traits in the same class. The test file `tests/pos/typeclass-aggregates.scala` shows why this is essential if we want to combine multiple givens with type members in a new given that aggregates all these givens in an intersection type. + +The change does not lose safety since different type aliases would in any case lead to uninstantiatable classes. \ No newline at end of file diff --git a/docs/_docs/reference/experimental/typeclasses-syntax.md b/docs/_docs/reference/experimental/typeclasses-syntax.md new file mode 100644 index 000000000000..1264bcec269d --- /dev/null +++ b/docs/_docs/reference/experimental/typeclasses-syntax.md @@ -0,0 +1,352 @@ + +# Pre-SIP: Improve Syntax for Context Bounds and Givens + +Some months ago I experimented with some small extensions and tweaks to Scala that would make Scala's support for type classes much more pleasant. It would put it on a par of what is supported by Rust or Swift but at the same time be distinctly Scala like, with simple and concise syntax and clear semantic foundations. + +That experiment consisted of three areas which are independent of each other: + + 1. Allow context bounds also for types with `Self` type members. + 2. Better support for modularity: keep track of the types of certain class arguments. + 3. Syntactic improvements: Named context bounds, simpler and more regular syntax for givens. + +I would like to propose each of these areas as separate SIPs. That makes them easier to review and discuss. Furthermore, each change set has value independently of what happens to the other proposals. + +I'll start with (3. syntactic improvements) which is the largest chunk of changes and also the most time sensitive. It contains new syntax that supersedes some existing syntax that was introduced in 3.0, so it's better to make the change at a time when not that much code using the new syntax is written yet. +By contrast the other two areas are less urgent. The set of changes proposed here are valuable to have, even if the other two areas are not, or not yet, accepted. + +The proposal is structured in three parts, covering named context bounds, +context bounds for type members, and changes to the given syntax. + +## 1. Named Context Bounds + +Context bounds are a convenient and legible abbreviation. A problem so far is that they are always anonymous, one cannot name the implicit parameter to which a context bound expands. For instance, consider the classical pair of type classes +```scala + trait SemiGroup[A]: + extension (x: A) def combine(y: A): A + + trait Monoid[A] extends SemiGroup[A]: + def unit: A +``` +and a `reduce` method defined like this: +```scala +def reduce[A : Monoid](xs: List[A]): A = ??? +``` +Since we don't have a name for the `Monoid` instance of `A`, we need to resort to `summon` in the body of `reduce`: +```scala +def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(summon[Monoid[A]].unit)(_ `combine` _) +``` +That's generally considered too painful to write and read, hence people usually adopt one of two alternatives. Either, eschew context bounds and switch to using clauses: +```scala +def reduce[A](xs: List[A])(using m: Monoid[A]): A = + xs.foldLeft(m)(_ `combine` _) +``` +Or, plan ahead and define a "trampoline" method in `Monoid`'s companion object: +```scala + trait Monoid[A] extends SemiGroup[A]: + def unit: A + object Monoid: + def unit[A](using m: Monoid[A]): A = m.unit + ... + def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) +``` +This is all accidental complexity which can be avoided by the following proposal. + +**Proposal:** Allow to name a context bound, like this: +```scala + def reduce[A : Monoid as m](xs: List[A]): A = + xs.foldLeft(m.unit)(_ `combine` _) +``` + +We use `as x` after the type to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. + +**Benefits:** The new syntax is simple and clear. It avoids the awkward choice between concise context bounds that can't be named and verbose using clauses that can. + +### New Syntax for Aggregate Context Bounds + +Aggregate context bounds like `A : X : Y` are not obvious to read, and it becomes worse when we add names, e.g. `A : X as x : Y as y`. + +**Proposal:** Allow to combine several context bounds inside `{...}`, analogous +to import clauses. Example: + +```scala + trait: + def showMax[X : {Ordering, Show}](x: X, y: X): String + class B extends A: + def showMax[X : {Ordering as ordering, Show as show}](x: X, y: X): String = + show.asString(ordering.max(x, y)) +``` + +The old syntax with multiple `:` should be phased out over time. There's more about migration at the end of this Pre-SIP. + +**Benefits:** The new syntax is much clearer than the old one, in particular for newcomers that don't know context bounds well. + +### Better Default Names for Context Bounds + +So far, an unnamed context bound for a type parameter gets a synthesized fresh name. It would be much more useful if it got the name of the constrained type parameter instead, translated to be a term name. This means our `reduce` method over monoids would not even need an `as` binding. We could simply formulate it as follows: +``` + def reduce[A : Monoid](xs: List[A]) = + xs.foldLeft(A.unit)(_ `combine` _) +``` + + + +The use of a name like `A` above in two variants, both as a type name and as a term name is of course familiar to Scala programmers. We use the same convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. + +**Proposed Rules** + + 1. The generated evidence parameter for a context bound `A : C as a` has name `a` + 2. The generated evidence for a context bound `A : C` without an `as` binding has name `A` (seen as a term name). So, `A : C` is equivalent to `A : C as A`. + 3. If there are multiple context bounds for a type parameter, as in `A : {C_1, ..., C_n}`, the generated evidence parameter for every context bound `C_i` has a fresh synthesized name, unless the context bound carries an `as` clause, in which case rule (1) applies. + +The default naming convention reduces the need for named context bounds. But named context bounds are still essential, for at least two reasons: + + - They are needed to give names to multiple context bounds. + - They give an explanation what a single unnamed context bound expands to. + + +### Expansion of Context Bounds + +Context bounds are currently translated to implicit parameters in the last parameter list of a method or class. This is a problem if a context bound is mentioned in one of the preceding parameter types. For example, consider a type class of parsers with associated type members `Input` and `Result` describing the input type on which the parsers operate and the type of results they produce: +```scala +trait Parser[P]: + type Input + type Result +``` +Here is a method `run` that runs a parser on an input of the required type: + +```scala +def run[P : Parser](in: P.Input): P.Result +``` +Or, making clearer what happens by using an explicit name for the context bound: +```scala +def run[P : Parser as p](in: p.Input): p.Result +``` +With the current translation this does not work since it would be expanded to: +```scala + def run[P](x: p.Input)(using p: Parser[P]): p.Result +``` +Note that the `p` in `p.Input` refers to the `p` introduced in the using clause, which comes later. So this is ill-formed. + +This problem would be fixed by changing the translation of context bounds so that they expand to using clauses immediately after the type parameter. But such a change is infeasible, for two reasons: + + 1. It would be a binary-incompatible change. + 2. Putting using clauses earlier can impair type inference. A type in + a using clause can be constrained by term arguments coming before that + clause. Moving the using clause first would miss those constraints, which could cause ambiguities in implicit search. + +But there is an alternative which is feasible: + +**Proposal:** Map the context bounds of a method or class as follows: + + 1. If one of the bounds is referred to by its term name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. + 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. + 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. + +Rules (2) and (3) are the status quo, and match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility is maintained. + +**Discussion** More refined rules could be envisaged where context bounds are spread over different using clauses so that each comes as late as possible. But it would make matters more complicated and the gain in expressiveness is not clear to me. + + +## 2. Context Bounds for Type Members + +It's not very orthogonal to allow subtype bounds for both type parameters and abstract type members, but context bounds only for type parameters. What's more, we don't even have the fallback of an explicit using clause for type members. The only alternative is to also introduce a set of abstract givens that get implemented in each subclass. This is extremely heavyweight and opaque to newcomers. + +**Proposal**: Allow context bounds for type members. Example: + +```scala + class Collection: + type Element : Ord +``` + +The question is how these bounds are expanded. Context bounds on type parameters +are expanded into using clauses. But for type members this does not work, since we cannot refer to a member type of a class in a parameter type of that class. What we are after is an equivalent of using parameter clauses but represented as class members. + +**Proposal:** ~~Introduce a new kind of given definition of the form:~~ +Introduce a new way to implement a given definition in a trait like this: +```scala +given T = deferred +``` +~~`deferred` is a soft keyword which has special meaning only in this context. +A given with `deferred` right hand side can appear only as a member definition of some trait.~~ +`deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. + +Deferred givens allow a clean implementation of context bounds in traits, +as in the following example: +```scala +trait Sorted: + type Element : Ord + +class SortedSet[A : Ord] extends Sorted: + type Element = A +``` +The compiler expands this to the following implementation: +```scala +trait Sorted: + type Element + given Ord[Element] = compiletime.deferred + +class SortedSet[A](using A: Ord[A]) extends Sorted: + type Element = A + override given Ord[Element] = A // i.e. the A defined by the using clause +``` + +The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. + +**Benefits:** + + - Better orthogonality, type parameters and abstract type members now accept the same kinds of bounds. + - Better ergonomics, since deferred givens get naturally implemented in inheriting classes, no need for boilerplate to fill in definitions of abstract givens. + +**Alternative:** It was suggested that we use a modifier for a deferred given instead of a `= deferred`. Something like `deferred given C[T]`. But a modifier does not suggest the concept that a deferred given will be implemented automatically in subclasses unless an explicit definition is written. In a sense, we can see `= deferred` as the invocation of a magic macro that is provided by the compiler. So from a user's point of view a given with `deferred` right hand side is not abstract. +It is a concrete definition where the compiler will provide the correct implementation. + +## 3. Cleanup of Given Syntax + +A good language syntax is like a Bach fugue: A small set of motifs is combined in a multitude of harmonic ways. Dissonances and irregularities should be avoided. + +When designing Scala 3, I believe that, by and large, we achieved that goal, except in one area, which is the syntax of givens. There _are_ some glaring dissonances, as seen in this code for defining an ordering on lists: +```scala +given [A](using Ord[A]): Ord[List[A]] with + def compare(x: List[A], y: List[A]) = ... +``` +The `:` feels utterly foreign in this position. It's definitely not a type ascription, so what is its role? Just as bad is the trailing `with`. Everywhere else we use braces or trailing `:` to start a scope of nested definitions, so the need of `with` sticks out like a sore thumb. + +We arrived at that syntax not because of a flight of fancy but because even after trying for about a year to find other solutions it seemed like the least bad alternative. The awkwardness of the given syntax arose because we insisted that givens could be named or anonymous, with the default on anonymous, that we would not use underscore for an anonymous given, and that the name, if present, had to come first, and have the form `name [parameters] :`. In retrospect, that last requirement showed a lack of creativity on our part. + +Sometimes unconventional syntax grows on you and becomes natural after a while. But here it was unfortunately the opposite. The longer I used given definitions in this style the more awkward they felt, in particular since the rest of the language seemed so much better put together by comparison. And I believe many others agree with me on this. Since the current syntax is unnatural and esoteric, this means it's difficult to discover and very foreign even after that. This makes it much harder to learn and apply givens than it need be. + +### New Given Syntax + +Things become much simpler if we introduce the optional name instead with an `as name` clause at the end, just like we did for context bounds. We can then use a more intuitive syntax for givens like this: +```scala +given Ord[String]: + def compare(x: String, y: String) = ... + +given [A : Ord] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + +given Monoid[Int]: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` +If explicit names are desired, we add them with `as` clauses: +```scala +given Ord[String] as stringOrd: + def compare(x: String, y: String) = ... + +given [A : Ord] => Ord[List[A]] as listOrd: + def compare(x: List[A], y: List[A]) = ... + +given Monoid[Int] as intMonoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` + +The underlying principles are: + + - A `given` clause consists of the following elements: + + - An optional _precondition_, which introduces type parameters and/or using clauses and which ends in `=>`, + - the implemented _type_, + - an optional name binding using `as`, + - an implementation which consists of either an `=` and an expression, + or a template body. + + - Since there is no longer a middle `:` separating name and parameters from the implemented type, we can use a `:` to start the class body without looking unnatural, as is done everywhere else. That eliminates the special case where `with` was used before. + +This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s +in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. + +Changing something introduced just recently in Scala 3 is not fun, +but I believe these adjustments are preferable to let bad syntax +sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code +starts migrating. + +Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. + + +### Abolish Abstract Givens + +Another simplification is possible. So far we have special syntax for abstract givens: +```scala +given x: T +``` +The problem is that this syntax clashes with the quite common case where we want to establish a given without any nested definitions. For instance +consider a given that constructs a type tag: +```scala +class Tag[T] +``` +Then this works: +```scala +given Tag[String]() +given Tag[String] with {} +``` +But the following more natural syntax fails: +```scala +given Tag[String] +``` +The last line gives a rather cryptic error: +``` +1 |given Tag[String] + | ^ + | anonymous given cannot be abstract +``` +The problem is that the compiler thinks that the last given is intended to be abstract, and complains since abstract givens need to be named. This is another annoying dissonance. Nowhere else in Scala's syntax does adding a +`()` argument to a class cause a drastic change in meaning. And it's also a violation of the principle that it should be possible to define all givens without providing names for them. + +Fortunately, abstract givens are no longer necessary since they are superseded by the new `deferred` scheme. So we can deprecate that syntax over time. Abstract givens are a highly specialized mechanism with a so far non-obvious syntax. We have seen that this syntax clashes with reasonable expectations of Scala programmers. My estimate is that maybe a dozen people world-wide have used abstract givens in anger so far. + +**Proposal** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. + +This is less of a disruption than it might appear at first: + + - `given T` was illegal before since abstract givens could not be anonymous. + It now means a concrete given of class `T` with no member definitions. + - `given x: T` is legacy syntax for an abstract given. + - `given T as x = deferred` is the analogous new syntax, which is more powerful since + it allows for automatic instantiation. + - `given T = deferred` is the anonymous version in the new syntax, which was not expressible before. + +**Benefits:** + + - Simplification of the language since a feature is dropped + - Eliminate non-obvious and misleading syntax. + +## Summary of Syntax Changes + +Here is the complete context-free syntax for all proposed features. +Overall the syntax for givens becomes a lot simpler than what it was before. + +``` +TmplDef ::= 'given' GivenDef +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody +GivenType ::= AnnotType {id [nl] AnnotType} + +TypeDef ::= id [TypeParamClause] TypeAndCtxBounds +TypeParamBounds ::= TypeAndCtxBounds +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] +``` + +## Summary + +The proposed set of changes removes awkward syntax and makes dealing with context bounds and givens a lot more regular and pleasant. In summary, the proposed changes are: + + 1. Allow to name context bounds with `as` clauses. + 2. Give useful default names to other context bounds. + 3. Introduce a less cryptic syntax for multiple context bounds. + 4. Allow context bounds on type members which expand to deferred givens. + 5. Introduce a more regular and clearer syntax for givens. + 6. Eliminate abstract givens. + +These changes were implemented as part of a [draft PR](https://github.com/lampepfl/dotty/pulls/odersky) +which also covers the other prospective changes slated to be proposed in two future SIPs. The new system has proven to work well and to address several fundamental issues people were having with +existing implementation techniques for type classes. + +The changes proposed in this pre-SIP are time-sensitive since we would like to correct some awkward syntax choices in Scala 3 before more code migrates to the new constructs (so far, it seems most code still uses Scala 2 style implicits, which will eventually be phased out). It is easy to migrate to the new syntax and to support both old and new for a transition period. diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md new file mode 100644 index 000000000000..79abed7be6a8 --- /dev/null +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -0,0 +1,776 @@ + +--- +layout: doc-page +title: "Type Classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses.html +--- + +# Some Proposed Changes for Better Support of Type Classes + +Martin Odersky, 8.1.2024 + +A type class in Scala is a pattern where we define + + - a trait with one type parameter (the _type class_) + - given instances at specific instantiations of that trait, + - using clauses or context bounds abstracting over that trait. + +Type classes as a pattern work overall OK, but if we compare them to native implementations in Haskell, or protocols in Swift, or traits in Rust, then there are some idiosyncrasies and rough corners which in the end make them +a bit cumbersome and limiting for standard generic programming patterns. Much has improved since Scala 2's implicits, but there is still some gap to bridge to get to parity with these languages. + +This note shows that with some fairly small and reasonable tweaks to Scala's syntax and typing rules we can obtain a much better scheme for working with type classes, or do generic programming in general. + +The bulk of the suggested improvements has been implemented and is available +under source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +## Generalizing Context Bounds + + The only place in Scala's syntax where the type class pattern is relevant is + in context bounds. A context bound such as + +```scala + def min[A: Ordering](x: List[A]): A +``` +requires that `Ordering` is a trait or class with a single type parameter (which makes it a type class) and expands to a `using` clause that instantiates that parameter. Here is the expansion of `min`: +```scala + def min[A](x: List[A])(using Ordering[A]): A +``` + +**Proposal** Allow type classes to define an abstract type member named `Self` instead of a type parameter. + +**Example** + +```scala + trait Ord: + type Self + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + def reduce[A: Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) + + trait ParserCombinator: + type Self + type Input + type Result + extension (self: Self) + def parse(input: Input): Option[Result] = ... + + def combine[A: ParserCombinator, B: ParserCombinator { type Input = A.Input }] = ... +``` + +**Advantages** + + - Avoids repetitive type parameters, concentrates on what's essential, namely the type class hierarchy. + - Gives a clear indication of traits intended as type classes. A trait is a type class + if it has type `Self` as a member + - Allows to create aggregate type classes that combine givens via intersection types. + - Allows to use refinements in context bounds (the `combine` example above would be very awkward to express using the old way of context bounds expanding to type constructors). + +`Self`-based context bounds are a better fit for a dependently typed language like Scala than parameter-based ones. The main reason is that we are dealing with proper types, not type constructors. Proper types can be parameterized, intersected, or refined. This makes `Self`-based designs inherently more compositional than parameterized ones. + + + +**Details** + +When a trait has both a type parameter and an abstract `Self` type, we + resolve a context bound to the `Self` type. This allows type classes + that carry type parameters, as in + +```scala +trait Sequential[E]: + type Self +``` + +Here, +```scala +[S: Sequential[Int]] +``` +should resolve to: +```scala +[S](using Sequential[Int] { type Self = S }) +``` +and not to: +```scala +[S](using Sequential[S]) +``` + +**Discussion** + + Why not use `This` for the self type? The name `This` suggests that it is the type of `this`. But this is not true for type class traits. `Self` is the name of the type implementing a distinguished _member type_ of the trait in a `given` definition. `Self` is an established term in both Rust and Swift with the meaning used here. + + One possible objection to the `Self` based design is that it does not cover "multi-parameter" type classes. But neither do context bounds! "Multi-parameter" type classes in Scala are simply givens that can be synthesized with the standard mechanisms. Type classes in the strict sense abstract only over a single type, namely the implementation type of a trait. + + +## Auxiliary Type Alias `forms` + +We introduce a standard type alias `forms` in the Scala package or in `Predef`, defined like this: + +```scala + infix type forms[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } +``` + +This makes writing instance definitions quite pleasant. Examples: + +```scala + given Int forms Ord ... + given Int forms Monoid ... + + type Reader = [X] =>> Env => X + given Reader forms Monad ... +``` + +(more examples will follow below) + + + +## Naming Context Bounds + +Context bounds are a convenient and legible abbreviation. A problem so far is that they are always anonymous, +one cannot name the using parameter to which a context bound expands. + +For instance, consider a `reduce` method over `Monoid`s defined like this: + +```scala +def reduce[A : Monoid](xs: List[A]): A = ??? +``` +Since we don't have a name for the `Monoid` instance of `A`, we need to resort to `summon` in the body of `reduce`: +```scala +def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(summon Monoid[A])(_ `combine` _) +``` +That's generally considered too painful to write and read, hence people usually adopt one of two alternatives. Either, eschew context bounds and switch to using clauses: +```scala +def reduce[A](xs: List[A])(using m: Monoid[A]): A = + xs.foldLeft(m)(_ `combine` _) +``` +Or, plan ahead and define a "trampoline" method in `Monoid`'s companion object: +```scala + trait Monoid[A] extends SemiGroup[A]: + def unit: A + object Monoid: + def unit[A](using m: Monoid[A]): A = m.unit + ... + def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) +``` +This is all accidental complexity which can be avoided by the following proposal. + +**Proposal:** Allow to name a context bound, like this: +```scala + def reduce[A : Monoid as m](xs: List[A]): A = + xs.foldLeft(m.unit)(_ `combine` _) +``` + +We use `as x` after the type to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. + +**Benefits:** The new syntax is simple and clear. +It avoids the awkward choice between concise context bounds that can't be named and verbose using clauses that can. + +### New Syntax for Aggregate Context Bounds + +Aggregate context bounds like `A : X : Y` are not obvious to read, and it becomes worse when we add names, e.g. `A : X as x : Y as y`. + +**Proposal:** Allow to combine several context bounds inside `{...}`, analogous +to import clauses. Example: + +```scala + trait: + def showMax[X : {Ordering, Show}](x: X, y: X): String + class B extends A: + def showMax[X : {Ordering as ordering, Show as show}](x: X, y: X): String = + show.asString(ordering.max(x, y)) +``` + +The old syntax with multiple `:` should be phased out over time. + +**Benefits:** The new syntax is much clearer than the old one, in particular for newcomers that don't know context bounds well. + +### Better Default Names for Context Bounds + +So far, an unnamed context bound for a type parameter gets a synthesized fresh name. It would be much more useful if it got the name of the constrained type parameter instead, translated to be a term name. This means our `reduce` method over monoids would not even need an `as` binding. We could simply formulate it as follows: +``` + def reduce[A : Monoid](xs: List[A]) = + xs.foldLeft(A.unit)(_ `combine` _) +``` + +The use of a name like `A` above in two variants, both as a type name and as a term name is of course familiar to Scala programmers. We use the same convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. + +**Proposed Rules** + + 1. The generated evidence parameter for a context bound `A : C as a` has name `a` + 2. The generated evidence for a context bound `A : C` without an `as` binding has name `A` (seen as a term name). So, `A : C` is equivalent to `A : C as A`. + 3. If there are multiple context bounds for a type parameter, as in `A : {C_1, ..., C_n}`, the generated evidence parameter for every context bound `C_i` has a fresh synthesized name, unless the context bound carries an `as` clause, in which case rule (1) applies. + +The default naming convention reduces the need for named context bounds. But named context bounds are still essential, for at least two reasons: + + - They are needed to give names to multiple context bounds. + - They give an explanation what a single unnamed context bound expands to. + + +### Expansion of Context Bounds + +Context bounds are currently translated to implicit parameters in the last parameter list of a method or class. This is a problem if a context bound is mentioned in one of the preceding parameter types. For example, consider a type class of parsers with associated type members `Input` and `Result` describing the input type on which the parsers operate and the type of results they produce: +```scala +trait Parser[P]: + type Input + type Result +``` +Here is a method `run` that runs a parser on an input of the required type: + +```scala +def run[P : Parser](in: P.Input): P.Result +``` +Or, making clearer what happens by using an explicit name for the context bound: +```scala +def run[P : Parser as p](in: p.Input): p.Result +``` +With the current translation this does not work since it would be expanded to: +```scala + def run[P](x: p.Input)(using p: Parser[P]): p.Result +``` +Note that the `p` in `p.Input` refers to the `p` introduced in the using clause, which comes later. So this is ill-formed. + +This problem would be fixed by changing the translation of context bounds so that they expand to using clauses immediately after the type parameter. But such a change is infeasible, for two reasons: + + 1. It would be a binary-incompatible change. + 2. Putting using clauses earlier can impair type inference. A type in + a using clause can be constrained by term arguments coming before that + clause. Moving the using clause first would miss those constraints, which could cause ambiguities in implicit search. + +But there is an alternative which is feasible: + +**Proposal:** Map the context bounds of a method or class as follows: + + 1. If one of the bounds is referred to by its term name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. + 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. + 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. + +Rules (2) and (3) are the status quo, and match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility is maintained. + +**Discussion** More refined rules could be envisaged where context bounds are spread over different using clauses so that each comes as late as possible. But it would make matters more complicated and the gain in expressiveness is not clear to me. + +Named (either explicitly, or by default) context bounds in givens that produce classes are mapped to tracked val's of these classes (see #18958). This allows +references to these parameters to be precise, so that information about dependent type members is preserved. + + +## Context Bounds for Type Members + +It's not very orthogonal to allow subtype bounds for both type parameters and abstract type members, but context bounds only for type parameters. What's more, we don't even have the fallback of an explicit using clause for type members. The only alternative is to also introduce a set of abstract givens that get implemented in each subclass. This is extremely heavyweight and opaque to newcomers. + +**Proposal**: Allow context bounds for type members. Example: + +```scala + class Collection: + type Element : Ord +``` + +The question is how these bounds are expanded. Context bounds on type parameters +are expanded into using clauses. But for type members this does not work, since we cannot refer to a member type of a class in a parameter type of that class. What we are after is an equivalent of using parameter clauses but represented as class members. + +**Proposal:** Introduce a new way to implement a given definition in a trait like this: +```scala +given T = deferred +``` +`deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. + +Deferred givens allow a clean implementation of context bounds in traits, +as in the following example: +```scala +trait Sorted: + type Element : Ord + +class SortedSet[A : Ord] extends Sorted: + type Element = A +``` +The compiler expands this to the following implementation: +```scala +trait Sorted: + type Element + given Ord[Element] = compiletime.deferred + +class SortedSet[A](using A: Ord[A]) extends Sorted: + type Element = A + override given Ord[Element] = A // i.e. the A defined by the using clause +``` + +The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. + +**Benefits:** + + - Better orthogonality, type parameters and abstract type members now accept the same kinds of bounds. + - Better ergonomics, since deferred givens get naturally implemented in inheriting classes, no need for boilerplate to fill in definitions of abstract givens. + +**Alternative:** It was suggested that we use a modifier for a deferred given instead of a `= deferred`. Something like `deferred given C[T]`. But a modifier does not suggest the concept that a deferred given will be implemented automatically in subclasses unless an explicit definition is written. In a sense, we can see `= deferred` as the invocation of a magic macro that is provided by the compiler. So from a user's point of view a given with `deferred` right hand side is not abstract. +It is a concrete definition where the compiler will provide the correct implementation. + +## New Given Syntax + +A good language syntax is like a Bach fugue: A small set of motifs is combined in a multitude of harmonic ways. Dissonances and irregularities should be avoided. + +When designing Scala 3, I believe that, by and large, we achieved that goal, except in one area, which is the syntax of givens. There _are_ some glaring dissonances, as seen in this code for defining an ordering on lists: +```scala +given [A](using Ord[A]): Ord[List[A]] with + def compare(x: List[A], y: List[A]) = ... +``` +The `:` feels utterly foreign in this position. It's definitely not a type ascription, so what is its role? Just as bad is the trailing `with`. Everywhere else we use braces or trailing `:` to start a scope of nested definitions, so the need of `with` sticks out like a sore thumb. + +We arrived at that syntax not because of a flight of fancy but because even after trying for about a year to find other solutions it seemed like the least bad alternative. The awkwardness of the given syntax arose because we insisted that givens could be named or anonymous, with the default on anonymous, that we would not use underscore for an anonymous given, and that the name, if present, had to come first, and have the form `name [parameters] :`. In retrospect, that last requirement showed a lack of creativity on our part. + +Sometimes unconventional syntax grows on you and becomes natural after a while. But here it was unfortunately the opposite. The longer I used given definitions in this style the more awkward they felt, in particular since the rest of the language seemed so much better put together by comparison. And I believe many others agree with me on this. Since the current syntax is unnatural and esoteric, this means it's difficult to discover and very foreign even after that. This makes it much harder to learn and apply givens than it need be. + +Things become much simpler if we introduce the optional name instead with an `as name` clause at the end, just like we did for context bounds. We can then use a more intuitive syntax for givens like this: +```scala +given String forms Ord: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] forms Ord: + def compare(x: List[A], y: List[A]) = ... + +given Int forms Monoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` +If explicit names are desired, we add them with `as` clauses: +```scala +given String forms Ord as intOrd: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] forms Ord as listOrd: + def compare(x: List[A], y: List[A]) = ... + +given Int forms Monoid as intMonoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` + +The underlying principles are: + + - A `given` clause consists of the following elements: + + - An optional _precondition_, which introduces type parameters and/or using clauses and which ends in `=>`, + - the implemented _type_, + - an optional name binding using `as`, + - an implementation which consists of either an `=` and an expression, + or a template body. + + - Since there is no longer a middle `:` separating name and parameters from the implemented type, we can use a `:` to start the class body without looking unnatural, as is done everywhere else. That eliminates the special case where `with` was used before. + +This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s +in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. + +Changing something introduced just recently in Scala 3 is not fun, +but I believe these adjustments are preferable to let bad syntax +sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code +starts migrating. + +Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. + + +### Abolish Abstract Givens + +Another simplification is possible. So far we have special syntax for abstract givens: +```scala +given x: T +``` +The problem is that this syntax clashes with the quite common case where we want to establish a given without any nested definitions. For instance +consider a given that constructs a type tag: +```scala +class Tag[T] +``` +Then this works: +```scala +given Tag[String]() +given Tag[String] with {} +``` +But the following more natural syntax fails: +```scala +given Tag[String] +``` +The last line gives a rather cryptic error: +``` +1 |given Tag[String] + | ^ + | anonymous given cannot be abstract +``` +The problem is that the compiler thinks that the last given is intended to be abstract, and complains since abstract givens need to be named. This is another annoying dissonance. Nowhere else in Scala's syntax does adding a +`()` argument to a class cause a drastic change in meaning. And it's also a violation of the principle that it should be possible to define all givens without providing names for them. + +Fortunately, abstract givens are no longer necessary since they are superseded by the new `deferred` scheme. So we can deprecate that syntax over time. Abstract givens are a highly specialized mechanism with a so far non-obvious syntax. We have seen that this syntax clashes with reasonable expectations of Scala programmers. My estimate is that maybe a dozen people world-wide have used abstract givens in anger so far. + +**Proposal** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. + +This is less of a disruption than it might appear at first: + + - `given T` was illegal before since abstract givens could not be anonymous. + It now means a concrete given of class `T` with no member definitions. + - `given x: T` is legacy syntax for an abstract given. + - `given T as x = deferred` is the analogous new syntax, which is more powerful since + it allows for automatic instantiation. + - `given T = deferred` is the anonymous version in the new syntax, which was not expressible before. + +**Benefits:** + + - Simplification of the language since a feature is dropped + - Eliminate non-obvious and misleading syntax. + +## Summary of Syntax Changes + +Here is the complete context-free syntax for all proposed features. +Overall the syntax for givens becomes a lot simpler than what it was before. + +``` +TmplDef ::= 'given' GivenDef +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody +GivenType ::= AnnotType {id [nl] AnnotType} + +TypeDef ::= id [TypeParamClause] TypeAndCtxBounds +TypeParamBounds ::= TypeAndCtxBounds +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] +``` + + + +## Examples + + +### Example 1 + +Here are some standard type classes, which were mostly already introduced at the start of this note, now with associated instance givens and some test code: + +```scala + // Type classes + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] // Here, Self is a type constructor with parameter A + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + // Instances + + given Int forms Ord: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] forms Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = + (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List forms Monad: + extension [A](xs: List[A]) + def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] forms Monad: + extension [A](r: Ctx => A) + def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + // Usages + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T forms Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) +``` + + +### Example 2 + +The following contributed code by @LPTK (issue #10929) did _not_ work at first since +references were not tracked correctly. The version below adds explicit tracked parameters which makes the code compile. +```scala +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: tup.map(x.tail)(f) +``` + +Note the quite convoluted syntax, which makes the code hard to understand. Here is the same example in the new type class syntax, which also compiles correctly: +```scala +//> using options -language:experimental.modularity -source future + +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple forms TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest forms TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: Rest.map(x.tail)(f) +``` +Note in particular the following points: + + - In the original code, it was not clear that `TupleOf` is a type class, + since it contained two type parameters, one of which played the role + of the instance type `Self`. The new version is much clearer: `TupleOf` is + a type class over `Self` with one additional parameter, the common type of all tuple elements. + - The two given definitions are obfuscated in the old code. Their version + in the new code makes it clear what kind of instances they define: + + - `EmptyTuple` is a tuple of `Nothing`. + - if `Rest` is a tuple of `A`, then `A *: Rest` is also a tuple of `A`. + + - There's no need to introduce names for parameter instances in using clauses; the default naming scheme for context bound evidences works fine, and is more concise. + - There's no need to manually declare implicit parameters as `tracked`, + context bounds provide that automatically. + - Everything in the new code feels like idiomatic Scala 3, whereas the original code exhibits the awkward corner case that requires a `with` in + front of given definitions. + +### Example 3 + +Dimi Racordon tried to [define parser combinators](https://users.scala-lang.org/t/create-an-instance-of-a-type-class-with-methods-depending-on-type-members/9613) in Scala that use dependent type members for inputs and results. It was intended as a basic example of type class constraints, but it did not work in current Scala. + +Here is the problem solved with the new syntax. Note how much clearer that syntax is compared to Dimi's original version, which did not work out in the end. + +```scala +/** A parser combinator */ +trait Combinator: + type Self + + type Input + type Result + + extension (self: Self) + /** Parses and returns an element from input `in` */ + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](a: A, b: B) + +given [I, R] => Apply[I, R] forms Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] forms Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for + x <- self.a.parse(in) + y <- self.b.parse(in) + yield (x, y) +``` +The example is now as expressed as straightforwardly as it should be: + + - `Combinator` is a type class with two associated types, `Input` and `Result`, and a `parse` method. + - `Apply` and `Combine` are two data constructors representing parser combinators. They are declared to be `Combinators` in the two subsequent `given` declarations. + - `Apply`'s parse method applies the `action` function to the input. + - `Combine[A, B]` is a parser combinator provided `A` and `B` are parser combinators + that process the same type of `Input`, which is also the input type of + `Combine[A, B]`. Its `Result` type is a pair of the `Result` types of `A` and `B`. + Results are produced by a simple for-expression. + +Compared to the original example, which required serious contortions, this is now all completely straightforward. + +_Note 1:_ One could also explore improvements, for instance making this purely functional. But that's not the point of the demonstration here, where I wanted +to take the original example and show how it can be made to work with the new constructs, and be expressed more clearly as well. + +_Note 2:_ One could improve the notation even further by adding equality constraints in the style of Swift, which in turn resemble the _sharing constraints_ of SML. A hypothetical syntax applied to the second given would be: +```scala +given [A: Combinator, B: Combinator with A.Input == B.Input] + => Combine[A, B] forms Combinator: +``` +This variant is aesthetically pleasing since it makes the equality constraint symmetric. The original version had to use an asymmetric refinement on the second type parameter bound instead. For now, such constraints are neither implemented nor proposed. This is left as a possibility for future work. Note also the analogy with +the work of @mbovel and @Sporarum on refinement types, where similar `with` clauses can appear for term parameters. If that work goes ahead, we could possibly revisit the issue of `with` clauses also for type parameters. + +### Example 4 + +Dimi Racordon tried to [port some core elements](https://github.com/kyouko-taiga/scala-hylolib) of the type class based [Hylo standard library to Scala](https://github.com/hylo-lang/hylo/tree/main/StandardLibrary/Sources). It worked to some degree, but there were some things that could not be expressed, and more things that could be expressed only awkwardly. + +With the improvements proposed here, the library can now be expressed quite clearly and straightforwardly. See tests/pos/hylolib in this PR for details. + +## Suggested Improvements unrelated to Type Classes + +The following improvements elsewhere would make sense alongside the suggested changes to type classes. But they are currently not part of this proposal or implementation. + +### Fixing Singleton + +We know the current treatment of `Singleton` as a type bound is broken since +`x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. + +A better approach is to treat `Singleton` as a type class that is interpreted specially by the compiler. + +We can do this in a backwards-compatible way by defining `Singleton` like this: + +```scala +trait Singleton: + type Self +``` + +Then, instead of using an unsound upper bound we can use a context bound: + +```scala +def f[X: Singleton](x: X) = ... +``` + +The context bound would be treated specially by the compiler so that no using clause is generated at runtime. + +_Aside_: This can also lead to a solution how to express precise type variables. We can introduce another special type class `Precise` and use it like this: + +```scala +def f[X: Precise](x: X) = ... +``` +This would disable automatic widening of singleton types in inferred instances of type variable `X`. + +### Using `as` also in Patterns + +Since we have now more precedents of `as` as a postfix binder, I want to come back to the proposal to use it in patterns as well, in favor of `@`, which should be deprecated. + +Examples: + +```scala + xs match + case (Person(name, age) as p) :: rest => ... + + tp match + case Param(tl, _) :: _ as tparams => ... + + val x :: xs1 as xs = ys.checkedCast +``` + +These would replace the previous syntax using `@`: + +```scala + xs match + case p @ Person(name, age) :: rest => ... + + tp match + case tparams @ (Param(tl, _) :: _) => ... + + val xs @ (x :: xs1) = ys.checkedCast +``` +**Advantages:** No unpronounceable and non-standard symbol like `@`. More regularity. + +Generally, we want to use `as name` to attach a name for some entity that could also have been used stand-alone. + +**Proposed Syntax Change** + +``` +Pattern2 ::= InfixPattern ['as' id] +``` + +## Summary + +I have proposed some tweaks to Scala 3, which would greatly increase its usability for modular, type class based, generic programming. The proposed changes are: + + 1. Allow context bounds over classes that define a `Self` member type. + 1. Allow context bounds to be named with `as`. Use the bound parameter name as a default name for the generated context bound evidence. + 1. Add a new `{...}` syntax for multiple context bounds. + 1. Make context bounds also available for type members, which expand into a new form of deferred given. Phase out the previous abstract givens in favor of the new form. + 1. Add a predefined type alias `forms`. + 1. Introduce a new cleaner syntax of given clauses. + +It's interesting that givens, which are a very general concept in Scala, were "almost there" when it comes to full support of concepts and generic programming. We only needed to add a few usability tweaks to context bounds, +alongside two syntactic changes that supersede the previous forms of `given .. with` clauses and abstract givens. Also interesting is that the superseded syntax constructs were the two areas where we collectively felt that the previous solutions were a bit awkward, but we could not think of better ones at the time. It's very nice that more satisfactory solutions are now emerging. + +## Conclusion + +Generic programming can be expressed in a number of languages. For instance, with +type classes in Haskell, or with traits in Rust, or with protocols in Swift, or with concepts in C++. Each of these is constructed from a fairly heavyweight set of new constructs, different from expressions and types. By contrast, equivalent solutions in Scala rely on regular types. Type classes are simply traits that define a `Self` type member. + +The proposed scheme has similar expressiveness to Protocols in Swift or Traits in Rust. Both of these were largely influenced by Jeremy Siek's PdD thesis "[A language for generic programming](https://scholarworks.iu.edu/dspace/handle/2022/7067)", which was first proposed as a way to implement concepts in C++. C++ did not follow Siek's approach, but Swift and Rust did. + +In Siek's thesis and in the formal treatments of Rust and Swift, + type class concepts are explained by mapping them to a lower level language of explicit dictionaries with representations for terms and types. Crucially, that lower level is not expressible without loss of granularity in the source language itself, since type representations are mapped to term dictionaries. By contrast, the current proposal expands type class concepts into other well-typed Scala constructs, which ultimately map into well-typed DOT programs. Type classes are simply a convenient notation for something that can already be expressed in Scala. In that sense, we stay true to the philosophy of a _scalable language_, where a small core can support a large range of advanced use cases. + diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md index 98e9a7d3d711..e21d369b6b5e 100644 --- a/docs/_docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -37,7 +37,12 @@ final def print(bits: BitMap): Unit = printUnit.print(bits) final type PrinterType = printUnit.PrinterType ``` -They can be accessed inside `Copier` as well as from outside: +With the experimental `modularity` language import, only exported methods and values are final, whereas the generated `PrinterType` would be a simple type alias +```scala + type PrinterType = printUnit.PrinterType +``` + +These aliases can be accessed inside `Copier` as well as from outside: ```scala val copier = new Copier @@ -90,12 +95,17 @@ export O.* ``` Export aliases copy the type and value parameters of the members they refer to. -Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: +Export aliases of term members are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: - - Export aliases cannot be overridden, since they are final. + - Export aliases of methods or fields cannot be overridden, since they are final. - Export aliases cannot override concrete members in base classes, since they are not marked `override`. - However, export aliases can implement deferred members of base classes. + - Export type aliases are normally also final, except when the experimental + language import `modularity` is present. The general + rules for type aliases ensure in any case that if there are several type aliases in a class, + they must agree on their right hand sides, or the class could not be instantiated. + So dropping the `final` for export type aliases is safe. Export aliases for public value definitions that are accessed without referring to private values in the qualifier path diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index 1980bc4e0ab2..b07bc174162e 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -200,8 +200,8 @@ SimpleType ::= SimpleLiteral | Singleton ‘.’ ‘type’ | ‘(’ Types ‘)’ | Refinement - | SimpleType1 TypeArgs - | SimpleType1 ‘#’ id + | SimpleType TypeArgs + | SimpleType ‘#’ id Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id @@ -392,7 +392,7 @@ LocalModifier ::= ‘abstract’ AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] AccessQualifier ::= ‘[’ id ‘]’ -Annotation ::= ‘@’ SimpleType1 {ParArgumentExprs} +Annotation ::= ‘@’ SimpleType {ParArgumentExprs} Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} @@ -443,6 +443,7 @@ ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +GivenType ::= AnnotType {id [nl] AnnotType} StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods @@ -452,7 +453,7 @@ ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef Template ::= InheritClauses [TemplateBody] InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) -ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrApp ::= SimpleType {Annotation} {ParArgumentExprs} ConstrExpr ::= SelfInvocation | <<< SelfInvocation {semi BlockStat} >>> SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 65d7ac2f9ee4..6b8f8d561f82 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -153,6 +153,8 @@ subsection: - page: reference/experimental/cc.md - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md + - page: reference/experimental/modularity.md + - page: reference/experimental/typeclasses.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/library/src/scala/annotation/internal/WitnessNames.scala b/library/src/scala/annotation/internal/WitnessNames.scala new file mode 100644 index 000000000000..f859cda96d06 --- /dev/null +++ b/library/src/scala/annotation/internal/WitnessNames.scala @@ -0,0 +1,53 @@ +package scala.annotation +package internal + +/** An annotation that is used for marking type definitions that should get + * context bound companions. The scheme is as follows: + * + * 1. When desugaring a context-bounded type A, add a @WitnessNames(n_1, ... , n_k) + * annotation to the type declaration node, where n_1, ..., n_k are the names of + * all the witnesses generated for the context bounds of A. This annotation will + * be pickled as usual. + * + * 2. During Namer or Unpickling, when encountering a type declaration A with + * a WitnessNames(n_1, ... , n_k) annotation, create a CB companion `val A` with + * rtype ``[ref_1 | ... | ref_k] where ref_i is a TermRef + * with the same prefix as A and name n_i. Except, don't do this if the type in + * question is a type parameter and there is already a term parameter with name A + * defined for the same method. + * + * ContextBoundCompanion is defined as an internal abstract type like this: + * + * type ``[-Refs] + * + * The context bound companion's variance is negative, so that unons in the + * arguments are joined when encountering multiple definfitions and forming a glb. + * + * 3. Add a special case for typing a selection A.m on a value A of type + * ContextBoundCompanion[ref_1, ..., ref_k]. Namely, try to typecheck all + * selections ref_1.m, ..., ref_k.m with the expected type. There must be + * a unique selection ref_i.m that typechecks and such that for all other + * selections ref_j.m that also typecheck one of the following three criteria + * applies: + * + * 1. ref_i.m and ref_j.m are the same. This means: If they are types then + * ref_i.m is an alias of ref_j.m. If they are terms then they are both + * singleton types and ref_i.m =:= ref_j.m. + * 2. The underlying type (under widen) of ref_i is a true supertype of the + * underlying type of ref_j. + * 3. ref_i.m is a term, the underlying type of ref_j is not a strict subtype + * of the underlying type of ref_j, and the underlying type ref_i.m is a + * strict subtype of the underlying type of ref_j.m. + * + * If there is such a selection, map A.m to ref_i.m, otherwise report an error. + * + * (2) might surprise. It is the analogue of given disambiguation, where we also + * pick the most general candidate that matches the expected type. E.g. we have + * context bounds for Functor, Monad, and Applicable. In this case we want to + * select the `map` method of `Functor`. + * + * 4. At PostTyper, issue an error when encountering any reference to a CB companion. + */ +class WitnessNames(names: String*) extends StaticAnnotation + + diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index 3eca997554a0..be76941a680b 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -42,6 +42,19 @@ def erasedValue[T]: T = erasedValue[T] @compileTimeOnly("`uninitialized` can only be used as the right hand side of a mutable field definition") def uninitialized: Nothing = ??? +/** Used as the right hand side of a given in a trait, like this + * + * ``` + * given T = deferred + * ``` + * + * This signifies that the given will get a synthesized definition in all classes + * that implement the enclosing trait and that do not contain an explicit overriding + * definition of that given. + */ +@compileTimeOnly("`deferred` can only be used as the right hand side of a given definition in a trait") +def deferred: Nothing = ??? + /** The error method is used to produce user-defined compile errors during inline expansion. * If an inline expansion results in a call error(msgStr) the compiler produces an error message containing the given msgStr. * diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 09feaf11c31d..43ec109a120f 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -61,4 +61,16 @@ object Predef: inline def ne(inline y: AnyRef | Null): Boolean = !(x eq y) + /** A type supporting Self-based type classes. + * + * A is TC + * + * expands to + * + * TC { type Self = A } + * + * which is what is needed for a context bound `[A: TC]`. + */ + infix type forms[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } + end Predef diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 70d5f2d41907..f1bd43cf7fb7 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -91,6 +91,18 @@ object language: @compileTimeOnly("`into` can only be used at compile time in import statements") object into + /** Experimental support for new features for better modularity, including + * - better tracking of dependencies through classes + * - better usability of context bounds + * - better syntax and conventions for type classes + * - ability to merge exported types in intersections + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/modularity]] + * @see [[https://dotty.epfl.ch/docs/reference/experimental/typeclasses]] + */ + @compileTimeOnly("`modularity` can only be used at compile time in import statements") + object modularity + /** Was needed to add support for relaxed imports of extension methods. * The language import is no longer needed as this is now a standard feature since SIP was accepted. * @see [[http://dotty.epfl.ch/docs/reference/contextual/extension-methods]] diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 7565d23b2c1b..22591104ea41 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -8,6 +8,10 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of the library Build.previousDottyVersion -> Seq( + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.compiletime.package#package.deferred"), + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.internal.WitnessNames"), ), // Additions since last LTS @@ -71,6 +75,8 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of tasty core Build.previousDottyVersion -> Seq( + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.TRACKED"), + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.TRACKED"), ), // Additions since last LTS diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index e17c98234691..a93198e4e3e4 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -223,6 +223,7 @@ Standard-Section: "ASTs" TopLevelStat* EXPORTED -- An export forwarder OPEN -- an open class INVISIBLE -- invisible during typechecking + TRACKED -- a tracked class parameter / a dependent class Annotation Variance = STABLE -- invariant @@ -504,6 +505,7 @@ object TastyFormat { final val INVISIBLE = 44 final val EMPTYCLAUSE = 45 final val SPLITCLAUSE = 46 + final val TRACKED = 47 // Tree Cat. 2: tag Nat final val firstNatTreeTag = SHAREDterm @@ -693,7 +695,8 @@ object TastyFormat { | INVISIBLE | ANNOTATION | PRIVATEqualified - | PROTECTEDqualified => true + | PROTECTEDqualified + | TRACKED => true case _ => false } diff --git a/tests/neg/cb-companion-leaks.check b/tests/neg/cb-companion-leaks.check new file mode 100644 index 000000000000..b865d058f0e7 --- /dev/null +++ b/tests/neg/cb-companion-leaks.check @@ -0,0 +1,66 @@ +-- [E194] Type Error: tests/neg/cb-companion-leaks.scala:11:23 --------------------------------------------------------- +11 | def foo[A: {C, D}] = A // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- +-- [E194] Type Error: tests/neg/cb-companion-leaks.scala:15:10 --------------------------------------------------------- +15 | val x = A // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- +-- [E194] Type Error: tests/neg/cb-companion-leaks.scala:17:9 ---------------------------------------------------------- +17 | val y: A.type = ??? // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/cb-companion-leaks.scala b/tests/neg/cb-companion-leaks.scala new file mode 100644 index 000000000000..24aa36406253 --- /dev/null +++ b/tests/neg/cb-companion-leaks.scala @@ -0,0 +1,18 @@ +//> using options -language:experimental.modularity -source future -explain + +class C: + type Self + +class D: + type Self + +trait Test: + + def foo[A: {C, D}] = A // error + + type A: C + + val x = A // error + + val y: A.type = ??? // error + diff --git a/tests/neg/deferred-givens.check b/tests/neg/deferred-givens.check new file mode 100644 index 000000000000..cc15901d087f --- /dev/null +++ b/tests/neg/deferred-givens.check @@ -0,0 +1,13 @@ +-- [E172] Type Error: tests/neg/deferred-givens.scala:11:6 ------------------------------------------------------------- +11 |class B extends A // error + |^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- [E172] Type Error: tests/neg/deferred-givens.scala:13:15 ------------------------------------------------------------ +13 |abstract class C extends A // error + |^^^^^^^^^^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- Error: tests/neg/deferred-givens.scala:26:8 ------------------------------------------------------------------------- +26 | class E extends A2 // error, can't summon polymorphic given + | ^^^^^^^^^^^^^^^^^^ + | Cannnot infer the implementation of the deferred given instance given_Ctx3_T in trait A2 + | since that given is parameterized. An implementing given needs to be written explicitly. diff --git a/tests/neg/deferred-givens.scala b/tests/neg/deferred-givens.scala new file mode 100644 index 000000000000..7ff67d784714 --- /dev/null +++ b/tests/neg/deferred-givens.scala @@ -0,0 +1,30 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +class Ctx +class Ctx2 + +trait A: + given Ctx as ctx = deferred + given Ctx2 = deferred + +class B extends A // error + +abstract class C extends A // error + +class D extends A: + given Ctx as ctx = Ctx() // ok, was implemented + given Ctx2 = Ctx2() // ok + +class Ctx3[T] + +trait A2: + given [T] => Ctx3[T] = deferred + +object O: + given [T] => Ctx3[T] = Ctx3[T]() + class E extends A2 // error, can't summon polymorphic given + +class E extends A2: + given [T] => Ctx3[T] = Ctx3[T]() // ok + diff --git a/tests/neg/deferredSummon.check b/tests/neg/deferredSummon.check new file mode 100644 index 000000000000..bd76ad73467e --- /dev/null +++ b/tests/neg/deferredSummon.check @@ -0,0 +1,17 @@ +-- Error: tests/neg/deferredSummon.scala:4:26 -------------------------------------------------------------------------- +4 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:7:26 -------------------------------------------------------------------------- +7 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:12:16 ------------------------------------------------------------------------- +12 | given Int = deferred // error + | ^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:16:14 ------------------------------------------------------------------------- +16 | given Int = defered // error + | ^^^^^^^ + |`deferred` can only be used as the right hand side of a given definition in a trait. + |Note that `deferred` can only be used under its own name when implementing a given in a trait; `defered` is not accepted. diff --git a/tests/neg/deferredSummon.scala b/tests/neg/deferredSummon.scala new file mode 100644 index 000000000000..cddde82535fb --- /dev/null +++ b/tests/neg/deferredSummon.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.modularity + +object Test: + given Int = compiletime.deferred // error + +abstract class C: + given Int = compiletime.deferred // error + +trait A: + import compiletime.deferred + locally: + given Int = deferred // error + +trait B: + import compiletime.deferred as defered + given Int = defered // error + + + diff --git a/tests/neg/i0248-inherit-refined.check b/tests/neg/i0248-inherit-refined.check new file mode 100644 index 000000000000..4e14c3c6f14b --- /dev/null +++ b/tests/neg/i0248-inherit-refined.check @@ -0,0 +1,12 @@ +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:8:18 ------------------------------------------------------- +8 | class C extends Y // error + | ^ + | test.A & test.B is not a class type + | + | longer explanation available when compiling with `-explain` +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:10:18 ------------------------------------------------------ +10 | class D extends Z // error + | ^ + | test.A | test.B is not a class type + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i0248-inherit-refined.scala b/tests/neg/i0248-inherit-refined.scala index 97b6f5cdab73..f7cd6375afc9 100644 --- a/tests/neg/i0248-inherit-refined.scala +++ b/tests/neg/i0248-inherit-refined.scala @@ -1,10 +1,12 @@ +//> using options -source future -language:experimental.modularity + object test { class A { type T } type X = A { type T = Int } - class B extends X // error + class B extends X // was error, now OK type Y = A & B class C extends Y // error type Z = A | B class D extends Z // error - abstract class E extends ({ val x: Int }) // error + abstract class E extends ({ val x: Int }) // was error, now OK } diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check index ccc2b9f7ed00..eded51f70f31 100644 --- a/tests/neg/i12348.check +++ b/tests/neg/i12348.check @@ -1,8 +1,8 @@ --- [E040] Syntax Error: tests/neg/i12348.scala:2:15 -------------------------------------------------------------------- -2 | given inline x: Int = 0 // error - | ^ - | 'with' expected, but identifier found --- [E040] Syntax Error: tests/neg/i12348.scala:3:10 -------------------------------------------------------------------- -3 |} // error - | ^ - | '}' expected, but eof found +-- [E040] Syntax Error: tests/neg/i12348.scala:2:16 -------------------------------------------------------------------- +2 | given inline x: Int = 0 // error // error + | ^ + | an identifier expected, but ':' found +-- [E067] Syntax Error: tests/neg/i12348.scala:2:8 --------------------------------------------------------------------- +2 | given inline x: Int = 0 // error // error + | ^ + |Declaration of given instance given_x_inline_ not allowed here: only classes can have declared but undefined members diff --git a/tests/neg/i12348.scala b/tests/neg/i12348.scala index 69fc77fb532e..43daf9a2801b 100644 --- a/tests/neg/i12348.scala +++ b/tests/neg/i12348.scala @@ -1,3 +1,2 @@ object A { - given inline x: Int = 0 // error -} // error \ No newline at end of file + given inline x: Int = 0 // error // error diff --git a/tests/neg/i12456.scala b/tests/neg/i12456.scala index b9fb0283dcd7..c1a3ada5a420 100644 --- a/tests/neg/i12456.scala +++ b/tests/neg/i12456.scala @@ -1 +1 @@ -object F { type T[G[X] <: X, F <: G[F]] } // error // error +object F { type T[G[X] <: X, F <: G[F]] } // error diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala index a80e8b2b289b..998c54292b15 100644 --- a/tests/neg/i13757-match-type-anykind.scala +++ b/tests/neg/i13757-match-type-anykind.scala @@ -8,7 +8,7 @@ object Test: type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int - type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded + type AnyKindMatchType4[X <: Option] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded diff --git a/tests/neg/i15264.scala b/tests/neg/i15264.scala new file mode 100644 index 000000000000..e13e1089dba3 --- /dev/null +++ b/tests/neg/i15264.scala @@ -0,0 +1,59 @@ +import language.`3.5` +object priority: + // lower number = higher priority + class Prio0 extends Prio1 + object Prio0 { given Prio0() } + + class Prio1 extends Prio2 + object Prio1 { given Prio1() } + + class Prio2 + object Prio2 { given Prio2() } + +object repro: + // analogous to cats Eq, Hash, Order: + class A[V] + class B[V] extends A[V] + class C[V] extends A[V] + + class Q[V] + + object context: + // prios work here, which is cool + given[V](using priority.Prio0): C[V] = new C[V] + given[V](using priority.Prio1): B[V] = new B[V] + given[V](using priority.Prio2): A[V] = new A[V] + + object exports: + // so will these exports + export context.given + + // if you import these don't import from 'context' above + object qcontext: + // base defs, like what you would get from cats + given gb: B[Int] = new B[Int] + given gc: C[Int] = new C[Int] + + // these seem like they should work but don't + given gcq[V](using p0: priority.Prio0)(using c: C[V]): C[Q[V]] = new C[Q[V]] + given gbq[V](using p1: priority.Prio1)(using b: B[V]): B[Q[V]] = new B[Q[V]] + given gaq[V](using p2: priority.Prio2)(using a: A[V]): A[Q[V]] = new A[Q[V]] + +object test1: + import repro.* + import repro.exports.given + + // these will work + val a = summon[A[Int]] + +object test2: + import repro.* + import repro.qcontext.given + + // This one will fail as ambiguous - prios aren't having an effect. + // Priorities indeed don't have an effect if the result is already decided + // without using clauses, they onyl act as a tie breaker. + // With the new resolution rules, it's ambiguous since we pick `gaq` for + // summon, and that needs an A[Int], but there are only the two competing choices + // qb and qc. + val a = summon[A[Q[Int]]] // error: ambiguous between qb and qc for A[Int] diff --git a/tests/neg/i3964.scala b/tests/neg/i3964.scala new file mode 100644 index 000000000000..eaf3953bc230 --- /dev/null +++ b/tests/neg/i3964.scala @@ -0,0 +1,12 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test1: + + abstract class Bar { val x: Animal } + val bar: Bar { val x: Cat } = new Bar { val x = new Cat } // error, but should work + + trait Foo { val x: Animal } + val foo: Foo { val x: Cat } = new Foo { val x = new Cat } // error, but should work diff --git a/tests/neg/i7045.scala b/tests/neg/i7045.scala new file mode 100644 index 000000000000..b4c6d60cd35a --- /dev/null +++ b/tests/neg/i7045.scala @@ -0,0 +1,7 @@ +trait Bar { type Y } +trait Foo { type X } + +class Test: + given a1(using b: Bar): Foo = new Foo { type X = b.Y } // ok + given a2(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } // ok + given a3(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } // error \ No newline at end of file diff --git a/tests/neg/i9328.scala b/tests/neg/i9328.scala index dabde498e1dc..c13d33e103b9 100644 --- a/tests/neg/i9328.scala +++ b/tests/neg/i9328.scala @@ -3,7 +3,7 @@ type Id[T] = T match { case _ => T } -class Foo2[T <: Id[T]] // error // error +class Foo2[T <: Id[T]] // error object Foo { // error object Foo { } diff --git a/tests/neg/i9330.scala b/tests/neg/i9330.scala index ca25582ef7e8..6ba57c033473 100644 --- a/tests/neg/i9330.scala +++ b/tests/neg/i9330.scala @@ -1,4 +1,4 @@ val x = { - () == "" // error + () == "" implicit def foo[A: A] // error // error // error } diff --git a/tests/neg/parent-refinement-access.check b/tests/neg/parent-refinement-access.check new file mode 100644 index 000000000000..5cde9d51558f --- /dev/null +++ b/tests/neg/parent-refinement-access.check @@ -0,0 +1,7 @@ +-- [E164] Declaration Error: tests/neg/parent-refinement-access.scala:6:6 ---------------------------------------------- +6 |trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error + | ^ + | error overriding value x in trait Year2 of type Int; + | value x in trait Gen of type Any has weaker access privileges; it should be public + | (Note that value x in trait Year2 of type Int is abstract, + | and is therefore overridden by concrete value x in trait Gen of type Any) diff --git a/tests/neg/parent-refinement-access.scala b/tests/neg/parent-refinement-access.scala new file mode 100644 index 000000000000..57d45f4fb201 --- /dev/null +++ b/tests/neg/parent-refinement-access.scala @@ -0,0 +1,6 @@ +//> using options -source future -language:experimental.modularity + +trait Gen: + private[Gen] val x: Any = () + +trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error diff --git a/tests/neg/parent-refinement.check b/tests/neg/parent-refinement.check index 550430bd35a7..cf9a57bc7821 100644 --- a/tests/neg/parent-refinement.check +++ b/tests/neg/parent-refinement.check @@ -1,4 +1,25 @@ --- Error: tests/neg/parent-refinement.scala:5:2 ------------------------------------------------------------------------ -5 | with Ordered[Year] { // error - | ^^^^ - | end of toplevel definition expected but 'with' found +-- Error: tests/neg/parent-refinement.scala:11:6 ----------------------------------------------------------------------- +11 |class Bar extends IdOf[Int], (X { type Value = String }) // error + | ^^^ + |class Bar cannot be instantiated since it has a member Value with possibly conflicting bounds Int | String <: ... <: Int & String +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:15:17 ------------------------------------------------- +15 | val x: Value = 0 // error + | ^ + | Found: (0 : Int) + | Required: Baz.this.Value + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:21:6 -------------------------------------------------- +21 | foo(2) // error + | ^ + | Found: (2 : Int) + | Required: Boolean + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:17:22 ------------------------------------------------- +17 |val x: IdOf[Int] = Baz() // error + | ^^^^^ + | Found: Baz + | Required: IdOf[Int] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/parent-refinement.scala b/tests/neg/parent-refinement.scala index ca2b88a75fd8..868747faba57 100644 --- a/tests/neg/parent-refinement.scala +++ b/tests/neg/parent-refinement.scala @@ -1,7 +1,21 @@ +//> using options -source future -language:experimental.modularity trait Id { type Value } +trait X { type Value } +type IdOf[T] = Id { type Value = T } + case class Year(value: Int) extends AnyVal - with Id { type Value = Int } - with Ordered[Year] { // error + with (Id { type Value = Int }) + with Ordered[Year] + +class Bar extends IdOf[Int], (X { type Value = String }) // error + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = 0 // error + +val x: IdOf[Int] = Baz() // error -} \ No newline at end of file +object Clash extends ({ def foo(x: Int): Int }): + def foo(x: Boolean): Int = 1 + foo(2) // error diff --git a/tests/neg/parser-stability-12.scala b/tests/neg/parser-stability-12.scala index 78ff178d010c..17a611d70e34 100644 --- a/tests/neg/parser-stability-12.scala +++ b/tests/neg/parser-stability-12.scala @@ -1,4 +1,4 @@ trait x0[]: // error - trait x1[x1 <:x0] // error: type x0 takes type parameters + trait x1[x1 <:x0] extends x1[ // error // error \ No newline at end of file diff --git a/tests/neg/showtest.scala b/tests/neg/showtest.scala new file mode 100644 index 000000000000..31f3d07d1204 --- /dev/null +++ b/tests/neg/showtest.scala @@ -0,0 +1,10 @@ +trait Show[T]: + extension (x: T) def show: String + +given showAnx: Show[Any] = _.toString + +def print[T: Show](x: T) = println(x.show) + +@main def Test = + println("hello".show) // ok + print("hello".show) // error diff --git a/tests/neg/tracked.check b/tests/neg/tracked.check new file mode 100644 index 000000000000..ae734e7aa0b4 --- /dev/null +++ b/tests/neg/tracked.check @@ -0,0 +1,50 @@ +-- Error: tests/neg/tracked.scala:2:16 --------------------------------------------------------------------------------- +2 |class C(tracked x: Int) // error + | ^ + | `val` or `var` expected +-- [E040] Syntax Error: tests/neg/tracked.scala:7:18 ------------------------------------------------------------------- +7 | def foo(tracked a: Int) = // error + | ^ + | ':' expected, but identifier found +-- Error: tests/neg/tracked.scala:8:12 --------------------------------------------------------------------------------- +8 | tracked val b: Int = 2 // error + | ^^^ + | end of statement expected but 'val' found +-- Error: tests/neg/tracked.scala:11:10 -------------------------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^ + | end of statement expected but 'object' found +-- Error: tests/neg/tracked.scala:14:10 -------------------------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^ + | end of statement expected but 'class' found +-- Error: tests/neg/tracked.scala:17:10 -------------------------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^ + | end of statement expected but 'type' found +-- Error: tests/neg/tracked.scala:20:29 -------------------------------------------------------------------------------- +20 | given g2(using tracked val x: Int): C = C(x) // error + | ^^^^^^^^^^^^^^^^^^ + | method parameter x may not be a `val` +-- Error: tests/neg/tracked.scala:4:21 --------------------------------------------------------------------------------- +4 |class C2(tracked var x: Int) // error + | ^ + | mutable variables may not be `tracked` +-- [E006] Not Found Error: tests/neg/tracked.scala:11:2 ---------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:14:2 ---------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:17:2 ---------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/tracked.scala b/tests/neg/tracked.scala new file mode 100644 index 000000000000..8d315a7b89ac --- /dev/null +++ b/tests/neg/tracked.scala @@ -0,0 +1,20 @@ +//> using options -source future -language:experimental.modularity +class C(tracked x: Int) // error + +class C2(tracked var x: Int) // error + +object A: + def foo(tracked a: Int) = // error + tracked val b: Int = 2 // error + +object B: + tracked object Foo // error // error + +object C: + tracked class D // error // error + +object D: + tracked type T = Int // error // error + +object E: + given g2(using tracked val x: Int): C = C(x) // error diff --git a/tests/neg/tracked2.scala b/tests/neg/tracked2.scala new file mode 100644 index 000000000000..2e6fa8cf6045 --- /dev/null +++ b/tests/neg/tracked2.scala @@ -0,0 +1 @@ +class C(tracked val x: Int) // error diff --git a/tests/neg/unapplied-types.scala b/tests/neg/unapplied-types.scala deleted file mode 100644 index 2f2339baa026..000000000000 --- a/tests/neg/unapplied-types.scala +++ /dev/null @@ -1,7 +0,0 @@ -trait T { - type L[X] = List[X] - type T1 <: L // error: takes type parameters - type T2 = L // error: takes type parameters - type T3 = List // error: takes type parameters - type T4 <: List // error: takes type parameters -} diff --git a/tests/new/test.scala b/tests/new/test.scala index e6bfc29fd808..8ba66903f581 100644 --- a/tests/new/test.scala +++ b/tests/new/test.scala @@ -1,2 +1,21 @@ -object Test: - def f: Any = 1 +trait Monoid[T]{ + def unit: T + type TypeMember = Int +} +object Monoid{ + implicit object intM extends Monoid[Int]{ def unit = 0 } +} +object Main{ + opaque type ABound = Unit + def reduceSort[A](xs: List[A])(using monoid_a: Monoid[A], ordering_a: scala.math.Ordering[A]) = + inline implicit def f(v: ABound): ordering_a.type = ordering_a + inline implicit def g(v: ABound): monoid_a.type = monoid_a + val A: ABound = null.asInstanceOf[ABound] + val i: A.TypeMember = 123 // THIS WORKS + A.unit +: xs.sorted(A) + + def main(args: Array[String]): Unit = { + println("Hello World") + println(reduceSort(List(1, 2, 3, 2, 1))) + } +} \ No newline at end of file diff --git a/tests/new/tracked-mixin-traits.scala b/tests/new/tracked-mixin-traits.scala new file mode 100644 index 000000000000..21d890d44f42 --- /dev/null +++ b/tests/new/tracked-mixin-traits.scala @@ -0,0 +1,16 @@ +trait A: + type T +object a extends A: + type T = Int + +trait B(tracked val b: A): + type T = b.T + +trait C(tracked val c: A): + type T = c.T + +class D extends B(a), C(a): + val x: T = 2 + + + diff --git a/tests/pos-macros/i8325/Macro_1.scala b/tests/pos-macros/i8325/Macro_1.scala index 18466e17b3df..92a54d21b00a 100644 --- a/tests/pos-macros/i8325/Macro_1.scala +++ b/tests/pos-macros/i8325/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -15,7 +15,7 @@ object A: import quotes.reflect.* expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) - case Apply(fun,args) => '{ A.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } + case Apply(fun,args) => '{ O.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325/Test_2.scala b/tests/pos-macros/i8325/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325/Test_2.scala +++ b/tests/pos-macros/i8325/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos-macros/i8325b/Macro_1.scala b/tests/pos-macros/i8325b/Macro_1.scala index 181efa260f9b..139abed94078 100644 --- a/tests/pos-macros/i8325b/Macro_1.scala +++ b/tests/pos-macros/i8325b/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -16,7 +16,7 @@ object A: expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) case r@Apply(fun,args) => '{ - A.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } + O.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325b/Test_2.scala b/tests/pos-macros/i8325b/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325b/Test_2.scala +++ b/tests/pos-macros/i8325b/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos/FromString.scala b/tests/pos/FromString.scala new file mode 100644 index 000000000000..892bc28d9e8d --- /dev/null +++ b/tests/pos/FromString.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future + +trait FromString: + type Self + def fromString(s: String): Self + +given Int forms FromString = _.toInt + +given Double forms FromString = _.toDouble + +def add[N: {FromString, Numeric as num}](a: String, b: String): N = + N.plus( + num.plus(N.fromString(a), N.fromString(b)), + N.fromString(a) + ) \ No newline at end of file diff --git a/tests/pos/cb-companion-joins.scala b/tests/pos/cb-companion-joins.scala new file mode 100644 index 000000000000..e36ed2d4d864 --- /dev/null +++ b/tests/pos/cb-companion-joins.scala @@ -0,0 +1,23 @@ +import language.experimental.modularity +import language.future + +trait M: + type Self + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num: + type Self + def zero: Self + +trait A extends M +trait B extends M + +trait AA: + type X: M +trait BB: + type X: Num +class CC[X1: {M, Num}] extends AA, BB: + type X = X1 + X.zero + X.unit diff --git a/tests/pos/deferred-givens.scala b/tests/pos/deferred-givens.scala new file mode 100644 index 000000000000..b9018c97e151 --- /dev/null +++ b/tests/pos/deferred-givens.scala @@ -0,0 +1,36 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* +class Ord[Elem] +given Ord[Double] + +trait A: + type Elem : Ord + def foo = summon[Ord[Elem]] + +class AC extends A: + type Elem = Double + override given Ord[Elem] = ??? + +class AD extends A: + type Elem = Double + +trait B: + type Elem + given Ord[Elem] = deferred + def foo = summon[Ord[Elem]] + +class C extends B: + type Elem = String + override given Ord[Elem] = ??? + +def bar(using Ord[String]) = 1 + +class D(using Ord[String]) extends B: + type Elem = String + +class E(using x: Ord[String]) extends B: + type Elem = String + override given Ord[Elem] = x + +class F[X: Ord] extends B: + type Elem = X diff --git a/tests/pos/deferredSummon.scala b/tests/pos/deferredSummon.scala new file mode 100644 index 000000000000..494b5d84df21 --- /dev/null +++ b/tests/pos/deferredSummon.scala @@ -0,0 +1,48 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +trait Ord: + type Self + def less(x: Self, y: Self): Boolean + +trait A: + type Elem + given Elem forms Ord = deferred + def foo = summon[Elem forms Ord] + +trait B: + type Elem: Ord + def foo = summon[Elem forms Ord] + +object Inst: + given Int forms Ord: + def less(x: Int, y: Int) = x < y + +object Test1: + import Inst.given + class C extends A: + type Elem = Int + object E extends A: + type Elem = Int + given A: + type Elem = Int + +class D1[T: Ord] extends B: + type Elem = T + +object Test2: + import Inst.given + class C extends B: + type Elem = Int + object E extends B: + type Elem = Int + given B: + type Elem = Int + +class D2[T: Ord] extends B: + type Elem = T + + + + + diff --git a/tests/pos/dep-context-bounds.scala b/tests/pos/dep-context-bounds.scala new file mode 100644 index 000000000000..c724d92e9809 --- /dev/null +++ b/tests/pos/dep-context-bounds.scala @@ -0,0 +1,17 @@ +//> using options -language:experimental.modularity -source future +trait A: + type Self + +object Test1: + def foo[X: A](x: X.Self) = ??? + + def bar[X: A](a: Int) = ??? + + def baz[X: A](a: Int)(using String) = ??? + +object Test2: + def foo[X: A as x](a: x.Self) = ??? + + def bar[X: A as x](a: Int) = ??? + + def baz[X: A as x](a: Int)(using String) = ??? diff --git a/tests/pos/depclass-1.scala b/tests/pos/depclass-1.scala new file mode 100644 index 000000000000..38daef85ae98 --- /dev/null +++ b/tests/pos/depclass-1.scala @@ -0,0 +1,19 @@ +//> using options -source future -language:experimental.modularity +class A(tracked val source: String) + +class B(x: Int, tracked val source1: String) extends A(source1) + +class C(tracked val source2: String) extends B(1, source2) + +//class D(source1: String) extends C(source1) +val x = C("hello") +val _: A{ val source: "hello" } = x + +class Vec[Elem](tracked val size: Int) +class Vec8 extends Vec[Float](8) + +val v = Vec[Float](10) +val v2 = Vec8() +val xx: 10 = v.size +val x2: 8 = v2.size + diff --git a/tests/pos/hylolib-extract.scala b/tests/pos/hylolib-extract.scala new file mode 100644 index 000000000000..ea7987dc95a0 --- /dev/null +++ b/tests/pos/hylolib-extract.scala @@ -0,0 +1,29 @@ +//> using options -language:experimental.modularity -source future +package hylotest + +trait Value: + type Self + extension (self: Self) def eq(other: Self): Boolean + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + +class BitArray + +given Boolean forms Value: + extension (self: Self) def eq(other: Self): Boolean = + self == other + +given BitArray forms Collection: + type Element = Boolean + +extension [Self: Value](self: Self) + def neq(other: Self): Boolean = !self.eq(other) + +extension [Self: Collection](self: Self) + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + ??? diff --git a/tests/pos/hylolib/AnyCollection.scala b/tests/pos/hylolib/AnyCollection.scala new file mode 100644 index 000000000000..c1ae21e71122 --- /dev/null +++ b/tests/pos/hylolib/AnyCollection.scala @@ -0,0 +1,51 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base: Collection](base: Base): AnyCollection[Base.Element] = + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[Base.Position])) + + def at(p: AnyValue): Base.Element = + base.at(p.unsafelyUnwrappedAs[Base.Position]) + + new AnyCollection[Base.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given [T: Value] => AnyCollection[T] forms Collection: + + type Element = T + type Position = AnyValue + + extension (self: AnyCollection[T]) + def startPosition = self._start() + def endPosition = self._end() + def positionAfter(p: Position) = self._after(p) + def at(p: Position) = self._at(p) + diff --git a/tests/pos/hylolib/AnyValue.scala b/tests/pos/hylolib/AnyValue.scala new file mode 100644 index 000000000000..27a368cbcd8f --- /dev/null +++ b/tests/pos/hylolib/AnyValue.scala @@ -0,0 +1,67 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T: Value](wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given AnyValue forms Value: + + extension (self: AnyValue) + def copy(): AnyValue = self.copy() + def eq(other: AnyValue): Boolean = self `eq` other + def hashInto(hasher: Hasher): Hasher = self.hashInto(hasher) + diff --git a/tests/pos/hylolib/AnyValueTests.scala b/tests/pos/hylolib/AnyValueTests.scala new file mode 100644 index 000000000000..96d3563f4f53 --- /dev/null +++ b/tests/pos/hylolib/AnyValueTests.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class AnyValueTests extends munit.FunSuite: + + test("eq"): + val a = AnyValue(1) + assert(a `eq` a) + assert(!(a `neq` a)) + + val b = AnyValue(2) + assert(!(a `eq` b)) + assert(a `neq` b) + diff --git a/tests/pos/hylolib/BitArray.scala b/tests/pos/hylolib/BitArray.scala new file mode 100644 index 000000000000..851f7d02e783 --- /dev/null +++ b/tests/pos/hylolib/BitArray.scala @@ -0,0 +1,362 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given BitArray.Position forms Value: + + extension (self: BitArray.Position) + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + +given BitArray forms Collection: + + type Element = Boolean + type Position = BitArray.Position + + extension (self: BitArray) + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + +given BitArray forms StringConvertible: + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + diff --git a/tests/pos/hylolib/Collection.scala b/tests/pos/hylolib/Collection.scala new file mode 100644 index 000000000000..ccc3b6d6a16e --- /dev/null +++ b/tests/pos/hylolib/Collection.scala @@ -0,0 +1,267 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + + /** The type of a position in the collection. */ + type Position: Value + + extension (self: Self) + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def loop(p: Position, n: Int): Int = + if p `eq` e then n else loop(self.positionAfter(p), n + 1) + loop(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if i `eq` e then false + else if j `eq` e then true + else + def recur(n: Position): Boolean = + if n `eq` j then true + else if n `eq` e then false + else recur(self.positionAfter(n)) + recur(self.positionAfter(i)) + + class Slice2(val base: Self, val bounds: Range[Position]): + + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Position = + bounds.lowerBound + + def endPosition: Position = + bounds.upperBound + + def at(p: Position): Element = + base.at(p) + end Slice2 + +end Collection + +extension [Self: Collection](self: Self) + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(Self.Element, Slice[Self])] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + def headAndTail2: Option[(Self.Element, Self.Slice2)] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Self.Slice2(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T)(combine: (T, Self.Element) => T): T = + val e = self.endPosition + def loop(p: Self.Position, r: T): T = + if p `eq` e then r + else loop(self.positionAfter(p), combine(r, self.at(p))) + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: Self.Element => Boolean): Boolean = + val e = self.endPosition + def loop(p: Self.Position): Boolean = + if p `eq` e then true + else if !action(self.at(p)) then false + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T: Value](transform: Self.Element => T): HyArray[T] = + self.reduce(HyArray[T]()): (r, e) => + r.append(transform(e), assumeUniqueness = true) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: Self.Element => Boolean): HyArray[Self.Element] = + self.reduce(HyArray[Self.Element]()): (r, e) => + if isIncluded(e) then r.append(e, assumeUniqueness = true) else r + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: Self.Element => Boolean): Option[Self.Position] = + val e = self.endPosition + def loop(p: Self.Position): Option[Self.Position] = + if p `eq` e then None + else if predicate(self.at(p)) then Some(p) + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Self.Element forms Comparable): Option[Self.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Self.Element forms Comparable): Option[Self.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + if self.isEmpty then + None + else + val e = self.endPosition + def loop(p: Self.Position, least: Self.Element): Self.Element = + if p `eq` e then + least + else + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + loop(self.positionAfter(p), y) + val b = self.startPosition + Some(loop(self.positionAfter(b), self.at(b))) + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + def loop(i: Self.Position, j: T.Position): Boolean = + if i `eq` self.endPosition then + j `eq` other.endPosition + else if j `eq` other.endPosition then + false + else if self.at(i) `neq` other.at(j)then + false + else + loop(self.positionAfter(i), other.positionAfter(j)) + loop(self.startPosition, other.startPosition) +end extension diff --git a/tests/pos/hylolib/CollectionTests.scala b/tests/pos/hylolib/CollectionTests.scala new file mode 100644 index 000000000000..d884790f64d7 --- /dev/null +++ b/tests/pos/hylolib/CollectionTests.scala @@ -0,0 +1,67 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class CollectionTests extends munit.FunSuite: + + test("isEmpty"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.isEmpty) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + assert(!nonEmpty.isEmpty) + + test("count"): + val a = AnyCollection(HyArray[Int](1, 2)) + assertEquals(a.count, 2) + + test("isBefore"): + val empty = AnyCollection(HyArray[Int]()) + assert(!empty.isBefore(empty.startPosition, empty.endPosition)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + val p0 = nonEmpty.startPosition + val p1 = nonEmpty.positionAfter(p0) + val p2 = nonEmpty.positionAfter(p1) + assert(nonEmpty.isBefore(p0, nonEmpty.endPosition)) + assert(nonEmpty.isBefore(p1, nonEmpty.endPosition)) + assert(!nonEmpty.isBefore(p2, nonEmpty.endPosition)) + + test("headAndTail"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.headAndTail, None) + + val one = AnyCollection(HyArray[Int](1)) + val Some((h0, t0)) = one.headAndTail: @unchecked + assert(h0 eq 1) + assert(t0.isEmpty) + + val two = AnyCollection(HyArray[Int](1, 2)) + val Some((h1, t1)) = two.headAndTail: @unchecked + assertEquals(h1, 1) + assertEquals(t1.count, 1) + + test("reduce"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.reduce(0)((s, x) => s + x), 0) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + assertEquals(nonEmpty.reduce(0)((s, x) => s + x), 6) + + test("forEach"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.forEach((e) => false)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + var s = 0 + assert(nonEmpty.forEach((e) => { s += e; true })) + assertEquals(s, 6) + + s = 0 + assert(!nonEmpty.forEach((e) => { s += e; false })) + assertEquals(s, 1) + + test("elementsEqual"): + val a = HyArray(1, 2) + assert(a.elementsEqual(a)) +end CollectionTests diff --git a/tests/pos/hylolib/CoreTraits.scala b/tests/pos/hylolib/CoreTraits.scala new file mode 100644 index 000000000000..f4b3699b430e --- /dev/null +++ b/tests/pos/hylolib/CoreTraits.scala @@ -0,0 +1,56 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value: + type Self + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + def neq(other: Self): Boolean = !self.eq(other) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable extends Value { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib/Hasher.scala b/tests/pos/hylolib/Hasher.scala new file mode 100644 index 000000000000..ca45550ed002 --- /dev/null +++ b/tests/pos/hylolib/Hasher.scala @@ -0,0 +1,39 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib/HyArray.scala b/tests/pos/hylolib/HyArray.scala new file mode 100644 index 000000000000..52faf3647bc1 --- /dev/null +++ b/tests/pos/hylolib/HyArray.scala @@ -0,0 +1,202 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element: Value as elementIsCValue]( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + /** Adds the contents of `source` at the end of the array. */ + def appendContents[C: Collection { type Element = HyArray.this.Element }]( + source: C, assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + source.reduce(result): (r, e) => + r.append(e, assumeUniqueness = true) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T: Value](elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given [T: Value] => HyArray[T] forms Value: + + extension (self: HyArray[T]) + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher)((h, e) => e.hashInto(h)) + +given [T: Value] => HyArray[T] forms Collection: + + type Element = T + type Position = Int + + extension (self: HyArray[T]) + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + +given [T: {Value, StringConvertible}] => HyArray[T] forms StringConvertible: + extension (self: HyArray[T]) + override def description: String = + val contents = mutable.StringBuilder() + self.forEach: e => + contents ++= e.description + true + s"[${contents.mkString(", ")}]" diff --git a/tests/pos/hylolib/HyArrayTests.scala b/tests/pos/hylolib/HyArrayTests.scala new file mode 100644 index 000000000000..0de65603d0c7 --- /dev/null +++ b/tests/pos/hylolib/HyArrayTests.scala @@ -0,0 +1,17 @@ +import hylo.* +import hylo.given + +class HyArrayTests extends munit.FunSuite: + + test("reserveCapacity"): + var a = HyArray[Int]() + a = a.append(1) + a = a.append(2) + + a = a.reserveCapacity(10) + assert(a.capacity >= 10) + assertEquals(a.count, 2) + assertEquals(a.at(0), 1) + assertEquals(a.at(1), 2) + +end HyArrayTests diff --git a/tests/pos/hylolib/Integers.scala b/tests/pos/hylolib/Integers.scala new file mode 100644 index 000000000000..d493729740a9 --- /dev/null +++ b/tests/pos/hylolib/Integers.scala @@ -0,0 +1,46 @@ +package hylo + +given Boolean forms Value: + + extension (self: Boolean) + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + +given Int forms Value: + + extension (self: Int) + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + +given Int forms Comparable: + + extension (self: Int) + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + +given Int forms StringConvertible diff --git a/tests/pos/hylolib/IntegersTests.scala b/tests/pos/hylolib/IntegersTests.scala new file mode 100644 index 000000000000..74dedf30d83e --- /dev/null +++ b/tests/pos/hylolib/IntegersTests.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class IntegersTests extends munit.FunSuite: + + test("Int.hashInto"): + val x = Hasher.hash(42) + val y = Hasher.hash(42) + assertEquals(x, y) + + val z = Hasher.hash(1337) + assertNotEquals(x, z) + diff --git a/tests/pos/hylolib/Range.scala b/tests/pos/hylolib/Range.scala new file mode 100644 index 000000000000..b0f50dd55c8c --- /dev/null +++ b/tests/pos/hylolib/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound: Comparable](lowerBound: Bound, upperBound: Bound) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib/Slice.scala b/tests/pos/hylolib/Slice.scala new file mode 100644 index 000000000000..c0860fadd35c --- /dev/null +++ b/tests/pos/hylolib/Slice.scala @@ -0,0 +1,63 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base: Collection]( + val base: Base, + val bounds: Range[Base.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Base.Position = + bounds.lowerBound + + def endPosition: Base.Position = + bounds.upperBound + + def positionAfter(p: Base.Position): Base.Position = + base.positionAfter(p) + + def at(p: Base.Position): Base.Element = + base.at(p) + +} + +given [C: Collection] => Slice[C] forms Collection: + + type Element = C.Element + type Position = C.Position + + extension (self: Slice[C]) + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] + // This is actually unsafe. We have: + // self.bounds: Range(Slice[C].Base.Position) + // But the _value_ of Slice[C].Base is not necssarily this given, even + // though it is true that `type Slice[C].Base = C`. There might be multiple + // implementations of `Slice[C] is Collection` that define different `Position` + // types. So we cannot conclude that `Slice[C].Base.Position = this.Position`. + // To make this safe, we'd need some form of coherence, where we ensure that + // there is only one way to implement `Slice is Collection`. + // + // As an alternativem we can make Slice dependent on the original Collection + // _instance_ instead of the original Collection _type_. This design is + // realized by the Slice2 definitions. It works without casts. + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + +given [C: Collection] => C.Slice2 forms Collection: + type Element = C.Element + type Position = C.Position + + extension (self: C.Slice2) + + def startPosition = self.bounds.lowerBound + def endPosition = self.bounds.upperBound + def positionAfter(p: Position) = self.base.positionAfter(p) + def at(p: Position) = self.base.at(p) diff --git a/tests/pos/hylolib/StringConvertible.scala b/tests/pos/hylolib/StringConvertible.scala new file mode 100644 index 000000000000..cf901d9a3313 --- /dev/null +++ b/tests/pos/hylolib/StringConvertible.scala @@ -0,0 +1,9 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible: + type Self + + /** Returns a textual description of `self`. */ + extension (self: Self) + def description: String = self.toString diff --git a/tests/pos/hylolib/Test.scala b/tests/pos/hylolib/Test.scala new file mode 100644 index 000000000000..9e8d6181affd --- /dev/null +++ b/tests/pos/hylolib/Test.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +object munit: + open class FunSuite: + def test(name: String)(op: => Unit): Unit = op + def assertEquals[T](x: T, y: T) = assert(x == y) + def assertNotEquals[T](x: T, y: T) = assert(x != y) + +@main def Test = + CollectionTests() + AnyValueTests() + HyArrayTests() + IntegersTests() + println("done") diff --git a/tests/pos/i10929-new-syntax.scala b/tests/pos/i10929-new-syntax.scala new file mode 100644 index 000000000000..3771c6c80658 --- /dev/null +++ b/tests/pos/i10929-new-syntax.scala @@ -0,0 +1,22 @@ +//> using options -language:experimental.modularity -source future +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple forms TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest forms TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: Rest.map(x.tail)(f)) + +def foo[T: TupleOf[Int]](xs: T): T.Mapped[Int] = T.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok diff --git a/tests/pos/i10929.scala b/tests/pos/i10929.scala new file mode 100644 index 000000000000..e916e4547e59 --- /dev/null +++ b/tests/pos/i10929.scala @@ -0,0 +1,21 @@ +//> using options -language:experimental.modularity -source future +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: tup.map(x.tail)(f)) + +def foo[T](xs: T)(using tup: T TupleOf Int): tup.Mapped[Int] = tup.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok \ No newline at end of file diff --git a/tests/pos/i13580.scala b/tests/pos/i13580.scala new file mode 100644 index 000000000000..c3c491a19dbe --- /dev/null +++ b/tests/pos/i13580.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +trait IntWidth: + type Out +given IntWidth: + type Out = 155 + +trait IntCandidate: + type Out +given (using tracked val w: IntWidth) => IntCandidate: + type Out = w.Out + +val x = summon[IntCandidate] +val xx = summon[x.Out =:= 155] diff --git a/tests/pos/i15264.scala b/tests/pos/i15264.scala index 05992df61b94..5be8436c12ba 100644 --- a/tests/pos/i15264.scala +++ b/tests/pos/i15264.scala @@ -30,6 +30,7 @@ object repro: // if you import these don't import from 'context' above object qcontext: // base defs, like what you would get from cats + given ga: A[Int] = new B[Int] // added so that we don't get an ambiguity in test2 given gb: B[Int] = new B[Int] given gc: C[Int] = new C[Int] @@ -45,9 +46,9 @@ object test1: // these will work val a = summon[A[Int]] + object test2: import repro.* import repro.qcontext.given - // this one will fail as ambiguous - prios aren't having an effect - val a = summon[A[Q[Int]]] \ No newline at end of file + val a = summon[A[Q[Int]]] diff --git a/tests/pos/i3920.scala b/tests/pos/i3920.scala new file mode 100644 index 000000000000..6cd74187098f --- /dev/null +++ b/tests/pos/i3920.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Ordering { + type T + def compare(t1:T, t2: T): Int +} + +class SetFunctor(tracked val ord: Ordering) { + type Set = List[ord.T] + def empty: Set = Nil + + implicit class helper(s: Set) { + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def member(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + } +} + +object Test { + val orderInt = new Ordering { + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + } + + val IntSet = new SetFunctor(orderInt) + import IntSet.* + + def main(args: Array[String]) = { + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.member(7)) + assert(set.member(8)) + } +} \ No newline at end of file diff --git a/tests/pos/i3964.scala b/tests/pos/i3964.scala new file mode 100644 index 000000000000..42412b910899 --- /dev/null +++ b/tests/pos/i3964.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +object Test3: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) + class Lst8 extends Lst(8) + + val v8a: Vec { val size: 8 } = new Vec8 + val v8b: Vec { val size: 8 } = new Vec(8) {} + + val l8a: Lst { val size: 8 } = new Lst8 + val l8b: Lst { val size: 8 } = new Lst(8) {} + + class VecN(tracked val n: Int) extends Vec(n) + class Vec9 extends VecN(9) + val v9a = VecN(9) + val _: Vec { val size: 9 } = v9a + val v9b = Vec9() + val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/i3964a/Defs_1.scala b/tests/pos/i3964a/Defs_1.scala new file mode 100644 index 000000000000..7dcc89f7003e --- /dev/null +++ b/tests/pos/i3964a/Defs_1.scala @@ -0,0 +1,18 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +package coll: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) \ No newline at end of file diff --git a/tests/pos/i3964a/Uses_2.scala b/tests/pos/i3964a/Uses_2.scala new file mode 100644 index 000000000000..9d1b6ebaa58b --- /dev/null +++ b/tests/pos/i3964a/Uses_2.scala @@ -0,0 +1,16 @@ +//> using options -source future -language:experimental.modularity +import coll.* +class Lst8 extends Lst(8) + +val v8a: Vec { val size: 8 } = new Vec8 +val v8b: Vec { val size: 8 } = new Vec(8) {} + +val l8a: Lst { val size: 8 } = new Lst8 +val l8b: Lst { val size: 8 } = new Lst(8) {} + +class VecN(tracked val n: Int) extends Vec(n) +class Vec9 extends VecN(9) +val v9a = VecN(9) +val _: Vec { val size: 9 } = v9a +val v9b = Vec9() +val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/i7045.scala b/tests/pos/i7045.scala deleted file mode 100644 index e683654dd5c3..000000000000 --- a/tests/pos/i7045.scala +++ /dev/null @@ -1,9 +0,0 @@ -trait Bar { type Y } -trait Foo { type X } - -class Test: - given a1(using b: Bar): Foo = new Foo { type X = b.Y } - - given a2(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } - - given a3(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } diff --git a/tests/pos/ord-over-tracked.scala b/tests/pos/ord-over-tracked.scala new file mode 100644 index 000000000000..a9b4aba556e1 --- /dev/null +++ b/tests/pos/ord-over-tracked.scala @@ -0,0 +1,15 @@ +import language.experimental.modularity + +trait Ord[T]: + def lt(x: T, y: T): Boolean + +given Ord[Int] = ??? + +case class D(tracked val x: Int) +given [T <: D]: Ord[T] = (a, b) => a.x < b.x + +def mySort[T: Ord](x: Array[T]): Array[T] = ??? + +def test = + val arr = Array(D(1)) + val arr1 = mySort(arr) // error: no given instance of type Ord[D{val x: (1 : Int)}] \ No newline at end of file diff --git a/tests/pos/overload-disambiguation.scala b/tests/pos/overload-disambiguation.scala new file mode 100644 index 000000000000..58b085758d92 --- /dev/null +++ b/tests/pos/overload-disambiguation.scala @@ -0,0 +1,13 @@ +class A +class B +class C[-T] + +def foo(using A): C[Any] = ??? +def foo(using B): C[Int] = ??? + + +@main def Test = + given A = A() + given B = B() + val x = foo + val _: C[Any] = x diff --git a/tests/pos/parent-refinement.scala b/tests/pos/parent-refinement.scala new file mode 100644 index 000000000000..eaa74228c5d6 --- /dev/null +++ b/tests/pos/parent-refinement.scala @@ -0,0 +1,48 @@ +//> using options -source future -language:experimental.modularity + +class A +class B extends A +class C extends B + +trait Id { type Value } +type IdOf[T] = Id { type Value = T } +trait X { type Value } + +case class Year(value: Int) extends IdOf[Int]: + val x: Value = 2 + +type Between[Lo, Hi] = X { type Value >: Lo <: Hi } + +class Foo() extends IdOf[B], Between[C, A]: + val x: Value = B() + +trait Bar extends IdOf[Int], (X { type Value = String }) + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = "" + +trait Gen: + type T + val x: T + +type IntInst = Gen: + type T = Int + val x: 0 + +trait IntInstTrait extends IntInst + +abstract class IntInstClass extends IntInstTrait, IntInst + +object obj1 extends IntInstTrait: + val x = 0 + +object obj2 extends IntInstClass: + val x = 0 + +def main = + val x: obj1.T = 2 - obj2.x + val y: obj2.T = 2 - obj1.x + + + diff --git a/tests/pos/parsercombinators-arrow.scala b/tests/pos/parsercombinators-arrow.scala new file mode 100644 index 000000000000..774adf12df48 --- /dev/null +++ b/tests/pos/parsercombinators-arrow.scala @@ -0,0 +1,48 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given [C, E] => Apply[C, E] forms Combinator: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + +given [A: Combinator, B: Combinator { type Context = A.Context }] + => Combine[A, B] forms Combinator: + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked diff --git a/tests/pos/parsercombinators-ctx-bounds.scala b/tests/pos/parsercombinators-ctx-bounds.scala new file mode 100644 index 000000000000..d77abea5e539 --- /dev/null +++ b/tests/pos/parsercombinators-ctx-bounds.scala @@ -0,0 +1,49 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: [X] =>> Combinator[X] { type Context = A.Context }] + : Combinator[Combine[A, B]] with + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/parsercombinators-expanded.scala b/tests/pos/parsercombinators-expanded.scala new file mode 100644 index 000000000000..cf8137bfe8eb --- /dev/null +++ b/tests/pos/parsercombinators-expanded.scala @@ -0,0 +1,64 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +object test: + + class apply[C, E] extends Combinator[Apply[C, E]]: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + + def apply[C, E]: apply[C, E] = new apply[C, E] + + class combine[A, B]( + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context} + ) extends Combinator[Combine[A, B]]: + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + + def combine[A, B]( + _f: Combinator[A], + _s: Combinator[B] { type Context = _f.Context} + ) = new combine[A, B](_f, _s) + // cast is needed since the type of new combine[A, B](_f, _s) + // drops the required refinement. + + extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + + @main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val c = combine( + apply[mutable.ListBuffer[Int], Int], + apply[mutable.ListBuffer[Int], Int] + ) + val r = c.parse(m)(stream) // was type mismatch, now OK + val rc: Option[(Int, Int)] = r + } diff --git a/tests/pos/parsercombinators-givens-2.scala b/tests/pos/parsercombinators-givens-2.scala new file mode 100644 index 000000000000..8349d69a30af --- /dev/null +++ b/tests/pos/parsercombinators-givens-2.scala @@ -0,0 +1,52 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B, C](using + f: Combinator[A] { type Context = C }, + s: Combinator[B] { type Context = C } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // works, but Element type is not resolved correctly +} diff --git a/tests/pos/parsercombinators-givens.scala b/tests/pos/parsercombinators-givens.scala new file mode 100644 index 000000000000..5b5588c93840 --- /dev/null +++ b/tests/pos/parsercombinators-givens.scala @@ -0,0 +1,54 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B](using + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/parsercombinators-new-syntax.scala b/tests/pos/parsercombinators-new-syntax.scala new file mode 100644 index 000000000000..f6813214b1b4 --- /dev/null +++ b/tests/pos/parsercombinators-new-syntax.scala @@ -0,0 +1,45 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + type Self + type Input + type Result + + extension (self: Self) + /// Parses and returns an element from input `in`. + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](first: A, second: B) + +given [I, R] => Apply[I, R] forms Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] forms Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for x <- self.first.parse(in); y <- self.second.parse(in) yield (x, y) + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // was error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Input` + val rc: Option[(Int, Int)] = r + diff --git a/tests/pos/parsercombinators-this.scala b/tests/pos/parsercombinators-this.scala new file mode 100644 index 000000000000..70b423985400 --- /dev/null +++ b/tests/pos/parsercombinators-this.scala @@ -0,0 +1,53 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator with { + type Self = Apply[C, E] + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: Combinator { type Context = A.Context }] + : Combinator with + type Self = Combine[A, B] + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/sets-tc.scala b/tests/pos/sets-tc.scala new file mode 100644 index 000000000000..40b3fc968369 --- /dev/null +++ b/tests/pos/sets-tc.scala @@ -0,0 +1,46 @@ +import language.experimental.modularity + +// First version: higher-kinded self type +object v1: + trait Sets: + type Self[A] + def empty[A]: Self[A] + def union[A](self: Self[A], other: Self[A]): Self[A] + + case class ListSet[A](elems: List[A]) + + given ListSet forms Sets: + def empty[A]: ListSet[A] = ListSet(Nil) + + def union[A](self: ListSet[A], other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S[_]: Sets](xs: List[S[A]]): S[A] = + xs.foldLeft(S.empty)(S.union) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + + // Second version: parameterized type class +object v2: + trait Sets[A]: + type Self + def empty: Self + extension (s: Self) def union (other: Self): Self + + case class ListSet[A](elems: List[A]) + + given [A] => ListSet[A] forms Sets[A]: + def empty: ListSet[A] = ListSet(Nil) + + extension (self: ListSet[A]) def union(other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S: Sets[A]](xs: List[S]): S = + xs.foldLeft(S.empty)(_ `union` _) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + diff --git a/tests/pos/showtest.scala b/tests/pos/showtest.scala new file mode 100644 index 000000000000..9a54e30c958a --- /dev/null +++ b/tests/pos/showtest.scala @@ -0,0 +1,13 @@ +trait Show[T]: + extension (x: T) def show: String + +given showAny[T]: Show[T] = _.toString +given showInt: Show[Int] = x => s"INT $x" + +def print[T: Show](x: T) = println(x.show) + +@main def Test = + println("hello".show) + println(1.show) + print("hello".show) + print(1) diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala new file mode 100644 index 000000000000..82bfa3973d3d --- /dev/null +++ b/tests/pos/typeclass-aggregates.scala @@ -0,0 +1,47 @@ +//> using options -source future -language:experimental.modularity +trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + + trait OrdProxy extends Ord: + export Ord.this.* + +trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait SemiGroupProxy extends SemiGroup: + export SemiGroup.this.* + +trait Monoid extends SemiGroup: + def unit: Self + + trait MonoidProxy extends Monoid: + export Monoid.this.* + +def ordWithMonoid(ord: Ord, monoid: Monoid{ type Self = ord.Self }): Ord & Monoid = + new ord.OrdProxy with monoid.MonoidProxy {} + +trait OrdWithMonoid extends Ord, Monoid + +def ordWithMonoid2(ord: Ord, monoid: Monoid{ type Self = ord.Self }) = //: OrdWithMonoid { type Self = ord.Self} = + new OrdWithMonoid with ord.OrdProxy with monoid.MonoidProxy {} + +given intOrd: (Ord { type Self = Int }) = ??? +given intMonoid: (Monoid { type Self = Int }) = ??? + +//given (using ord: Ord, monoid: Monoid{ type Self = ord.Self }): (Ord & Monoid { type Self = ord.Self}) = +// ordWithMonoid2(ord, monoid) + +val x = summon[Ord & Monoid { type Self = Int}] +val y: Int = ??? : x.Self + +// given [A, B](using ord: A forms Ord, monoid: A forms Monoid) => A forms Ord & Monoid = +// new ord.OrdProxy with monoid.MonoidProxy {} + +given [A](using ord: Ord { type Self = A }, monoid: Monoid { type Self = A}): ((Ord & Monoid) { type Self = A}) = + new ord.OrdProxy with monoid.MonoidProxy {} + diff --git a/tests/pos/typeclasses-arrow.scala b/tests/pos/typeclasses-arrow.scala new file mode 100644 index 000000000000..ebf963334db2 --- /dev/null +++ b/tests/pos/typeclasses-arrow.scala @@ -0,0 +1,140 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given Int forms Ord as intOrd: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] forms Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List forms Monad as listMonad: + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] forms Monad as readerMonad: + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T forms Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep forms Animal: + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses-this.scala b/tests/pos/typeclasses-this.scala new file mode 100644 index 000000000000..f6d3872a8c2e --- /dev/null +++ b/tests/pos/typeclasses-this.scala @@ -0,0 +1,141 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given intOrd: Int forms Ord with + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + +// given [T](using tracked val ev: Ord { type Self = T}): Ord { type Self = List[T] } with + given [T: Ord]: List[T] forms Ord with + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given listMonad: List forms Monad with + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given readerMonad[Ctx]: Reader[Ctx] forms Monad with + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given descending[T: Ord]: T forms Ord with + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep forms Animal with + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala index 07fe5a31ce5d..b61edd09cad1 100644 --- a/tests/pos/typeclasses.scala +++ b/tests/pos/typeclasses.scala @@ -1,66 +1,45 @@ -class Common: +//> using options -source future -language:experimental.modularity - // this should go in Predef - infix type at [A <: { type This}, B] = A { type This = B } +class Common: trait Ord: - type This - extension (x: This) - def compareTo(y: This): Int - def < (y: This): Boolean = compareTo(y) < 0 - def > (y: This): Boolean = compareTo(y) > 0 + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 trait SemiGroup: - type This - extension (x: This) def combine(y: This): This + type Self + extension (x: Self) def combine(y: Self): Self trait Monoid extends SemiGroup: - def unit: This + def unit: Self trait Functor: - type This[A] - extension [A](x: This[A]) def map[B](f: A => B): This[B] + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] trait Monad extends Functor: - def pure[A](x: A): This[A] - extension [A](x: This[A]) - def flatMap[B](f: A => This[B]): This[B] + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] def map[B](f: A => B) = x.flatMap(f `andThen` pure) + end Common object Instances extends Common: -/* - instance Int: Ord as intOrd with + given intOrd: (Int forms Ord) with + type Self = Int extension (x: Int) def compareTo(y: Int) = if x < y then -1 else if x > y then +1 else 0 -*/ - given intOrd: Ord with - type This = Int - extension (x: Int) - def compareTo(y: Int) = - if x < y then -1 - else if x > y then +1 - else 0 -/* - instance List[T: Ord]: Ord as listOrd with - extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match - case (Nil, Nil) => 0 - case (Nil, _) => -1 - case (_, Nil) => +1 - case (x :: xs1, y :: ys1) => - val fst = x.compareTo(y) - if (fst != 0) fst else xs1.compareTo(ys1) -*/ - // Proposed short syntax: - // given listOrd[T: Ord as ord]: Ord at T with - given listOrd[T](using ord: Ord { type This = T}): Ord with - type This = List[T] + given listOrd[T](using ord: T forms Ord): (List[T] forms Ord) with extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -70,32 +49,18 @@ object Instances extends Common: if (fst != 0) fst else xs1.compareTo(ys1) end listOrd -/* - instance List: Monad as listMonad with + given listMonad: (List forms Monad) with extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = List(x) -*/ - given listMonad: Monad with - type This[A] = List[A] - extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = - xs.flatMap(f) - def pure[A](x: A): List[A] = - List(x) -/* - type Reader[Ctx] = X =>> Ctx => X - instance Reader[Ctx: _]: Monad as readerMonad with - extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = - ctx => f(r(ctx))(ctx) - def pure[A](x: A): Ctx => A = - ctx => x -*/ + type Reader[Ctx] = [X] =>> Ctx => X - given readerMonad[Ctx]: Monad with - type This[X] = Ctx => X + //given [Ctx] => Reader[Ctx] forms Monad as readerMonad: + + given readerMonad[Ctx]: (Reader[Ctx] forms Monad) with extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -110,29 +75,17 @@ object Instances extends Common: def second = xs.tail.head def third = xs.tail.tail.head - //Proposed short syntax: - //extension [M: Monad as m, A](xss: M[M[A]]) - // def flatten: M[A] = - // xs.flatMap(identity) - - extension [M, A](using m: Monad)(xss: m.This[m.This[A]]) - def flatten: m.This[A] = + extension [M, A](using m: Monad)(xss: m.Self[m.Self[A]]) + def flatten: m.Self[A] = xss.flatMap(identity) - // Proposed short syntax: - //def maximum[T: Ord](xs: List[T]: T = - def maximum[T](xs: List[T])(using Ord at T): T = + def maximum[T](xs: List[T])(using T forms Ord): T = xs.reduceLeft((x, y) => if (x < y) y else x) - // Proposed short syntax: - // def descending[T: Ord as asc]: Ord at T = new Ord: - def descending[T](using asc: Ord at T): Ord at T = new Ord: - type This = T + def descending[T](using asc: T forms Ord): T forms Ord = new: extension (x: T) def compareTo(y: T) = asc.compareTo(y)(x) - // Proposed short syntax: - // def minimum[T: Ord](xs: List[T]) = - def minimum[T](xs: List[T])(using Ord at T) = + def minimum[T](xs: List[T])(using T forms Ord) = maximum(xs)(using descending) def test(): Unit = @@ -148,12 +101,12 @@ object Instances extends Common: // wc Scala: 30 115 853 // wc Rust : 57 193 1466 trait Animal: - type This - // Associated function signature; `This` refers to the implementor type. - def apply(name: String): This + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self // Method signatures; these will return a string. - extension (self: This) + extension (self: Self) def name: String def noise: String def talk(): Unit = println(s"$name, $noise") @@ -171,7 +124,7 @@ class Sheep(val name: String): /* instance Sheep: Animal with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = @@ -179,10 +132,9 @@ instance Sheep: Animal with */ // Implement the `Animal` trait for `Sheep`. -given Animal with - type This = Sheep +given (Sheep forms Animal) with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = diff --git a/tests/pos/unapplied-types.scala b/tests/pos/unapplied-types.scala new file mode 100644 index 000000000000..604e63deb8ad --- /dev/null +++ b/tests/pos/unapplied-types.scala @@ -0,0 +1,7 @@ +trait T { + type L[X] = List[X] + type T1 <: L // was error: takes type parameters + type T2 = L // was error: takes type parameters + type T3 = List // was error: takes type parameters + type T4 <: List // was error: takes type parameters +} diff --git a/tests/run/for-desugar-strawman.scala b/tests/run/for-desugar-strawman.scala new file mode 100644 index 000000000000..a92b19b9150a --- /dev/null +++ b/tests/run/for-desugar-strawman.scala @@ -0,0 +1,96 @@ + +@main def Test = + println: + for + x <- List(1, 2, 3) + y = x + x + if x >= 2 + i <- List.range(0, y) + z = i * i + if z % 2 == 0 + yield + i * x + + println: + val xs = List(1, 2, 3) + xs.flatMapDefined: x => + val y = x + x + xs.applyFilter(x >= 2): + val is = List.range(0, y) + is.mapDefined: i => + val z = i * i + is.applyFilter(z % 2 == 0): + i * x + +extension [A](as: List[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then Some(b) else None + + def flatMapDefined[B](f: A => Option[IterableOnce[B]]): List[B] = + as.flatMap: x => + f(x).getOrElse(Nil) + + def mapDefined[B](f: A => Option[B]): List[B] = + as.flatMap(f) + +object UNDEFINED + +extension [A](as: Vector[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then b else UNDEFINED + + def flatMapDefined[B](f: A => IterableOnce[B] | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: IterableOnce[B] => y + + def mapDefined[B](f: A => B | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: B => y :: Nil + +/* +F ::= val x = E; F + x <- E; G +G ::= [] + val x = E; G + if E; G + x <- E; G + +Translation scheme: + +{ for F yield E }c where c = undefined +{ for G yield E }c where c is a reference to the generator preceding the G sequence + +{ for [] yield E }c = E +{ for p = Ep; G yield E }c = val p = Ep; { for G yield E }c +{ for if Ep; G yield E}c = c.applyFilter(Ep)({ for G yield E }c) +{ for p <- Ep; G yield E }c = val c1 = Ep; c1.BIND{ case p => { for G yield E }c1 } (c1 fresh) + + where BIND = flatMapDefined if isGen(G), isFilter(G) + = mapDefined if !isGen(G), isFilter(G) + = flatMap if isGen(G), !isFilter(G) + = map if !isGen(G), !isFilter(G) + +{ for case p <- Ep; G yield E }c = { for $x <- Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E }c +{ for case p = Ep; G yield E }c = { for $x = Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E}c + +isFilter(if E; S) +isFilter(val x = E; S) if isFilter(S) + +isGen(x <- E; S) +isGen(val x = E; S) if isGen(S) +isGen(if E; S) if isGen(S) + +*/ + +val foo = 1 + +def main2 = + foo + ??? + ??? match { case _ => 0 } \ No newline at end of file diff --git a/tests/run/given-disambiguation.scala b/tests/run/given-disambiguation.scala new file mode 100644 index 000000000000..daba4d146e7f --- /dev/null +++ b/tests/run/given-disambiguation.scala @@ -0,0 +1,58 @@ +import language.experimental.modularity +import language.future + +trait M: + type Self + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num: + type Self + def zero: Self + +trait A extends M +trait B extends M + +def f[X: {M, A, B}](x: X) = + summon[X forms M] + x.combine(x) + +trait AA: + type XX: {M, A, B} + val x = XX.unit + val A: String = "hello" + +trait AAA: + type X: M +trait BBB: + type X: Num +class CCC[X1: {M, Num}] extends AAA, BBB: + type X = X1 + X.zero + X.unit + +@main def Test = + class C + + given C forms M: + extension (x: Self) def combine (y: Self) = "M" + def unit = C() + + given C forms A: + extension (x: Self) def combine (y: Self) = "A" + def unit = C() + + given C forms B: + extension (x: Self) def combine (y: Self) = "B" + def unit = C() + + assert(f(C()) == "M") + + class CC extends AA: + type XX = C + assert(A.length == 5) + assert(A.toString == "hello") + + CC() + + diff --git a/tests/run/given-triangle.check b/tests/run/given-triangle.check new file mode 100644 index 000000000000..5ba9e6a1e8b9 --- /dev/null +++ b/tests/run/given-triangle.check @@ -0,0 +1,3 @@ +class A +class B +class C diff --git a/tests/run/given-triangle.scala b/tests/run/given-triangle.scala new file mode 100644 index 000000000000..5ddba8df8b7b --- /dev/null +++ b/tests/run/given-triangle.scala @@ -0,0 +1,16 @@ +import language.future + +class A +class B extends A +class C extends A + +given A = A() +given B = B() +given C = C() + +def f(using a: A, b: B, c: C) = + println(a.getClass) + println(b.getClass) + println(c.getClass) + +@main def Test = f diff --git a/tests/run/i15840.scala b/tests/run/i15840.scala new file mode 100644 index 000000000000..0f238e2e7148 --- /dev/null +++ b/tests/run/i15840.scala @@ -0,0 +1,27 @@ +//> using options -language:experimental.modularity -source future + +trait Nat: + type N <: Nat + +class _0 extends Nat: + type N = _0 + +class NatOps[N <: Nat](tracked val n: N): + def toInt(using toIntN: ToInt[n.N]): Int = toIntN() + +// works +def toInt[N <: Nat](n: N)(using toIntN: ToInt[n.N]) = toIntN() + +sealed abstract class ToInt[N <: Nat]: + def apply(): Int + +object ToInt: + given ToInt[_0] { + def apply() = 0 + } + +@main def Test() = + assert(toInt(new _0) == 0) + assert(NatOps[_0](new _0).toInt == 0) + assert: + NatOps(new _0).toInt == 0 // did not work diff --git a/tests/run/i3920.scala b/tests/run/i3920.scala new file mode 100644 index 000000000000..c66fd8908976 --- /dev/null +++ b/tests/run/i3920.scala @@ -0,0 +1,26 @@ +//> using options -source future -language:experimental.modularity +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) \ No newline at end of file diff --git a/tests/run/implicit-specifity.scala b/tests/run/implicit-specifity.scala index 51fa02d91cfd..14954eddf2ef 100644 --- a/tests/run/implicit-specifity.scala +++ b/tests/run/implicit-specifity.scala @@ -1,3 +1,5 @@ +import language.`3.5` + case class Show[T](val i: Int) object Show { def apply[T](implicit st: Show[T]): Int = st.i @@ -38,5 +40,5 @@ object Test extends App { assert(Show[Int] == 0) assert(Show[String] == 1) assert(Show[Generic] == 1) // showGen loses against fallback due to longer argument list - assert(Show[Generic2] == 2) // ... but the opaque type intersection trick works. + assert(Show[Generic2] == 1) // ... and the opaque type intersection trick no longer works with new resolution rules. } diff --git a/tests/run/implied-for.scala b/tests/run/implied-for.scala index c7789ce570e4..a55d59e89505 100644 --- a/tests/run/implied-for.scala +++ b/tests/run/implied-for.scala @@ -20,7 +20,7 @@ object Test extends App { val x2: T = t val x3: D[Int] = d - assert(summon[T].isInstanceOf[B]) + assert(summon[T].isInstanceOf[T]) assert(summon[D[Int]].isInstanceOf[D[_]]) } diff --git a/tests/run/implied-priority.scala b/tests/run/implied-priority.scala index 0822fae6778f..61049de8e43e 100644 --- a/tests/run/implied-priority.scala +++ b/tests/run/implied-priority.scala @@ -1,5 +1,6 @@ /* These tests show various mechanisms available for implicit prioritization. */ +import language.`3.5` class E[T](val str: String) // The type for which we infer terms below @@ -72,16 +73,16 @@ def test2a = { } /* If that solution is not applicable, we can define an override by refining the - * result type of the given instance, e.g. like this: + * result type of all lower-priority instances, e.g. like this: */ object Impl3 { - given t1[T]: E[T]("low") + trait LowPriority // A marker trait to indicate a lower priority + given t1[T]: E[T]("low") with LowPriority } object Override { - trait HighestPriority // A marker trait to indicate a higher priority - given over[T]: E[T]("hi") with HighestPriority() + given over[T]: E[T]("hi") with {} } def test3 = { @@ -90,7 +91,7 @@ def test3 = { { import Override.given import Impl3.given - assert(summon[E[String]].str == "hi") // `over` takes priority since its result type is a subtype of t1's. + assert(summon[E[String]].str == "hi", summon[E[String]].str) // `Impl3` takes priority since its result type is a subtype of t1's. } } diff --git a/tests/semanticdb/expect/Methods.expect.scala b/tests/semanticdb/expect/Methods.expect.scala index f34c657b2f6d..4ec723ad584e 100644 --- a/tests/semanticdb/expect/Methods.expect.scala +++ b/tests/semanticdb/expect/Methods.expect.scala @@ -15,7 +15,7 @@ class Methods/*<-example::Methods#*/[T/*<-example::Methods#[T]*/] { def m6/*<-example::Methods#m6().*/(x/*<-example::Methods#m6().(x)*/: Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+1).*/(x/*<-example::Methods#m6(+1).(x)*/: List/*->example::Methods#List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+2).*/(x/*<-example::Methods#m6(+2).(x)*/: scala.List/*->scala::package.List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ - def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/: Ordering/*->scala::math::Ordering#*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ + def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ def `m8()./*<-example::Methods#`m8().`().*/`() = ???/*->scala::Predef.`???`().*/ class `m9()./*<-example::Methods#`m9().`#*/` def m9/*<-example::Methods#m9().*/(x/*<-example::Methods#m9().(x)*/: `m9().`/*->example::Methods#`m9().`#*/) = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/expect/Synthetic.expect.scala b/tests/semanticdb/expect/Synthetic.expect.scala index a4419aa8bd82..4d797ce2b856 100644 --- a/tests/semanticdb/expect/Synthetic.expect.scala +++ b/tests/semanticdb/expect/Synthetic.expect.scala @@ -30,7 +30,7 @@ class Synthetic/*<-example::Synthetic#*/ { null.asInstanceOf/*->scala::Any#asInstanceOf().*/[Int/*->scala::Int#*/ => Int/*->scala::Int#*/](2) } - class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*//*<-example::Synthetic#J#evidence$1.*/: Manifest/*->scala::Predef.Manifest#*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } + class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*/: /*<-example::Synthetic#J#evidence$1.*/Manifest/*->scala::Predef.Manifest#*//*->example::Synthetic#J#[T]*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } class F/*<-example::Synthetic#F#*/ implicit val ordering/*<-example::Synthetic#ordering.*/: Ordering/*->scala::package.Ordering#*/[F/*->example::Synthetic#F#*/] = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 2120cc633da8..84c3e7c6a110 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2732,8 +2732,8 @@ Occurrences: [16:29..16:32): ??? -> scala/Predef.`???`(). [17:6..17:8): m7 <- example/Methods#m7(). [17:9..17:10): U <- example/Methods#m7().[U] -[17:10..17:10): <- example/Methods#m7().(evidence$1) -[17:12..17:20): Ordering -> scala/math/Ordering# +[17:12..17:20): Ordering -> example/Methods#m7().[U] +[17:12..17:12): <- example/Methods#m7().(evidence$1) [17:22..17:23): c <- example/Methods#m7().(c) [17:25..17:32): Methods -> example/Methods# [17:33..17:34): T -> example/Methods#[T] @@ -3533,7 +3533,7 @@ Uri => Synthetic.scala Text => empty Language => Scala Symbols => 52 entries -Occurrences => 136 entries +Occurrences => 137 entries Synthetics => 39 entries Symbols: @@ -3659,8 +3659,9 @@ Occurrences: [32:8..32:9): J <- example/Synthetic#J# [32:9..32:9): <- example/Synthetic#J#``(). [32:10..32:11): T <- example/Synthetic#J#[T] -[32:11..32:11): <- example/Synthetic#J#evidence$1. +[32:13..32:13): <- example/Synthetic#J#evidence$1. [32:13..32:21): Manifest -> scala/Predef.Manifest# +[32:13..32:21): Manifest -> example/Synthetic#J#[T] [32:29..32:32): arr <- example/Synthetic#J#arr. [32:35..32:40): Array -> scala/Array. [32:41..32:46): empty -> scala/Array.empty(). diff --git a/tests/warn/context-bounds-migration.scala b/tests/warn/context-bounds-migration.scala new file mode 100644 index 000000000000..9d824a919b10 --- /dev/null +++ b/tests/warn/context-bounds-migration.scala @@ -0,0 +1,10 @@ + + +class C[T] +def foo[X: C] = () + +given [T]: C[T] = C[T]() + +def Test = + foo(C[Int]()) // warning + foo(using C[Int]()) // ok diff --git a/tests/warn/given-triangle.check b/tests/warn/given-triangle.check new file mode 100644 index 000000000000..69583830c2bc --- /dev/null +++ b/tests/warn/given-triangle.check @@ -0,0 +1,6 @@ +-- Warning: tests/warn/given-triangle.scala:16:18 ---------------------------------------------------------------------- +16 |@main def Test = f // warn + | ^ + | Change in given search preference for A between alternatives (given_A : A) and (given_B : B) + | Previous choice: the second alternative + | New choice : the first alternative diff --git a/tests/warn/given-triangle.scala b/tests/warn/given-triangle.scala new file mode 100644 index 000000000000..bc1a5c774f4f --- /dev/null +++ b/tests/warn/given-triangle.scala @@ -0,0 +1,16 @@ +//> using options -source 3.5-migration + +class A +class B extends A +class C extends A + +given A = A() +given B = B() +given C = C() + +def f(using a: A, b: B, c: C) = + println(a.getClass) + println(b.getClass) + println(c.getClass) + +@main def Test = f // warn