Skip to content

use Uninitialized instead of Null for relevant vars in dotty #15124

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/CompilationUnit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class CompilationUnit protected (val source: SourceFile) {
suspendedAtInliningPhase = true
throw CompilationUnit.SuspendException()

private var myAssignmentSpans: Map[Int, List[Span]] | Null = null
private var myAssignmentSpans: Map[Int, List[Span]] | Uninitialized = initiallyNull

/** A map from (name-) offsets of all local variables in this compilation unit
* that can be tracked for being not null to the list of spans of assignments
Expand Down
4 changes: 2 additions & 2 deletions compiler/src/dotty/tools/dotc/ast/Positioned.scala
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src
*/
def checkPos(nonOverlapping: Boolean)(using Context): Unit = try {
import untpd._
var lastPositioned: Positioned | Null = null
var lastPositioned: Positioned | Uninitialized = initiallyNull
var lastSpan = NoSpan
def check(p: Any): Unit = p match {
case p: Positioned =>
Expand Down Expand Up @@ -234,7 +234,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src

object Positioned {
@sharable private var debugId = Int.MinValue
@sharable private var ids: java.util.WeakHashMap[Positioned, Int] | Null = null
@sharable private var ids: java.util.WeakHashMap[Positioned, Int] | Uninitialized = initiallyNull
@sharable private var nextId: Int = 0

def init(using Context): Unit =
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/ast/Trees.scala
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ object Trees {
trait DefTree[-T >: Untyped] extends DenotingTree[T] {
type ThisTree[-T >: Untyped] <: DefTree[T]

private var myMods: untpd.Modifiers | Null = _
private var myMods: untpd.Modifiers | Uninitialized = _

private[dotc] def rawMods: untpd.Modifiers =
if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/ast/tpd.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1256,7 +1256,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
trait TreeProvider {
protected def computeRootTrees(using Context): List[Tree]

private var myTrees: List[Tree] | Null = _
private var myTrees: List[Tree] | Uninitialized = _

/** Get trees defined by this provider. Cache them if -Yretain-trees is set. */
def rootTrees(using Context): List[Tree] =
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import config.Printers.{default, typr}

trait ConstraintRunInfo { self: Run =>
private var maxSize = 0
private var maxConstraint: Constraint | Null = _
private var maxConstraint: Constraint | Uninitialized = _
def recordConstraintSize(c: Constraint, size: Int): Unit =
if (size > maxSize) {
maxSize = size
Expand Down
10 changes: 5 additions & 5 deletions compiler/src/dotty/tools/dotc/core/Contexts.scala
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ object Contexts {
def getFile(name: String): AbstractFile = getFile(name.toTermName)


private var related: SimpleIdentityMap[Phase | SourceFile, Context] | Null = null
private var related: SimpleIdentityMap[Phase | SourceFile, Context] | Uninitialized = initiallyNull

private def lookup(key: Phase | SourceFile): Context | Null =
util.Stats.record("Context.related.lookup")
Expand Down Expand Up @@ -853,7 +853,7 @@ object Contexts {
val initialCtx: Context = new InitialContext(this, settings)

/** The platform, initialized by `initPlatform()`. */
private var _platform: Platform | Null = _
private var _platform: Platform | Uninitialized = _

/** The platform */
def platform: Platform = {
Expand Down Expand Up @@ -911,8 +911,8 @@ object Contexts {
/** A table for hash consing unique named types */
private[core] val uniqueNamedTypes: NamedTypeUniques = NamedTypeUniques()

var emptyTypeBounds: TypeBounds | Null = null
var emptyWildcardBounds: WildcardType | Null = null
var emptyTypeBounds: TypeBounds | Uninitialized = initiallyNull
var emptyWildcardBounds: WildcardType | Uninitialized = initiallyNull

/** Number of findMember calls on stack */
private[core] var findMemberCount: Int = 0
Expand Down Expand Up @@ -1006,7 +1006,7 @@ object Contexts {
// Test that access is single threaded

/** The thread on which `checkSingleThreaded was invoked last */
@sharable private var thread: Thread | Null = null
@sharable private var thread: Thread | Uninitialized = initiallyNull

/** Check that we are on the same thread as before */
def checkSingleThreaded(): Unit =
Expand Down
8 changes: 4 additions & 4 deletions compiler/src/dotty/tools/dotc/core/Names.scala
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ object Names {
override def asTermName: TermName = this

@sharable // because it is only modified in the synchronized block of toTypeName.
private var myTypeName: TypeName | Null = null
private var myTypeName: TypeName | Uninitialized = initiallyNull
// Note: no @volatile needed since type names are immutable and therefore safely published

override def toTypeName: TypeName =
Expand Down Expand Up @@ -225,10 +225,10 @@ object Names {
}

@sharable // because it's just a cache for performance
private var myMangledString: String | Null = null
private var myMangledString: String | Uninitialized = initiallyNull

@sharable // because it's just a cache for performance
private var myMangled: Name | Null = null
private var myMangled: Name | Uninitialized = initiallyNull

protected[Names] def mangle: ThisName

Expand Down Expand Up @@ -259,7 +259,7 @@ object Names {

protected def computeToString: String

@sharable private var myToString: String | Null = null
@sharable private var myToString: String | Uninitialized = initiallyNull

override def toString: String =
if myToString == null then myToString = computeToString
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -565,7 +565,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
i += 1
}

private var myUninstVars: mutable.ArrayBuffer[TypeVar] | Null = _
private var myUninstVars: mutable.ArrayBuffer[TypeVar] | Uninitialized = _

/** The uninstantiated typevars of this constraint */
def uninstVars: collection.Seq[TypeVar] = {
Expand Down
6 changes: 3 additions & 3 deletions compiler/src/dotty/tools/dotc/core/Scopes.scala
Original file line number Diff line number Diff line change
Expand Up @@ -213,14 +213,14 @@ object Scopes {

/** the hash table
*/
private var hashTable: Array[ScopeEntry | Null] | Null = null
private var hashTable: Array[ScopeEntry | Null] | Uninitialized = initiallyNull

/** a cache for all elements, to be used by symbol iterator.
*/
private var elemsCache: List[Symbol] | Null = null
private var elemsCache: List[Symbol] | Uninitialized = initiallyNull

/** The synthesizer to be used, or `null` if no synthesis is done on this scope */
private var synthesize: SymbolSynthesizer | Null = null
private var synthesize: SymbolSynthesizer | Uninitialized = initiallyNull

/** Use specified synthesize for this scope */
def useSynthesizer(s: SymbolSynthesizer): Unit = synthesize = s
Expand Down
16 changes: 8 additions & 8 deletions compiler/src/dotty/tools/dotc/core/SymDenotations.scala
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,7 @@ object SymDenotations {
/** `fullName` where `.' is the separator character */
def fullName(using Context): Name = fullNameSeparated(QualifiedName)

private var myTargetName: Name | Null = null
private var myTargetName: Name | Uninitialized = initiallyNull

private def computeTargetName(targetNameAnnot: Option[Annotation])(using Context): Name =
targetNameAnnot match
Expand Down Expand Up @@ -1701,15 +1701,15 @@ object SymDenotations {

// ----- caches -------------------------------------------------------

private var myTypeParams: List[TypeSymbol] | Null = null
private var myTypeParams: List[TypeSymbol] | Uninitialized = initiallyNull
private var fullNameCache: SimpleIdentityMap[QualifiedNameKind, Name] = SimpleIdentityMap.empty

private var myMemberCache: EqHashMap[Name, PreDenotation] | Null = null
private var myMemberCache: EqHashMap[Name, PreDenotation] | Uninitialized = initiallyNull
private var myMemberCachePeriod: Period = Nowhere

/** A cache from types T to baseType(T, C) */
type BaseTypeMap = EqHashMap[CachedType, Type]
private var myBaseTypeCache: BaseTypeMap | Null = null
private var myBaseTypeCache: BaseTypeMap | Uninitialized = initiallyNull
private var myBaseTypeCachePeriod: Period = Nowhere

private var baseDataCache: BaseData = BaseData.None
Expand Down Expand Up @@ -1838,7 +1838,7 @@ object SymDenotations {

// ------ class-specific operations -----------------------------------

private var myThisType: Type | Null = null
private var myThisType: Type | Uninitialized = initiallyNull

/** The this-type depends on the kind of class:
* - for a package class `p`: ThisType(TypeRef(Noprefix, p))
Expand All @@ -1856,7 +1856,7 @@ object SymDenotations {
ThisType.raw(TypeRef(pre, cls))
}

private var myTypeRef: TypeRef | Null = null
private var myTypeRef: TypeRef | Uninitialized = initiallyNull

override def typeRef(using Context): TypeRef = {
if (myTypeRef == null) myTypeRef = super.typeRef
Expand Down Expand Up @@ -2628,8 +2628,8 @@ object SymDenotations {
def apply(module: TermSymbol, modcls: ClassSymbol): LazyType = this

private var myDecls: Scope = EmptyScope
private var mySourceModule: Symbol | Null = null
private var myModuleClass: Symbol | Null = null
private var mySourceModule: Symbol | Uninitialized = initiallyNull
private var myModuleClass: Symbol | Uninitialized = initiallyNull
private var mySourceModuleFn: Context ?=> Symbol = LazyType.NoSymbolFn
private var myModuleClassFn: Context ?=> Symbol = LazyType.NoSymbolFn

Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/core/Symbols.scala
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ object Symbols {
myCoord = c
}

private var myDefTree: Tree | Null = null
private var myDefTree: Tree | Uninitialized = initiallyNull

/** The tree defining the symbol at pickler time, EmptyTree if none was retained */
def defTree: Tree =
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/core/TypeComparer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling
needsGc = false
if Config.checkTypeComparerReset then checkReset()

private var pendingSubTypes: util.MutableSet[(Type, Type)] | Null = null
private var pendingSubTypes: util.MutableSet[(Type, Type)] | Uninitialized = initiallyNull
private var recCount = 0
private var monitored = false

Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/core/TypeOps.scala
Original file line number Diff line number Diff line change
Expand Up @@ -796,7 +796,7 @@ object TypeOps:
//
// See tests/patmat/i3938.scala
class InferPrefixMap extends TypeMap {
var prefixTVar: Type | Null = null
var prefixTVar: Type | Uninitialized = initiallyNull
def apply(tp: Type): Type = tp match {
case ThisType(tref: TypeRef) if !tref.symbol.isStaticOwner =>
if (tref.symbol.is(Module))
Expand Down
24 changes: 12 additions & 12 deletions compiler/src/dotty/tools/dotc/core/Types.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2073,7 +2073,7 @@ object Types {

/** Implementations of this trait cache the results of `narrow`. */
trait NarrowCached extends Type {
private var myNarrow: TermRef | Null = null
private var myNarrow: TermRef | Uninitialized = initiallyNull
override def narrow(using Context): TermRef = {
if (myNarrow == null) myNarrow = super.narrow
myNarrow.nn
Expand All @@ -2093,9 +2093,9 @@ object Types {

assert(prefix.isValueType || (prefix eq NoPrefix), s"invalid prefix $prefix")

private var myName: Name | Null = null
private var lastDenotation: Denotation | Null = null
private var lastSymbol: Symbol | Null = null
private var myName: Name | Uninitialized = initiallyNull
private var lastDenotation: Denotation | Uninitialized = initiallyNull
private var lastSymbol: Symbol | Uninitialized = initiallyNull
private var checkedPeriod: Period = Nowhere
private var myStableHash: Byte = 0
private var mySignature: Signature = _
Expand Down Expand Up @@ -2881,7 +2881,7 @@ object Types {

// `refFn` can be null only if `computed` is true.
case class LazyRef(private var refFn: (Context => (Type | Null)) | Null) extends UncachedProxyType with ValueType {
private var myRef: Type | Null = null
private var myRef: Type | Uninitialized = initiallyNull
private var computed = false

override def tryNormalize(using Context): Type = ref.tryNormalize
Expand Down Expand Up @@ -3024,7 +3024,7 @@ object Types {

val parent: Type = parentExp(this: @unchecked)

private var myRecThis: RecThis | Null = null
private var myRecThis: RecThis | Uninitialized = initiallyNull

def recThis: RecThis = {
if (myRecThis == null) myRecThis = new RecThisImpl(this)
Expand Down Expand Up @@ -3441,7 +3441,7 @@ object Types {
final def isTypeLambda: Boolean = isInstanceOf[TypeLambda]
final def isHigherKinded: Boolean = isInstanceOf[TypeProxy]

private var myParamRefs: List[ParamRefType] | Null = null
private var myParamRefs: List[ParamRefType] | Uninitialized = initiallyNull

def paramRefs: List[ParamRefType] = {
if myParamRefs == null then
Expand Down Expand Up @@ -4636,7 +4636,7 @@ object Types {
//val id = skid
//assert(id != 10)

private var myRepr: Name | Null = null
private var myRepr: Name | Uninitialized = initiallyNull
def repr(using Context): Name = {
if (myRepr == null) myRepr = SkolemName.fresh()
myRepr.nn
Expand Down Expand Up @@ -4820,7 +4820,7 @@ object Types {
def alternatives(using Context): List[Type] = cases.map(caseType)
def underlying(using Context): Type = bound

private var myReduced: Type | Null = null
private var myReduced: Type | Uninitialized = initiallyNull
private var reductionContext: util.MutableMap[Type, Type] = _

override def tryNormalize(using Context): Type =
Expand Down Expand Up @@ -4920,8 +4920,8 @@ object Types {
decls: Scope,
selfInfo: TypeOrSymbol) extends CachedGroundType with TypeType {

private var selfTypeCache: Type | Null = null
private var appliedRefCache: Type | Null = null
private var selfTypeCache: Type | Uninitialized = initiallyNull
private var appliedRefCache: Type | Uninitialized = initiallyNull

/** The self type of a class is the conjunction of
* - the explicit self type if given (or the info of a given self symbol), and
Expand All @@ -4948,7 +4948,7 @@ object Types {
}

// cached because baseType needs parents
private var parentsCache: List[Type] | Null = null
private var parentsCache: List[Type] | Uninitialized = initiallyNull

override def parents(using Context): List[Type] = {
if (parentsCache == null)
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/parsing/Scanners.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1555,7 +1555,7 @@ object Scanners {
/** The enclosing region, which is required to exist */
def enclosing: Region = outer.asInstanceOf[Region]

var knownWidth: IndentWidth | Null = null
var knownWidth: IndentWidth | Uninitialized = initiallyNull

/** The indentation width, Zero if not known */
final def indentWidth: IndentWidth =
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/reporting/Message.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ abstract class Message(val errorId: ErrorMessageID) { self =>
*/
def canExplain: Boolean = explain.nonEmpty

private var myMsg: String | Null = null
private var myMsg: String | Uninitialized = initiallyNull
private var myIsNonSensical: Boolean = false

private def dropNonSensical(msg: String): String =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import Diagnostic._
*/
class StoreReporter(outer: Reporter | Null = Reporter.NoReporter, fromTyperState: Boolean = false) extends Reporter {

protected var infos: mutable.ListBuffer[Diagnostic] | Null = null
protected var infos: mutable.ListBuffer[Diagnostic] | Uninitialized = initiallyNull

def doReport(dia: Diagnostic)(using Context): Unit = {
typr.println(s">>>> StoredError: ${dia.message}") // !!! DEBUG
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/transform/CapturedVars.scala
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer:
Set(refClass(defn.ObjectClass), volatileRefClass(defn.ObjectClass))
}

private var myRefInfo: RefInfo | Null = null
private var myRefInfo: RefInfo | Uninitialized = initiallyNull
private def refInfo(using Context): RefInfo = {
if (myRefInfo == null) myRefInfo = new RefInfo()
myRefInfo.uncheckedNN
Expand Down
4 changes: 2 additions & 2 deletions compiler/src/dotty/tools/dotc/transform/LazyVals.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer {
val containerFlagsMask: FlagSet = Method | Lazy | Accessor | Module

/** A map of lazy values to the fields they should null after initialization. */
private var lazyValNullables: IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] | Null = _
private var lazyValNullables: IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] | Uninitialized = _
private def nullableFor(sym: Symbol)(using Context) = {
// optimisation: value only used once, we can remove the value from the map
val nullables = lazyValNullables.nn.remove(sym)
Expand Down Expand Up @@ -382,7 +382,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer {
val thizClass = Literal(Constant(claz.info))
val helperModule = requiredModule("scala.runtime.LazyVals")
val getOffset = Select(ref(helperModule), lazyNme.RLazyVals.getOffset)
var offsetSymbol: TermSymbol | Null = null
var offsetSymbol: TermSymbol | Uninitialized = initiallyNull
var flag: Tree = EmptyTree
var ord = 0

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class SpecializeFunctions extends MiniPhase {
val sym = ddef.symbol
val cls = ctx.owner.asClass

var specName: Name | Null = null
var specName: Name | Uninitialized = initiallyNull

def isSpecializable = {
val paramTypes = ddef.termParamss.head.map(_.symbol.info)
Expand Down
4 changes: 2 additions & 2 deletions compiler/src/dotty/tools/dotc/transform/Splicing.scala
Original file line number Diff line number Diff line change
Expand Up @@ -190,8 +190,8 @@ class Splicing extends MacroTransform:
private class SpliceTransformer(spliceOwner: Symbol, isCaptured: Symbol => Boolean) extends Transformer:
private var refBindingMap = mutable.Map.empty[Symbol, (Tree, Symbol)]
/** Reference to the `Quotes` instance of the current level 1 splice */
private var quotes: Tree | Null = null // TODO: add to the context
private var healedTypes: PCPCheckAndHeal.QuoteTypeTags | Null = null // TODO: add to the context
private var quotes: Tree | Uninitialized = initiallyNull // TODO: add to the context
private var healedTypes: PCPCheckAndHeal.QuoteTypeTags | Uninitialized = initiallyNull // TODO: add to the context

def transformSplice(tree: tpd.Tree, tpe: Type, holeIdx: Int)(using Context): tpd.Tree =
assert(level == 0)
Expand Down
Loading