From 164eea51df8bc593423389e345532ed37a61d445 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 9 May 2023 11:56:32 +0200 Subject: [PATCH 01/90] Make SIP 54 a standard feature - Drop experimental language import - Change note in docs to say that this relaxation only applies to extension method calls, not extension methods called as normal methods. I tried to also reflect the second point in error messages but it turned out too hard. At the point where we generate the error message we do not know how the method was called and it would be unsystematic to create that side channel. In fact, information flows the other way: When we resolve an extension method name, we buffer the error messages and fix selected AmbiguityErrors. --- compiler/src/dotty/tools/dotc/config/Feature.scala | 1 - compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- docs/_docs/reference/contextual/extension-methods.md | 3 ++- library/src/scala/runtime/stdLibPatches/language.scala | 8 -------- 4 files changed, 3 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index e5ab8f65f55b..419ed5868cbf 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -29,7 +29,6 @@ object Feature: val fewerBraces = experimental("fewerBraces") val saferExceptions = experimental("saferExceptions") val clauseInterleaving = experimental("clauseInterleaving") - val relaxedExtensionImports = experimental("relaxedExtensionImports") val pureFunctions = experimental("pureFunctions") val captureChecking = experimental("captureChecking") val into = experimental("into") diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 7eb8519739c6..da9b1d1a9d80 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -260,7 +260,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer then altImports.uncheckedNN += altImp - if Feature.enabled(Feature.relaxedExtensionImports) && altImports != null && ctx.isImportContext then + if altImports != null && ctx.isImportContext then val curImport = ctx.importInfo.uncheckedNN namedImportRef(curImport) match case altImp: TermRef => diff --git a/docs/_docs/reference/contextual/extension-methods.md b/docs/_docs/reference/contextual/extension-methods.md index d98d80caafc5..8b9a3df5b84c 100644 --- a/docs/_docs/reference/contextual/extension-methods.md +++ b/docs/_docs/reference/contextual/extension-methods.md @@ -254,7 +254,8 @@ The following two rewritings are tried in order: not a wildcard import, pick the expansion from that import. Otherwise, report an ambiguous reference error. - **Note**: This relaxation is currently enabled only under the `experimental.relaxedExtensionImports` language import. + **Note**: This relaxation of the import rules applies only if the method `m` is used as an extension method. If it is used as a normal method in prefix form, the usual import rules apply, which means that importing `m` from + multiple places can lead to an ambiguity error. 2. If the first rewriting does not typecheck with expected type `T`, and there is an extension method `m` in some eligible object `o`, the selection is rewritten to `o.m[Ts](e)`. An object `o` is _eligible_ if diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 091e75fa06e1..d92495c6f5aa 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -69,14 +69,6 @@ object language: @compileTimeOnly("`clauseInterleaving` can only be used at compile time in import statements") object clauseInterleaving - /** Adds support for relaxed imports of extension methods. - * Extension methods with the same name can be imported from several places. - * - * @see [[http://dotty.epfl.ch/docs/reference/contextual/extension-methods]] - */ - @compileTimeOnly("`relaxedExtensionImports` can only be used at compile time in import statements") - object relaxedExtensionImports - /** Experimental support for pure function type syntax * * @see [[https://dotty.epfl.ch/docs/reference/experimental/purefuns]] From 81b4e5ce0c82b4e953864fd3e7ac5c28c3dc586b Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 9 May 2023 15:39:53 +0200 Subject: [PATCH 02/90] Add error message hint when extension imports are used as normal methods --- .../dotty/tools/dotc/reporting/messages.scala | 12 +++++-- .../src/dotty/tools/dotc/typer/Typer.scala | 3 +- tests/neg/i13558.scala | 31 ------------------- tests/neg/i16920.check | 28 ++++++++--------- tests/neg/i16920.scala | 1 - tests/neg/sip54.check | 17 ++++++++++ tests/neg/sip54.scala | 12 +++++++ tests/pos/i13558.scala | 1 - tests/pos/i16920.scala | 1 - 9 files changed, 55 insertions(+), 51 deletions(-) delete mode 100644 tests/neg/i13558.scala create mode 100644 tests/neg/sip54.check create mode 100644 tests/neg/sip54.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 423c1cdef264..138e51ecbc6d 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -1336,7 +1336,8 @@ class ConstrProxyShadows(proxy: TermRef, shadowed: Type, shadowedIsApply: Boolea |or use a full prefix for ${shadowed.termSymbol.name} if you mean the latter.""" end ConstrProxyShadows -class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(using Context) +class AmbiguousReference( + name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context, isExtension: => Boolean = false)(using Context) extends ReferenceMsg(AmbiguousReferenceID), NoDisambiguation { /** A string which explains how something was bound; Depending on `prec` this is either @@ -1358,10 +1359,17 @@ class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec i"""$howVisible$qualifier in ${whereFound.owner}""" } + def importHint = + if (newPrec == BindingPrec.NamedImport || newPrec == BindingPrec.WildImport) + && prevPrec == newPrec + && isExtension + then i"\n\n Hint: This error may arise if extension method `$name` is called as a normal method." + else "" + def msg(using Context) = i"""|Reference to $name is ambiguous. |It is both ${bindingString(newPrec, ctx)} - |and ${bindingString(prevPrec, prevCtx, " subsequently")}""" + |and ${bindingString(prevPrec, prevCtx, " subsequently")}$importHint""" def explain(using Context) = val precedent = diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index da9b1d1a9d80..b1b57d70ef53 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -233,7 +233,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer found else if !scala2pkg && !previous.isError && !found.isError then - fail(AmbiguousReference(name, newPrec, prevPrec, prevCtx)) + fail(AmbiguousReference(name, newPrec, prevPrec, prevCtx, + isExtension = previous.termSymbol.is(ExtensionMethod) && found.termSymbol.is(ExtensionMethod))) previous /** Assemble and check alternatives to an imported reference. This implies: diff --git a/tests/neg/i13558.scala b/tests/neg/i13558.scala deleted file mode 100644 index 1d4e1c506e43..000000000000 --- a/tests/neg/i13558.scala +++ /dev/null @@ -1,31 +0,0 @@ -package testcode - -class A - -class B - -object ExtensionA { - extension (self: A) { - def id = "A" - } -} -object ExtensionB { - extension (self: B) { - def id = "B" - } -} - -object Main { - def main1(args: Array[String]): Unit = { - import ExtensionB._ - import ExtensionA._ - val a = A() - println(a.id) // error - } - def main2(args: Array[String]): Unit = { - import ExtensionA._ - import ExtensionB._ - val a = A() - println(a.id) // error - } -} \ No newline at end of file diff --git a/tests/neg/i16920.check b/tests/neg/i16920.check index 131ba4c6265e..8f8172b5538e 100644 --- a/tests/neg/i16920.check +++ b/tests/neg/i16920.check @@ -1,5 +1,5 @@ --- [E008] Not Found Error: tests/neg/i16920.scala:20:11 ---------------------------------------------------------------- -20 | "five".wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:19:11 ---------------------------------------------------------------- +19 | "five".wow // error | ^^^^^^^^^^ | value wow is not a member of String. | An extension method was tried, but could not be fully constructed: @@ -10,8 +10,8 @@ | | Found: ("five" : String) | Required: Int --- [E008] Not Found Error: tests/neg/i16920.scala:28:6 ----------------------------------------------------------------- -28 | 5.wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:27:6 ----------------------------------------------------------------- +27 | 5.wow // error | ^^^^^ | value wow is not a member of Int. | An extension method was tried, but could not be fully constructed: @@ -22,8 +22,8 @@ | | Found: (5 : Int) | Required: Boolean --- [E008] Not Found Error: tests/neg/i16920.scala:29:11 ---------------------------------------------------------------- -29 | "five".wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:28:11 ---------------------------------------------------------------- +28 | "five".wow // error | ^^^^^^^^^^ | value wow is not a member of String. | An extension method was tried, but could not be fully constructed: @@ -34,8 +34,8 @@ | | Found: ("five" : String) | Required: Boolean --- [E008] Not Found Error: tests/neg/i16920.scala:36:6 ----------------------------------------------------------------- -36 | 5.wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:35:6 ----------------------------------------------------------------- +35 | 5.wow // error | ^^^^^ | value wow is not a member of Int. | An extension method was tried, but could not be fully constructed: @@ -48,8 +48,8 @@ | both Three.wow(5) | and Two.wow(5) | are possible expansions of 5.wow --- [E008] Not Found Error: tests/neg/i16920.scala:44:11 ---------------------------------------------------------------- -44 | "five".wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:43:11 ---------------------------------------------------------------- +43 | "five".wow // error | ^^^^^^^^^^ | value wow is not a member of String. | An extension method was tried, but could not be fully constructed: @@ -60,8 +60,8 @@ | | Found: ("five" : String) | Required: Int --- [E008] Not Found Error: tests/neg/i16920.scala:51:11 ---------------------------------------------------------------- -51 | "five".wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:50:11 ---------------------------------------------------------------- +50 | "five".wow // error | ^^^^^^^^^^ | value wow is not a member of String. | An extension method was tried, but could not be fully constructed: @@ -72,8 +72,8 @@ | | Found: ("five" : String) | Required: Int --- [E008] Not Found Error: tests/neg/i16920.scala:58:6 ----------------------------------------------------------------- -58 | 5.wow // error +-- [E008] Not Found Error: tests/neg/i16920.scala:57:6 ----------------------------------------------------------------- +57 | 5.wow // error | ^^^^^ | value wow is not a member of Int. | An extension method was tried, but could not be fully constructed: diff --git a/tests/neg/i16920.scala b/tests/neg/i16920.scala index 38345e811c1f..c4a54046e027 100644 --- a/tests/neg/i16920.scala +++ b/tests/neg/i16920.scala @@ -1,4 +1,3 @@ -import language.experimental.relaxedExtensionImports object One: extension (s: String) diff --git a/tests/neg/sip54.check b/tests/neg/sip54.check new file mode 100644 index 000000000000..d53687f8ba79 --- /dev/null +++ b/tests/neg/sip54.check @@ -0,0 +1,17 @@ +-- [E049] Reference Error: tests/neg/sip54.scala:12:8 ------------------------------------------------------------------ +12 |val _ = meth(foo)() // error // error + | ^^^^ + | Reference to meth is ambiguous. + | It is both imported by import A._ + | and imported subsequently by import B._ + | + | Hint: This error may arise if extension method `meth` is called as a normal method. + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/sip54.scala:12:13 ------------------------------------------------------------- +12 |val _ = meth(foo)() // error // error + | ^^^ + | Found: (foo : Foo) + | Required: Bar + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/sip54.scala b/tests/neg/sip54.scala new file mode 100644 index 000000000000..3e092af65d32 --- /dev/null +++ b/tests/neg/sip54.scala @@ -0,0 +1,12 @@ +class Foo +class Bar +object A: + extension (foo: Foo) def meth(): Foo = foo +object B: + extension (bar: Bar) def meth(): Bar = bar + +import A.* +import B.* + +val foo = new Foo +val _ = meth(foo)() // error // error diff --git a/tests/pos/i13558.scala b/tests/pos/i13558.scala index 0c8be379f6a9..6f18b770f467 100644 --- a/tests/pos/i13558.scala +++ b/tests/pos/i13558.scala @@ -1,5 +1,4 @@ package testcode -import language.experimental.relaxedExtensionImports class A diff --git a/tests/pos/i16920.scala b/tests/pos/i16920.scala index dd4f5804a4fd..d52e7e453e7e 100644 --- a/tests/pos/i16920.scala +++ b/tests/pos/i16920.scala @@ -1,4 +1,3 @@ -import language.experimental.relaxedExtensionImports object One: extension (s: String) From e5fd4773f2cb24a40ad946f12fb09da579139ad0 Mon Sep 17 00:00:00 2001 From: Carl Date: Mon, 8 May 2023 21:20:02 +0200 Subject: [PATCH 03/90] Implement -Xlint:private-shadow, type-parameter-shadow Respectively warn about : - a private field or a class parameter that shadows a superclass field - a local type parameter that shadows a type already in the scope Fixes : #17612 and #17613 --- compiler/src/dotty/tools/dotc/Compiler.scala | 2 +- .../tools/dotc/config/ScalaSettings.scala | 26 +- .../tools/dotc/transform/CheckShadowing.scala | 314 ++++++++++++++++++ .../fatal-warnings/i17612a.check | 32 ++ .../fatal-warnings/i17612a.scala | 42 +++ .../fatal-warnings/i17613a.check | 28 ++ .../fatal-warnings/i17613a.scala | 23 ++ .../fatal-warnings/i17613b.check | 44 +++ .../fatal-warnings/i17613b/i17613b.scala | 33 ++ .../fatal-warnings/i17613b/importTry.scala | 5 + 10 files changed, 547 insertions(+), 2 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala create mode 100644 tests/neg-custom-args/fatal-warnings/i17612a.check create mode 100644 tests/neg-custom-args/fatal-warnings/i17612a.scala create mode 100644 tests/neg-custom-args/fatal-warnings/i17613a.check create mode 100644 tests/neg-custom-args/fatal-warnings/i17613a.scala create mode 100644 tests/neg-custom-args/fatal-warnings/i17613b.check create mode 100644 tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala create mode 100644 tests/neg-custom-args/fatal-warnings/i17613b/importTry.scala diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index a6118732d4ae..743aca5bf90a 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -35,7 +35,7 @@ class Compiler { protected def frontendPhases: List[List[Phase]] = List(new Parser) :: // Compiler frontend: scanner, parser List(new TyperPhase) :: // Compiler frontend: namer, typer - List(new CheckUnused.PostTyper) :: // Check for unused elements + List(new CheckUnused.PostTyper, new CheckShadowing) :: // Check for unused and shadowing elements List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 0ddde23dc39f..c144364f68ba 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -9,6 +9,7 @@ import dotty.tools.dotc.config.SourceVersion import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.rewrites.Rewrites import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory} +import Setting.ChoiceWithHelp import scala.util.chaining._ @@ -155,7 +156,6 @@ private sealed trait VerboseSettings: */ private sealed trait WarningSettings: self: SettingGroup => - import Setting.ChoiceWithHelp val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) @@ -306,6 +306,30 @@ private sealed trait XSettings: } val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") + + val Xlint: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( + name = "-Xlint", + helpArg = "advanced warning", + descr = "Enable or disable specific `lint` warnings", + choices = List( + ChoiceWithHelp("nowarn", ""), + ChoiceWithHelp("all", ""), + ChoiceWithHelp("private-shadow", "Warn if a private field or class parameter shadows a superclass field"), + ChoiceWithHelp("type-parameter-shadow", "Warn when a type parameter shadows a type already in the scope"), + ), + default = Nil + ) + + object XlintHas: + def isChoiceSet(s: String)(using Context) = Xlint.value.pipe(us => us.contains(s)) + def allOr(s: String)(using Context) = Xlint.value.pipe(us => us.contains("all") || us.contains(s)) + def nowarn(using Context) = allOr("nowarn") + + def privateShadow(using Context) = + allOr("private-shadow") + def typeParameterShadow(using Context) = + allOr("type-parameter-shadow") + end XSettings /** -Y "Forking" as in forked tongue or "Private" settings */ diff --git a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala new file mode 100644 index 000000000000..1c575fdc89a1 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala @@ -0,0 +1,314 @@ +package dotty.tools.dotc.transform + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.Trees.EmptyTree +import dotty.tools.dotc.transform.MegaPhase +import dotty.tools.dotc.transform.MegaPhase.MiniPhase +import dotty.tools.dotc.report +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.util.{Property, SrcPos} +import dotty.tools.dotc.core.Symbols.ClassSymbol +import dotty.tools.dotc.core.Names.Name +import dotty.tools.dotc.core.Symbols.Symbol +import dotty.tools.dotc.core.Flags.EmptyFlags +import dotty.tools.dotc.ast.tpd.TreeTraverser +import dotty.tools.dotc.core.Types.watchList +import dotty.tools.dotc.core.Types.NoType +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Types +import dotty.tools.dotc.semanticdb.TypeOps +import dotty.tools.dotc.cc.boxedCaptureSet +import dotty.tools.dotc.core.Symbols.NoSymbol +import dotty.tools.dotc.transform.SymUtils.isParamOrAccessor +import scala.collection.mutable +import dotty.tools.dotc.core.Scopes.Scope +import scala.collection.immutable.HashMap +import dotty.tools.dotc.core.Symbols +import dotty.tools.dotc.typer.ImportInfo +import dotty.tools.dotc.ast.untpd.ImportSelector +import dotty.tools.dotc.core.StdNames.nme +import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.core.Denotations.SingleDenotation +import dotty.tools.dotc.ast.Trees.Ident +import dotty.tools.dotc.core.Names.TypeName +import dotty.tools.dotc.core.Names.TermName +import dotty.tools.dotc.core.Mode.Type +import dotty.tools.dotc.core.Names.SimpleName + +class CheckShadowing extends MiniPhase: + import CheckShadowing.* + import ShadowingData.* + + private val _key = Property.Key[ShadowingData] + + private def shadowingDataApply[U](f: ShadowingData => U)(using Context): Context = + ctx.property(_key).foreach(f) + ctx + + override def phaseName: String = CheckShadowing.name + + override def description: String = CheckShadowing.description + + override def isRunnable(using Context): Boolean = + super.isRunnable && + ctx.settings.Xlint.value.nonEmpty && + !ctx.isJava + + // Setup before the traversal + override def prepareForUnit(tree: tpd.Tree)(using Context): Context = + val data = ShadowingData() + val fresh = ctx.fresh.setProperty(_key, data) + shadowingDataApply(sd => sd.registerRootImports())(using fresh) + + // Reporting on traversal's end + override def transformUnit(tree: tpd.Tree)(using Context): tpd.Tree = + shadowingDataApply(sd => + reportShadowing(sd.getShadowingResult) + ) + tree + + // MiniPhase traversal : + + override def prepareForPackageDef(tree: tpd.PackageDef)(using Context): Context = + shadowingDataApply(sd => sd.inNewScope()) + ctx + + override def prepareForTemplate(tree: tpd.Template)(using Context): Context = + shadowingDataApply(sd => sd.inNewScope()) + ctx + + override def prepareForBlock(tree: tpd.Block)(using Context): Context = + shadowingDataApply(sd => sd.inNewScope()) + ctx + + override def prepareForOther(tree: tpd.Tree)(using Context): Context = + importTraverser(tree.symbol).traverse(tree) + ctx + + override def prepareForValDef(tree: tpd.ValDef)(using Context): Context = + shadowingDataApply(sd => + sd.registerPrivateShadows(tree) + ) + + override def prepareForTypeDef(tree: tpd.TypeDef)(using Context): Context = + if tree.symbol.isAliasType then // if alias, the parent is the current symbol + nestedTypeTraverser(tree.symbol).traverse(tree.rhs) + if tree.symbol.is(Param) then // if param, the parent is up + val owner = tree.symbol.owner + val parent = if (owner.isConstructor) then owner.owner else owner + nestedTypeTraverser(parent).traverse(tree.rhs)(using ctx.outer) + shadowingDataApply(sd => sd.registerCandidate(parent, tree)) + else + ctx + + + override def transformPackageDef(tree: tpd.PackageDef)(using Context): tpd.Tree = + shadowingDataApply(sd => sd.outOfScope()) + tree + + override def transformBlock(tree: tpd.Block)(using Context): tpd.Tree = + shadowingDataApply(sd => sd.outOfScope()) + tree + + override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = + shadowingDataApply(sd => sd.outOfScope()) + tree + + override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree = + if tree.symbol.is(Param) && !tree.symbol.owner.isConstructor then // Do not register for constructors the work is done for the Class owned equivalent TypeDef + shadowingDataApply(sd => sd.computeTypeParamShadowsFor(tree.symbol.owner)(using ctx.outer)) + if tree.symbol.isAliasType then // No need to start outer here, because the TypeDef reached here it's already the parent + shadowingDataApply(sd => sd.computeTypeParamShadowsFor(tree.symbol)(using ctx)) + tree + + // Helpers : + + private def reportShadowing(res: ShadowingData.ShadowResult)(using Context): Unit = + res.warnings.sortBy(w => (w.pos.line, w.pos.startPos.column))(using Ordering[(Int, Int)]).foreach { s => + s match + case PrivateShadowWarning(pos, shadow, shadowed) => + report.warning(s"${shadow.showLocated} shadows field ${shadowed.name} inherited from ${shadowed.owner}", pos) + case TypeParamShadowWarning(pos, shadow, parent, shadowed) => + if shadowed.exists then + report.warning(s"Type parameter ${shadow.name} for $parent shadows the type defined by ${shadowed.showLocated}", pos) + else + report.warning(s"Type parameter ${shadow.name} for $parent shadows an explicitly renamed type : ${shadow.name}", pos) + } + + private def nestedTypeTraverser(parent: Symbol) = new TreeTraverser: + import tpd._ + + override def traverse(tree: tpd.Tree)(using Context): Unit = + tree match + case t:tpd.TypeDef => + val newCtx = shadowingDataApply(sd => + sd.registerCandidate(parent, t) + ) + traverseChildren(tree)(using newCtx) + case _ => + traverseChildren(tree) + end traverse + end nestedTypeTraverser + + // To reach the imports during a miniphase traversal + private def importTraverser(parent: Symbol) = new TreeTraverser: + import tpd._ + + override def traverse(tree: tpd.Tree)(using Context): Unit = + tree match + case t:tpd.Import => + shadowingDataApply(sd => sd.registerImport(t)) + traverseChildren(tree) + case _ => + traverseChildren(tree) + +end CheckShadowing + + +object CheckShadowing: + + val name = "checkShadowing" + val description = "check for elements shadowing other elements in scope" + + private class ShadowingData: + import dotty.tools.dotc.transform.CheckShadowing.ShadowingData._ + import collection.mutable.{Set => MutSet, Map => MutMap, Stack => MutStack} + + private val rootImports = MutSet[SingleDenotation]() + private val explicitsImports = MutStack[MutSet[tpd.Import]]() + private val renamedImports = MutStack[MutMap[SimpleName, Name]]() // original name -> renamed name + + private val typeParamCandidates = MutMap[Symbol, Seq[tpd.TypeDef]]().withDefaultValue(Seq()) + private val shadowedTypeDefs = MutSet[TypeParamShadowWarning]() + + private val shadowedPrivateDefs = MutSet[PrivateShadowWarning]() + + def inNewScope()(using Context) = + explicitsImports.push(MutSet()) + renamedImports.push(MutMap()) + + def outOfScope()(using Context) = + explicitsImports.pop() + renamedImports.pop() + + /** Register the Root imports (at once per compilation unit)*/ + def registerRootImports()(using Context) = + ctx.definitions.rootImportTypes.foreach(rimp => println()) + val langPackageName = ctx.definitions.JavaLangPackageVal.name.toSimpleName // excludes lang package + rootImports.addAll(ctx.definitions.rootImportTypes.withFilter(_.name.toSimpleName != langPackageName).flatMap(_.typeMembers)) + + /* Register an import encountered in the current scope **/ + def registerImport(imp: tpd.Import)(using Context) = + val renamedImps = imp.selectors.collect(sel => { sel.renamed match + case Ident(rename) => + (sel.name.toSimpleName, rename) + }).toMap + explicitsImports.top += imp + renamedImports.top.addAll(renamedImps) + + /** Register a potential type definition which could shadows a Type already defined */ + def registerCandidate(parent: Symbol, typeDef: tpd.TypeDef) = + val actual = typeParamCandidates.getOrElseUpdate(parent, Seq()) + typeParamCandidates.update(parent, actual.+:(typeDef)) + + /** Compute if there is some TypeParam shadowing and register if it is the case*/ + def computeTypeParamShadowsFor(parent: Symbol)(using Context): Unit = + typeParamCandidates(parent).foreach(typeDef => { + val sym = typeDef.symbol + val shadowedType = + lookForRootShadowedType(sym) + .orElse(lookForImportedShadowedType(sym)) + .orElse(lookForUnitShadowedType(sym)) + shadowedType.foreach(shadowed => + if !renamedImports.exists(_.contains(shadowed.name.toSimpleName)) then + shadowedTypeDefs += TypeParamShadowWarning(typeDef.srcPos, typeDef.symbol, parent, shadowed) + ) + }) + + private def lookForRootShadowedType(symbol: Symbol)(using Context): Option[Symbol] = + rootImports.find(p => p.name.toSimpleName == symbol.name.toSimpleName).map(_.symbol) + + private def lookForImportedShadowedType(symbol: Symbol)(using Context): Option[Symbol] = + explicitsImports + .flatMap(_.flatMap(imp => symbol.isInImport(imp))) + .headOption + + private def lookForUnitShadowedType(symbol: Symbol)(using Context): Option[Symbol] = + if !ctx.owner.exists then + None + else + val declarationScope = ctx.effectiveScope + val res = declarationScope.lookup(symbol.name) + res match + case s: Symbol if s.isType => Some(s) + case _ => lookForUnitShadowedType(symbol)(using ctx.outer) + + /** Register if the valDef is a private declaration that shadows an inherited field */ + def registerPrivateShadows(valDef: tpd.ValDef)(using Context): Unit = + lookForShadowedField(valDef.symbol).foreach(shadowedField => + shadowedPrivateDefs += PrivateShadowWarning(valDef.startPos, valDef.symbol, shadowedField) + ) + + private def lookForShadowedField(symDecl: Symbol)(using Context): Option[Symbol] = + if symDecl.isPrivate then + val symDeclType = symDecl.info + val bClasses = symDecl.owner.info.baseClasses + bClasses match + case _ :: inherited => + inherited + .map(classSymbol => symDecl.denot.matchingDecl(classSymbol, symDeclType)) + .find(sym => sym.name == symDecl.name) + case Nil => + None + else + None + + /** Get the shadowing analysis's result */ + def getShadowingResult(using Context): ShadowResult = + + val privateShadowWarnings: List[ShadowWarning] = + if ctx.settings.XlintHas.privateShadow then + shadowedPrivateDefs.toList + else + Nil + val typeParamShadowWarnings: List[ShadowWarning] = + if ctx.settings.XlintHas.typeParameterShadow then + shadowedTypeDefs.toList + else + Nil + ShadowResult(privateShadowWarnings ++ typeParamShadowWarnings) + + extension (sym: Symbol) + /** Given an import and accessibility, return the import's symbol that matches import<->this symbol */ + private def isInImport(imp: tpd.Import)(using Context): Option[Symbol] = + val tpd.Import(qual, sels) = imp + val simpleSelections = qual.tpe.member(sym.name).alternatives + val typeSelections = sels.flatMap(n => qual.tpe.member(n.name.toTypeName).alternatives) + + sels.find(is => is.rename.toSimpleName == sym.name.toSimpleName).map(_.symbol) + .orElse(typeSelections.map(_.symbol).find(sd => sd.name == sym.name)) + .orElse(simpleSelections.map(_.symbol).find(sd => sd.name == sym.name)) + + end ShadowingData + + private object ShadowingData: + sealed abstract class ShadowWarning(val pos: SrcPos, val shadow: Symbol, val shadowed: Symbol) + + case class PrivateShadowWarning( + override val pos: SrcPos, + override val shadow: Symbol, + override val shadowed: Symbol + ) extends ShadowWarning(pos, shadow, shadowed) + + case class TypeParamShadowWarning( + override val pos: SrcPos, + override val shadow: Symbol, + val shadowParent: Symbol, + override val shadowed: Symbol, + ) extends ShadowWarning(pos, shadow, shadowed) + + /** A container for the results of the shadow elements analysis */ + case class ShadowResult(warnings: List[ShadowWarning]) + +end CheckShadowing \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i17612a.check b/tests/neg-custom-args/fatal-warnings/i17612a.check new file mode 100644 index 000000000000..fad897b7c5f8 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i17612a.check @@ -0,0 +1,32 @@ +-- Error: tests/neg-custom-args/fatal-warnings/i17612a.scala:18:15 ----------------------------------------------------- +18 | class Derived(x : Int, y: Int, z2: Int) extends Base(x, y + 1, z2): // error // error / for x, y translated to private[this] x field & shadowing var Base.x, Base.y + | ^ + | value x in class Derived shadows field x inherited from class Base +-- Error: tests/neg-custom-args/fatal-warnings/i17612a.scala:18:24 ----------------------------------------------------- +18 | class Derived(x : Int, y: Int, z2: Int) extends Base(x, y + 1, z2): // error // error / for x, y translated to private[this] x field & shadowing var Base.x, Base.y + | ^ + | value y in class Derived shadows field y inherited from class Base +-- Error: tests/neg-custom-args/fatal-warnings/i17612a.scala:20:2 ------------------------------------------------------ +20 | private val shadowed2 = 2 + 2 // error (In Scala 2 we cannot do that got the warning) + | ^ + | value shadowed2 in class Derived shadows field shadowed2 inherited from class Base +-- Error: tests/neg-custom-args/fatal-warnings/i17612a.scala:21:2 ------------------------------------------------------ +21 | private[this] val shadowed3 = 3 + 3 // error + | ^ + | value shadowed3 in class Derived shadows field shadowed3 inherited from class Base +-- Error: tests/neg-custom-args/fatal-warnings/i17612a.scala:23:2 ------------------------------------------------------ +23 | private val shadowed5 = 5 + 5 // error + | ^ + | value shadowed5 in class Derived shadows field shadowed5 inherited from class Base +-- Error: tests/neg-custom-args/fatal-warnings/i17612a.scala:34:20 ----------------------------------------------------- +34 | class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, y, z) // error // error // error + | ^ + | value x in class UnderDerived shadows field x inherited from class Base +-- Error: tests/neg-custom-args/fatal-warnings/i17612a.scala:34:28 ----------------------------------------------------- +34 | class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, y, z) // error // error // error + | ^ + | value y in class UnderDerived shadows field y inherited from class Base +-- Error: tests/neg-custom-args/fatal-warnings/i17612a.scala:34:36 ----------------------------------------------------- +34 | class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, y, z) // error // error // error + | ^ + | value z in class UnderDerived shadows field z inherited from class Base diff --git a/tests/neg-custom-args/fatal-warnings/i17612a.scala b/tests/neg-custom-args/fatal-warnings/i17612a.scala new file mode 100644 index 000000000000..0fb6306b96cb --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i17612a.scala @@ -0,0 +1,42 @@ +// scalac: -Xlint:private-shadow + +object i17612a: + class Base(var x: Int, val y: Int, var z: Int): + var shadowed2 = 2 + val shadowed3 = 3 + val shadowed4 = 4 + protected var shadowed5 = 5 + //var shadowed6 = 6 + + val notShadowed = -1 + private val notShadowed2 = -2 + //val fatalOverride = 0 + + def increment(): Unit = + x = x + 1 + + class Derived(x : Int, y: Int, z2: Int) extends Base(x, y + 1, z2): // error // error / for x, y translated to private[this] x field & shadowing var Base.x, Base.y + private def hello() = 4 + private val shadowed2 = 2 + 2 // error (In Scala 2 we cannot do that got the warning) + private[this] val shadowed3 = 3 + 3 // error + //private[Derived] val fatalOverride = 0 // value fatalOverride of type Int has weaker access privileges; it should be public + private val shadowed5 = 5 + 5 // error + private val notShadowed2 = -4 + //protected var shadowed6 = 6 + 6 // variable shadowed6 of type Int has weaker access privileges; it should be public + + def inFunctionScope() = + val notShadowed = -2 // OK + -2 + + override def toString = + s"x : ${x.toString}, y : ${y.toString}" + + class UnderDerived(x: Int, y: Int, z: Int) extends Derived(x, y, z) // error // error // error + + def main(args: Array[String]) = + val derived = new Derived(1, 1, 1) + println(derived.toString) // yields x: '1', as expected + derived.increment() + println(derived.toString) // still x: '1', probably unexpected, for y it never prints the super value, less surprising + println(derived.shadowed2) + println(derived.shadowed3) \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i17613a.check b/tests/neg-custom-args/fatal-warnings/i17613a.check new file mode 100644 index 000000000000..b0aeb85101a1 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i17613a.check @@ -0,0 +1,28 @@ +-- Error: tests/neg-custom-args/fatal-warnings/i17613a.scala:8:13 ------------------------------------------------------ +8 | def foobar[D](in: D) = in.toString // error method parameter shadows some other type + | ^ + | Type parameter D for method foobar shadows the type defined by trait D in class B +-- Error: tests/neg-custom-args/fatal-warnings/i17613a.scala:9:13 ------------------------------------------------------ +9 | type MySeq[D] = Seq[D] // error type member's parameter shadows some other type + | ^ + | Type parameter D for type MySeq shadows the type defined by trait D in class B +-- Error: tests/neg-custom-args/fatal-warnings/i17613a.scala:11:12 ----------------------------------------------------- +11 | class Foo[T](t: T): // error class parameter shadows some other type + | ^ + | Type parameter T for class Foo shadows the type defined by type T in class B +-- Error: tests/neg-custom-args/fatal-warnings/i17613a.scala:12:11 ----------------------------------------------------- +12 | def bar[T](w: T) = w.toString // error a type parameter shadows another type parameter + | ^ + | Type parameter T for method bar shadows the type defined by type T in class Foo +-- Error: tests/neg-custom-args/fatal-warnings/i17613a.scala:15:12 ----------------------------------------------------- +15 | class C[M[List[_]]] // error + | ^^^^^^^ + | Type parameter List for class C shadows the type defined by type List in package scala +-- Error: tests/neg-custom-args/fatal-warnings/i17613a.scala:16:11 ----------------------------------------------------- +16 | type E[M[List[_]]] = Int // error + | ^^^^^^^ + | Type parameter List for type E shadows the type defined by type List in package scala +-- Error: tests/neg-custom-args/fatal-warnings/i17613a.scala:17:14 ----------------------------------------------------- +17 | def foo[N[M[List[_]]]] = ??? // error + | ^^^^^^^ + | Type parameter List for method foo shadows the type defined by type List in package scala diff --git a/tests/neg-custom-args/fatal-warnings/i17613a.scala b/tests/neg-custom-args/fatal-warnings/i17613a.scala new file mode 100644 index 000000000000..4639bd4b5053 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i17613a.scala @@ -0,0 +1,23 @@ +// scalac: -Xlint:type-parameter-shadow + +object i17613a: + class B: + type T = Int + trait D + + def foobar[D](in: D) = in.toString // error method parameter shadows some other type + type MySeq[D] = Seq[D] // error type member's parameter shadows some other type + + class Foo[T](t: T): // error class parameter shadows some other type + def bar[T](w: T) = w.toString // error a type parameter shadows another type parameter + + // even deeply nested... + class C[M[List[_]]] // error + type E[M[List[_]]] = Int // error + def foo[N[M[List[_]]]] = ??? // error + + // ...but not between type parameters in the same list + class F[A, M[N[A]]] + type G[A, M[L[A]]] = Int + def bar[A, N[M[L[A]]]] = ??? + def main(args: Array[String]) = println("Test for type parameter shadow") diff --git a/tests/neg-custom-args/fatal-warnings/i17613b.check b/tests/neg-custom-args/fatal-warnings/i17613b.check new file mode 100644 index 000000000000..ed8ed45e42eb --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i17613b.check @@ -0,0 +1,44 @@ +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:9:13 ---------------------------------------------- +9 | def foobar[ImTrait](in: D) = in.toString // error + | ^^^^^^^ + | Type parameter ImTrait for method foobar shadows the type defined by trait ImTrait in object importTry +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:10:13 --------------------------------------------- +10 | type MySeq[ImTrait] = Seq[D] // error + | ^^^^^^^ + | Type parameter ImTrait for type MySeq shadows the type defined by trait ImTrait in object importTry +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:12:14 --------------------------------------------- +12 | def foobar2[ImClass](in: D) = in.toString // error + | ^^^^^^^ + | Type parameter ImClass for method foobar2 shadows the type defined by class ImClass in object importTry +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:13:14 --------------------------------------------- +13 | type MySeq2[ImClass] = Seq[D] // error + | ^^^^^^^ + | Type parameter ImClass for type MySeq2 shadows the type defined by class ImClass in object importTry +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:15:12 --------------------------------------------- +15 | class Foo[T](t: T): // error class parameter shadows some other type + | ^ + | Type parameter T for class Foo shadows the type defined by type T in class B +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:19:15 --------------------------------------------- +19 | def intType[List1](x: T) = x.toString() // error + | ^^^^^ + | Type parameter List1 for method intType shadows an explicitly renamed type : List1 +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:23:12 --------------------------------------------- +23 | class C[M[List[_]]] // error List not renamed here + | ^^^^^^^ + | Type parameter List for class C shadows the type defined by type List in package scala +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:24:11 --------------------------------------------- +24 | type E[M[Int[_]]] = Int // error + | ^^^^^^ + | Type parameter Int for type E shadows the type defined by class Int in package scala +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:26:14 --------------------------------------------- +26 | def foo[N[M[List[_]]]] = // error + | ^^^^^^^ + | Type parameter List for method foo shadows the type defined by type List in package scala +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:29:11 --------------------------------------------- +29 | type Z[ImClassR] = Int // error + | ^^^^^^^^ + | Type parameter ImClassR for type Z shadows an explicitly renamed type : ImClassR +-- Error: tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala:30:18 --------------------------------------------- +30 | class InnerCl[ImClassR] // error + | ^^^^^^^^ + | Type parameter ImClassR for class InnerCl shadows an explicitly renamed type : ImClassR diff --git a/tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala b/tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala new file mode 100644 index 000000000000..d2c1f334dd31 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i17613b/i17613b.scala @@ -0,0 +1,33 @@ +// scalac: -Xlint:type-parameter-shadow + +object i17613b: + import importTry._ + class B: + type T = Int + trait D + + def foobar[ImTrait](in: D) = in.toString // error + type MySeq[ImTrait] = Seq[D] // error + + def foobar2[ImClass](in: D) = in.toString // error + type MySeq2[ImClass] = Seq[D] // error + + class Foo[T](t: T): // error class parameter shadows some other type + import scala.collection.immutable.{List => List1} + def bar[List](w: T) = w.toString // no warning due to the explicit import renaming + + def intType[List1](x: T) = x.toString() // error + + type Y[List] = Int // no warning + + class C[M[List[_]]] // error List not renamed here + type E[M[Int[_]]] = Int // error + + def foo[N[M[List[_]]]] = // error + import importTry.{ImClass => ImClassR} + def inner[ImClass] = // no warning + type Z[ImClassR] = Int // error + class InnerCl[ImClassR] // error + 5 + + def main(args: Array[String]) = println("Test for type parameter shadow") diff --git a/tests/neg-custom-args/fatal-warnings/i17613b/importTry.scala b/tests/neg-custom-args/fatal-warnings/i17613b/importTry.scala new file mode 100644 index 000000000000..879f40ace356 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i17613b/importTry.scala @@ -0,0 +1,5 @@ +object importTry: + + trait ImTrait + + class ImClass \ No newline at end of file From 1f15b297b3d4579a75b55c1c2c2bce5558013d1e Mon Sep 17 00:00:00 2001 From: Carl Date: Wed, 7 Jun 2023 04:03:20 +0200 Subject: [PATCH 04/90] Move out miniphase --- compiler/src/dotty/tools/dotc/Compiler.scala | 3 +- .../tools/dotc/transform/CheckShadowing.scala | 28 +++++++++---------- 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 743aca5bf90a..5444bc3f41b6 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -35,7 +35,8 @@ class Compiler { protected def frontendPhases: List[List[Phase]] = List(new Parser) :: // Compiler frontend: scanner, parser List(new TyperPhase) :: // Compiler frontend: namer, typer - List(new CheckUnused.PostTyper, new CheckShadowing) :: // Check for unused and shadowing elements + List(new CheckUnused.PostTyper) :: // Check for unused elements + List(new CheckShadowing) :: // Check for shadowing elements List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files diff --git a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala index 1c575fdc89a1..90834bf5441e 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala @@ -158,8 +158,8 @@ class CheckShadowing extends MiniPhase: override def traverse(tree: tpd.Tree)(using Context): Unit = tree match case t:tpd.Import => - shadowingDataApply(sd => sd.registerImport(t)) - traverseChildren(tree) + val newCtx = shadowingDataApply(sd => sd.registerImport(t)) + traverseChildren(tree)(using newCtx) case _ => traverseChildren(tree) @@ -180,9 +180,9 @@ object CheckShadowing: private val renamedImports = MutStack[MutMap[SimpleName, Name]]() // original name -> renamed name private val typeParamCandidates = MutMap[Symbol, Seq[tpd.TypeDef]]().withDefaultValue(Seq()) - private val shadowedTypeDefs = MutSet[TypeParamShadowWarning]() + private val typeParamShadowWarnings = MutSet[TypeParamShadowWarning]() - private val shadowedPrivateDefs = MutSet[PrivateShadowWarning]() + private val privateShadowWarnings = MutSet[PrivateShadowWarning]() def inNewScope()(using Context) = explicitsImports.push(MutSet()) @@ -194,7 +194,6 @@ object CheckShadowing: /** Register the Root imports (at once per compilation unit)*/ def registerRootImports()(using Context) = - ctx.definitions.rootImportTypes.foreach(rimp => println()) val langPackageName = ctx.definitions.JavaLangPackageVal.name.toSimpleName // excludes lang package rootImports.addAll(ctx.definitions.rootImportTypes.withFilter(_.name.toSimpleName != langPackageName).flatMap(_.typeMembers)) @@ -222,7 +221,7 @@ object CheckShadowing: .orElse(lookForUnitShadowedType(sym)) shadowedType.foreach(shadowed => if !renamedImports.exists(_.contains(shadowed.name.toSimpleName)) then - shadowedTypeDefs += TypeParamShadowWarning(typeDef.srcPos, typeDef.symbol, parent, shadowed) + typeParamShadowWarnings += TypeParamShadowWarning(typeDef.srcPos, typeDef.symbol, parent, shadowed) ) }) @@ -247,7 +246,7 @@ object CheckShadowing: /** Register if the valDef is a private declaration that shadows an inherited field */ def registerPrivateShadows(valDef: tpd.ValDef)(using Context): Unit = lookForShadowedField(valDef.symbol).foreach(shadowedField => - shadowedPrivateDefs += PrivateShadowWarning(valDef.startPos, valDef.symbol, shadowedField) + privateShadowWarnings += PrivateShadowWarning(valDef.startPos, valDef.symbol, shadowedField) ) private def lookForShadowedField(symDecl: Symbol)(using Context): Option[Symbol] = @@ -266,18 +265,17 @@ object CheckShadowing: /** Get the shadowing analysis's result */ def getShadowingResult(using Context): ShadowResult = - - val privateShadowWarnings: List[ShadowWarning] = + val privateWarnings: List[ShadowWarning] = if ctx.settings.XlintHas.privateShadow then - shadowedPrivateDefs.toList + privateShadowWarnings.toList else Nil - val typeParamShadowWarnings: List[ShadowWarning] = + val typeParamWarnings: List[ShadowWarning] = if ctx.settings.XlintHas.typeParameterShadow then - shadowedTypeDefs.toList + typeParamShadowWarnings.toList else Nil - ShadowResult(privateShadowWarnings ++ typeParamShadowWarnings) + ShadowResult(privateWarnings ++ typeParamWarnings) extension (sym: Symbol) /** Given an import and accessibility, return the import's symbol that matches import<->this symbol */ @@ -285,8 +283,8 @@ object CheckShadowing: val tpd.Import(qual, sels) = imp val simpleSelections = qual.tpe.member(sym.name).alternatives val typeSelections = sels.flatMap(n => qual.tpe.member(n.name.toTypeName).alternatives) - - sels.find(is => is.rename.toSimpleName == sym.name.toSimpleName).map(_.symbol) + sels + .find(is => is.rename.toSimpleName == sym.name.toSimpleName).map(_.symbol) .orElse(typeSelections.map(_.symbol).find(sd => sd.name == sym.name)) .orElse(simpleSelections.map(_.symbol).find(sd => sd.name == sym.name)) From 175d4f354ed848ed4fa7b6f63d150441667d0d5e Mon Sep 17 00:00:00 2001 From: Carl Date: Tue, 20 Jun 2023 01:41:06 +0200 Subject: [PATCH 05/90] Fix Exception in CheckUnused isOverriden() helper --- compiler/src/dotty/tools/dotc/Compiler.scala | 3 +-- .../dotty/tools/dotc/config/ScalaSettings.scala | 7 ++----- .../dotty/tools/dotc/transform/CheckUnused.scala | 16 ++++++++++++++-- .../neg-custom-args/fatal-warnings/i16639a.scala | 4 ++-- 4 files changed, 19 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 5444bc3f41b6..6727fc4e91c0 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -35,8 +35,7 @@ class Compiler { protected def frontendPhases: List[List[Phase]] = List(new Parser) :: // Compiler frontend: scanner, parser List(new TyperPhase) :: // Compiler frontend: namer, typer - List(new CheckUnused.PostTyper) :: // Check for unused elements - List(new CheckShadowing) :: // Check for shadowing elements + List(new CheckShadowing, new CheckUnused.PostTyper) :: // Check for unused elements // Check for shadowing elements List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index c144364f68ba..0922ed0d2d22 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -312,7 +312,6 @@ private sealed trait XSettings: helpArg = "advanced warning", descr = "Enable or disable specific `lint` warnings", choices = List( - ChoiceWithHelp("nowarn", ""), ChoiceWithHelp("all", ""), ChoiceWithHelp("private-shadow", "Warn if a private field or class parameter shadows a superclass field"), ChoiceWithHelp("type-parameter-shadow", "Warn when a type parameter shadows a type already in the scope"), @@ -321,10 +320,8 @@ private sealed trait XSettings: ) object XlintHas: - def isChoiceSet(s: String)(using Context) = Xlint.value.pipe(us => us.contains(s)) - def allOr(s: String)(using Context) = Xlint.value.pipe(us => us.contains("all") || us.contains(s)) - def nowarn(using Context) = allOr("nowarn") - + def allOr(s: String)(using Context) = + Xlint.value.pipe(us => us.contains("all") || us.contains(s)) def privateShadow(using Context) = allOr("private-shadow") def typeParameterShadow(using Context) = diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index bd521c8679d0..572734ccd809 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -633,6 +633,19 @@ object CheckUnused: imp.expr.tpe.member(sel.name.toTypeName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit)) ) + /** Returns some inherited symbol with the same type and name as the given "symDecl" */ + private def lookForInheritedDecl(symDecl: Symbol)(using Context): Option[Symbol] = + val symDeclType = symDecl.info + val bClasses = symDecl.owner.info.baseClasses + bClasses match + case _ :: inherited => + inherited + .map(classSymbol => symDecl.denot.matchingDecl(classSymbol, symDeclType)) + .find(sym => sym.name == symDecl.name) + case Nil => + None + + extension (tree: ImportSelector) def boundTpe: Type = tree.bound match { case untpd.TypedSplice(tree1) => tree1.tpe @@ -705,8 +718,7 @@ object CheckUnused: /** A function is overriden. Either has `override flags` or parent has a matching member (type and name) */ private def isOverriden(using Context): Boolean = - sym.is(Flags.Override) || - (sym.exists && sym.owner.thisType.parents.exists(p => sym.matchingMember(p).exists)) + sym.is(Flags.Override) || lookForInheritedDecl(sym).isDefined end extension diff --git a/tests/neg-custom-args/fatal-warnings/i16639a.scala b/tests/neg-custom-args/fatal-warnings/i16639a.scala index c62910b7f566..89f8bfef1b17 100644 --- a/tests/neg-custom-args/fatal-warnings/i16639a.scala +++ b/tests/neg-custom-args/fatal-warnings/i16639a.scala @@ -26,11 +26,11 @@ trait Bing trait Accessors { private var v1: Int = 0 // error warn private var v2: Int = 0 // error warn, never set - private var v3: Int = 0 // warn, never got /Dotty: no warn even if not used + private var v3: Int = 0 private var v4: Int = 0 // no warn private[this] var v5 = 0 // error warn, never set - private[this] var v6 = 0 // warn, never got /Dotty: no warn even if not used + private[this] var v6 = 0 private[this] var v7 = 0 // no warn def bippy(): Int = { From 07f65819ac23d618f5f363b9f0fe28f11bd3c84f Mon Sep 17 00:00:00 2001 From: Carl Date: Tue, 20 Jun 2023 11:16:27 +0200 Subject: [PATCH 06/90] Warn instead of fail for invalid -Xlint args --- compiler/src/dotty/tools/dotc/Compiler.scala | 2 +- .../dotty/tools/dotc/config/ScalaSettings.scala | 2 +- .../src/dotty/tools/dotc/config/Settings.scala | 16 ++++++++++++++-- .../tools/dotc/transform/CheckShadowing.scala | 4 ++-- .../dotty/tools/dotc/transform/CheckUnused.scala | 4 ++-- 5 files changed, 20 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 6727fc4e91c0..ff701727c4ad 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -35,7 +35,7 @@ class Compiler { protected def frontendPhases: List[List[Phase]] = List(new Parser) :: // Compiler frontend: scanner, parser List(new TyperPhase) :: // Compiler frontend: namer, typer - List(new CheckShadowing, new CheckUnused.PostTyper) :: // Check for unused elements // Check for shadowing elements + List(new CheckUnused.PostTyper, new CheckShadowing) :: // Check for unused elements and shadowing elements List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 0922ed0d2d22..4675e448b75a 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -307,7 +307,7 @@ private sealed trait XSettings: val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") - val Xlint: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( + val Xlint: Setting[List[ChoiceWithHelp[String]]] = UncompleteMultiChoiceHelpSetting( name = "-Xlint", helpArg = "advanced warning", descr = "Enable or disable specific `lint` warnings", diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 34e5582e8a91..d992f5bdf2ee 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -62,6 +62,7 @@ object Settings: prefix: String = "", aliases: List[String] = Nil, depends: List[(Setting[?], Any)] = Nil, + ignoreInvalidArgs: Boolean = false, propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int) { private var changed: Boolean = false @@ -104,8 +105,16 @@ object Settings: def fail(msg: String, args: List[String]) = ArgsSummary(sstate, args, errors :+ msg, warnings) + def warn(msg: String, args: List[String]) = + ArgsSummary(sstate, args, errors, warnings :+ msg) + def missingArg = - fail(s"missing argument for option $name", args) + val msg = s"missing argument for option $name" + if ignoreInvalidArgs then warn(msg + ", the tag was ignored", args) else fail(msg, args) + + def invalidChoices(invalid: List[String]) = + val msg = s"invalid choice(s) for $name: ${invalid.mkString(",")}" + if ignoreInvalidArgs then warn(msg + ", the tag was ignored", args) else fail(msg, args) def setBoolean(argValue: String, args: List[String]) = if argValue.equalsIgnoreCase("true") || argValue.isEmpty then update(true, args) @@ -144,7 +153,7 @@ object Settings: choices match case Some(valid) => strings.filterNot(valid.contains) match case Nil => update(strings, args) - case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) + case invalid => invalidChoices(invalid) case _ => update(strings, args) case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => setString(argRest, args) @@ -287,6 +296,9 @@ object Settings: def MultiChoiceHelpSetting(name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def UncompleteMultiChoiceHelpSetting(name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases, ignoreInvalidArgs = true)) + def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = publish(Setting(name, descr, default, aliases = aliases)) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala index 90834bf5441e..26539bfb9b88 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala @@ -83,7 +83,7 @@ class CheckShadowing extends MiniPhase: ctx override def prepareForOther(tree: tpd.Tree)(using Context): Context = - importTraverser(tree.symbol).traverse(tree) + importTraverser.traverse(tree) ctx override def prepareForValDef(tree: tpd.ValDef)(using Context): Context = @@ -152,7 +152,7 @@ class CheckShadowing extends MiniPhase: end nestedTypeTraverser // To reach the imports during a miniphase traversal - private def importTraverser(parent: Symbol) = new TreeTraverser: + private def importTraverser = new TreeTraverser: import tpd._ override def traverse(tree: tpd.Tree)(using Context): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 572734ccd809..aa58a9a05495 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -291,9 +291,9 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke case UnusedSymbol(t, _, WarnTypes.PatVars) => report.warning(s"unused pattern variable", t) case UnusedSymbol(t, _, WarnTypes.UnsetLocals) => - report.warning(s"unset local variable", t) + report.warning(s"unset local variable, consider using an immutable val instead", t) case UnusedSymbol(t, _, WarnTypes.UnsetPrivates) => - report.warning(s"unset private variable", t) + report.warning(s"unset private variable, consider using an immutable val instead", t) } end CheckUnused From 297183cba133ee98de3cb8aa37cabc9db2d05034 Mon Sep 17 00:00:00 2001 From: Carl Date: Tue, 20 Jun 2023 18:09:51 +0200 Subject: [PATCH 07/90] Lint MegaPhase after ExtractSemanticDB --- compiler/src/dotty/tools/dotc/Compiler.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index ff701727c4ad..1383017b0e2a 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -35,10 +35,10 @@ class Compiler { protected def frontendPhases: List[List[Phase]] = List(new Parser) :: // Compiler frontend: scanner, parser List(new TyperPhase) :: // Compiler frontend: namer, typer - List(new CheckUnused.PostTyper, new CheckShadowing) :: // Check for unused elements and shadowing elements List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files + List(new CheckUnused.PostTyper, new CheckShadowing) :: // Check for unused elements and shadowing elements List(new PostTyper) :: // Additional checks and cleanups after type checking List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks From 1572968bfe9963255c8240bec95aa0a726e65778 Mon Sep 17 00:00:00 2001 From: Ondrej Lhotak Date: Fri, 14 Jul 2023 16:51:02 -0400 Subject: [PATCH 08/90] fix #11967: flow typing nullability in pattern matches --- .../dotty/tools/dotc/typer/Nullables.scala | 10 ++++++ .../src/dotty/tools/dotc/typer/Typer.scala | 14 ++++++-- tests/explicit-nulls/pos/flow-match.scala | 32 +++++++++++++++++++ 3 files changed, 54 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala index 9104418d406f..5be8e0aa3060 100644 --- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala +++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala @@ -190,6 +190,16 @@ object Nullables: // TODO: Add constant pattern if the constant type is not nullable case _ => false + def matchesNull(cdef: CaseDef)(using Context): Boolean = + cdef.guard.isEmpty && patMatchesNull(cdef.pat) + + private def patMatchesNull(pat: Tree)(using Context): Boolean = pat match + case Literal(Constant(null)) => true + case Bind(_, pat) => patMatchesNull(pat) + case Alternative(trees) => trees.exists(patMatchesNull) + case _ if isVarPattern(pat) => true + case _ => false + extension (infos: List[NotNullInfo]) /** Do the current not-null infos imply that `ref` is not null? diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index cb5051ea34ad..0cb1361618ef 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1843,12 +1843,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Special typing of Match tree when the expected type is a MatchType, * and the patterns of the Match tree and the MatchType correspond. */ - def typedDependentMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: MatchType)(using Context): Tree = { + def typedDependentMatchFinish(tree: untpd.Match, sel: Tree, wideSelType0: Type, cases: List[untpd.CaseDef], pt: MatchType)(using Context): Tree = { var caseCtx = ctx + var wideSelType = wideSelType0 + var alreadyStripped = false val cases1 = tree.cases.zip(pt.cases) .map { case (cas, tpe) => val case1 = typedCase(cas, sel, wideSelType, tpe)(using caseCtx) caseCtx = Nullables.afterPatternContext(sel, case1.pat) + if !alreadyStripped && Nullables.matchesNull(case1) then + wideSelType = wideSelType.stripNull + alreadyStripped = true case1 } .asInstanceOf[List[CaseDef]] @@ -1862,10 +1867,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer assignType(cpy.Match(tree)(sel, cases1), sel, cases1) } - def typedCases(cases: List[untpd.CaseDef], sel: Tree, wideSelType: Type, pt: Type)(using Context): List[CaseDef] = + def typedCases(cases: List[untpd.CaseDef], sel: Tree, wideSelType0: Type, pt: Type)(using Context): List[CaseDef] = var caseCtx = ctx + var wideSelType = wideSelType0 + var alreadyStripped = false cases.mapconserve { cas => val case1 = typedCase(cas, sel, wideSelType, pt)(using caseCtx) + if !alreadyStripped && Nullables.matchesNull(case1) then + wideSelType = wideSelType.stripNull + alreadyStripped = true caseCtx = Nullables.afterPatternContext(sel, case1.pat) case1 } diff --git a/tests/explicit-nulls/pos/flow-match.scala b/tests/explicit-nulls/pos/flow-match.scala index 260068b3ac3f..2ed746be81b5 100644 --- a/tests/explicit-nulls/pos/flow-match.scala +++ b/tests/explicit-nulls/pos/flow-match.scala @@ -12,4 +12,36 @@ object MatchTest { // after the null case, s becomes non-nullable case _ => s } + + def f(s: String | Null): String = s match { + case null => "other" + case s2 => s2 + case s3 => s3 + } + + class Foo + + def f2(s: String | Null): String = s match { + case n @ null => "other" + case s2 => s2 + case s3 => s3 + } + + def f3(s: String | Null): String = s match { + case null | "foo" => "other" + case s2 => s2 + case s3 => s3 + } + + def f4(s: String | Null): String = s match { + case _ => "other" + case s2 => s2 + case s3 => s3 + } + + def f5(s: String | Null): String = s match { + case x => "other" + case s2 => s2 + case s3 => s3 + } } From 91438892b2551234c55e4c02bfc542dc56ed54de Mon Sep 17 00:00:00 2001 From: Ondrej Lhotak Date: Sat, 15 Jul 2023 17:03:01 -0400 Subject: [PATCH 09/90] address review comments --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- tests/explicit-nulls/neg/flow-match.scala | 15 +++++++++++++++ tests/explicit-nulls/pos/flow-match.scala | 11 +++++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 tests/explicit-nulls/neg/flow-match.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 0cb1361618ef..44b9844916bd 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1873,10 +1873,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var alreadyStripped = false cases.mapconserve { cas => val case1 = typedCase(cas, sel, wideSelType, pt)(using caseCtx) + caseCtx = Nullables.afterPatternContext(sel, case1.pat) if !alreadyStripped && Nullables.matchesNull(case1) then wideSelType = wideSelType.stripNull alreadyStripped = true - caseCtx = Nullables.afterPatternContext(sel, case1.pat) case1 } diff --git a/tests/explicit-nulls/neg/flow-match.scala b/tests/explicit-nulls/neg/flow-match.scala new file mode 100644 index 000000000000..e385758261cd --- /dev/null +++ b/tests/explicit-nulls/neg/flow-match.scala @@ -0,0 +1,15 @@ +// Test flow-typing when NotNullInfos are from cases + +object MatchTest { + def f6(s: String | Null): String = s match { + case s2 => s2 // error + case null => "other" // error + case s3 => s3 + } + + def f7(s: String | Null): String = s match { + case null => "other" + case null => "other" // error + case s3 => s3 + } +} diff --git a/tests/explicit-nulls/pos/flow-match.scala b/tests/explicit-nulls/pos/flow-match.scala index 2ed746be81b5..57e2c12b3c68 100644 --- a/tests/explicit-nulls/pos/flow-match.scala +++ b/tests/explicit-nulls/pos/flow-match.scala @@ -44,4 +44,15 @@ object MatchTest { case s2 => s2 case s3 => s3 } + + def f6(s: String | Null): String = s match { + case s3: String => s3 + case null => "other" + case s4 => s4 + } + + def f7(s: String | Null): String = s match { + case s2 => s2.nn + case s3 => s3 + } } From f351ec4c255841031c7d4099cc23c602b4d3289c Mon Sep 17 00:00:00 2001 From: Ondrej Lhotak Date: Thu, 13 Jul 2023 21:07:51 -0400 Subject: [PATCH 10/90] allow nullability flow typing even in presence of pattern match Nullability flow typing is conservatively disabled for mutable variables to which a write occurs nested inside a Tree other than some known ones, such as If and WhileDo. This is to prevent flow-sensitive reasoning for variables that are captured and written to in a closure. Pattern matches do not create a closure. This change enables nullability flow typing even for mutable variables that are written to inside a pattern match. --- compiler/src/dotty/tools/dotc/typer/Nullables.scala | 2 +- tests/explicit-nulls/pos/match-flow-typing.scala | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 tests/explicit-nulls/pos/match-flow-typing.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala index 9104418d406f..f4748eb03849 100644 --- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala +++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala @@ -445,7 +445,7 @@ object Nullables: else candidates -= name case None => traverseChildren(tree) - case _: (If | WhileDo | Typed) => + case _: (If | WhileDo | Typed | Match | CaseDef) => traverseChildren(tree) // assignments to candidate variables are OK here ... case _ => reachable = Set.empty // ... but not here diff --git a/tests/explicit-nulls/pos/match-flow-typing.scala b/tests/explicit-nulls/pos/match-flow-typing.scala new file mode 100644 index 000000000000..cda8b25da5ff --- /dev/null +++ b/tests/explicit-nulls/pos/match-flow-typing.scala @@ -0,0 +1,8 @@ +def m(): String = { + var x: String|Null = "foo" + 1 match { + case 1 => x = x + } + if(x == null) "foo" + else x +} From 74f6851f0f7db62f1fcdb9e2362a6c2b5944aa85 Mon Sep 17 00:00:00 2001 From: Ondrej Lhotak Date: Mon, 24 Jul 2023 15:33:19 -0400 Subject: [PATCH 11/90] additionally include writes under try/catch/finally --- compiler/src/dotty/tools/dotc/typer/Nullables.scala | 2 +- tests/explicit-nulls/pos/match-flow-typing.scala | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala index f4748eb03849..68e3c0f8ccd6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala +++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala @@ -445,7 +445,7 @@ object Nullables: else candidates -= name case None => traverseChildren(tree) - case _: (If | WhileDo | Typed | Match | CaseDef) => + case _: (If | WhileDo | Typed | Match | CaseDef | untpd.ParsedTry) => traverseChildren(tree) // assignments to candidate variables are OK here ... case _ => reachable = Set.empty // ... but not here diff --git a/tests/explicit-nulls/pos/match-flow-typing.scala b/tests/explicit-nulls/pos/match-flow-typing.scala index cda8b25da5ff..200af36a73e0 100644 --- a/tests/explicit-nulls/pos/match-flow-typing.scala +++ b/tests/explicit-nulls/pos/match-flow-typing.scala @@ -6,3 +6,16 @@ def m(): String = { if(x == null) "foo" else x } + +def m2(): String = { + var x: String|Null = "foo" + try { + x = x + } catch { + case e => x = x + } finally { + x = x + } + if(x == null) "foo" + else x +} From b18b7442d1eb01230d1e7de2622950e0c1491b0a Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 2 Aug 2023 16:47:24 +0200 Subject: [PATCH 12/90] faster class dependency cache --- .../tools/dotc/sbt/ExtractDependencies.scala | 78 ++++++++++++------- .../dotty/tools/dotc/typer/Synthesizer.scala | 1 - 2 files changed, 51 insertions(+), 28 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 01a3362c659a..d7d3678a3298 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -26,6 +26,8 @@ import xsbti.UseScope import xsbti.api.DependencyContext import xsbti.api.DependencyContext._ +import scala.jdk.CollectionConverters.* + import scala.collection.{Set, mutable} @@ -74,7 +76,11 @@ class ExtractDependencies extends Phase { collector.traverse(unit.tpdTree) if (ctx.settings.YdumpSbtInc.value) { - val deps = rec.classDependencies.map(_.toString).toArray[Object] + val deps = rec.classDependencies.flatMap((k,vs) => + vs.iterator.flatMap((to, depCtxs) => + depCtxs.asScala.map(depCtx => s"ClassDependency($k, $to, $depCtx)") + ) + ).toArray[Object] val names = rec.usedNames.map { case (clazz, names) => s"$clazz: $names" }.toArray[Object] Arrays.sort(deps) Arrays.sort(names) @@ -265,7 +271,7 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. // Avoid cycles by remembering both the types (testcase: // tests/run/enum-values.scala) and the symbols of named types (testcase: // tests/pos-java-interop/i13575) we've seen before. - val seen = new mutable.HashSet[Symbol | Type] + val seen = new util.HashSet[Symbol | Type](64) def traverse(tp: Type): Unit = if (!seen.contains(tp)) { seen += tp tp match { @@ -306,7 +312,15 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. } } -case class ClassDependency(fromClass: Symbol, toClass: Symbol, context: DependencyContext) +class ClassDepsInClass: + private val _classes = util.EqHashMap[Symbol, EnumSet[DependencyContext]]() + + def addDependency(fromClass: Symbol, context: DependencyContext): Unit = + val set = _classes.getOrElseUpdate(fromClass, EnumSet.noneOf(classOf[DependencyContext])) + set.add(context) + + def iterator: Iterator[(Symbol, EnumSet[DependencyContext])] = + _classes.iterator /** Record dependencies using `addUsedName`/`addClassDependency` and inform Zinc using `sendToZinc()`. * @@ -355,10 +369,9 @@ class DependencyRecorder { * safely. */ def addUsedRawName(name: Name, includeSealedChildren: Boolean = false)(using Context): Unit = { - val fromClass = resolveDependencySource + val fromClass = resolveDependencyFromClass if (fromClass.exists) { - val usedName = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) - usedName.update(name, includeSealedChildren) + lastUsedCache.update(name, includeSealedChildren) } } @@ -373,9 +386,9 @@ class DependencyRecorder { * of the associated value, see the documentation of parameter `includeSealedChildren` * of `addUsedRawName`. */ - private val _names = new mutable.HashMap[Name, DefaultScopes.type | PatMatScopes.type] + private val _names = new util.HashMap[Name, DefaultScopes.type | PatMatScopes.type] - def names: collection.Map[Name, EnumSet[UseScope]] = _names + def iterator: Iterator[(Name, EnumSet[UseScope])] = _names.iterator private[DependencyRecorder] def update(name: Name, includeSealedChildren: Boolean): Unit = { if (includeSealedChildren) @@ -386,7 +399,7 @@ class DependencyRecorder { override def toString(): String = { val builder = new StringBuilder - names.foreach { case (name, scopes) => + iterator.foreach { (name, scopes) => builder.append(name.mangledString) builder.append(" in [") scopes.forEach(scope => builder.append(scope.toString)) @@ -398,17 +411,17 @@ class DependencyRecorder { } - private val _classDependencies = new mutable.HashSet[ClassDependency] + private val _classDependencies = new mutable.HashMap[Symbol, ClassDepsInClass] - def classDependencies: Set[ClassDependency] = _classDependencies + def classDependencies: collection.Map[Symbol, ClassDepsInClass] = _classDependencies /** Record a dependency to the class `to` in a given `context` * from the current non-local enclosing class. */ def addClassDependency(toClass: Symbol, context: DependencyContext)(using Context): Unit = - val fromClass = resolveDependencySource + val fromClass = resolveDependencyFromClass if (fromClass.exists) - _classDependencies += ClassDependency(fromClass, toClass, context) + lastDepCache.addDependency(toClass, context) private val _usedNames = new mutable.HashMap[Symbol, UsedNamesInClass] @@ -418,11 +431,13 @@ class DependencyRecorder { usedNames.foreach: case (clazz, usedNames) => val className = classNameAsString(clazz) - usedNames.names.foreach: - case (usedName, scopes) => - cb.usedName(className, usedName.toString, scopes) + usedNames.iterator.foreach: (usedName, scopes) => + cb.usedName(className, usedName.toString, scopes) val siblingClassfiles = new mutable.HashMap[PlainFile, Path] - classDependencies.foreach(recordClassDependency(cb, _, siblingClassfiles)) + for (fromClass, partialDependencies) <- _classDependencies do + for (toClass, deps) <- partialDependencies.iterator do + for dep <- deps.asScala do + recordClassDependency(cb, fromClass, toClass, dep, siblingClassfiles) clear() /** Clear all state. */ @@ -431,15 +446,17 @@ class DependencyRecorder { _classDependencies.clear() lastOwner = NoSymbol lastDepSource = NoSymbol + lastDepCache = null + lastUsedCache = null _responsibleForImports = NoSymbol /** Handles dependency on given symbol by trying to figure out if represents a term * that is coming from either source code (not necessarily compiled in this compilation * run) or from class file and calls respective callback method. */ - private def recordClassDependency(cb: interfaces.IncrementalCallback, dep: ClassDependency, - siblingClassfiles: mutable.Map[PlainFile, Path])(using Context): Unit = { - val fromClassName = classNameAsString(dep.fromClass) + private def recordClassDependency(cb: interfaces.IncrementalCallback, fromClass: Symbol, toClass: Symbol, + depCtx: DependencyContext, siblingClassfiles: mutable.Map[PlainFile, Path])(using Context): Unit = { + val fromClassName = classNameAsString(fromClass) val sourceFile = ctx.compilationUnit.source /**For a `.tasty` file, constructs a sibling class to the `jpath`. @@ -465,13 +482,13 @@ class DependencyRecorder { }) def binaryDependency(path: Path, binaryClassName: String) = - cb.binaryDependency(path, binaryClassName, fromClassName, sourceFile, dep.context) + cb.binaryDependency(path, binaryClassName, fromClassName, sourceFile, depCtx) - val depClass = dep.toClass + val depClass = toClass val depFile = depClass.associatedFile if depFile != null then { // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) - def allowLocal = dep.context == DependencyByInheritance || dep.context == LocalDependencyByInheritance + def allowLocal = depCtx == DependencyByInheritance || depCtx == LocalDependencyByInheritance val isTasty = depFile.hasTastyExtension def processExternalDependency() = { @@ -485,7 +502,7 @@ class DependencyRecorder { case pf: PlainFile => // The dependency comes from a class file, Zinc handles JRT filesystem binaryDependency(if isTasty then cachedSiblingClass(pf) else pf.jpath, binaryClassName) case _ => - internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", dep.fromClass.srcPos) + internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", fromClass.srcPos) } } @@ -495,23 +512,30 @@ class DependencyRecorder { // We cannot ignore dependencies coming from the same source file because // the dependency info needs to propagate. See source-dependencies/trait-trait-211. val toClassName = classNameAsString(depClass) - cb.classDependency(toClassName, fromClassName, dep.context) + cb.classDependency(toClassName, fromClassName, depCtx) } } private var lastOwner: Symbol = _ private var lastDepSource: Symbol = _ + private var lastDepCache: ClassDepsInClass | Null = _ + private var lastUsedCache: UsedNamesInClass | Null = _ /** The source of the dependency according to `nonLocalEnclosingClass` * if it exists, otherwise fall back to `responsibleForImports`. * * This is backed by a cache which is invalidated when `ctx.owner` changes. */ - private def resolveDependencySource(using Context): Symbol = { + private def resolveDependencyFromClass(using Context): Symbol = { + import dotty.tools.uncheckedNN if (lastOwner != ctx.owner) { lastOwner = ctx.owner val source = nonLocalEnclosingClass - lastDepSource = if (source.is(PackageClass)) responsibleForImports else source + val fromClass = if (source.is(PackageClass)) responsibleForImports else source + if lastDepSource != fromClass then + lastDepSource = fromClass + lastDepCache = _classDependencies.getOrElseUpdate(fromClass, new ClassDepsInClass) + lastUsedCache = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) } lastDepSource diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index cbb13a841946..35bb36b003f9 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -20,7 +20,6 @@ import annotation.{tailrec, constructorOnly} import ast.tpd._ import Synthesizer._ import sbt.ExtractDependencies.* -import sbt.ClassDependency import xsbti.api.DependencyContext._ /** Synthesize terms for special classes */ From d20c6246504fb4ea5595821533dbfc43ac483775 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 3 Aug 2023 13:46:32 +0200 Subject: [PATCH 13/90] add EqHashSet --- .../src/dotty/tools/dotc/util/EqHashSet.scala | 136 +++++++++++++ .../tools/dotc/util/GenericHashSet.scala | 191 ++++++++++++++++++ .../src/dotty/tools/dotc/util/HashSet.scala | 76 +------ .../dotty/tools/dotc/util/MutableSet.scala | 7 + 4 files changed, 343 insertions(+), 67 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/util/EqHashSet.scala create mode 100644 compiler/src/dotty/tools/dotc/util/GenericHashSet.scala diff --git a/compiler/src/dotty/tools/dotc/util/EqHashSet.scala b/compiler/src/dotty/tools/dotc/util/EqHashSet.scala new file mode 100644 index 000000000000..42aee97ce79c --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/EqHashSet.scala @@ -0,0 +1,136 @@ +package dotty.tools.dotc.util + +import dotty.tools.uncheckedNN + +object EqHashSet: + + def from[T](xs: IterableOnce[T]): EqHashSet[T] = + val set = new EqHashSet[T]() + set ++= xs + set + +/** A hash set that allows some privileged protected access to its internals + * @param initialCapacity Indicates the initial number of slots in the hash table. + * The actual number of slots is always a power of 2, so the + * initial size of the table will be the smallest power of two + * that is equal or greater than the given `initialCapacity`. + * Minimum value is 4. +* @param capacityMultiple The minimum multiple of capacity relative to used elements. + * The hash table will be re-sized once the number of elements + * multiplied by capacityMultiple exceeds the current size of the hash table. + * However, a table of size up to DenseLimit will be re-sized only + * once the number of elements reaches the table's size. + */ +class EqHashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashSet[T](initialCapacity, capacityMultiple) { + import GenericHashSet.DenseLimit + + /** System's identity hashcode left shifted by 1 */ + final def hash(key: T): Int = + System.identityHashCode(key) << 1 + + /** reference equality */ + final def isEqual(x: T, y: T): Boolean = x.asInstanceOf[AnyRef] eq y.asInstanceOf[AnyRef] + + /** Turn hashcode `x` into a table index */ + private def index(x: Int): Int = x & (table.length - 1) + + private def firstIndex(x: T) = if isDense then 0 else index(hash(x)) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 1) + + private def entryAt(idx: Int): T | Null = table(idx).asInstanceOf[T | Null] + private def setEntry(idx: Int, x: T) = table(idx) = x.asInstanceOf[AnyRef | Null] + + override def lookup(x: T): T | Null = + Stats.record(statsItem("lookup")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then return e + idx = nextIndex(idx) + e = entryAt(idx) + null + + /** Add entry at `x` at index `idx` */ + private def addEntryAt(idx: Int, x: T): T = + Stats.record(statsItem("addEntryAt")) + setEntry(idx, x) + used += 1 + if used > limit then growTable() + x + + /** attempts to put `x` in the Set, if it was not entered before, return true, else return false. */ + override def add(x: T): Boolean = + Stats.record(statsItem("enter")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then return false // already entered + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) + true // first entry + + override def put(x: T): T = + Stats.record(statsItem("put")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + // TODO: remove uncheckedNN when explicit-nulls is enabled for regule compiling + if isEqual(e.uncheckedNN, x) then return e.uncheckedNN + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) + + override def +=(x: T): Unit = put(x) + + override def remove(x: T): Boolean = + Stats.record(statsItem("remove")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then + var hole = idx + while + idx = nextIndex(idx) + e = entryAt(idx) + e != null + do + val eidx = index(hash(e.uncheckedNN)) + if isDense + || index(eidx - (hole + 1)) > index(idx - (hole + 1)) + // entry `e` at `idx` can move unless `index(hash(e))` is in + // the (ring-)interval [hole + 1 .. idx] + then + setEntry(hole, e.uncheckedNN) + hole = idx + table(hole) = null + used -= 1 + return true + idx = nextIndex(idx) + e = entryAt(idx) + false + + override def -=(x: T): Unit = + remove(x) + + private def addOld(x: T) = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + idx = nextIndex(idx) + e = entryAt(idx) + setEntry(idx, x) + + override def copyFrom(oldTable: Array[AnyRef | Null]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val e: T | Null = oldTable(idx).asInstanceOf[T | Null] + if e != null then addOld(e.uncheckedNN) + idx += 1 +} diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala new file mode 100644 index 000000000000..704298e55fb7 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala @@ -0,0 +1,191 @@ +package dotty.tools.dotc.util + +import dotty.tools.uncheckedNN + +object GenericHashSet: + + /** The number of elements up to which dense packing is used. + * If the number of elements reaches `DenseLimit` a hash table is used instead + */ + inline val DenseLimit = 8 + +/** A hash set that allows some privileged protected access to its internals + * @param initialCapacity Indicates the initial number of slots in the hash table. + * The actual number of slots is always a power of 2, so the + * initial size of the table will be the smallest power of two + * that is equal or greater than the given `initialCapacity`. + * Minimum value is 4. +* @param capacityMultiple The minimum multiple of capacity relative to used elements. + * The hash table will be re-sized once the number of elements + * multiplied by capacityMultiple exceeds the current size of the hash table. + * However, a table of size up to DenseLimit will be re-sized only + * once the number of elements reaches the table's size. + */ +abstract class GenericHashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { + import GenericHashSet.DenseLimit + + protected var used: Int = _ + protected var limit: Int = _ + protected var table: Array[AnyRef | Null] = _ + + clear() + + private def allocate(capacity: Int) = + table = new Array[AnyRef | Null](capacity) + limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple + + private def roundToPower(n: Int) = + if n < 4 then 4 + else if Integer.bitCount(n) == 1 then n + else 1 << (32 - Integer.numberOfLeadingZeros(n)) + + def clear(resetToInitial: Boolean): Unit = + used = 0 + if resetToInitial then allocate(roundToPower(initialCapacity)) + else java.util.Arrays.fill(table, null) + + /** The number of elements in the set */ + def size: Int = used + + protected def isDense = limit < DenseLimit + + /** Hashcode, by default a processed `x.hashCode`, can be overridden */ + protected def hash(key: T): Int + + /** Hashcode, by default `equals`, can be overridden */ + protected def isEqual(x: T, y: T): Boolean + + /** Turn hashcode `x` into a table index */ + private def index(x: Int): Int = x & (table.length - 1) + + protected def currentTable: Array[AnyRef | Null] = table + + private def firstIndex(x: T) = if isDense then 0 else index(hash(x)) + private def nextIndex(idx: Int) = + Stats.record(statsItem("miss")) + index(idx + 1) + + private def entryAt(idx: Int): T | Null = table(idx).asInstanceOf[T | Null] + private def setEntry(idx: Int, x: T) = table(idx) = x.asInstanceOf[AnyRef | Null] + + def lookup(x: T): T | Null = + Stats.record(statsItem("lookup")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then return e + idx = nextIndex(idx) + e = entryAt(idx) + null + + /** Add entry at `x` at index `idx` */ + private def addEntryAt(idx: Int, x: T): T = + Stats.record(statsItem("addEntryAt")) + setEntry(idx, x) + used += 1 + if used > limit then growTable() + x + + /** attempts to put `x` in the Set, if it was not entered before, return true, else return false. */ + override def add(x: T): Boolean = + Stats.record(statsItem("enter")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then return false // already entered + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) + true // first entry + + def put(x: T): T = + Stats.record(statsItem("put")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + // TODO: remove uncheckedNN when explicit-nulls is enabled for regule compiling + if isEqual(e.uncheckedNN, x) then return e.uncheckedNN + idx = nextIndex(idx) + e = entryAt(idx) + addEntryAt(idx, x) + + def +=(x: T): Unit = put(x) + + def remove(x: T): Boolean = + Stats.record(statsItem("remove")) + var idx = firstIndex(x) + var e: T | Null = entryAt(idx) + while e != null do + if isEqual(e.uncheckedNN, x) then + var hole = idx + while + idx = nextIndex(idx) + e = entryAt(idx) + e != null + do + val eidx = index(hash(e.uncheckedNN)) + if isDense + || index(eidx - (hole + 1)) > index(idx - (hole + 1)) + // entry `e` at `idx` can move unless `index(hash(e))` is in + // the (ring-)interval [hole + 1 .. idx] + then + setEntry(hole, e.uncheckedNN) + hole = idx + table(hole) = null + used -= 1 + return true + idx = nextIndex(idx) + e = entryAt(idx) + false + + def -=(x: T): Unit = + remove(x) + + private def addOld(x: T) = + Stats.record(statsItem("re-enter")) + var idx = firstIndex(x) + var e = entryAt(idx) + while e != null do + idx = nextIndex(idx) + e = entryAt(idx) + setEntry(idx, x) + + def copyFrom(oldTable: Array[AnyRef | Null]): Unit = + if isDense then + Array.copy(oldTable, 0, table, 0, oldTable.length) + else + var idx = 0 + while idx < oldTable.length do + val e: T | Null = oldTable(idx).asInstanceOf[T | Null] + if e != null then addOld(e.uncheckedNN) + idx += 1 + + protected def growTable(): Unit = + val oldTable = table + val newLength = + if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) + else table.length * 2 + allocate(newLength) + copyFrom(oldTable) + + abstract class EntryIterator extends Iterator[T]: + def entry(idx: Int): T | Null + private var idx = 0 + def hasNext = + while idx < table.length && table(idx) == null do idx += 1 + idx < table.length + def next() = + require(hasNext) + try entry(idx).uncheckedNN finally idx += 1 + + def iterator: Iterator[T] = new EntryIterator(): + def entry(idx: Int) = entryAt(idx) + + override def toString: String = + iterator.mkString("HashSet(", ", ", ")") + + protected def statsItem(op: String) = + val prefix = if isDense then "HashSet(dense)." else "HashSet." + val suffix = getClass.getSimpleName + s"$prefix$op $suffix" +} diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index a6e1532c804f..e8cabd13a097 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -4,11 +4,6 @@ import dotty.tools.uncheckedNN object HashSet: - /** The number of elements up to which dense packing is used. - * If the number of elements reaches `DenseLimit` a hash table is used instead - */ - inline val DenseLimit = 8 - def from[T](xs: IterableOnce[T]): HashSet[T] = val set = new HashSet[T]() set ++= xs @@ -26,33 +21,8 @@ object HashSet: * However, a table of size up to DenseLimit will be re-sized only * once the number of elements reaches the table's size. */ -class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { - import HashSet.DenseLimit - - private var used: Int = _ - private var limit: Int = _ - private var table: Array[AnyRef | Null] = _ - - clear() - - private def allocate(capacity: Int) = - table = new Array[AnyRef | Null](capacity) - limit = if capacity <= DenseLimit then capacity - 1 else capacity / capacityMultiple - - private def roundToPower(n: Int) = - if n < 4 then 4 - else if Integer.bitCount(n) == 1 then n - else 1 << (32 - Integer.numberOfLeadingZeros(n)) - - def clear(resetToInitial: Boolean): Unit = - used = 0 - if resetToInitial then allocate(roundToPower(initialCapacity)) - else java.util.Arrays.fill(table, null) - - /** The number of elements in the set */ - def size: Int = used - - protected def isDense = limit < DenseLimit +class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends GenericHashSet[T](initialCapacity, capacityMultiple) { + import GenericHashSet.DenseLimit /** Hashcode, by default a processed `x.hashCode`, can be overridden */ protected def hash(key: T): Int = @@ -68,8 +38,6 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu /** Turn hashcode `x` into a table index */ protected def index(x: Int): Int = x & (table.length - 1) - protected def currentTable: Array[AnyRef | Null] = table - protected def firstIndex(x: T) = if isDense then 0 else index(hash(x)) protected def nextIndex(idx: Int) = Stats.record(statsItem("miss")) @@ -78,7 +46,7 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu protected def entryAt(idx: Int): T | Null = table(idx).asInstanceOf[T | Null] protected def setEntry(idx: Int, x: T) = table(idx) = x.asInstanceOf[AnyRef | Null] - def lookup(x: T): T | Null = + override def lookup(x: T): T | Null = Stats.record(statsItem("lookup")) var idx = firstIndex(x) var e: T | Null = entryAt(idx) @@ -96,7 +64,7 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu if used > limit then growTable() x - def put(x: T): T = + override def put(x: T): T = Stats.record(statsItem("put")) var idx = firstIndex(x) var e: T | Null = entryAt(idx) @@ -107,9 +75,9 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu e = entryAt(idx) addEntryAt(idx, x) - def +=(x: T): Unit = put(x) + override def +=(x: T): Unit = put(x) - def remove(x: T): Boolean = + override def remove(x: T): Boolean = Stats.record(statsItem("remove")) var idx = firstIndex(x) var e: T | Null = entryAt(idx) @@ -136,7 +104,7 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu e = entryAt(idx) false - def -=(x: T): Unit = + override def -=(x: T): Unit = remove(x) private def addOld(x: T) = @@ -148,7 +116,7 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu e = entryAt(idx) setEntry(idx, x) - def copyFrom(oldTable: Array[AnyRef | Null]): Unit = + override def copyFrom(oldTable: Array[AnyRef | Null]): Unit = if isDense then Array.copy(oldTable, 0, table, 0, oldTable.length) else @@ -158,32 +126,6 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu if e != null then addOld(e.uncheckedNN) idx += 1 - protected def growTable(): Unit = - val oldTable = table - val newLength = - if oldTable.length == DenseLimit then DenseLimit * 2 * roundToPower(capacityMultiple) - else table.length * 2 - allocate(newLength) - copyFrom(oldTable) - - abstract class EntryIterator extends Iterator[T]: - def entry(idx: Int): T | Null - private var idx = 0 - def hasNext = - while idx < table.length && table(idx) == null do idx += 1 - idx < table.length - def next() = - require(hasNext) - try entry(idx).uncheckedNN finally idx += 1 - - def iterator: Iterator[T] = new EntryIterator(): + override def iterator: Iterator[T] = new EntryIterator(): def entry(idx: Int) = entryAt(idx) - - override def toString: String = - iterator.mkString("HashSet(", ", ", ")") - - protected def statsItem(op: String) = - val prefix = if isDense then "HashSet(dense)." else "HashSet." - val suffix = getClass.getSimpleName - s"$prefix$op $suffix" } diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala index 9529262fa5ec..05fd57a50e71 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -7,6 +7,13 @@ abstract class MutableSet[T] extends ReadOnlySet[T]: /** Add element `x` to the set */ def +=(x: T): Unit + /** attempts to put `x` in the Set, if it was not entered before, return true, else return false. + * Overridden in GenericHashSet. + */ + def add(x: T): Boolean = + if lookup(x) == null then { this += x; true } + else false + /** Like `+=` but return existing element equal to `x` of it exists, * `x` itself otherwise. */ From 3702fe9a32b3fb6651155d21326ca66e1040a88c Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 3 Aug 2023 13:46:55 +0200 Subject: [PATCH 14/90] use EqHashSet in extractDependencies --- .../src/dotty/tools/dotc/sbt/ExtractDependencies.scala | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index d7d3678a3298..65203bc8cc7f 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -271,14 +271,12 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. // Avoid cycles by remembering both the types (testcase: // tests/run/enum-values.scala) and the symbols of named types (testcase: // tests/pos-java-interop/i13575) we've seen before. - val seen = new util.HashSet[Symbol | Type](64) - def traverse(tp: Type): Unit = if (!seen.contains(tp)) { - seen += tp + val seen = new util.EqHashSet[Symbol | Type](128) // 64 still needs to grow often for scala3-compiler + def traverse(tp: Type): Unit = if seen.add(tp) then { tp match { case tp: NamedType => val sym = tp.symbol - if !seen.contains(sym) && !sym.is(Package) then - seen += sym + if !sym.is(Package) && seen.add(sym) then addDependency(sym) if !sym.isClass then traverse(tp.info) traverse(tp.prefix) From 992f200ace402412946d855b320ef9e969b7e0a7 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 3 Aug 2023 14:26:05 +0200 Subject: [PATCH 15/90] use scratch type dependencies set --- .../tools/dotc/sbt/ExtractDependencies.scala | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 65203bc8cc7f..b3162a309a40 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -232,6 +232,13 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. throw ex } + /**Reused EqHashSet, safe to use as each TypeDependencyTraverser is used atomically + * Avoid cycles by remembering both the types (testcase: + * tests/run/enum-values.scala) and the symbols of named types (testcase: + * tests/pos-java-interop/i13575) we've seen before. + */ + private val scratchSeen = new util.EqHashSet[Symbol | Type](128) + /** Traverse a used type and record all the dependencies we need to keep track * of for incremental recompilation. * @@ -268,15 +275,13 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. private abstract class TypeDependencyTraverser(using Context) extends TypeTraverser() { protected def addDependency(symbol: Symbol): Unit - // Avoid cycles by remembering both the types (testcase: - // tests/run/enum-values.scala) and the symbols of named types (testcase: - // tests/pos-java-interop/i13575) we've seen before. - val seen = new util.EqHashSet[Symbol | Type](128) // 64 still needs to grow often for scala3-compiler - def traverse(tp: Type): Unit = if seen.add(tp) then { + scratchSeen.clear(resetToInitial = false) + + def traverse(tp: Type): Unit = if scratchSeen.add(tp) then { tp match { case tp: NamedType => val sym = tp.symbol - if !sym.is(Package) && seen.add(sym) then + if !sym.is(Package) && scratchSeen.add(sym) then addDependency(sym) if !sym.isClass then traverse(tp.info) traverse(tp.prefix) From cdd353c3c7218fc8203ed56f1eca7d512ba7a3f8 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 4 Aug 2023 23:27:41 +0200 Subject: [PATCH 16/90] optimise getOrElseUpdate --- .../src/dotty/tools/dotc/util/EqHashMap.scala | 16 +++++++++++++++ .../tools/dotc/util/GenericHashMap.scala | 20 +++++++++++++------ .../src/dotty/tools/dotc/util/HashMap.scala | 16 +++++++++++++++ 3 files changed, 46 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/EqHashMap.scala b/compiler/src/dotty/tools/dotc/util/EqHashMap.scala index ea049acba02b..25d9fb2907b8 100644 --- a/compiler/src/dotty/tools/dotc/util/EqHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/EqHashMap.scala @@ -58,6 +58,22 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): used += 1 if used > limit then growTable() + override def getOrElseUpdate(key: Key, value: => Value): Value = + // created by blending lookup and update, avoid having to recompute hash and probe + Stats.record(statsItem("lookup-or-update")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + val v = value + setKey(idx, key) + setValue(idx, v) + used += 1 + if used > limit then growTable() + v + private def addOld(key: Key, value: Value): Unit = Stats.record(statsItem("re-enter")) var idx = firstIndex(key) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index a21a4af37038..6d013717ec52 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -129,12 +129,20 @@ abstract class GenericHashMap[Key, Value] null def getOrElseUpdate(key: Key, value: => Value): Value = - var v: Value | Null = lookup(key) - if v == null then - val v1 = value - v = v1 - update(key, v1) - v.uncheckedNN + // created by blending lookup and update, avoid having to recompute hash and probe + Stats.record(statsItem("lookup-or-update")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + val v = value + setKey(idx, key) + setValue(idx, v) + used += 1 + if used > limit then growTable() + v private def addOld(key: Key, value: Value): Unit = Stats.record(statsItem("re-enter")) diff --git a/compiler/src/dotty/tools/dotc/util/HashMap.scala b/compiler/src/dotty/tools/dotc/util/HashMap.scala index aaae781c310a..eec3a604b5e2 100644 --- a/compiler/src/dotty/tools/dotc/util/HashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/HashMap.scala @@ -63,6 +63,22 @@ extends GenericHashMap[Key, Value](initialCapacity, capacityMultiple): used += 1 if used > limit then growTable() + override def getOrElseUpdate(key: Key, value: => Value): Value = + // created by blending lookup and update, avoid having to recompute hash and probe + Stats.record(statsItem("lookup-or-update")) + var idx = firstIndex(key) + var k = keyAt(idx) + while k != null do + if isEqual(k, key) then return valueAt(idx) + idx = nextIndex(idx) + k = keyAt(idx) + val v = value + setKey(idx, key) + setValue(idx, v) + used += 1 + if used > limit then growTable() + v + private def addOld(key: Key, value: Value): Unit = Stats.record(statsItem("re-enter")) var idx = firstIndex(key) From 4bffbc5e35d791c70542070cf9f576ea12b2743e Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 15 Aug 2023 16:36:27 +0200 Subject: [PATCH 17/90] merge usednames and classdeps caches --- .../tools/dotc/sbt/ExtractDependencies.scala | 96 +++++++++---------- 1 file changed, 47 insertions(+), 49 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index b3162a309a40..e3b5f375f585 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -76,12 +76,8 @@ class ExtractDependencies extends Phase { collector.traverse(unit.tpdTree) if (ctx.settings.YdumpSbtInc.value) { - val deps = rec.classDependencies.flatMap((k,vs) => - vs.iterator.flatMap((to, depCtxs) => - depCtxs.asScala.map(depCtx => s"ClassDependency($k, $to, $depCtx)") - ) - ).toArray[Object] - val names = rec.usedNames.map { case (clazz, names) => s"$clazz: $names" }.toArray[Object] + val deps = rec.foundDeps.map { case (clazz, found) => s"$clazz: ${found.classesString}" }.toArray[Object] + val names = rec.foundDeps.map { case (clazz, found) => s"$clazz: ${found.namesString}" }.toArray[Object] Arrays.sort(deps) Arrays.sort(names) @@ -168,7 +164,7 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. /** Traverse the tree of a source file and record the dependencies and used names which - * can be retrieved using `dependencies` and`usedNames`. + * can be retrieved using `foundDeps`. */ override def traverse(tree: Tree)(using Context): Unit = try { tree match { @@ -315,16 +311,6 @@ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd. } } -class ClassDepsInClass: - private val _classes = util.EqHashMap[Symbol, EnumSet[DependencyContext]]() - - def addDependency(fromClass: Symbol, context: DependencyContext): Unit = - val set = _classes.getOrElseUpdate(fromClass, EnumSet.noneOf(classOf[DependencyContext])) - set.add(context) - - def iterator: Iterator[(Symbol, EnumSet[DependencyContext])] = - _classes.iterator - /** Record dependencies using `addUsedName`/`addClassDependency` and inform Zinc using `sendToZinc()`. * * Note: As an alternative design choice, we could directly call the appropriate @@ -336,10 +322,10 @@ class ClassDepsInClass: class DependencyRecorder { import ExtractDependencies.* - /** A map from a non-local class to the names it uses, this does not include + /** A map from a non-local class to the names and classes it uses, this does not include * names which are only defined and not referenced. */ - def usedNames: collection.Map[Symbol, UsedNamesInClass] = _usedNames + def foundDeps: collection.Map[Symbol, FoundDepsInClass] = _foundDeps /** Record a reference to the name of `sym` from the current non-local * enclosing class. @@ -374,7 +360,7 @@ class DependencyRecorder { def addUsedRawName(name: Name, includeSealedChildren: Boolean = false)(using Context): Unit = { val fromClass = resolveDependencyFromClass if (fromClass.exists) { - lastUsedCache.update(name, includeSealedChildren) + lastFoundCache.recordName(name, includeSealedChildren) } } @@ -383,26 +369,36 @@ class DependencyRecorder { private val DefaultScopes = EnumSet.of(UseScope.Default) private val PatMatScopes = EnumSet.of(UseScope.Default, UseScope.PatMatTarget) - /** An object that maintain the set of used names from within a class */ - final class UsedNamesInClass { + /** An object that maintain the set of used names and class dependencies from within a class */ + final class FoundDepsInClass { /** Each key corresponds to a name used in the class. To understand the meaning * of the associated value, see the documentation of parameter `includeSealedChildren` * of `addUsedRawName`. */ private val _names = new util.HashMap[Name, DefaultScopes.type | PatMatScopes.type] - def iterator: Iterator[(Name, EnumSet[UseScope])] = _names.iterator + /** Each key corresponds to a class dependency used in the class. + */ + private val _classes = util.EqHashMap[Symbol, EnumSet[DependencyContext]]() + + def addDependency(fromClass: Symbol, context: DependencyContext): Unit = + val set = _classes.getOrElseUpdate(fromClass, EnumSet.noneOf(classOf[DependencyContext])) + set.add(context) + + def classes: Iterator[(Symbol, EnumSet[DependencyContext])] = _classes.iterator - private[DependencyRecorder] def update(name: Name, includeSealedChildren: Boolean): Unit = { + def names: Iterator[(Name, EnumSet[UseScope])] = _names.iterator + + private[DependencyRecorder] def recordName(name: Name, includeSealedChildren: Boolean): Unit = { if (includeSealedChildren) _names(name) = PatMatScopes else _names.getOrElseUpdate(name, DefaultScopes) } - override def toString(): String = { + def namesString: String = { val builder = new StringBuilder - iterator.foreach { (name, scopes) => + names.foreach { case (name, scopes) => builder.append(name.mangledString) builder.append(" in [") scopes.forEach(scope => builder.append(scope.toString)) @@ -411,12 +407,19 @@ class DependencyRecorder { } builder.toString() } - } - - private val _classDependencies = new mutable.HashMap[Symbol, ClassDepsInClass] - - def classDependencies: collection.Map[Symbol, ClassDepsInClass] = _classDependencies + def classesString: String = { + val builder = new StringBuilder + classes.foreach { case (clazz, scopes) => + builder.append(clazz.toString) + builder.append(" in [") + scopes.forEach(scope => builder.append(scope.toString)) + builder.append("]") + builder.append(", ") + } + builder.toString() + } + } /** Record a dependency to the class `to` in a given `context` * from the current non-local enclosing class. @@ -424,33 +427,30 @@ class DependencyRecorder { def addClassDependency(toClass: Symbol, context: DependencyContext)(using Context): Unit = val fromClass = resolveDependencyFromClass if (fromClass.exists) - lastDepCache.addDependency(toClass, context) + lastFoundCache.addDependency(toClass, context) - private val _usedNames = new mutable.HashMap[Symbol, UsedNamesInClass] + private val _foundDeps = new mutable.HashMap[Symbol, FoundDepsInClass] /** Send the collected dependency information to Zinc and clear the local caches. */ def sendToZinc()(using Context): Unit = ctx.withIncCallback: cb => - usedNames.foreach: - case (clazz, usedNames) => + val siblingClassfiles = new mutable.HashMap[PlainFile, Path] + foundDeps.foreach: + case (clazz, foundDeps) => val className = classNameAsString(clazz) - usedNames.iterator.foreach: (usedName, scopes) => + foundDeps.names.foreach: (usedName, scopes) => cb.usedName(className, usedName.toString, scopes) - val siblingClassfiles = new mutable.HashMap[PlainFile, Path] - for (fromClass, partialDependencies) <- _classDependencies do - for (toClass, deps) <- partialDependencies.iterator do - for dep <- deps.asScala do - recordClassDependency(cb, fromClass, toClass, dep, siblingClassfiles) + for (toClass, deps) <- foundDeps.classes do + for dep <- deps.asScala do + recordClassDependency(cb, clazz, toClass, dep, siblingClassfiles) clear() /** Clear all state. */ def clear(): Unit = - _usedNames.clear() - _classDependencies.clear() + _foundDeps.clear() lastOwner = NoSymbol lastDepSource = NoSymbol - lastDepCache = null - lastUsedCache = null + lastFoundCache = null _responsibleForImports = NoSymbol /** Handles dependency on given symbol by trying to figure out if represents a term @@ -521,8 +521,7 @@ class DependencyRecorder { private var lastOwner: Symbol = _ private var lastDepSource: Symbol = _ - private var lastDepCache: ClassDepsInClass | Null = _ - private var lastUsedCache: UsedNamesInClass | Null = _ + private var lastFoundCache: FoundDepsInClass | Null = _ /** The source of the dependency according to `nonLocalEnclosingClass` * if it exists, otherwise fall back to `responsibleForImports`. @@ -537,8 +536,7 @@ class DependencyRecorder { val fromClass = if (source.is(PackageClass)) responsibleForImports else source if lastDepSource != fromClass then lastDepSource = fromClass - lastDepCache = _classDependencies.getOrElseUpdate(fromClass, new ClassDepsInClass) - lastUsedCache = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) + lastFoundCache = _foundDeps.getOrElseUpdate(fromClass, new FoundDepsInClass) } lastDepSource From 04fb40db4f6400956a16e77a3cb21755eb920a3c Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 15 Aug 2023 17:16:11 +0200 Subject: [PATCH 18/90] main cache is util.HashMap --- .../src/dotty/tools/dotc/sbt/ExtractDependencies.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index e3b5f375f585..bd3ab4e3ae0f 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -76,8 +76,8 @@ class ExtractDependencies extends Phase { collector.traverse(unit.tpdTree) if (ctx.settings.YdumpSbtInc.value) { - val deps = rec.foundDeps.map { case (clazz, found) => s"$clazz: ${found.classesString}" }.toArray[Object] - val names = rec.foundDeps.map { case (clazz, found) => s"$clazz: ${found.namesString}" }.toArray[Object] + val deps = rec.foundDeps.iterator.map { case (clazz, found) => s"$clazz: ${found.classesString}" }.toArray[Object] + val names = rec.foundDeps.iterator.map { case (clazz, found) => s"$clazz: ${found.namesString}" }.toArray[Object] Arrays.sort(deps) Arrays.sort(names) @@ -325,7 +325,7 @@ class DependencyRecorder { /** A map from a non-local class to the names and classes it uses, this does not include * names which are only defined and not referenced. */ - def foundDeps: collection.Map[Symbol, FoundDepsInClass] = _foundDeps + def foundDeps: util.ReadOnlyMap[Symbol, FoundDepsInClass] = _foundDeps /** Record a reference to the name of `sym` from the current non-local * enclosing class. @@ -429,13 +429,13 @@ class DependencyRecorder { if (fromClass.exists) lastFoundCache.addDependency(toClass, context) - private val _foundDeps = new mutable.HashMap[Symbol, FoundDepsInClass] + private val _foundDeps = new util.EqHashMap[Symbol, FoundDepsInClass] /** Send the collected dependency information to Zinc and clear the local caches. */ def sendToZinc()(using Context): Unit = ctx.withIncCallback: cb => val siblingClassfiles = new mutable.HashMap[PlainFile, Path] - foundDeps.foreach: + _foundDeps.iterator.foreach: case (clazz, foundDeps) => val className = classNameAsString(clazz) foundDeps.names.foreach: (usedName, scopes) => From a813f2f4ef31d52fd0691ab0a1e58fe714644fa1 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 16 Aug 2023 14:30:02 +0200 Subject: [PATCH 19/90] add unit tests for maps and sets --- .../src/dotty/tools/dotc/util/EqHashSet.scala | 30 ---- .../src/dotty/tools/dotc/util/HashSet.scala | 42 ++---- .../dotty/tools/dotc/util/EqHashMapTest.scala | 115 +++++++++++++++ .../dotty/tools/dotc/util/EqHashSetTest.scala | 119 +++++++++++++++ .../dotty/tools/dotc/util/HashMapTest.scala | 137 ++++++++++++++++++ .../dotty/tools/dotc/util/HashSetTest.scala | 117 +++++++++++++++ 6 files changed, 498 insertions(+), 62 deletions(-) create mode 100644 compiler/test/dotty/tools/dotc/util/EqHashMapTest.scala create mode 100644 compiler/test/dotty/tools/dotc/util/EqHashSetTest.scala create mode 100644 compiler/test/dotty/tools/dotc/util/HashMapTest.scala create mode 100644 compiler/test/dotty/tools/dotc/util/HashSetTest.scala diff --git a/compiler/src/dotty/tools/dotc/util/EqHashSet.scala b/compiler/src/dotty/tools/dotc/util/EqHashSet.scala index 42aee97ce79c..44a050ae2bf8 100644 --- a/compiler/src/dotty/tools/dotc/util/EqHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/EqHashSet.scala @@ -85,36 +85,6 @@ class EqHashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends override def +=(x: T): Unit = put(x) - override def remove(x: T): Boolean = - Stats.record(statsItem("remove")) - var idx = firstIndex(x) - var e: T | Null = entryAt(idx) - while e != null do - if isEqual(e.uncheckedNN, x) then - var hole = idx - while - idx = nextIndex(idx) - e = entryAt(idx) - e != null - do - val eidx = index(hash(e.uncheckedNN)) - if isDense - || index(eidx - (hole + 1)) > index(idx - (hole + 1)) - // entry `e` at `idx` can move unless `index(hash(e))` is in - // the (ring-)interval [hole + 1 .. idx] - then - setEntry(hole, e.uncheckedNN) - hole = idx - table(hole) = null - used -= 1 - return true - idx = nextIndex(idx) - e = entryAt(idx) - false - - override def -=(x: T): Unit = - remove(x) - private def addOld(x: T) = Stats.record(statsItem("re-enter")) var idx = firstIndex(x) diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index e8cabd13a097..3a973793d542 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -64,48 +64,29 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Ge if used > limit then growTable() x - override def put(x: T): T = - Stats.record(statsItem("put")) + override def add(x: T): Boolean = + Stats.record(statsItem("enter")) var idx = firstIndex(x) var e: T | Null = entryAt(idx) while e != null do - // TODO: remove uncheckedNN when explicit-nulls is enabled for regule compiling - if isEqual(e.uncheckedNN, x) then return e.uncheckedNN + if isEqual(e.uncheckedNN, x) then return false // already entered idx = nextIndex(idx) e = entryAt(idx) addEntryAt(idx, x) + true // first entry - override def +=(x: T): Unit = put(x) - - override def remove(x: T): Boolean = - Stats.record(statsItem("remove")) + override def put(x: T): T = + Stats.record(statsItem("put")) var idx = firstIndex(x) var e: T | Null = entryAt(idx) while e != null do - if isEqual(e.uncheckedNN, x) then - var hole = idx - while - idx = nextIndex(idx) - e = entryAt(idx) - e != null - do - val eidx = index(hash(e.uncheckedNN)) - if isDense - || index(eidx - (hole + 1)) > index(idx - (hole + 1)) - // entry `e` at `idx` can move unless `index(hash(e))` is in - // the (ring-)interval [hole + 1 .. idx] - then - setEntry(hole, e.uncheckedNN) - hole = idx - table(hole) = null - used -= 1 - return true + // TODO: remove uncheckedNN when explicit-nulls is enabled for regule compiling + if isEqual(e.uncheckedNN, x) then return e.uncheckedNN idx = nextIndex(idx) e = entryAt(idx) - false + addEntryAt(idx, x) - override def -=(x: T): Unit = - remove(x) + override def +=(x: T): Unit = put(x) private def addOld(x: T) = Stats.record(statsItem("re-enter")) @@ -125,7 +106,4 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Ge val e: T | Null = oldTable(idx).asInstanceOf[T | Null] if e != null then addOld(e.uncheckedNN) idx += 1 - - override def iterator: Iterator[T] = new EntryIterator(): - def entry(idx: Int) = entryAt(idx) } diff --git a/compiler/test/dotty/tools/dotc/util/EqHashMapTest.scala b/compiler/test/dotty/tools/dotc/util/EqHashMapTest.scala new file mode 100644 index 000000000000..561dabb555a9 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/util/EqHashMapTest.scala @@ -0,0 +1,115 @@ +package dotty.tools.dotc.util + +import org.junit.Test +import org.junit.Assert.* + +class EqHashMapTest: + + var counter = 0 + + // basic identity hash, and reference equality, but with a counter for ordering + class Id: + val count = { counter += 1; counter } + + val id1, id2, id3 = Id() + + given Ordering[Id] = Ordering.by(_.count) + + @Test + def invariant: Unit = + assert((id1 ne id2) && (id1 ne id3) && (id2 ne id3)) + + @Test + def newEmpty: Unit = + val m = EqHashMap[Id, Int]() + assert(m.size == 0) + assert(m.iterator.toList == Nil) + + @Test + def update: Unit = + val m = EqHashMap[Id, Int]() + assert(m.size == 0 && !m.contains(id1)) + m.update(id1, 1) + assert(m.size == 1 && m(id1) == 1) + m.update(id1, 2) // replace value + assert(m.size == 1 && m(id1) == 2) + m.update(id3, 3) // new key + assert(m.size == 2 && m(id1) == 2 && m(id3) == 3) + + @Test + def getOrElseUpdate: Unit = + val m = EqHashMap[Id, Int]() + // add id1 + assert(m.size == 0 && !m.contains(id1)) + val added = m.getOrElseUpdate(id1, 1) + assert(added == 1 && m.size == 1 && m(id1) == 1) + // try add id1 again + val addedAgain = m.getOrElseUpdate(id1, 23) + assert(addedAgain != 23 && m.size == 1 && m(id1) == 1) // no change + + private def fullMap() = + val m = EqHashMap[Id, Int]() + m.update(id1, 1) + m.update(id2, 2) + m + + @Test + def remove: Unit = + val m = fullMap() + // remove id2 + m.remove(id2) + assert(m.size == 1) + assert(m.contains(id1) && !m.contains(id2)) + // remove id1 + m -= id1 + assert(m.size == 0) + assert(!m.contains(id1) && !m.contains(id2)) + + @Test + def lookup: Unit = + val m = fullMap() + assert(m.lookup(id1) == 1) + assert(m.lookup(id2) == 2) + assert(m.lookup(id3) == null) + + @Test + def iterator: Unit = + val m = fullMap() + assert(m.iterator.toList.sorted == List(id1 -> 1,id2 -> 2)) + + @Test + def clear: Unit = + locally: + val s1 = fullMap() + s1.clear() + assert(s1.size == 0) + locally: + val s2 = fullMap() + s2.clear(resetToInitial = false) + assert(s2.size == 0) + + // basic structural equality and hash code + class I32(val x: Int): + override def hashCode(): Int = x + override def equals(that: Any): Boolean = that match + case that: I32 => this.x == that.x + case _ => false + + /** the hash set is based on reference equality, i.e. does not use universal equality */ + @Test + def referenceEquality: Unit = + val i1, i2 = I32(1) // different instances + + assert(i1.equals(i2)) // structural equality + assert(i1 ne i2) // reference inequality + + val m = locally: + val m = EqHashMap[I32, Int]() + m(i1) = 23 + m(i2) = 29 + m + + assert(m.size == 2 && m(i1) == 23 && m(i2) == 29) + assert(m.keysIterator.toSet == Set(i1)) // scala.Set delegates to universal equality + end referenceEquality + diff --git a/compiler/test/dotty/tools/dotc/util/EqHashSetTest.scala b/compiler/test/dotty/tools/dotc/util/EqHashSetTest.scala new file mode 100644 index 000000000000..1c1ffe0b7931 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/util/EqHashSetTest.scala @@ -0,0 +1,119 @@ +package dotty.tools.dotc.util + +import org.junit.Test +import org.junit.Assert.* + +class EqHashSetTest: + + var counter = 0 + + // basic identity hash, and reference equality, but with a counter for ordering + class Id: + val count = { counter += 1; counter } + + val id1, id2, id3 = Id() + + given Ordering[Id] = Ordering.by(_.count) + + @Test + def invariant: Unit = + assert((id1 ne id2) && (id1 ne id3) && (id2 ne id3)) + + @Test + def newEmpty: Unit = + val s = EqHashSet[Id]() + assert(s.size == 0) + assert(s.iterator.toList == Nil) + + @Test + def put: Unit = + val s = EqHashSet[Id]() + // put id1 + assert(s.size == 0 && !s.contains(id1)) + s += id1 + assert(s.size == 1 && s.contains(id1)) + // put id2 + assert(!s.contains(id2)) + s.put(id2) + assert(s.size == 2 && s.contains(id1) && s.contains(id2)) + // put id3 + s ++= List(id3) + assert(s.size == 3 && s.contains(id1) && s.contains(id2) && s.contains(id3)) + + @Test + def add: Unit = + val s = EqHashSet[Id]() + // add id1 + assert(s.size == 0 && !s.contains(id1)) + val added = s.add(id1) + assert(added && s.size == 1 && s.contains(id1)) + // try add id1 again + val addedAgain = s.add(id1) + assert(!addedAgain && s.size == 1 && s.contains(id1)) // no change + + @Test + def construct: Unit = + val s = EqHashSet.from(List(id1,id2,id3)) + assert(s.size == 3) + assert(s.contains(id1) && s.contains(id2) && s.contains(id3)) + + @Test + def remove: Unit = + val s = EqHashSet.from(List(id1,id2,id3)) + // remove id2 + s.remove(id2) + assert(s.size == 2) + assert(s.contains(id1) && !s.contains(id2) && s.contains(id3)) + // remove id1 + s -= id1 + assert(s.size == 1) + assert(!s.contains(id1) && !s.contains(id2) && s.contains(id3)) + // remove id3 + s --= List(id3) + assert(s.size == 0) + assert(!s.contains(id1) && !s.contains(id2) && !s.contains(id3)) + + @Test + def lookup: Unit = + val s = EqHashSet.from(List(id1, id2)) + assert(s.lookup(id1) eq id1) + assert(s.lookup(id2) eq id2) + assert(s.lookup(id3) eq null) + + @Test + def iterator: Unit = + val s = EqHashSet.from(List(id1,id2,id3)) + assert(s.iterator.toList.sorted == List(id1,id2,id3)) + + @Test + def clear: Unit = + locally: + val s1 = EqHashSet.from(List(id1,id2,id3)) + s1.clear() + assert(s1.size == 0) + locally: + val s2 = EqHashSet.from(List(id1,id2,id3)) + s2.clear(resetToInitial = false) + assert(s2.size == 0) + + // basic structural equality and hash code + class I32(val x: Int): + override def hashCode(): Int = x + override def equals(that: Any): Boolean = that match + case that: I32 => this.x == that.x + case _ => false + + /** the hash map is based on reference equality, i.e. does not use universal equality */ + @Test + def referenceEquality: Unit = + val i1, i2 = I32(1) // different instances + + assert(i1.equals(i2)) // structural equality + assert(i1 ne i2) // reference inequality + + val s = EqHashSet.from(List(i1,i2)) + + assert(s.size == 2 && s.contains(i1) && s.contains(i2)) + assert(s.iterator.toSet == Set(i1)) // scala.Set delegates to universal equality + end referenceEquality + diff --git a/compiler/test/dotty/tools/dotc/util/HashMapTest.scala b/compiler/test/dotty/tools/dotc/util/HashMapTest.scala new file mode 100644 index 000000000000..97bf8446756c --- /dev/null +++ b/compiler/test/dotty/tools/dotc/util/HashMapTest.scala @@ -0,0 +1,137 @@ +package dotty.tools.dotc.util + +import org.junit.Test +import org.junit.Assert.* + +class HashMapTest: + + var counter = 0 + + // structural hash and equality, but with a counter for ordering + class Id(val count: Int = { counter += 1; counter }): + override def hashCode(): Int = count + override def equals(that: Any): Boolean = that match + case that: Id => this.count == that.count + case _ => false + def makeCopy: Id = new Id(count) + + val id1, id2, id3 = Id() + + given Ordering[Id] = Ordering.by(_.count) + + @Test + def invariant: Unit = + assert((id1 ne id2) && (id1 ne id3) && (id2 ne id3)) + assert(id1 != id2 && id1 != id3 && id2 != id3) + + @Test + def newEmpty: Unit = + val m = HashMap[Id, Int]() + assert(m.size == 0) + assert(m.iterator.toList == Nil) + + @Test + def update: Unit = + val m = HashMap[Id, Int]() + assert(m.size == 0 && !m.contains(id1)) + m.update(id1, 1) + assert(m.size == 1 && m(id1) == 1) + m.update(id1, 2) // replace value + assert(m.size == 1 && m(id1) == 2) + m.update(id3, 3) // new key + assert(m.size == 2 && m(id1) == 2 && m(id3) == 3) + + @Test + def getOrElseUpdate: Unit = + val m = HashMap[Id, Int]() + // add id1 + assert(m.size == 0 && !m.contains(id1)) + val added = m.getOrElseUpdate(id1, 1) + assert(added == 1 && m.size == 1 && m(id1) == 1) + // try add id1 again + val addedAgain = m.getOrElseUpdate(id1, 23) + assert(addedAgain != 23 && m.size == 1 && m(id1) == 1) // no change + + class StatefulHash: + var hashCount = 0 + override def hashCode(): Int = { hashCount += 1; super.hashCode() } + + @Test + def getOrElseUpdate_hashesAtMostOnce: Unit = + locally: + val sh1 = StatefulHash() + val m = HashMap[StatefulHash, Int]() // will be a dense map with default size + val added = m.getOrElseUpdate(sh1, 1) + assert(sh1.hashCount == 0) // no hashing at all for dense maps + locally: + val sh1 = StatefulHash() + val m = HashMap[StatefulHash, Int](64) // not dense + val added = m.getOrElseUpdate(sh1, 1) + assert(sh1.hashCount == 1) // would be 2 if for example getOrElseUpdate was implemented as lookup + update + + private def fullMap() = + val m = HashMap[Id, Int]() + m.update(id1, 1) + m.update(id2, 2) + m + + @Test + def remove: Unit = + val m = fullMap() + // remove id2 + m.remove(id2) + assert(m.size == 1) + assert(m.contains(id1) && !m.contains(id2)) + // remove id1 + m -= id1 + assert(m.size == 0) + assert(!m.contains(id1) && !m.contains(id2)) + + @Test + def lookup: Unit = + val m = fullMap() + assert(m.lookup(id1) == 1) + assert(m.lookup(id2) == 2) + assert(m.lookup(id3) == null) + + @Test + def iterator: Unit = + val m = fullMap() + assert(m.iterator.toList.sorted == List(id1 -> 1,id2 -> 2)) + + @Test + def clear: Unit = + locally: + val s1 = fullMap() + s1.clear() + assert(s1.size == 0) + locally: + val s2 = fullMap() + s2.clear(resetToInitial = false) + assert(s2.size == 0) + + // basic structural equality and hash code + class I32(val x: Int): + override def hashCode(): Int = x + override def equals(that: Any): Boolean = that match + case that: I32 => this.x == that.x + case _ => false + + /** the hash map is based on universal equality, i.e. does not use reference equality */ + @Test + def universalEquality: Unit = + val id2_2 = id2.makeCopy + + assert(id2.equals(id2_2)) // structural equality + assert(id2 ne id2_2) // reference inequality + + val m = locally: + val m = HashMap[Id, Int]() + m(id2) = 23 + m(id2_2) = 29 + m + + assert(m.size == 1 && m(id2) == 29 && m(id2_2) == 29) + assert(m.keysIterator.toList.head eq id2) // does not replace id2 with id2_2 + end universalEquality + diff --git a/compiler/test/dotty/tools/dotc/util/HashSetTest.scala b/compiler/test/dotty/tools/dotc/util/HashSetTest.scala new file mode 100644 index 000000000000..2089be508a4c --- /dev/null +++ b/compiler/test/dotty/tools/dotc/util/HashSetTest.scala @@ -0,0 +1,117 @@ +package dotty.tools.dotc.util + +import org.junit.Test +import org.junit.Assert.* + +class HashSetTest: + + var counter = 0 + + // structural hash and equality, with a counter for ordering + class Id(val count: Int = { counter += 1; counter }): + override def hashCode: Int = count + override def equals(that: Any): Boolean = that match + case that: Id => this.count == that.count + case _ => false + def makeCopy: Id = new Id(count) + + val id1, id2, id3 = Id() + + given Ordering[Id] = Ordering.by(_.count) + + @Test + def invariant: Unit = + assert((id1 ne id2) && (id1 ne id3) && (id2 ne id3)) + assert(id1 != id2 && id1 != id3 && id2 != id3) + + @Test + def newEmpty: Unit = + val s = HashSet[Id]() + assert(s.size == 0) + assert(s.iterator.toList == Nil) + + @Test + def put: Unit = + val s = HashSet[Id]() + // put id1 + assert(s.size == 0 && !s.contains(id1)) + s += id1 + assert(s.size == 1 && s.contains(id1)) + // put id2 + assert(!s.contains(id2)) + s.put(id2) + assert(s.size == 2 && s.contains(id1) && s.contains(id2)) + // put id3 + s ++= List(id3) + assert(s.size == 3 && s.contains(id1) && s.contains(id2) && s.contains(id3)) + + @Test + def add: Unit = + val s = HashSet[Id]() + // add id1 + assert(s.size == 0 && !s.contains(id1)) + val added = s.add(id1) + assert(added && s.size == 1 && s.contains(id1)) + // try add id1 again + val addedAgain = s.add(id1) + assert(!addedAgain && s.size == 1 && s.contains(id1)) // no change + + @Test + def construct: Unit = + val s = HashSet.from(List(id1,id2,id3)) + assert(s.size == 3) + assert(s.contains(id1) && s.contains(id2) && s.contains(id3)) + + @Test + def remove: Unit = + val s = HashSet.from(List(id1,id2,id3)) + // remove id2 + s.remove(id2) + assert(s.size == 2) + assert(s.contains(id1) && !s.contains(id2) && s.contains(id3)) + // remove id1 + s -= id1 + assert(s.size == 1) + assert(!s.contains(id1) && !s.contains(id2) && s.contains(id3)) + // remove id3 + s --= List(id3) + assert(s.size == 0) + assert(!s.contains(id1) && !s.contains(id2) && !s.contains(id3)) + + @Test + def lookup: Unit = + val s = HashSet.from(List(id1, id2)) + assert(s.lookup(id1) eq id1) + assert(s.lookup(id2) eq id2) + assert(s.lookup(id3) eq null) + + @Test + def iterator: Unit = + val s = HashSet.from(List(id1,id2,id3)) + assert(s.iterator.toList.sorted == List(id1,id2,id3)) + + @Test + def clear: Unit = + locally: + val s1 = HashSet.from(List(id1,id2,id3)) + s1.clear() + assert(s1.size == 0) + locally: + val s2 = HashSet.from(List(id1,id2,id3)) + s2.clear(resetToInitial = false) + assert(s2.size == 0) + + /** the hash set is based on universal equality, i.e. does not use reference equality */ + @Test + def universalEquality: Unit = + val id2_2 = id2.makeCopy + + assert(id2.equals(id2_2)) // structural equality + assert(id2 ne id2_2) // reference inequality + + val s = HashSet.from(List(id2,id2_2)) + + assert(s.size == 1 && s.contains(id2) && s.contains(id2_2)) + assert(s.iterator.toList == List(id2)) // single element + end universalEquality + From 59d69dbda21de11777488e71c9f00e74c727a07d Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 30 Aug 2023 09:47:47 +0200 Subject: [PATCH 20/90] Stabilize Quotes `defn.PolyFunction` --- library/src/scala/quoted/Quotes.scala | 1 - project/MiMaFilters.scala | 4 ++-- tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala | 1 - 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 15df4a08e1f0..63c7cd3790db 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -4337,7 +4337,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def FunctionClass(arity: Int, isContextual: Boolean): Symbol /** The `scala.PolyFunction` built-in trait. */ - @experimental def PolyFunctionClass: Symbol /** Function-like object that maps arity to symbols for classes `scala.TupleX`. diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index fc7b1e29bafc..fcecf9fc53a9 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -6,8 +6,8 @@ object MiMaFilters { // New API in 3.4.X ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefTypeTest"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefMethods"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#defnModule.FunctionClass") - // New API in 3.4.X + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#defnModule.FunctionClass"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#defnModule.PolyFunctionClass"), ) val TastyCore: Seq[ProblemFilter] = Seq( ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.EXPLICITtpt"), diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index d30a58954bc4..3471b47fab03 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -89,7 +89,6 @@ val experimentalDefinitionInLibrary = Set( "scala.quoted.Quotes.reflectModule.MethodTypeMethods.hasErasedParams", "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.erasedArgs", "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.hasErasedArgs", - "scala.quoted.Quotes.reflectModule.defnModule.PolyFunctionClass", // New feature: reverse method on Tuple "scala.Tuple.reverse", From 08376a592f1b33cf7ff1712040d1e651dfd1cf06 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Wed, 6 Sep 2023 13:49:13 +0200 Subject: [PATCH 21/90] Reject capturing function type in polymorphic functions --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 1 + tests/neg-custom-args/captures/i18518.scala | 5 +++++ 2 files changed, 6 insertions(+) create mode 100644 tests/neg-custom-args/captures/i18518.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 40c90bc25e3c..0dfbc8b150c5 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1419,6 +1419,7 @@ object Parsers { private def getFunction(tree: Tree): Option[Function] = tree match { case Parens(tree1) => getFunction(tree1) case Block(Nil, tree1) => getFunction(tree1) + case Function(_, _: CapturesAndResult) => None case t: Function => Some(t) case _ => None } diff --git a/tests/neg-custom-args/captures/i18518.scala b/tests/neg-custom-args/captures/i18518.scala new file mode 100644 index 000000000000..61e63a54141c --- /dev/null +++ b/tests/neg-custom-args/captures/i18518.scala @@ -0,0 +1,5 @@ +import language.experimental.captureChecking +type Foo1 = [R] -> (x: Unit) ->{} Unit // error +type Foo2 = [R] -> (x: Unit) ->{cap} Unit // error +type Foo3 = (c: Int^) -> [R] -> (x: Unit) ->{c} Unit // error +type Foo4 = (c: Int^) -> [R] -> (x0: Unit) -> (x: Unit) ->{c} Unit From bb1f028a0c6361098389b7e83f490ca5fd294272 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Wed, 6 Sep 2023 15:28:59 +0200 Subject: [PATCH 22/90] Document the special case in `getFunction` --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 0dfbc8b150c5..5ba66efae628 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1419,7 +1419,11 @@ object Parsers { private def getFunction(tree: Tree): Option[Function] = tree match { case Parens(tree1) => getFunction(tree1) case Block(Nil, tree1) => getFunction(tree1) - case Function(_, _: CapturesAndResult) => None + case Function(_, _: CapturesAndResult) => + // A function tree like this will be desugared + // into a capturing type in the typer, + // so None is returned. + None case t: Function => Some(t) case _ => None } From fd97de5d677c41a2a68e560dc0dd32d4cbb235b7 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 7 Sep 2023 18:38:17 +0100 Subject: [PATCH 23/90] Use the unwidened type when casting structural calls So if the call is to a stable val, the call will have a stable type. --- .../src/dotty/tools/dotc/typer/Dynamic.scala | 4 ++-- tests/pos/i18263.orig.scala | 16 ++++++++++++++++ tests/pos/i18263.scala | 15 +++++++++++++++ 3 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i18263.orig.scala create mode 100644 tests/pos/i18263.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index 717966923708..1c829567058a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -235,14 +235,14 @@ trait Dynamic { if ValueClasses.isDerivedValueClass(tpe.classSymbol) && qual.tpe <:< defn.ReflectSelectableTypeRef then val genericUnderlying = ValueClasses.valueClassUnbox(tpe.classSymbol.asClass) val underlying = tpe.select(genericUnderlying).widen.resultType - New(tpe, tree.cast(underlying) :: Nil) + New(tpe.widen, tree.cast(underlying) :: Nil) else tree maybeBoxed.cast(tpe) fun.tpe.widen match { case tpe: ValueType => - structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(tpe) + structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(fun.tpe) case tpe: MethodType => def isDependentMethod(tpe: Type): Boolean = tpe match { diff --git a/tests/pos/i18263.orig.scala b/tests/pos/i18263.orig.scala new file mode 100644 index 000000000000..68b000580f08 --- /dev/null +++ b/tests/pos/i18263.orig.scala @@ -0,0 +1,16 @@ +sealed trait Scope +sealed trait Domain extends Scope +object Domain extends Domain + +trait Baz[T] +def baz(using ck: Scope): Baz[ck.type] = ??? + +class Foo extends scala.reflect.Selectable: + type TScope = Domain + final protected given TScope = Domain + +object ID: + val internal1 = new Foo: + val ii = new Foo: + val x = baz + val z = internal1.ii.x //error diff --git a/tests/pos/i18263.scala b/tests/pos/i18263.scala new file mode 100644 index 000000000000..4fe79999afe7 --- /dev/null +++ b/tests/pos/i18263.scala @@ -0,0 +1,15 @@ +final class Bar +final class Inv[T] +class Foo extends scala.reflect.Selectable: + type Boo = Bar + final given boo1: Boo = new Bar + +class Test: + def mkInv(using bar: Bar): Inv[bar.type] = new Inv() + + def test: Unit = + val foo1 /* : Foo { val foo2: { z1 => Foo { val inv1: Inv[(z1.boo1 : z1.Boo)] }}} */ = new Foo: + val foo2 /* : { z1 => Foo { val inv1: Inv[(z1.boo1 : z1.Boo)] }} */ = new Foo: + val inv1 /* : Inv[( boo1 : Boo)] */ = mkInv /* (this.boo1) */ + val inv2 = foo1.foo2.inv1 // error + () From f1758a61d46d8cd8a474fc50ca3b5c918967ac9a Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 8 Sep 2023 13:36:16 +0100 Subject: [PATCH 24/90] Add a .widenExpr call --- compiler/src/dotty/tools/dotc/ast/TreeInfo.scala | 7 +------ compiler/src/dotty/tools/dotc/typer/Dynamic.scala | 2 +- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 6659818b333e..fa8e3d0850a7 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -969,16 +969,11 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => !tree.symbol.exists && tree.isTerm && hasRefinement(tree.qualifier.tpe) - def loop(tree: Tree): Boolean = tree match - case TypeApply(fun, _) => - loop(fun) - case Apply(fun, _) => - loop(fun) + funPart(tree) match case tree: Select => isStructuralTermSelect(tree) case _ => false - loop(tree) } /** Return a pair consisting of (supercall, rest) diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index 1c829567058a..51734e1a5d4b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -242,7 +242,7 @@ trait Dynamic { fun.tpe.widen match { case tpe: ValueType => - structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(fun.tpe) + structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(fun.tpe.widenExpr) case tpe: MethodType => def isDependentMethod(tpe: Type): Boolean = tpe match { From dc1fc60ca033e85f637febda77b38d09ce8c4658 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 9 Sep 2023 12:26:22 +0200 Subject: [PATCH 25/90] Change global enabling scheme for cc The aim is to have an efficient test whether a phase or denot transformer should be run. --- compiler/src/dotty/tools/dotc/Run.scala | 6 +++--- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 5 +++-- .../src/dotty/tools/dotc/config/Feature.scala | 6 +++--- compiler/src/dotty/tools/dotc/core/Phases.scala | 15 +++++++++++---- 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 9aaf12da3dcc..438561d15ada 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -168,10 +168,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint */ var pureFunsImportEncountered = false - /** Will be set to true if any of the compiled compilation units contains - * a captureChecking language import. + /** Will be set to true if experimental.captureChecking is enabled + * or any of the compiled compilation units contains a captureChecking language import. */ - var ccImportEncountered = false + var ccEnabledSomewhere = Feature.enabledBySetting(Feature.captureChecking)(using ictx) private var myEnrichedErrorMessage = false diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index d5bd8522ca92..4c7de4a176f7 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -29,7 +29,7 @@ object CheckCaptures: class Pre extends PreRecheck, SymTransformer: - override def isEnabled(using Context) = true + override def isRunnable(using Context) = super.isRunnable && Feature.ccEnabledSomewhere /** - Reset `private` flags of parameter accessors so that we can refine them * in Setup if they have non-empty capture sets. @@ -190,7 +190,8 @@ class CheckCaptures extends Recheck, SymTransformer: import CheckCaptures.* def phaseName: String = "cc" - override def isEnabled(using Context) = true + + override def isRunnable(using Context) = super.isRunnable && Feature.ccEnabledSomewhere def newRechecker()(using Context) = CaptureChecker(ctx) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 5bcc139326f9..b64d7016e913 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -103,8 +103,8 @@ object Feature: /** Is captureChecking enabled for any of the currently compiled compilation units? */ def ccEnabledSomewhere(using Context) = - enabledBySetting(captureChecking) - || ctx.run != null && ctx.run.nn.ccImportEncountered + if ctx.run != null then ctx.run.nn.ccEnabledSomewhere + else enabledBySetting(captureChecking) def sourceVersionSetting(using Context): SourceVersion = SourceVersion.valueOf(ctx.settings.source.value) @@ -174,7 +174,7 @@ object Feature: true else if fullFeatureName == captureChecking then ctx.compilationUnit.needsCaptureChecking = true - if ctx.run != null then ctx.run.nn.ccImportEncountered = true + if ctx.run != null then ctx.run.nn.ccEnabledSomewhere = true true else false diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 3fc7238cdd82..e62299ab8b76 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -299,6 +299,14 @@ object Phases { */ def phaseName: String + /** This property is queried when phases are first assembled. + * If it is false, the phase will be dropped from the set of phases to traverse. + */ + def isEnabled(using Context): Boolean = true + + /** This property is queried before a phase is run. + * If it is false, the phase is skipped. + */ def isRunnable(using Context): Boolean = !ctx.reporter.hasErrors // TODO: This might test an unintended condition. @@ -306,6 +314,9 @@ object Phases { // run one calls `errorsReported`, not `hasErrors`. // But maybe changing this would prevent useful phases from running? + /** True for all phases except NoPhase */ + def exists: Boolean = true + /** If set, allow missing or superfluous arguments in applications * and type applications. */ @@ -360,10 +371,6 @@ object Phases { /** Can this transform change the base types of a type? */ def changesBaseTypes: Boolean = changesParents - def isEnabled(using Context): Boolean = true - - def exists: Boolean = true - def initContext(ctx: FreshContext): Unit = () /** A hook that allows to transform the usual context passed to the function From 38cd627b9b7d4f4cfcc556cc2046a730a255c877 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 9 Sep 2023 12:53:26 +0200 Subject: [PATCH 26/90] A more robust scheme for resetting denotations after Recheck The new scheme works also for arbitrary denotation changes in PreRecheck. Furthermore, recheck denot transformers are not run after Recheck has ended. This means that effectivly only symbols touched by the Rechecker are transformed and reset again afterwards. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 15 ++++++---- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- .../tools/dotc/transform/PreRecheck.scala | 2 ++ .../dotty/tools/dotc/transform/Recheck.scala | 30 +++++++++---------- 4 files changed, 26 insertions(+), 23 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 4c7de4a176f7..6538cc1545df 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -34,14 +34,17 @@ object CheckCaptures: /** - Reset `private` flags of parameter accessors so that we can refine them * in Setup if they have non-empty capture sets. * - Special handling of some symbols defined for case classes. + * Enabled only until recheck is finished, and provided some compilation unit + * is CC-enabled. */ def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if sym.isAllOf(PrivateParamAccessor) && !sym.hasAnnotation(defn.ConstructorOnlyAnnot) then - sym.copySymDenotation(initFlags = sym.flags &~ Private | Recheck.ResetPrivate) - else if Synthetics.needsTransform(sym) then - Synthetics.transform(sym, toCC = true) - else - sym + if !pastRecheck && Feature.ccEnabledSomewhere then + if sym.isAllOf(PrivateParamAccessor) && !sym.hasAnnotation(defn.ConstructorOnlyAnnot) then + sym.copySymDenotation(initFlags = sym.flags &~ Private | Recheck.ResetPrivate) + else if Synthetics.needsTransform(sym) then + Synthetics.transform(sym, toCC = true) + else sym + else sym end Pre enum EnvKind: diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index adaa7219d68b..e18c5e559aba 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -306,7 +306,7 @@ extends tpd.TreeTraverser: /** Update info of `sym` for CheckCaptures phase only */ private def updateInfo(sym: Symbol, info: Type)(using Context) = - sym.updateInfoBetween(preRecheckPhase, thisPhase, info, newOwnerFor(sym)) + sym.updateInfo(preRecheckPhase, info, newOwnerFor(sym)) sym.namedType match case ref: CaptureRef => ref.invalidateCaches() case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/PreRecheck.scala b/compiler/src/dotty/tools/dotc/transform/PreRecheck.scala index db9e28d7aad7..ba60d3b97adc 100644 --- a/compiler/src/dotty/tools/dotc/transform/PreRecheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/PreRecheck.scala @@ -14,6 +14,8 @@ abstract class PreRecheck extends Phase, DenotTransformer: override def changesBaseTypes: Boolean = true + var pastRecheck = false + def run(using Context): Unit = () override def isCheckable = false diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 2456e4011367..d8db460b06a8 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -48,26 +48,19 @@ object Recheck: extension (sym: Symbol) - /** Update symbol's info to newInfo from prevPhase.next to lastPhase. + /** Update symbol's info to newInfo after `prevPhase`. * Also update owner to newOwnerOrNull if it is not null. - * Reset to previous info and owner for phases after lastPhase. + * The update is valid until after Recheck. After that the symbol's denotation + * is reset to what it was before PreRecheck. */ - def updateInfoBetween(prevPhase: DenotTransformer, lastPhase: DenotTransformer, newInfo: Type, newOwnerOrNull: Symbol | Null = null)(using Context): Unit = + def updateInfo(prevPhase: DenotTransformer, newInfo: Type, newOwnerOrNull: Symbol | Null = null)(using Context): Unit = val newOwner = if newOwnerOrNull == null then sym.owner else newOwnerOrNull if (sym.info ne newInfo) || (sym.owner ne newOwner) then val flags = sym.flags - sym.copySymDenotation( - initFlags = - if flags.isAllOf(ResetPrivateParamAccessor) - then flags &~ ResetPrivate | Private - else flags - ).installAfter(lastPhase) // reset sym.copySymDenotation( owner = newOwner, info = newInfo, - initFlags = - if newInfo.isInstanceOf[LazyType] then flags &~ Touched - else flags + initFlags = if newInfo.isInstanceOf[LazyType] then flags &~ Touched else flags ).installAfter(prevPhase) /** Does symbol have a new denotation valid from phase.next that is different @@ -158,16 +151,20 @@ abstract class Recheck extends Phase, SymTransformer: // One failing test is pos/i583a.scala /** Change any `ResetPrivate` flags back to `Private` */ - def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if sym.isAllOf(Recheck.ResetPrivateParamAccessor) then - sym.copySymDenotation(initFlags = sym.flags &~ Recheck.ResetPrivate | Private) - else sym + def transformSym(symd: SymDenotation)(using Context): SymDenotation = + val sym = symd.symbol + if sym.isUpdatedAfter(preRecheckPhase) then atPhase(preRecheckPhase)(sym.denot) + else symd def run(using Context): Unit = val rechecker = newRechecker() rechecker.checkUnit(ctx.compilationUnit) rechecker.reset() + override def runOn(units: List[CompilationUnit])(using runCtx: Context): List[CompilationUnit] = + try super.runOn(units) + finally preRecheckPhase.pastRecheck = true + def newRechecker()(using Context): Rechecker /** The typechecker pass */ @@ -197,6 +194,7 @@ abstract class Recheck extends Phase, SymTransformer: def reset()(using Context): Unit = for (ref, mbr) <- prevSelDenots.iterator do ref.withDenot(mbr) + preRecheckPhase /** Constant-folded rechecked type `tp` of tree `tree` */ protected def constFold(tree: Tree, tp: Type)(using Context): Type = From c325428b1e93f07c9a0f7b7ca19eb957a65da600 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 10 Sep 2023 13:09:00 +0200 Subject: [PATCH 27/90] Simplify Synthetics.transform No backward mapping is necessary anymore. # Conflicts: # compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 8 +- .../src/dotty/tools/dotc/cc/Synthetics.scala | 79 +++++-------------- 2 files changed, 21 insertions(+), 66 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 6538cc1545df..09f55dc35583 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -42,7 +42,7 @@ object CheckCaptures: if sym.isAllOf(PrivateParamAccessor) && !sym.hasAnnotation(defn.ConstructorOnlyAnnot) then sym.copySymDenotation(initFlags = sym.flags &~ Private | Recheck.ResetPrivate) else if Synthetics.needsTransform(sym) then - Synthetics.transform(sym, toCC = true) + Synthetics.transform(sym) else sym else sym end Pre @@ -203,11 +203,7 @@ class CheckCaptures extends Recheck, SymTransformer: override def run(using Context): Unit = if Feature.ccEnabled then super.run - - override def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if Synthetics.needsTransform(sym) then Synthetics.transform(sym, toCC = false) - else super.transformSym(sym) - + override def printingContext(ctx: Context) = ctx.withProperty(ccStateKey, Some(new CCState)) class CaptureChecker(ictx: Context) extends Rechecker(ictx): diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index 1e7c8d641238..1509fd838265 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -61,7 +61,7 @@ object Synthetics: * @param sym The method to transform @pre needsTransform(sym) must hold. * @param toCC Whether to transform the type to capture checking or back. */ - def transform(sym: SymDenotation, toCC: Boolean)(using Context): SymDenotation = + def transform(sym: SymDenotation)(using Context): SymDenotation = /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. * An apply method in a case class like this: @@ -92,19 +92,7 @@ object Synthetics: case _ => info - /** Drop capture dependencies from the type of `apply` or `copy` method of a case class */ - def dropCaptureDeps(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = dropCaptureDeps(tp.resType)) - case CapturingType(parent, _) => - dropCaptureDeps(parent) - case RefinedType(parent, _, _) => - dropCaptureDeps(parent) - case _ => - tp - /** Add capture information to the type of the default getter of a case class copy method - * if toCC = true, or remove the added info again if toCC = false. */ def transformDefaultGetterCaptures(info: Type, owner: Symbol, idx: Int)(using Context): Type = info match case info: MethodOrPoly => @@ -112,11 +100,10 @@ object Synthetics: case info: ExprType => info.derivedExprType(transformDefaultGetterCaptures(info.resType, owner, idx)) case EventuallyCapturingType(parent, _) => - if toCC then transformDefaultGetterCaptures(parent, owner, idx) - else parent + transformDefaultGetterCaptures(parent, owner, idx) case info @ AnnotatedType(parent, annot) => info.derivedAnnotatedType(transformDefaultGetterCaptures(parent, owner, idx), annot) - case _ if toCC && idx < owner.asClass.paramGetters.length => + case _ if idx < owner.asClass.paramGetters.length => val param = owner.asClass.paramGetters(idx) val pinfo = param.info atPhase(ctx.phase.next) { @@ -126,32 +113,19 @@ object Synthetics: case _ => info - /** Augment an unapply of type `(x: C): D` to `(x: C^{cap}): D^{x}` if toCC is true, - * or remove the added capture sets again if toCC = false. - */ + /** Augment an unapply of type `(x: C): D` to `(x: C^{cap}): D^{x}` */ def transformUnapplyCaptures(info: Type)(using Context): Type = info match case info: MethodType => - if toCC then - val paramInfo :: Nil = info.paramInfos: @unchecked - val newParamInfo = CapturingType(paramInfo, CaptureSet.universal) - val trackedParam = info.paramRefs.head - def newResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = newResult(tp.resType)) - case _ => - CapturingType(tp, CaptureSet(trackedParam)) - info.derivedLambdaType(paramInfos = newParamInfo :: Nil, resType = newResult(info.resType)) - .showing(i"augment unapply type $info to $result", capt) - else info.paramInfos match - case CapturingType(oldParamInfo, _) :: Nil => - def oldResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = oldResult(tp.resType)) - case CapturingType(tp, _) => - tp - info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + val paramInfo :: Nil = info.paramInfos: @unchecked + val newParamInfo = CapturingType(paramInfo, CaptureSet.universal) + val trackedParam = info.paramRefs.head + def newResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = newResult(tp.resType)) case _ => - info + CapturingType(tp, CaptureSet(trackedParam)) + info.derivedLambdaType(paramInfos = newParamInfo :: Nil, resType = newResult(info.resType)) + .showing(i"augment unapply type $info to $result", capt) case info: PolyType => info.derivedLambdaType(resType = transformUnapplyCaptures(info.resType)) @@ -159,16 +133,9 @@ object Synthetics: val (pt: PolyType) = symd.info: @unchecked val (mt: MethodType) = pt.resType: @unchecked val (enclThis: ThisType) = symd.owner.thisType: @unchecked - val mt1 = - if toCC then - MethodType(mt.paramNames)( - mt1 => mt.paramInfos.map(_.capturing(CaptureSet.universal)), - mt1 => CapturingType(mt.resType, CaptureSet(enclThis, mt1.paramRefs.head))) - else - MethodType(mt.paramNames)( - mt1 => mt.paramInfos.map(_.stripCapturing), - mt1 => mt.resType.stripCapturing) - pt.derivedLambdaType(resType = mt1) + pt.derivedLambdaType(resType = MethodType(mt.paramNames)( + mt1 => mt.paramInfos.map(_.capturing(CaptureSet.universal)), + mt1 => CapturingType(mt.resType, CaptureSet(enclThis, mt1.paramRefs.head)))) def transformCurriedTupledCaptures(symd: SymDenotation) = val (et: ExprType) = symd.info: @unchecked @@ -179,18 +146,10 @@ object Synthetics: defn.FunctionOf(args, mapFinalResult(res, f), isContextual) else f(tp) - val resType1 = - if toCC then - mapFinalResult(et.resType, CapturingType(_, CaptureSet(enclThis))) - else - et.resType.stripCapturing - ExprType(resType1) + ExprType(mapFinalResult(et.resType, CapturingType(_, CaptureSet(enclThis)))) def transformCompareCaptures = - if toCC then - MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType) - else - defn.methOfAnyRef(defn.BooleanType) + MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType) sym.copySymDenotation(info = sym.name match case DefaultGetterName(nme.copy, n) => @@ -198,7 +157,7 @@ object Synthetics: case nme.unapply => transformUnapplyCaptures(sym.info) case nme.apply | nme.copy => - if toCC then addCaptureDeps(sym.info) else dropCaptureDeps(sym.info) + addCaptureDeps(sym.info) case nme.andThen | nme.compose => transformComposeCaptures(sym) case nme.curried | nme.tupled => From 8bfc293138f814afbeab3be99016bc94bf6470b3 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 11 Sep 2023 16:30:35 +0200 Subject: [PATCH 28/90] Make Array.apply an intrinsic --- .../src/dotty/tools/dotc/transform/init/Objects.scala | 8 ++++++-- tests/init-global/neg/mutable-array.scala | 8 ++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 tests/init-global/neg/mutable-array.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 3d01b6a93b08..c29fa3fb2c81 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -616,7 +616,7 @@ object Objects: * @param superType The type of the super in a super call. NoType for non-super calls. * @param needResolve Whether the target of the call needs resolution? */ - def call(value: Value, meth: Symbol, args: List[ArgInfo], receiver: Type, superType: Type, needResolve: Boolean = true): Contextual[Value] = log("call " + meth.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { + def call(value: Value, meth: Symbol, args: List[ArgInfo], receiver: Type, superType: Type, needResolve: Boolean = true): Contextual[Value] = log("call " + meth.show + ", this = " + value.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { value match case Cold => report.warning("Using cold alias. Calling trace:\n" + Trace.show, Trace.position) @@ -658,7 +658,11 @@ object Objects: resolve(ref.klass, meth) if target.isOneOf(Flags.Method) then - if target.hasSource then + if target.owner == defn.ArrayModuleClass && target.name == nme.apply then + val arr = OfArray(State.currentObject, summon[Regions.Data]) + Heap.write(arr.addr, args.map(_.value).join) + arr + else if target.hasSource then val cls = target.owner.enclosingClass.asClass val ddef = target.defTree.asInstanceOf[DefDef] val meth = ddef.symbol diff --git a/tests/init-global/neg/mutable-array.scala b/tests/init-global/neg/mutable-array.scala new file mode 100644 index 000000000000..3cbc23a5e127 --- /dev/null +++ b/tests/init-global/neg/mutable-array.scala @@ -0,0 +1,8 @@ +object A: + class Box(var value: Int) + val box: Box = new Box(0) + +object B: + val boxes: Array[A.Box] = Array(A.box) + val box: A.Box = boxes(0) + val x: Int = box.value // error From 1e5f66111d6199343031c835029f9374b673cbd3 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 11 Sep 2023 16:49:13 +0200 Subject: [PATCH 29/90] Rename Heap.write to Heap.writeUnion --- .../tools/dotc/transform/init/Objects.scala | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index c29fa3fb2c81..0c116cc29d5c 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -470,7 +470,7 @@ object Objects: /** Store the heap as a mutable field to avoid threading it through the program. */ class MutableData(private[Heap] var heap: Data): - private[Heap] def update(addr: Addr, value: Value): Unit = + private[Heap] def writeUnion(addr: Addr, value: Value): Unit = heap.get(addr) match case None => heap = heap.updated(addr, value) @@ -489,8 +489,8 @@ object Objects: def read(addr: Addr)(using mutable: MutableData): Value = mutable.heap(addr) - def write(addr: Addr, value: Value)(using mutable: MutableData): Unit = - mutable.update(addr, value) + def writeUnion(addr: Addr, value: Value)(using mutable: MutableData): Unit = + mutable.writeUnion(addr, value) def localVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol): Addr = LocalVarAddr(regions, sym, owner) @@ -639,7 +639,7 @@ object Objects: if arr.addr.owner != State.currentObject then errorMutateOtherStaticObject(State.currentObject, arr.addr.owner) else - Heap.write(arr.addr, args.tail.head.value) + Heap.writeUnion(arr.addr, args.tail.head.value) Bottom else // Array.length is OK @@ -660,7 +660,7 @@ object Objects: if target.isOneOf(Flags.Method) then if target.owner == defn.ArrayModuleClass && target.name == nme.apply then val arr = OfArray(State.currentObject, summon[Regions.Data]) - Heap.write(arr.addr, args.map(_.value).join) + Heap.writeUnion(arr.addr, args.map(_.value).join) arr else if target.hasSource then val cls = target.owner.enclosingClass.asClass @@ -846,7 +846,7 @@ object Objects: if addr.owner != State.currentObject then errorMutateOtherStaticObject(State.currentObject, addr.owner) else - Heap.write(addr, rhs) + Heap.writeUnion(addr, rhs) else report.warning("Mutating a field before its initialization: " + field.show + ". Calling trace:\n" + Trace.show, Trace.position) end match @@ -871,7 +871,7 @@ object Objects: case outer: (Ref | Cold.type | Bottom.type) => if klass == defn.ArrayClass then val arr = OfArray(State.currentObject, summon[Regions.Data]) - Heap.write(arr.addr, Bottom) + Heap.writeUnion(arr.addr, Bottom) arr else // Widen the outer to finitize the domain. Arguments already widened in `evalArgs`. @@ -907,7 +907,7 @@ object Objects: if sym.is(Flags.Mutable) then val addr = Heap.localVarAddr(summon[Regions.Data], sym, State.currentObject) Env.setLocalVar(sym, addr) - Heap.write(addr, value) + Heap.writeUnion(addr, value) else Env.setLocalVal(sym, value) } @@ -968,8 +968,8 @@ object Objects: * @param value The value of the rhs of the assignment. */ def writeLocal(thisV: ThisValue, sym: Symbol, value: Value): Contextual[Value] = log("write local " + sym.show + " with " + value.show, printer, (_: Value).show) { - assert(sym.is(Flags.Mutable), "Writing to immutable variable " + sym.show) + Env.resolveEnv(sym.enclosingMethod, thisV, summon[Env.Data]) match case Some(thisV -> env) => given Env.Data = env @@ -978,7 +978,7 @@ object Objects: if addr.owner != State.currentObject then errorMutateOtherStaticObject(State.currentObject, addr.owner) else - Heap.write(addr, value) + Heap.writeUnion(addr, value) case _ => report.warning("[Internal error] Variable not found " + sym.show + "\nenv = " + env.show + ". Calling trace:\n" + Trace.show, Trace.position) @@ -1541,7 +1541,7 @@ object Objects: if acc.is(Flags.Mutable) then val addr = Heap.fieldVarAddr(summon[Regions.Data], acc, State.currentObject) thisV.initVar(acc, addr) - Heap.write(addr, value) + Heap.writeUnion(addr, value) else thisV.initVal(acc, value) printer.println(acc.show + " initialized with " + value) @@ -1636,7 +1636,7 @@ object Objects: if sym.is(Flags.Mutable) then val addr = Heap.fieldVarAddr(summon[Regions.Data], sym, State.currentObject) thisV.initVar(sym, addr) - Heap.write(addr, res) + Heap.writeUnion(addr, res) else thisV.initVal(sym, res) From c16fb4e13c9fbddce0d7541f7c566955a6f65023 Mon Sep 17 00:00:00 2001 From: LydiaSkuse <31440012+LydiaSkuse@users.noreply.github.com> Date: Fri, 15 Sep 2023 15:45:32 +0200 Subject: [PATCH 30/90] use directives instead of scalac options in tests --- compiler/test-resources/repl/i13208.scala | 2 +- compiler/test-resources/repl/rewrite-messages | 2 +- compiler/test/dotty/tools/repl/ReplTest.scala | 2 +- compiler/test/dotty/tools/utils.scala | 31 +++++++++++-------- .../macro/pos/t8013/inpervolated_2.scala | 4 +-- tests/pos-macros/i18409.scala | 2 +- 6 files changed, 23 insertions(+), 20 deletions(-) diff --git a/compiler/test-resources/repl/i13208.scala b/compiler/test-resources/repl/i13208.scala index ce4fcf0d9ed8..61ace43c732d 100644 --- a/compiler/test-resources/repl/i13208.scala +++ b/compiler/test-resources/repl/i13208.scala @@ -1,4 +1,4 @@ -// scalac: -source:future -deprecation +//> using options -source:future -deprecation scala> type M[X] = X match { case Int => String case _ => Int } scala> type N[X] = X match { case List[_] => Int } 1 warning found diff --git a/compiler/test-resources/repl/rewrite-messages b/compiler/test-resources/repl/rewrite-messages index eee2fe034c43..a63a72195019 100644 --- a/compiler/test-resources/repl/rewrite-messages +++ b/compiler/test-resources/repl/rewrite-messages @@ -1,4 +1,4 @@ -// scalac: -source:future-migration -deprecation -Werror +//> using options -source:future-migration -deprecation -Werror scala> import scala.util._ -- Error: ---------------------------------------------------------------------- 1 | import scala.util._ diff --git a/compiler/test/dotty/tools/repl/ReplTest.scala b/compiler/test/dotty/tools/repl/ReplTest.scala index 34cad747fde6..8fbf635c9a17 100644 --- a/compiler/test/dotty/tools/repl/ReplTest.scala +++ b/compiler/test/dotty/tools/repl/ReplTest.scala @@ -69,7 +69,7 @@ extends ReplDriver(options, new PrintStream(out, true, StandardCharsets.UTF_8.na val expectedOutput = lines.filter(nonBlank) val actualOutput = { - val opts = toolArgsFor(ToolName.Scalac)(lines.take(1)) + val opts = toolArgsFor(ToolName.Scalac, scriptFile.map(_.toString))(lines.take(1)) val (optsLine, inputLines) = if opts.isEmpty then ("", lines) else (lines.head, lines.drop(1)) resetToInitial(opts) diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala index 8c154a38850d..8161631acb44 100644 --- a/compiler/test/dotty/tools/utils.scala +++ b/compiler/test/dotty/tools/utils.scala @@ -65,7 +65,7 @@ type ToolArgs = Map[ToolName, List[String]] */ def toolArgsFor(files: List[JPath], charset: Charset = UTF_8): ToolArgs = files.foldLeft(Map.empty[ToolName, List[String]]) { (res, path) => - val toolargs = toolArgsParse(resource(Files.lines(path, charset))(_.limit(10).toScala(List))) + val toolargs = toolArgsParse(resource(Files.lines(path, charset))(_.limit(10).toScala(List)), Some(path.toString)) toolargs.foldLeft(res) { case (acc, (tool, args)) => val name = ToolName.named(tool) @@ -74,31 +74,36 @@ def toolArgsFor(files: List[JPath], charset: Charset = UTF_8): ToolArgs = } } -def toolArgsFor(tool: ToolName)(lines: List[String]): List[String] = - toolArgsParse(lines).collectFirst { case (name, args) if tool eq ToolName.named(name) => CommandLineParser.tokenize(args) }.getOrElse(Nil) +def toolArgsFor(tool: ToolName, filename: Option[String])(lines: List[String]): List[String] = + toolArgsParse(lines, filename).collectFirst { case (name, args) if tool eq ToolName.named(name) => CommandLineParser.tokenize(args) }.getOrElse(Nil) -// scalac: arg1 arg2, with alternative opening, optional space, alt names, text that is not */ up to end. +// scalajs: arg1 arg2, with alternative opening, optional space, alt names, text that is not */ up to end. // groups are (name, args) +// note: ideally we would replace everything that requires this to use directive syntax, however scalajs: --skip has no directive equivalent yet. private val toolArg = raw"(?://|/\*| \*) ?(?i:(${ToolName.values.mkString("|")})):((?:[^*]|\*(?!/))*)".r.unanchored private val directiveOptionsArg = raw"//> using options (.*)".r.unanchored // Inspect the lines for compiler options of the form -// `// scalac: args`, `/* scalac: args`, ` * scalac: args`. +// `//> using options args`, `// scalajs: args`, `/* scalajs: args`, ` * scalajs: args` etc. // If args string ends in close comment, stop at the `*` `/`. // Returns all the matches by the regex. -def toolArgsParse(lines: List[String]): List[(String,String)] = - lines.flatMap { case toolArg(name, args) => List((name, args)) case _ => Nil } ++ +def toolArgsParse(lines: List[String], filename: Option[String]): List[(String,String)] = + lines.flatMap { + case toolArg("scalac", _) => sys.error(s"`// scalac: args` not supported. Please use `//> using options args`${filename.fold("")(f => s" in file $f")}") + case toolArg(name, args) => List((name, args)) + case _ => Nil + } ++ lines.flatMap { case directiveOptionsArg(args) => List(("scalac", args)) case _ => Nil } import org.junit.Test import org.junit.Assert._ class ToolArgsTest: - @Test def `missing toolarg is absent`: Unit = assertEquals(Nil, toolArgsParse(List(""))) - @Test def `toolarg is present`: Unit = assertEquals(("test", " -hey") :: Nil, toolArgsParse("// test: -hey" :: Nil)) - @Test def `tool is present`: Unit = assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test)("// test: -hey" :: Nil)) - @Test def `missing tool is absent`: Unit = assertEquals(Nil, toolArgsFor(ToolName.Javac)("// test: -hey" :: Nil)) + @Test def `missing toolarg is absent`: Unit = assertEquals(Nil, toolArgsParse(List(""), None)) + @Test def `toolarg is present`: Unit = assertEquals(("test", " -hey") :: Nil, toolArgsParse("// test: -hey" :: Nil, None)) + @Test def `tool is present`: Unit = assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test, None)("// test: -hey" :: Nil)) + @Test def `missing tool is absent`: Unit = assertEquals(Nil, toolArgsFor(ToolName.Javac, None)("// test: -hey" :: Nil)) @Test def `multitool is present`: Unit = - assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) - assertEquals("-d" :: "/tmp" :: Nil, toolArgsFor(ToolName.Javac)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) + assertEquals("-hey" :: Nil, toolArgsFor(ToolName.Test, None)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) + assertEquals("-d" :: "/tmp" :: Nil, toolArgsFor(ToolName.Javac, None)("// test: -hey" :: "// javac: -d /tmp" :: Nil)) end ToolArgsTest diff --git a/tests/disabled/macro/pos/t8013/inpervolated_2.scala b/tests/disabled/macro/pos/t8013/inpervolated_2.scala index 90e571b42c8c..cbe5139cef5a 100644 --- a/tests/disabled/macro/pos/t8013/inpervolated_2.scala +++ b/tests/disabled/macro/pos/t8013/inpervolated_2.scala @@ -1,6 +1,4 @@ -/* - * scalac: -Xfatal-warnings -Xlint - */ +//> using options -Xfatal-warnings -Xlint package t8013 // unsuspecting user of perverse macro diff --git a/tests/pos-macros/i18409.scala b/tests/pos-macros/i18409.scala index e1dd8cef674d..800e192b81bb 100644 --- a/tests/pos-macros/i18409.scala +++ b/tests/pos-macros/i18409.scala @@ -1,4 +1,4 @@ -// scalac: -Werror -Wunused:all +//> using options -Werror -Wunused:all import scala.quoted.* From 8e6986176157f6320341a55a74a4ceff6840147a Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 14 Sep 2023 11:36:57 +0200 Subject: [PATCH 31/90] Print TypeError stacktraces on -Ydebug-error or -Ydebug-type-error --- compiler/src/dotty/tools/dotc/core/TypeErrors.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 1dcd2301b1a7..87e4ba923e58 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -22,7 +22,10 @@ abstract class TypeError(using creationContext: Context) extends Exception(""): * This is expensive and only useful for debugging purposes. */ def computeStackTrace: Boolean = - ctx.debug || (cyclicErrors != noPrinter && this.isInstanceOf[CyclicReference] && !(ctx.mode is Mode.CheckCyclic)) + ctx.debug + || (cyclicErrors != noPrinter && this.isInstanceOf[CyclicReference] && !(ctx.mode is Mode.CheckCyclic)) + || ctx.settings.YdebugTypeError.value + || ctx.settings.YdebugError.value override def fillInStackTrace(): Throwable = if computeStackTrace then super.fillInStackTrace().nn From 56eb2d8291a95b15d49cc4099c696f409a7273f3 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 18 Sep 2023 22:09:13 +0200 Subject: [PATCH 32/90] Update compiler/src/dotty/tools/dotc/transform/Recheck.scala --- compiler/src/dotty/tools/dotc/transform/Recheck.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index d8db460b06a8..ae5f65f15123 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -194,7 +194,6 @@ abstract class Recheck extends Phase, SymTransformer: def reset()(using Context): Unit = for (ref, mbr) <- prevSelDenots.iterator do ref.withDenot(mbr) - preRecheckPhase /** Constant-folded rechecked type `tp` of tree `tree` */ protected def constFold(tree: Tree, tp: Type)(using Context): Type = From 5d4078b9641b41ff3bad640373c4dc2b4a1f3de5 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 30 Aug 2023 10:02:47 +0200 Subject: [PATCH 33/90] Stabilize quotes `Flags.AbsOverride` --- library/src/scala/quoted/Quotes.scala | 4 ++-- project/MiMaFilters.scala | 1 + tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala | 1 - 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 15df4a08e1f0..d785a68ce2e1 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -4399,9 +4399,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is this an abstract override method? * * This corresponds to a definition declared as "abstract override def" in the source. - * See https://stackoverflow.com/questions/23645172/why-is-abstract-override-required-not-override-alone-in-subtrait for examples. + * See https://stackoverflow.com/questions/23645172/why-is-abstract-override-required-not-override-alone-in-subtrait for examples. */ - @experimental def AbsOverride: Flags + def AbsOverride: Flags /** Is this generated by Scala compiler. * Corresponds to ACC_SYNTHETIC in the JVM. diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index fc7b1e29bafc..0a180a899cc3 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -4,6 +4,7 @@ import com.typesafe.tools.mima.core._ object MiMaFilters { val Library: Seq[ProblemFilter] = Seq( // New API in 3.4.X + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#FlagsModule.AbsOverride"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefTypeTest"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefMethods"), ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#defnModule.FunctionClass") diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index d30a58954bc4..e9d6ab49f22a 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -68,7 +68,6 @@ val experimentalDefinitionInLibrary = Set( "scala.annotation.init$.region", //// New APIs: Quotes - "scala.quoted.Quotes.reflectModule.FlagsModule.AbsOverride", // Can be stabilized in 3.4.0 (unsure) or later "scala.quoted.Quotes.reflectModule.CompilationInfoModule.XmacroSettings", "scala.quoted.Quotes.reflectModule.FlagsModule.JavaAnnotation", From ab9f88219c1901be22243d084843c322cfb079a7 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Mon, 21 Aug 2023 14:46:12 +0200 Subject: [PATCH 34/90] Add default arguments to `derivedRefinedType` --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 4 ++-- .../dotty/tools/dotc/core/ConstraintHandling.scala | 2 +- .../src/dotty/tools/dotc/core/TypeComparer.scala | 2 +- compiler/src/dotty/tools/dotc/core/Types.scala | 12 +++++++----- .../dotc/core/unpickleScala2/Scala2Unpickler.scala | 4 ++-- compiler/src/dotty/tools/dotc/typer/Checking.scala | 2 +- .../src/dotty/tools/dotc/typer/Inferencing.scala | 2 +- .../src/dotty/tools/dotc/typer/Synthesizer.scala | 4 ++-- 8 files changed, 17 insertions(+), 15 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index d5bd8522ca92..2ca907639d95 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -369,7 +369,7 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => val t1 = t match case t @ defn.RefinedFunctionOf(rinfo: MethodType) => - t.derivedRefinedType(t.parent, t.refinedName, this(rinfo)) + t.derivedRefinedType(refinedInfo = this(rinfo)) case _ => mapOver(t) if variance > 0 then t1 @@ -948,7 +948,7 @@ class CheckCaptures extends Recheck, SymTransformer: adaptTypeFun(actual, rinfo.resType, expected, covariant, insertBox, ares1 => val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) - val actual1 = actual.derivedRefinedType(actual.parent, actual.refinedName, rinfo1) + val actual1 = actual.derivedRefinedType(refinedInfo = rinfo1) actual1 ) case _ => diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 7d2f51381d5a..c7ceada9884b 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -689,7 +689,7 @@ trait ConstraintHandling { case tp: AndType => tp.derivedAndType(tp.tp1.hardenUnions, tp.tp2.hardenUnions) case tp: RefinedType => - tp.derivedRefinedType(tp.parent.hardenUnions, tp.refinedName, tp.refinedInfo) + tp.derivedRefinedType(parent = tp.parent.hardenUnions) case tp: RecType => tp.rebind(tp.parent.hardenUnions) case tp: HKTypeLambda => diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 6f85de6f14c0..8df809dc9ee6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1824,7 +1824,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private def fixRecs(anchor: SingletonType, tp: Type): Type = { def fix(tp: Type): Type = tp.stripTypeVar match { case tp: RecType => fix(tp.parent).substRecThis(tp, anchor) - case tp @ RefinedType(parent, rname, rinfo) => tp.derivedRefinedType(fix(parent), rname, rinfo) + case tp: RefinedType => tp.derivedRefinedType(parent = fix(tp.parent)) case tp: TypeParamRef => fixOrElse(bounds(tp).hi, tp) case tp: TypeProxy => fixOrElse(tp.superType, tp) case tp: AndType => tp.derivedAndType(fix(tp.tp1), fix(tp.tp2)) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index f2c4fdecb834..8a725d8a0754 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1360,7 +1360,7 @@ object Types { case tp: AndType => tp.derivedAndType(tp.tp1.widenUnionWithoutNull, tp.tp2.widenUnionWithoutNull) case tp: RefinedType => - tp.derivedRefinedType(tp.parent.widenUnion, tp.refinedName, tp.refinedInfo) + tp.derivedRefinedType(parent = tp.parent.widenUnion) case tp: RecType => tp.rebind(tp.parent.widenUnion) case tp: HKTypeLambda => @@ -3226,7 +3226,9 @@ object Types { def checkInst(using Context): this.type = this // debug hook - def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(using Context): Type = + final def derivedRefinedType + (parent: Type = this.parent, refinedName: Name = this.refinedName, refinedInfo: Type = this.refinedInfo) + (using Context): Type = if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this else RefinedType(parent, refinedName, refinedInfo) @@ -4130,7 +4132,7 @@ object Types { case tp @ AppliedType(tycon, args) if defn.isFunctionNType(tp) => wrapConvertible(tp.derivedAppliedType(tycon, args.init :+ addInto(args.last))) case tp @ defn.RefinedFunctionOf(rinfo) => - wrapConvertible(tp.derivedRefinedType(tp.parent, tp.refinedName, addInto(rinfo))) + wrapConvertible(tp.derivedRefinedType(refinedInfo = addInto(rinfo))) case tp: MethodOrPoly => tp.derivedLambdaType(resType = addInto(tp.resType)) case ExprType(resType) => @@ -5631,8 +5633,8 @@ object Types { else hi case (arg, _) => arg tp.derivedAppliedType(tycon, args1) - case tp @ RefinedType(parent, name, info) => - tp.derivedRefinedType(approxWildcardArgs(parent), name, info) + case tp: RefinedType => + tp.derivedRefinedType(approxWildcardArgs(tp.parent)) case _ => tp approxWildcardArgs(tp) diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 4db5158c0e7f..481768fb4dbf 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -734,8 +734,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val info1 = info.symbol.info assert(info1.derivesFrom(defn.SingletonClass)) RefinedType(parent1, name, info1.mapReduceAnd(removeSingleton)(_ & _)) - case info => - tp.derivedRefinedType(parent1, name, info) + case _ => + tp.derivedRefinedType(parent = parent1) } case tp @ AppliedType(tycon, args) => val tycon1 = tycon.safeDealias diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 1ea24187a185..51cf019a2f85 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -1092,7 +1092,7 @@ trait Checking { case tp @ AppliedType(tycon, args) => tp.derivedAppliedType(tycon, args.mapConserve(checkGoodBounds)) case tp: RefinedType => - tp.derivedRefinedType(tp.parent, tp.refinedName, checkGoodBounds(tp.refinedInfo)) + tp.derivedRefinedType(refinedInfo = checkGoodBounds(tp.refinedInfo)) case _ => tp } diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 184c8675e678..618a6cec1a3e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -537,7 +537,7 @@ object Inferencing { } if tparams.isEmpty then tp else tp.derivedAppliedType(tycon, args1) case tp: AndOrType => tp.derivedAndOrType(captureWildcards(tp.tp1), captureWildcards(tp.tp2)) - case tp: RefinedType => tp.derivedRefinedType(captureWildcards(tp.parent), tp.refinedName, tp.refinedInfo) + case tp: RefinedType => tp.derivedRefinedType(parent = captureWildcards(tp.parent)) case tp: RecType => tp.derivedRecType(captureWildcards(tp.parent)) case tp: LazyRef => captureWildcards(tp.ref) case tp: AnnotatedType => tp.derivedAnnotatedType(captureWildcards(tp.parent), tp.annot) diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index c9b81a7d4769..d492b38c16c8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -743,8 +743,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): def recur(handlers: SpecialHandlers): TreeWithErrors = handlers match case (cls, handler) :: rest => def baseWithRefinements(tp: Type): Type = tp.dealias match - case tp @ RefinedType(parent, rname, rinfo) => - tp.derivedRefinedType(baseWithRefinements(parent), rname, rinfo) + case tp: RefinedType => + tp.derivedRefinedType(parent = baseWithRefinements(tp.parent)) case _ => tp.baseType(cls) val base = baseWithRefinements(formal) From d71ca06419a69b38b7ae0e932e47bc13577a8200 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 20 Jun 2023 13:03:42 +0200 Subject: [PATCH 35/90] Intrinsify `constValueTuple` and `summonAll` The new implementation instantiates the TupleN/TupleXXL classes directly. This avoids the expensive construction of tuples using `*:`. Fixes #15988 --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 19 +++++ .../dotty/tools/dotc/core/Definitions.scala | 6 +- .../dotty/tools/dotc/inlines/Inliner.scala | 4 +- .../dotty/tools/dotc/inlines/Inlines.scala | 69 ++++++++++++++----- library/src/scala/compiletime/package.scala | 19 ++--- tests/neg/17211.check | 20 +++--- tests/neg/i14177a.scala | 2 +- tests/run/i15988a.scala | 6 ++ tests/run/i15988b.scala | 21 ++++++ 9 files changed, 119 insertions(+), 47 deletions(-) create mode 100644 tests/run/i15988a.scala create mode 100644 tests/run/i15988b.scala diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index a80fcc59a806..f57065eb9b15 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -1520,6 +1520,25 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } } + /** Creates the tuple containing the elemets */ + def tupleTree(elems: List[Tree])(using Context): Tree = { + val arity = elems.length + if arity == 0 then + ref(defn.EmptyTupleModule) + else if arity <= Definitions.MaxTupleArity then + // TupleN[elem1Tpe, ...](elem1, ...) + ref(defn.TupleType(arity).nn.typeSymbol.companionModule) + .select(nme.apply) + .appliedToTypes(elems.map(_.tpe.widenIfUnstable)) + .appliedToArgs(elems) + else + // TupleXXL.apply(elems*) // TODO add and use Tuple.apply(elems*) ? + ref(defn.TupleXXLModule) + .select(nme.apply) + .appliedToVarargs(elems.map(_.asInstance(defn.ObjectType)), TypeTree(defn.ObjectType)) + .asInstance(defn.tupleType(elems.map(elem => elem.tpe.widenIfUnstable))) + } + /** Creates the tuple type tree representation of the type trees in `ts` */ def tupleTypeTree(elems: List[Tree])(using Context): Tree = { val arity = elems.length diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 22a49a760e57..d5dd6b07b465 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -243,8 +243,10 @@ class Definitions { @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") @tu lazy val Compiletime_constValueOpt: Symbol = CompiletimePackageClass.requiredMethod("constValueOpt") + @tu lazy val Compiletime_constValueTuple: Symbol = CompiletimePackageClass.requiredMethod("constValueTuple") @tu lazy val Compiletime_summonFrom : Symbol = CompiletimePackageClass.requiredMethod("summonFrom") - @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") + @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") + @tu lazy val Compiletime_summonAll : Symbol = CompiletimePackageClass.requiredMethod("summonAll") @tu lazy val CompiletimeTestingPackage: Symbol = requiredPackage("scala.compiletime.testing") @tu lazy val CompiletimeTesting_typeChecks: Symbol = CompiletimeTestingPackage.requiredMethod("typeChecks") @tu lazy val CompiletimeTesting_typeCheckErrors: Symbol = CompiletimeTestingPackage.requiredMethod("typeCheckErrors") @@ -932,6 +934,8 @@ class Definitions { @tu lazy val TupleTypeRef: TypeRef = requiredClassRef("scala.Tuple") def TupleClass(using Context): ClassSymbol = TupleTypeRef.symbol.asClass @tu lazy val Tuple_cons: Symbol = TupleClass.requiredMethod("*:") + @tu lazy val TupleModule: Symbol = requiredModule("scala.Tuple") + @tu lazy val EmptyTupleClass: Symbol = requiredClass("scala.EmptyTuple") @tu lazy val EmptyTupleModule: Symbol = requiredModule("scala.EmptyTuple") @tu lazy val NonEmptyTupleTypeRef: TypeRef = requiredClassRef("scala.NonEmptyTuple") def NonEmptyTupleClass(using Context): ClassSymbol = NonEmptyTupleTypeRef.symbol.asClass diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 1fb6e785dd43..ffe52dbe2e25 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -497,8 +497,8 @@ class Inliner(val call: tpd.Tree)(using Context): // assertAllPositioned(tree) // debug tree.changeOwner(originalOwner, ctx.owner) - def tryConstValue: Tree = - TypeComparer.constValue(callTypeArgs.head.tpe) match { + def tryConstValue(tpe: Type): Tree = + TypeComparer.constValue(tpe) match { case Some(c) => Literal(c).withSpan(call.span) case _ => EmptyTree } diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 4e0d0565c4a8..ffc820d59c71 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -413,36 +413,67 @@ object Inlines: return Intrinsics.codeOf(arg, call.srcPos) case _ => - // Special handling of `constValue[T]`, `constValueOpt[T], and summonInline[T]` + // Special handling of `constValue[T]`, `constValueOpt[T]`, `constValueTuple[T]`, and `summonInline[T]` if callTypeArgs.length == 1 then - if (inlinedMethod == defn.Compiletime_constValue) { - val constVal = tryConstValue + + def constValueOrError(tpe: Type): Tree = + val constVal = tryConstValue(tpe) if constVal.isEmpty then - val msg = NotConstant("cannot take constValue", callTypeArgs.head.tpe) - return ref(defn.Predef_undefined).withSpan(call.span).withType(ErrorType(msg)) + val msg = NotConstant("cannot take constValue", tpe) + ref(defn.Predef_undefined).withSpan(callTypeArgs.head.span).withType(ErrorType(msg)) else - return constVal + constVal + + def searchImplicitOrError(tpe: Type): Tree = + val evTyper = new Typer(ctx.nestingLevel + 1) + val evCtx = ctx.fresh.setTyper(evTyper) + inContext(evCtx) { + val evidence = evTyper.inferImplicitArg(tpe, callTypeArgs.head.span) + evidence.tpe match + case fail: Implicits.SearchFailureType => + errorTree(call, evTyper.missingArgMsg(evidence, tpe, "")) + case _ => + evidence + } + + def unrollTupleTypes(tpe: Type): Option[List[Type]] = tpe.dealias match + case AppliedType(tycon, args) if defn.isTupleClass(tycon.typeSymbol) => + Some(args) + case AppliedType(tycon, head :: tail :: Nil) if tycon.isRef(defn.PairClass) => + unrollTupleTypes(tail).map(head :: _) + case tpe: TermRef if tpe.symbol == defn.EmptyTupleModule => + Some(Nil) + case _ => + None + + if (inlinedMethod == defn.Compiletime_constValue) { + return constValueOrError(callTypeArgs.head.tpe) } else if (inlinedMethod == defn.Compiletime_constValueOpt) { - val constVal = tryConstValue + val constVal = tryConstValue(callTypeArgs.head.tpe) return ( if (constVal.isEmpty) ref(defn.NoneModule.termRef) else New(defn.SomeClass.typeRef.appliedTo(constVal.tpe), constVal :: Nil) ) } + else if (inlinedMethod == defn.Compiletime_constValueTuple) { + unrollTupleTypes(callTypeArgs.head.tpe) match + case Some(types) => + val constants = types.map(constValueOrError) + return Typed(tpd.tupleTree(constants), TypeTree(callTypeArgs.head.tpe)).withSpan(call.span) + case _ => + return errorTree(call, em"Tuple element types must be known at compile time") + } else if (inlinedMethod == defn.Compiletime_summonInline) { - def searchImplicit(tpt: Tree) = - val evTyper = new Typer(ctx.nestingLevel + 1) - val evCtx = ctx.fresh.setTyper(evTyper) - inContext(evCtx) { - val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span) - evidence.tpe match - case fail: Implicits.SearchFailureType => - errorTree(call, evTyper.missingArgMsg(evidence, tpt.tpe, "")) - case _ => - evidence - } - return searchImplicit(callTypeArgs.head) + return searchImplicitOrError(callTypeArgs.head.tpe) + } + else if (inlinedMethod == defn.Compiletime_summonAll) { + unrollTupleTypes(callTypeArgs.head.tpe) match + case Some(types) => + val implicits = types.map(searchImplicitOrError) + return Typed(tpd.tupleTree(implicits), TypeTree(callTypeArgs.head.tpe)).withSpan(call.span) + case _ => + return errorTree(call, em"Tuple element types must be known at compile time") } end if diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index ff00b83bcb79..3eca997554a0 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -117,13 +117,9 @@ transparent inline def constValue[T]: T = * `(constValue[X1], ..., constValue[Xn])`. */ inline def constValueTuple[T <: Tuple]: T = - val res = - inline erasedValue[T] match - case _: EmptyTuple => EmptyTuple - case _: (t *: ts) => constValue[t] *: constValueTuple[ts] - end match - res.asInstanceOf[T] -end constValueTuple + // implemented in dotty.tools.dotc.typer.Inliner + error("Compiler bug: `constValueTuple` was not evaluated by the compiler") + /** Summons first given matching one of the listed cases. E.g. in * @@ -168,13 +164,8 @@ transparent inline def summonInline[T]: T = * @return the given values typed as elements of the tuple */ inline def summonAll[T <: Tuple]: T = - val res = - inline erasedValue[T] match - case _: EmptyTuple => EmptyTuple - case _: (t *: ts) => summonInline[t] *: summonAll[ts] - end match - res.asInstanceOf[T] -end summonAll + // implemented in dotty.tools.dotc.typer.Inliner + error("Compiler bug: `summonAll` was not evaluated by the compiler") /** Assertion that an argument is by-name. Used for nullability checking. */ def byName[T](x: => T): T = x diff --git a/tests/neg/17211.check b/tests/neg/17211.check index 3c2f10a61957..be7086e3b3eb 100644 --- a/tests/neg/17211.check +++ b/tests/neg/17211.check @@ -1,14 +1,14 @@ --- [E182] Type Error: tests/neg/17211.scala:14:12 ---------------------------------------------------------------------- +-- [E182] Type Error: tests/neg/17211.scala:14:13 ---------------------------------------------------------------------- 14 | constValue[IsInt[Foo.Foo]] // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - | IsInt[Foo.Foo] is not a constant type; cannot take constValue + | ^^^^^^^^^^^^^^ + | IsInt[Foo.Foo] is not a constant type; cannot take constValue | - | Note: a match type could not be fully reduced: + | Note: a match type could not be fully reduced: | - | trying to reduce IsInt[Foo.Foo] - | failed since selector Foo.Foo - | does not match case Int => (true : Boolean) - | and cannot be shown to be disjoint from it either. - | Therefore, reduction cannot advance to the remaining case + | trying to reduce IsInt[Foo.Foo] + | failed since selector Foo.Foo + | does not match case Int => (true : Boolean) + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case | - | case _ => (false : Boolean) + | case _ => (false : Boolean) diff --git a/tests/neg/i14177a.scala b/tests/neg/i14177a.scala index 3031271c369b..237eaacb3b66 100644 --- a/tests/neg/i14177a.scala +++ b/tests/neg/i14177a.scala @@ -3,4 +3,4 @@ import scala.compiletime.* trait C[A] inline given [Tup <: Tuple]: C[Tup] with - val cs = summonAll[Tuple.Map[Tup, C]] // error cannot reduce inline match with + val cs = summonAll[Tuple.Map[Tup, C]] // error: Tuple element types must be known at compile time diff --git a/tests/run/i15988a.scala b/tests/run/i15988a.scala new file mode 100644 index 000000000000..dba5008fd950 --- /dev/null +++ b/tests/run/i15988a.scala @@ -0,0 +1,6 @@ +import scala.compiletime.constValueTuple + +@main def Test: Unit = + assert(constValueTuple[EmptyTuple] == EmptyTuple) + assert(constValueTuple[("foo", 5, 3.14, "bar", false)] == ("foo", 5, 3.14, "bar", false)) + assert(constValueTuple[(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23)] == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23)) diff --git a/tests/run/i15988b.scala b/tests/run/i15988b.scala new file mode 100644 index 000000000000..4b7764d94a18 --- /dev/null +++ b/tests/run/i15988b.scala @@ -0,0 +1,21 @@ +import scala.compiletime.summonAll + +@main def Test: Unit = + assert(summonAll[EmptyTuple] == EmptyTuple) + assert(summonAll[(5, 5, 5)] == (5, 5, 5)) + assert( + summonAll[( + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + )] == ( + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, + )) + +given 5 = 5 From c39d260042332803259563110603521ed6381db6 Mon Sep 17 00:00:00 2001 From: Lorenzo Gabriele Date: Tue, 19 Sep 2023 15:32:37 +0200 Subject: [PATCH 36/90] Avoid flattening `List` of `List`s --- .../tools/dotc/transform/CheckUnused.scala | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 0eeec0f3cbec..eb3c37622e0d 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -561,11 +561,19 @@ object CheckUnused: else Nil val warnings = - List(sortedImp, sortedLocalDefs, sortedExplicitParams, sortedImplicitParams, - sortedPrivateDefs, sortedPatVars, unsetLocalDefs, unsetPrivateDefs).flatten.sortBy { s => - val pos = s.pos.sourcePos - (pos.line, pos.column) - } + val unsorted = + sortedImp ::: + sortedLocalDefs ::: + sortedExplicitParams ::: + sortedImplicitParams ::: + sortedPrivateDefs ::: + sortedPatVars ::: + unsetLocalDefs ::: + unsetPrivateDefs + unsorted.sortBy { s => + val pos = s.pos.sourcePos + (pos.line, pos.column) + } UnusedResult(warnings.toSet) end getUnused //============================ HELPERS ==================================== From 0c04c2e5206e8e5ade2cc8c31efb394803e62e28 Mon Sep 17 00:00:00 2001 From: Lorenzo Gabriele Date: Tue, 19 Sep 2023 15:44:14 +0200 Subject: [PATCH 37/90] Remove redundant `.toList` call on a `List` Co-authored-by: Nicolas Stucki --- compiler/src/dotty/tools/MainGenericRunner.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala index 6f4366a00b77..0a5a1b191a44 100644 --- a/compiler/src/dotty/tools/MainGenericRunner.scala +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -148,7 +148,7 @@ object MainGenericRunner { case (o @ javaOption(striped)) :: tail => processArgs(tail, settings.withJavaArgs(striped).withScalaArgs(o)) case (o @ scalaOption(_*)) :: tail => - val remainingArgs = (CommandLineParser.expandArg(o) ++ tail).toList + val remainingArgs = CommandLineParser.expandArg(o) ++ tail processArgs(remainingArgs, settings) case (o @ colorOption(_*)) :: tail => processArgs(tail, settings.withScalaArgs(o)) From c3377d4900cba307d291fc56e39187431fc9aa7a Mon Sep 17 00:00:00 2001 From: Lorenzo Gabriele Date: Tue, 19 Sep 2023 15:48:18 +0200 Subject: [PATCH 38/90] Avoid creating a List to initialize fields --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 22a49a760e57..73e731e89676 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -2131,8 +2131,10 @@ class Definitions { this.initCtx = ctx if (!isInitialized) { // force initialization of every symbol that is synthesized or hijacked by the compiler - val forced = - syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass + syntheticCoreClasses + syntheticCoreMethods + JavaEnumClass + // end force initialization isInitialized = true } addSyntheticSymbolsComments From fe4a6855efb1e59e050f3ebf00cfd91a32675eec Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Fri, 15 Sep 2023 10:48:14 +0200 Subject: [PATCH 39/90] Add missing span to extension method select --- .../dotty/tools/dotc/inlines/Inliner.scala | 2 +- .../dotty/tools/dotc/typer/Applications.scala | 2 +- .../tools/dotc/typer/ErrorReporting.scala | 3 +- .../dotty/tools/dotc/typer/Implicits.scala | 12 +++--- .../tools/dotc/typer/ImportSuggestions.scala | 2 +- .../dotty/tools/dotc/typer/ProtoTypes.scala | 39 ++++++++++--------- .../src/dotty/tools/dotc/typer/Typer.scala | 10 ++--- .../tests/definition/PcDefinitionSuite.scala | 10 +++++ .../highlight/DocumentHighlightSuite.scala | 31 +++++++++++++++ .../tools/pc/tests/hover/HoverTypeSuite.scala | 12 ++++++ .../semanticdb/expect/Extension.expect.scala | 9 +++++ tests/semanticdb/expect/Extension.scala | 9 +++++ tests/semanticdb/metac.expect | 26 +++++++++++-- 13 files changed, 130 insertions(+), 37 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 1fb6e785dd43..fe32a6754984 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -771,7 +771,7 @@ class Inliner(val call: tpd.Tree)(using Context): override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { val locked = ctx.typerState.ownedVars - val qual1 = typed(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) + val qual1 = typed(tree.qualifier, shallowSelectionProto(tree.name, pt, this, tree.nameSpan)) val resNoReduce = untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt) val reducedProjection = reducer.reduceProjection(resNoReduce) if reducedProjection.isType then diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 921e3ca86fe4..9501e51aeb6f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -976,7 +976,7 @@ trait Applications extends Compatibility { val resultType = if !originalResultType.isRef(defn.ObjectClass) then originalResultType else AvoidWildcardsMap()(proto.resultType.deepenProtoTrans) match - case SelectionProto(nme.asInstanceOf_, PolyProto(_, resTp), _, _) => resTp + case SelectionProto(nme.asInstanceOf_, PolyProto(_, resTp), _, _, _) => resTp case resTp if isFullyDefined(resTp, ForceDegree.all) => resTp case _ => defn.ObjectType val methType = MethodType(proto.typedArgs().map(_.tpe.widen), resultType) diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index ab252bb1b206..368ce7523846 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -8,6 +8,7 @@ import Types._, ProtoTypes._, Contexts._, Decorators._, Denotations._, Symbols._ import Implicits._, Flags._, Constants.Constant import Trees._ import NameOps._ +import util.Spans.NoSpan import util.SrcPos import config.Feature import reporting._ @@ -275,7 +276,7 @@ object ErrorReporting { else val add = suggestImports( ViewProto(qualType.widen, - SelectionProto(tree.name, WildcardType, NoViewsAllowed, privateOK = false))) + SelectionProto(tree.name, WildcardType, NoViewsAllowed, privateOK = false, NoSpan))) if add.isEmpty then "" else ", but could be made available as an extension method." ++ add end selectErrorAddendum diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 3e2faee56d1b..04aca960845e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -76,7 +76,7 @@ object Implicits: * method with the selecting name? False otherwise. */ def hasExtMethod(tp: Type, expected: Type)(using Context) = expected match - case selProto @ SelectionProto(selName: TermName, _, _, _) => + case selProto @ SelectionProto(selName: TermName, _, _, _, _) => tp.memberBasedOnFlags(selName, required = ExtensionMethod).exists case _ => false @@ -454,7 +454,7 @@ object Implicits: def clarify(tp: Type)(using Context): Type = tp final protected def qualify(using Context): String = expectedType match { - case SelectionProto(name, mproto, _, _) if !argument.isEmpty => + case SelectionProto(name, mproto, _, _, _) if !argument.isEmpty => i"provide an extension method `$name` on ${argument.tpe}" case NoType => if (argument.isEmpty) i"match expected type" @@ -866,8 +866,8 @@ trait Implicits: NoMatchingImplicitsFailure else { def adjust(to: Type) = to.stripTypeVar.widenExpr match { - case SelectionProto(name, memberProto, compat, true) => - SelectionProto(name, memberProto, compat, privateOK = false) + case SelectionProto(name, memberProto, compat, true, nameSpan) => + SelectionProto(name, memberProto, compat, privateOK = false, nameSpan) case tp => tp } @@ -1161,10 +1161,10 @@ trait Implicits: pt, locked) } pt match - case selProto @ SelectionProto(selName: TermName, mbrType, _, _) => + case selProto @ SelectionProto(selName: TermName, mbrType, _, _, nameSpan) => def tryExtension(using Context) = - extMethodApply(untpd.Select(untpdGenerated, selName), argument, mbrType) + extMethodApply(untpd.Select(untpdGenerated, selName).withSpan(nameSpan), argument, mbrType) def tryConversionForSelection(using Context) = val converted = tryConversion diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index 70addd442100..a21a94aab271 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -237,7 +237,7 @@ trait ImportSuggestions: // don't suggest things that are imported by default def extensionImports = pt match - case ViewProto(argType, SelectionProto(name: TermName, _, _, _)) => + case ViewProto(argType, SelectionProto(name: TermName, _, _, _, _)) => roots.flatMap(extensionMethod(_, name, argType)) case _ => Nil diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 7303124b0cd4..051c75522003 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -21,6 +21,7 @@ import dotty.tools.dotc.core.Flags.Transparent import dotty.tools.dotc.config.{ Feature, SourceVersion } import scala.annotation.internal.sharable +import dotty.tools.dotc.util.Spans.{NoSpan, Span} object ProtoTypes { @@ -180,7 +181,7 @@ object ProtoTypes { * * [ ].name: proto */ - abstract case class SelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean) + abstract case class SelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean, nameSpan: Span) extends CachedProxyType with ProtoType with ValueTypeOrProto { /** Is the set of members of this type unknown, in the sense that we @@ -243,9 +244,9 @@ object ProtoTypes { def underlying(using Context): Type = WildcardType - def derivedSelectionProto(name: Name, memberProto: Type, compat: Compatibility)(using Context): SelectionProto = - if ((name eq this.name) && (memberProto eq this.memberProto) && (compat eq this.compat)) this - else SelectionProto(name, memberProto, compat, privateOK) + def derivedSelectionProto(name: Name, memberProto: Type, compat: Compatibility, nameSpan: Span)(using Context): SelectionProto = + if ((name eq this.name) && (memberProto eq this.memberProto) && (compat eq this.compat) && (nameSpan == this.nameSpan)) this + else SelectionProto(name, memberProto, compat, privateOK, nameSpan) override def isErroneous(using Context): Boolean = memberProto.isErroneous @@ -253,14 +254,14 @@ object ProtoTypes { override def unusableForInference(using Context): Boolean = memberProto.unusableForInference - def map(tm: TypeMap)(using Context): SelectionProto = derivedSelectionProto(name, tm(memberProto), compat) + def map(tm: TypeMap)(using Context): SelectionProto = derivedSelectionProto(name, tm(memberProto), compat, nameSpan) def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = ta(x, memberProto) override def deepenProto(using Context): SelectionProto = - derivedSelectionProto(name, memberProto.deepenProto, compat) + derivedSelectionProto(name, memberProto.deepenProto, compat, nameSpan) override def deepenProtoTrans(using Context): SelectionProto = - derivedSelectionProto(name, memberProto.deepenProtoTrans, compat) + derivedSelectionProto(name, memberProto.deepenProtoTrans, compat, nameSpan) override def computeHash(bs: Hashable.Binders): Int = { val delta = (if (compat eq NoViewsAllowed) 1 else 0) | (if (privateOK) 2 else 0) @@ -281,12 +282,12 @@ object ProtoTypes { } } - class CachedSelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean) - extends SelectionProto(name, memberProto, compat, privateOK) + class CachedSelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean, nameSpan: Span) + extends SelectionProto(name, memberProto, compat, privateOK, nameSpan) object SelectionProto { - def apply(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean)(using Context): SelectionProto = { - val selproto = new CachedSelectionProto(name, memberProto, compat, privateOK) + def apply(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean, nameSpan: Span)(using Context): SelectionProto = { + val selproto = new CachedSelectionProto(name, memberProto, compat, privateOK, nameSpan) if (compat eq NoViewsAllowed) unique(selproto) else selproto } } @@ -294,11 +295,11 @@ object ProtoTypes { /** Create a selection proto-type, but only one level deep; * treat constructors specially */ - def shallowSelectionProto(name: Name, tp: Type, typer: Typer)(using Context): TermType = + def shallowSelectionProto(name: Name, tp: Type, typer: Typer, nameSpan: Span)(using Context): TermType = if (name.isConstructorName) WildcardType else tp match - case tp: UnapplyFunProto => new UnapplySelectionProto(name) - case tp => SelectionProto(name, IgnoredProto(tp), typer, privateOK = true) + case tp: UnapplyFunProto => new UnapplySelectionProto(name, nameSpan) + case tp => SelectionProto(name, IgnoredProto(tp), typer, privateOK = true, nameSpan) /** A prototype for expressions [] that are in some unspecified selection operation * @@ -308,12 +309,12 @@ object ProtoTypes { * operation is further selection. In this case, the expression need not be a value. * @see checkValue */ - @sharable object AnySelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true) + @sharable object AnySelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) - @sharable object SingletonTypeProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true) + @sharable object SingletonTypeProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) /** A prototype for selections in pattern constructors */ - class UnapplySelectionProto(name: Name) extends SelectionProto(name, WildcardType, NoViewsAllowed, true) + class UnapplySelectionProto(name: Name, nameSpan: Span) extends SelectionProto(name, WildcardType, NoViewsAllowed, true, nameSpan) trait ApplyingProto extends ProtoType // common trait of ViewProto and FunProto trait FunOrPolyProto extends ProtoType: // common trait of PolyProto and FunProto @@ -612,7 +613,7 @@ object ProtoTypes { def isMatchedBy(tp: Type, keepConstraint: Boolean)(using Context): Boolean = ctx.typer.isApplicableType(tp, argType :: Nil, resultType) || { resType match { - case selProto @ SelectionProto(selName: TermName, mbrType, _, _) => + case selProto @ SelectionProto(selName: TermName, mbrType, _, _, _) => ctx.typer.hasExtensionMethodNamed(tp, selName, argType, mbrType) //.reporting(i"has ext $tp $name $argType $mbrType: $result") case _ => @@ -934,7 +935,7 @@ object ProtoTypes { } approxOr case tp: SelectionProto => - tp.derivedSelectionProto(tp.name, wildApprox(tp.memberProto, theMap, seen, internal), NoViewsAllowed) + tp.derivedSelectionProto(tp.name, wildApprox(tp.memberProto, theMap, seen, internal), NoViewsAllowed, tp.nameSpan) case tp: ViewProto => tp.derivedViewProto( wildApprox(tp.argType, theMap, seen, internal), diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 6ae7e96c40aa..c2ca4218eaf0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -760,7 +760,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer record("typedSelect") def typeSelectOnTerm(using Context): Tree = - val qual = typedExpr(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) + val qual = typedExpr(tree.qualifier, shallowSelectionProto(tree.name, pt, this, tree.nameSpan)) typedSelect(tree, pt, qual).withSpan(tree.span).computeNullable() def javaSelectOnType(qual: Tree)(using Context) = @@ -790,7 +790,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tryAlternatively(typeSelectOnTerm)(fallBack) if (tree.qualifier.isType) { - val qual1 = typedType(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) + val qual1 = typedType(tree.qualifier, shallowSelectionProto(tree.name, pt, this, tree.nameSpan)) assignType(cpy.Select(tree)(qual1, tree.name), qual1) } else if (ctx.isJava && tree.name.isTypeName) @@ -3513,7 +3513,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer then Some(adapt(tree, pt, locked)) else - val selProto = SelectionProto(name, pt, NoViewsAllowed, privateOK = false) + val selProto = SelectionProto(name, pt, NoViewsAllowed, privateOK = false, tree.nameSpan) if selProto.isMatchedBy(qual.tpe) || tree.hasAttachment(InsertedImplicitOnQualifier) then None else @@ -3538,7 +3538,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer (tree: untpd.Select, pt: Type, mbrProto: Type, qual: Tree, locked: TypeVars, compat: Compatibility, inSelect: Boolean) (using Context): Tree = - def selectionProto = SelectionProto(tree.name, mbrProto, compat, privateOK = inSelect) + def selectionProto = SelectionProto(tree.name, mbrProto, compat, privateOK = inSelect, tree.nameSpan) def tryExtension(using Context): Tree = val altImports = new mutable.ListBuffer[TermRef]() @@ -3968,7 +3968,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * function prototype `(...)R`. Otherwise `pt`. */ def ptWithoutRedundantApply: Type = pt.revealIgnored match - case SelectionProto(nme.apply, mpt, _, _) => + case SelectionProto(nme.apply, mpt, _, _, _) => mpt.revealIgnored match case fpt: FunProto => fpt case _ => pt diff --git a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala index 4869cdf0fa3b..2b49d2db3f08 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala @@ -401,3 +401,13 @@ class PcDefinitionSuite extends BasePcDefinitionSuite: | |""".stripMargin ) + + @Test def `implicit-extension` = + check( + """|class MyIntOut(val value: Int) + |object MyIntOut: + | extension (i: MyIntOut) def <> = i.value % 2 == 1 + | + |val a = MyIntOut(1).un@@even + |""".stripMargin, + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/highlight/DocumentHighlightSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/highlight/DocumentHighlightSuite.scala index 6ed5d6c636e3..0ed40c6c537f 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/highlight/DocumentHighlightSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/highlight/DocumentHighlightSuite.scala @@ -1143,3 +1143,34 @@ class DocumentHighlightSuite extends BaseDocumentHighlightSuite: | case MySome[<>](value: <>) extends MyOption[Int] |""".stripMargin, ) + + @Test def `implicit-extension` = + check( + """|class MyIntOut(val value: Int) + |object MyIntOut: + | extension (i: MyIntOut) def <> = i.value % 2 == 1 + | + |val a = MyIntOut(1) + |val m = a.<> + |""".stripMargin, + ) + + @Test def `implicit-extension-2` = + check( + """|class MyIntOut(val value: Int) + |object MyIntOut: + | extension (i: MyIntOut) def <>(u: Int) = i.value % 2 == 1 + | + |val a = MyIntOut(1).<>(3) + |""".stripMargin, + ) + + @Test def `implicit-extension-infix` = + check( + """|class MyIntOut(val value: Int) + |object MyIntOut: + | extension (i: MyIntOut) def <<++>>(u: Int) = i.value + u + | + |val a = MyIntOut(1) <<+@@+>> 3 + |""".stripMargin, + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTypeSuite.scala index e47eb48068d2..2157aa891bae 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTypeSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTypeSuite.scala @@ -355,3 +355,15 @@ class HoverTypeSuite extends BaseHoverSuite: """|val ddd: Int |""".stripMargin.hover, ) + + @Test def `infix-extension` = + check( + """|class MyIntOut(val value: Int) + |object MyIntOut: + | extension (i: MyIntOut) def uneven = i.value % 2 == 1 + | + |val a = MyIntOut(1).un@@even + |""".stripMargin, + """|extension (i: MyIntOut) def uneven: Boolean + |""".stripMargin.hover, + ) diff --git a/tests/semanticdb/expect/Extension.expect.scala b/tests/semanticdb/expect/Extension.expect.scala index b40e965d4885..f6f76b17b698 100644 --- a/tests/semanticdb/expect/Extension.expect.scala +++ b/tests/semanticdb/expect/Extension.expect.scala @@ -16,3 +16,12 @@ extension (s/*<-ext::Extension$package.readInto().(s)*/: String/*->scala::Predef trait Functor/*<-ext::Functor#*/[F/*<-ext::Functor#[F]*/[_]]: extension [T/*<-ext::Functor#map().[T]*/](t/*<-ext::Functor#map().(t)*/: F/*->ext::Functor#[F]*/[T/*->ext::Functor#map().[T]*/]) def map/*<-ext::Functor#map().*/[U/*<-ext::Functor#map().[U]*/](f/*<-ext::Functor#map().(f)*/: T/*->ext::Functor#map().[T]*/ => U/*->ext::Functor#map().[U]*/): F/*->ext::Functor#[F]*/[U/*->ext::Functor#map().[U]*/] + +opaque type Deck/*<-ext::Extension$package.Deck#*/ = Long/*->scala::Long#*/ +object Deck/*<-ext::Extension$package.Deck.*/: + extension (data/*<-ext::Extension$package.Deck.fooSize().(data)*/: Deck/*->ext::Extension$package.Deck#*/) + def fooSize/*<-ext::Extension$package.Deck.fooSize().*/: Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ + +object DeckUsage/*<-ext::DeckUsage.*/: + val deck/*<-ext::DeckUsage.deck.*/: Deck/*->ext::Extension$package.Deck#*/ = ???/*->scala::Predef.`???`().*/ + deck/*->ext::DeckUsage.deck.*/.fooSize/*->ext::Extension$package.Deck.fooSize().*/ diff --git a/tests/semanticdb/expect/Extension.scala b/tests/semanticdb/expect/Extension.scala index c204b1ff7fcc..76a012e4b758 100644 --- a/tests/semanticdb/expect/Extension.scala +++ b/tests/semanticdb/expect/Extension.scala @@ -16,3 +16,12 @@ extension (s: String) trait Functor[F[_]]: extension [T](t: F[T]) def map[U](f: T => U): F[U] + +opaque type Deck = Long +object Deck: + extension (data: Deck) + def fooSize: Int = ??? + +object DeckUsage: + val deck: Deck = ??? + deck.fooSize diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 00727163fda4..d8aa1e39abef 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -1450,12 +1450,18 @@ Schema => SemanticDB v4 Uri => Extension.scala Text => empty Language => Scala -Symbols => 26 entries -Occurrences => 52 entries +Symbols => 32 entries +Occurrences => 66 entries Synthetics => 1 entries Symbols: -ext/Extension$package. => final package object ext extends Object { self: ext.type => +6 decls } +ext/DeckUsage. => final object DeckUsage extends Object { self: DeckUsage.type => +2 decls } +ext/DeckUsage.deck. => val method deck Deck +ext/Extension$package. => final package object ext extends Object { self: ext.type { opaque type Deck } => +9 decls } +ext/Extension$package.Deck# => opaque type Deck +ext/Extension$package.Deck. => final object Deck extends Object { self: Deck.type => +2 decls } +ext/Extension$package.Deck.fooSize(). => method fooSize (param data: Deck): Int +ext/Extension$package.Deck.fooSize().(data) => param data: Deck ext/Extension$package.`#*#`(). => method #*# (param s: String)(param i: Int): Tuple2[String, Int] ext/Extension$package.`#*#`().(i) => param i: Int ext/Extension$package.`#*#`().(s) => param s: String @@ -1535,6 +1541,20 @@ Occurrences: [17:44..17:45): U -> ext/Functor#map().[U] [17:48..17:49): F -> ext/Functor#[F] [17:50..17:51): U -> ext/Functor#map().[U] +[19:12..19:16): Deck <- ext/Extension$package.Deck# +[19:19..19:23): Long -> scala/Long# +[20:7..20:11): Deck <- ext/Extension$package.Deck. +[21:13..21:17): data <- ext/Extension$package.Deck.fooSize().(data) +[21:19..21:23): Deck -> ext/Extension$package.Deck# +[22:8..22:15): fooSize <- ext/Extension$package.Deck.fooSize(). +[22:17..22:20): Int -> scala/Int# +[22:23..22:26): ??? -> scala/Predef.`???`(). +[24:7..24:16): DeckUsage <- ext/DeckUsage. +[25:6..25:10): deck <- ext/DeckUsage.deck. +[25:12..25:16): Deck -> ext/Extension$package.Deck# +[25:19..25:22): ??? -> scala/Predef.`???`(). +[26:2..26:6): deck -> ext/DeckUsage.deck. +[26:7..26:14): fooSize -> ext/Extension$package.Deck.fooSize(). Synthetics: [14:46..14:61):summon[Read[T]] => *(x$2) From e2b88905335d1f43449d2b3c35f66fe3321b5d7c Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 20 Sep 2023 09:51:09 +0200 Subject: [PATCH 40/90] Stabilize SIP-53 SIP-53: https://docs.scala-lang.org/sips/quote-pattern-type-variable-syntax.html --- .../src/dotty/tools/dotc/typer/QuotesAndSplices.scala | 10 ---------- tests/neg/sip-53-exprimental-a.scala | 11 ----------- tests/neg/sip-53-exprimental-b.scala | 10 ---------- 3 files changed, 31 deletions(-) delete mode 100644 tests/neg/sip-53-exprimental-a.scala delete mode 100644 tests/neg/sip-53-exprimental-b.scala diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index 28afccd1ca43..bda2c25c26b8 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -172,10 +172,6 @@ trait QuotesAndSplices { getQuotedPatternTypeVariable(tree.name.asTypeName) match case Some(typeSym) => - checkExperimentalFeature( - "support for multiple references to the same type (without backticks) in quoted type patterns (SIP-53)", - tree.srcPos, - "\n\nSIP-53: https://docs.scala-lang.org/sips/quote-pattern-type-variable-syntax.html") warnOnInferredBounds(typeSym) ref(typeSym) case None => @@ -223,12 +219,6 @@ trait QuotesAndSplices { case LambdaTypeTree(_, body: TypeBoundsTree) => // ok case _ => report.error("Quote type variable definition cannot be an alias", tdef.srcPos) - if quoted.isType && untpdTypeVariables.nonEmpty then - checkExperimentalFeature( - "explicit type variable declarations quoted type patterns (SIP-53)", - untpdTypeVariables.head.srcPos, - "\n\nSIP-53: https://docs.scala-lang.org/sips/quote-pattern-type-variable-syntax.html") - if ctx.mode.is(Mode.InPatternAlternative) then for tpVar <- untpdTypeVariables do report.error(IllegalVariableInPatternAlternative(tpVar.name), tpVar.srcPos) diff --git a/tests/neg/sip-53-exprimental-a.scala b/tests/neg/sip-53-exprimental-a.scala deleted file mode 100644 index 1b2f5a24d8a9..000000000000 --- a/tests/neg/sip-53-exprimental-a.scala +++ /dev/null @@ -1,11 +0,0 @@ -//> using options -Yno-experimental - -import scala.quoted.* - -def foo(using Quotes): Unit = - (??? : Type[?]) match - case '[ (t, t, t) ] => // error // error - '{ ??? : Any } match - case '{ type u; $x: u } => // error - case '{ type u; ($ls: List[u]).map($f: u => Int) } => // error // error - diff --git a/tests/neg/sip-53-exprimental-b.scala b/tests/neg/sip-53-exprimental-b.scala deleted file mode 100644 index 0fb391c9cb5f..000000000000 --- a/tests/neg/sip-53-exprimental-b.scala +++ /dev/null @@ -1,10 +0,0 @@ -//> using options -Yno-experimental - -import scala.quoted.* - -def empty[K <: AnyKind : Type](using Quotes): Type[?] = - Type.of[K] match - case '[type t; `t`] => Type.of[t] // error - case '[type f[X]; `f`] => Type.of[f] // error - case '[type f[X <: Int, Y]; `f`] => Type.of[f] // error - case '[type k <: AnyKind; `k` ] => Type.of[k] // error From 240e95ae148189c46ce990d0d71db968f7cffe63 Mon Sep 17 00:00:00 2001 From: Lorenzo Gabriele Date: Wed, 20 Sep 2023 12:01:46 +0200 Subject: [PATCH 41/90] Add back `ScalaValueClasses()` initialization The call to `apply` perform side effects that need to be executed --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 73e731e89676..646496dcb4df 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -2133,6 +2133,7 @@ class Definitions { // force initialization of every symbol that is synthesized or hijacked by the compiler syntheticCoreClasses syntheticCoreMethods + ScalaValueClasses() JavaEnumClass // end force initialization isInitialized = true From ddc4eb85817979c4691edcb8cbaff8a55c965945 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 20 Sep 2023 12:28:20 +0200 Subject: [PATCH 42/90] Fix bug when restoring denotations in Recheck. Need to copy the denotation, since denotations come with next pointers which would get scrambled otherwise. The bug was observed when compiling stdlib under new capture checking implementation. # Conflicts: # compiler/src/dotty/tools/dotc/transform/Recheck.scala --- compiler/src/dotty/tools/dotc/transform/Recheck.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index ae5f65f15123..306ca2b0eb9c 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -153,7 +153,8 @@ abstract class Recheck extends Phase, SymTransformer: /** Change any `ResetPrivate` flags back to `Private` */ def transformSym(symd: SymDenotation)(using Context): SymDenotation = val sym = symd.symbol - if sym.isUpdatedAfter(preRecheckPhase) then atPhase(preRecheckPhase)(sym.denot) + if sym.isUpdatedAfter(preRecheckPhase) + then atPhase(preRecheckPhase)(sym.denot.copySymDenotation()) else symd def run(using Context): Unit = From f67d50b8fb08459be306e114819bb4eafc8d3db1 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 7 Sep 2023 10:49:04 +0200 Subject: [PATCH 43/90] Scala 2.13.12 --- community-build/community-projects/stdLib213 | 2 +- project/Build.scala | 6 +++--- project/MiMaFilters.scala | 3 +++ 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/community-build/community-projects/stdLib213 b/community-build/community-projects/stdLib213 index 6243e902928c..88383e58f82c 160000 --- a/community-build/community-projects/stdLib213 +++ b/community-build/community-projects/stdLib213 @@ -1 +1 @@ -Subproject commit 6243e902928c344fb0e82e21120bb257f08a2af2 +Subproject commit 88383e58f82cd728afc9316081c2350489c39943 diff --git a/project/Build.scala b/project/Build.scala index ee1ed242e1b2..0766ae707d7f 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -123,8 +123,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.10" - case Bootstrapped => "2.13.10" + case NonBootstrapped => "2.13.12" + case Bootstrapped => "2.13.12" } /** Version of the scala-library for which we will generate TASTy. @@ -134,7 +134,7 @@ object Build { * We can use nightly versions to tests the future compatibility in development. * Nightly versions: https://scala-ci.typesafe.com/ui/native/scala-integration/org/scala-lang */ - val stdlibBootstrappedVersion = "2.13.12-bin-364ee69" + val stdlibBootstrappedVersion = "2.13.12" val dottyOrganization = "org.scala-lang" val dottyGithubUrl = "https://github.com/lampepfl/dotty" diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index fc7b1e29bafc..8937b30084ef 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -90,6 +90,9 @@ object MiMaFilters { "scala.collection.IterableOnceOps#Maximized.this", // New in 2.13.11: private inner class "scala.util.Properties.", "scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5", + // New in 2.13.12 -- can be removed once scala/scala#10549 lands in 2.13.13 + // and we take the upgrade here + "scala.collection.immutable.MapNodeRemoveAllSetNodeIterator.next", ).map(ProblemFilters.exclude[DirectMissingMethodProblem]) } ) From b66c2cd5db72335f5f01af1a7718bc68a8c98d46 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 20 Sep 2023 16:37:06 +0200 Subject: [PATCH 44/90] update a test case (languageFeature.higherKinds was deprecated) --- .../test/dotty/tools/pc/tests/completion/CompletionSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 6ff955afc94a..313013c34de1 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -852,8 +852,8 @@ class CompletionSuite extends BaseCompletionSuite: """|dynamics scala.languageFeature |existentials scala.languageFeature |experimental scala.languageFeature - |higherKinds scala.languageFeature |implicitConversions scala.languageFeature + |postfixOps scala.languageFeature |""".stripMargin, topLines = Some(5) ) From 9e80e941f4f5314466837cef5fa2e190636b713b Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Wed, 20 Sep 2023 19:40:36 +0200 Subject: [PATCH 45/90] Address review: Better names --- .../tools/dotc/transform/init/Objects.scala | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 0c116cc29d5c..836218f302bc 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -470,7 +470,7 @@ object Objects: /** Store the heap as a mutable field to avoid threading it through the program. */ class MutableData(private[Heap] var heap: Data): - private[Heap] def writeUnion(addr: Addr, value: Value): Unit = + private[Heap] def writeJoin(addr: Addr, value: Value): Unit = heap.get(addr) match case None => heap = heap.updated(addr, value) @@ -479,7 +479,7 @@ object Objects: val value2 = value.join(current) if value2 != current then heap = heap.updated(addr, value2) - + end MutableData def empty(): MutableData = new MutableData(Map.empty) @@ -489,8 +489,8 @@ object Objects: def read(addr: Addr)(using mutable: MutableData): Value = mutable.heap(addr) - def writeUnion(addr: Addr, value: Value)(using mutable: MutableData): Unit = - mutable.writeUnion(addr, value) + def writeJoin(addr: Addr, value: Value)(using mutable: MutableData): Unit = + mutable.writeJoin(addr, value) def localVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol): Addr = LocalVarAddr(regions, sym, owner) @@ -639,7 +639,7 @@ object Objects: if arr.addr.owner != State.currentObject then errorMutateOtherStaticObject(State.currentObject, arr.addr.owner) else - Heap.writeUnion(arr.addr, args.tail.head.value) + Heap.writeJoin(arr.addr, args.tail.head.value) Bottom else // Array.length is OK @@ -660,7 +660,7 @@ object Objects: if target.isOneOf(Flags.Method) then if target.owner == defn.ArrayModuleClass && target.name == nme.apply then val arr = OfArray(State.currentObject, summon[Regions.Data]) - Heap.writeUnion(arr.addr, args.map(_.value).join) + Heap.writeJoin(arr.addr, args.map(_.value).join) arr else if target.hasSource then val cls = target.owner.enclosingClass.asClass @@ -846,7 +846,7 @@ object Objects: if addr.owner != State.currentObject then errorMutateOtherStaticObject(State.currentObject, addr.owner) else - Heap.writeUnion(addr, rhs) + Heap.writeJoin(addr, rhs) else report.warning("Mutating a field before its initialization: " + field.show + ". Calling trace:\n" + Trace.show, Trace.position) end match @@ -871,7 +871,7 @@ object Objects: case outer: (Ref | Cold.type | Bottom.type) => if klass == defn.ArrayClass then val arr = OfArray(State.currentObject, summon[Regions.Data]) - Heap.writeUnion(arr.addr, Bottom) + Heap.writeJoin(arr.addr, Bottom) arr else // Widen the outer to finitize the domain. Arguments already widened in `evalArgs`. @@ -907,7 +907,7 @@ object Objects: if sym.is(Flags.Mutable) then val addr = Heap.localVarAddr(summon[Regions.Data], sym, State.currentObject) Env.setLocalVar(sym, addr) - Heap.writeUnion(addr, value) + Heap.writeJoin(addr, value) else Env.setLocalVal(sym, value) } @@ -978,7 +978,7 @@ object Objects: if addr.owner != State.currentObject then errorMutateOtherStaticObject(State.currentObject, addr.owner) else - Heap.writeUnion(addr, value) + Heap.writeJoin(addr, value) case _ => report.warning("[Internal error] Variable not found " + sym.show + "\nenv = " + env.show + ". Calling trace:\n" + Trace.show, Trace.position) @@ -1541,7 +1541,7 @@ object Objects: if acc.is(Flags.Mutable) then val addr = Heap.fieldVarAddr(summon[Regions.Data], acc, State.currentObject) thisV.initVar(acc, addr) - Heap.writeUnion(addr, value) + Heap.writeJoin(addr, value) else thisV.initVal(acc, value) printer.println(acc.show + " initialized with " + value) @@ -1636,7 +1636,7 @@ object Objects: if sym.is(Flags.Mutable) then val addr = Heap.fieldVarAddr(summon[Regions.Data], sym, State.currentObject) thisV.initVar(sym, addr) - Heap.writeUnion(addr, res) + Heap.writeJoin(addr, res) else thisV.initVal(sym, res) From 5df5855ac5099363583e43751e16cd1808880c78 Mon Sep 17 00:00:00 2001 From: Jakub Ciesluk <323892@uwr.edu.pl> Date: Thu, 21 Sep 2023 10:50:44 +0200 Subject: [PATCH 46/90] bugfix: Incorrect semanticdb span on Selectable Semanticdb range on selectDynamic in `foo.bar` previously contained `.bar` instead of `bar` --- compiler/src/dotty/tools/dotc/ast/Trees.scala | 2 +- .../dotc/semanticdb/ExtractSemanticDB.scala | 11 +-- tests/semanticdb/expect/Givens.expect.scala | 6 +- .../expect/ImplicitConversion.expect.scala | 6 +- .../expect/StructuralTypes.expect.scala | 22 +++++ tests/semanticdb/expect/StructuralTypes.scala | 22 +++++ tests/semanticdb/metac.expect | 87 ++++++++++++++++--- 7 files changed, 132 insertions(+), 24 deletions(-) create mode 100644 tests/semanticdb/expect/StructuralTypes.expect.scala create mode 100644 tests/semanticdb/expect/StructuralTypes.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index c13eede4d590..d3cb5865f4da 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -455,7 +455,7 @@ object Trees { val point = span.point if name.toTermName == nme.ERROR then Span(point) - else if qualifier.span.start > span.start then // right associative + else if qualifier.span.start > span.point then // right associative val realName = name.stripModuleClassSuffix.lastPart Span(span.start, span.start + realName.length, point) else diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 5cff105cbaa4..e364bf15ca13 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -392,15 +392,12 @@ class ExtractSemanticDB extends Phase: }).toMap end findGetters - private def selectSpan(tree: Select) = + private def selectSpan(tree: Select)(using Context) = val end = tree.span.end val limit = tree.qualifier.span.end - val start = - if limit < end then - val len = tree.name.toString.length - if tree.source.content()(end - 1) == '`' then end - len - 2 else end - len - else limit - Span(start max limit, end) + if limit < end then + tree.nameSpan + else Span(limit, end) extension (span: Span) private def hasLength: Boolean = span.exists && !span.isZeroExtent diff --git a/tests/semanticdb/expect/Givens.expect.scala b/tests/semanticdb/expect/Givens.expect.scala index 8ee82a419435..8cd1ee287096 100644 --- a/tests/semanticdb/expect/Givens.expect.scala +++ b/tests/semanticdb/expect/Givens.expect.scala @@ -4,11 +4,11 @@ package b object Givens/*<-a::b::Givens.*/: extension [A/*<-a::b::Givens.sayHello().[A]*/](any/*<-a::b::Givens.sayHello().(any)*/: A/*->a::b::Givens.sayHello().[A]*/) - def sayHello/*<-a::b::Givens.sayHello().*/ = s"Hello, I am $any/*->a::b::Givens.sayHello().(any)*/"/*->scala::StringContext#s().*/ + def sayHello/*<-a::b::Givens.sayHello().*/ = s/*->scala::StringContext#s().*/"Hello, I am $any/*->a::b::Givens.sayHello().(any)*/" extension [B/*<-a::b::Givens.sayGoodbye().[B]*//*<-a::b::Givens.saySoLong().[B]*/](any/*<-a::b::Givens.sayGoodbye().(any)*//*<-a::b::Givens.saySoLong().(any)*/: B/*->a::b::Givens.sayGoodbye().[B]*//*->a::b::Givens.saySoLong().[B]*/) - def sayGoodbye/*<-a::b::Givens.sayGoodbye().*/ = s"Goodbye, from $any/*->a::b::Givens.sayGoodbye().(any)*/"/*->scala::StringContext#s().*/ - def saySoLong/*<-a::b::Givens.saySoLong().*/ = s"So Long, from $any/*->a::b::Givens.saySoLong().(any)*/"/*->scala::StringContext#s().*/ + def sayGoodbye/*<-a::b::Givens.sayGoodbye().*/ = s/*->scala::StringContext#s().*/"Goodbye, from $any/*->a::b::Givens.sayGoodbye().(any)*/" + def saySoLong/*<-a::b::Givens.saySoLong().*/ = s/*->scala::StringContext#s().*/"So Long, from $any/*->a::b::Givens.saySoLong().(any)*/" val hello1/*<-a::b::Givens.hello1.*/ = 1.sayHello/*->a::b::Givens.sayHello().*/ val goodbye1/*<-a::b::Givens.goodbye1.*/ = 1.sayGoodbye/*->a::b::Givens.sayGoodbye().*/ diff --git a/tests/semanticdb/expect/ImplicitConversion.expect.scala b/tests/semanticdb/expect/ImplicitConversion.expect.scala index 635ffb4d94c9..7c1708ee7617 100644 --- a/tests/semanticdb/expect/ImplicitConversion.expect.scala +++ b/tests/semanticdb/expect/ImplicitConversion.expect.scala @@ -21,10 +21,10 @@ class ImplicitConversion/*<-example::ImplicitConversion#*/ { val x/*<-example::ImplicitConversion#x.*/: Int/*->scala::Int#*/ = message/*->example::ImplicitConversion#message.*/ // interpolators - s"Hello $message/*->example::ImplicitConversion#message.*/ $number/*->example::ImplicitConversion#number.*/"/*->scala::StringContext#s().*/ - s"""Hello + s/*->scala::StringContext#s().*/"Hello $message/*->example::ImplicitConversion#message.*/ $number/*->example::ImplicitConversion#number.*/" + s/*->scala::StringContext#s().*/"""Hello |$message/*->example::ImplicitConversion#message.*/ - |$number/*->example::ImplicitConversion#number.*/"""/*->scala::StringContext#s().*/.stripMargin/*->scala::collection::StringOps#stripMargin(+1).*/ + |$number/*->example::ImplicitConversion#number.*/""".stripMargin/*->scala::collection::StringOps#stripMargin(+1).*/ val a/*<-example::ImplicitConversion#a.*/: Int/*->scala::Int#*/ = char/*->example::ImplicitConversion#char.*/ val b/*<-example::ImplicitConversion#b.*/: Long/*->scala::Long#*/ = char/*->example::ImplicitConversion#char.*/ diff --git a/tests/semanticdb/expect/StructuralTypes.expect.scala b/tests/semanticdb/expect/StructuralTypes.expect.scala new file mode 100644 index 000000000000..96c7181d6f10 --- /dev/null +++ b/tests/semanticdb/expect/StructuralTypes.expect.scala @@ -0,0 +1,22 @@ +package example + +import reflect.Selectable/*->scala::reflect::Selectable.*/.reflectiveSelectable/*->scala::reflect::Selectable.reflectiveSelectable().*/ + +object StructuralTypes/*<-example::StructuralTypes.*/: + type User/*<-example::StructuralTypes.User#*/ = { + def name/*<-local0*/: String/*->scala::Predef.String#*/ + def age/*<-local1*/: Int/*->scala::Int#*/ + def foo/*<-local3*/(x/*<-local2*/: Int/*->scala::Int#*/): Int/*->scala::Int#*/ + } + + val user/*<-example::StructuralTypes.user.*/ = null.asInstanceOf/*->scala::Any#asInstanceOf().*/[User/*->example::StructuralTypes.User#*/] + user/*->example::StructuralTypes.user.*/.name/*->scala::reflect::Selectable#selectDynamic().*/ + user/*->example::StructuralTypes.user.*/.age/*->scala::reflect::Selectable#selectDynamic().*/ + val fooBar/*<-example::StructuralTypes.fooBar.*/ = user/*->example::StructuralTypes.user.*/ foo/*->scala::reflect::Selectable#applyDynamic().*/ 123 + + val V/*<-example::StructuralTypes.V.*/: Object/*->java::lang::Object#*/ { + def scalameta/*<-local4*/: String/*->scala::Predef.String#*/ + } = /*<-local6*/new: + def scalameta/*<-local5*/ = "4.0" + V/*->example::StructuralTypes.V.*/.scalameta/*->scala::reflect::Selectable#selectDynamic().*/ +end StructuralTypes/*->example::StructuralTypes.*/ \ No newline at end of file diff --git a/tests/semanticdb/expect/StructuralTypes.scala b/tests/semanticdb/expect/StructuralTypes.scala new file mode 100644 index 000000000000..5d10dbe67224 --- /dev/null +++ b/tests/semanticdb/expect/StructuralTypes.scala @@ -0,0 +1,22 @@ +package example + +import reflect.Selectable.reflectiveSelectable + +object StructuralTypes: + type User = { + def name: String + def age: Int + def foo(x: Int): Int + } + + val user = null.asInstanceOf[User] + user.name + user.age + val fooBar = user foo 123 + + val V: Object { + def scalameta: String + } = new: + def scalameta = "4.0" + V.scalameta +end StructuralTypes \ No newline at end of file diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 00727163fda4..8f875a050fd9 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -193,21 +193,21 @@ Occurrences: [27:6..27:9): s1x <- advanced/Test.s1x. [27:12..27:13): s -> advanced/Test.s. [27:14..27:16): s1 -> advanced/Structural#s1(). -[27:16..27:18): .x -> scala/reflect/Selectable#selectDynamic(). +[27:17..27:18): x -> scala/reflect/Selectable#selectDynamic(). [28:6..28:8): s2 <- advanced/Test.s2. [28:11..28:12): s -> advanced/Test.s. [28:13..28:15): s2 -> advanced/Structural#s2(). [29:6..29:9): s2x <- advanced/Test.s2x. [29:12..29:13): s -> advanced/Test.s. [29:14..29:16): s2 -> advanced/Structural#s2(). -[29:16..29:18): .x -> scala/reflect/Selectable#selectDynamic(). +[29:17..29:18): x -> scala/reflect/Selectable#selectDynamic(). [30:6..30:8): s3 <- advanced/Test.s3. [30:11..30:12): s -> advanced/Test.s. [30:13..30:15): s3 -> advanced/Structural#s3(). [31:6..31:9): s3x <- advanced/Test.s3x. [31:12..31:13): s -> advanced/Test.s. [31:14..31:16): s3 -> advanced/Structural#s3(). -[31:16..31:18): .m -> scala/reflect/Selectable#applyDynamic(). +[31:17..31:18): m -> scala/reflect/Selectable#applyDynamic(). [31:19..31:22): ??? -> scala/Predef.`???`(). [33:6..33:7): e <- advanced/Test.e. [33:14..33:23): Wildcards -> advanced/Wildcards# @@ -242,7 +242,7 @@ Occurrences: [47:11..47:14): foo -> advanced/Test.foo. [47:15..47:16): A -> local17 [47:19..47:22): foo -> advanced/Test.foo. -[47:22..47:24): .a -> scala/reflect/Selectable#selectDynamic(). +[47:23..47:24): a -> scala/reflect/Selectable#selectDynamic(). [52:6..52:13): HKClass <- advanced/HKClass# [52:13..52:13): <- advanced/HKClass#``(). [52:14..52:15): F <- advanced/HKClass#[F] @@ -1684,8 +1684,8 @@ Occurrences: [5:16..5:19): any <- a/b/Givens.sayHello().(any) [5:21..5:22): A -> a/b/Givens.sayHello().[A] [6:8..6:16): sayHello <- a/b/Givens.sayHello(). +[6:19..6:20): s -> scala/StringContext#s(). [6:34..6:37): any -> a/b/Givens.sayHello().(any) -[6:37..6:38): " -> scala/StringContext#s(). [8:13..8:14): B <- a/b/Givens.sayGoodbye().[B] [8:13..8:14): B <- a/b/Givens.saySoLong().[B] [8:16..8:19): any <- a/b/Givens.sayGoodbye().(any) @@ -1693,11 +1693,11 @@ Occurrences: [8:21..8:22): B -> a/b/Givens.sayGoodbye().[B] [8:21..8:22): B -> a/b/Givens.saySoLong().[B] [9:8..9:18): sayGoodbye <- a/b/Givens.sayGoodbye(). +[9:21..9:22): s -> scala/StringContext#s(). [9:38..9:41): any -> a/b/Givens.sayGoodbye().(any) -[9:41..9:42): " -> scala/StringContext#s(). [10:8..10:17): saySoLong <- a/b/Givens.saySoLong(). +[10:20..10:21): s -> scala/StringContext#s(). [10:37..10:40): any -> a/b/Givens.saySoLong().(any) -[10:40..10:41): " -> scala/StringContext#s(). [12:6..12:12): hello1 <- a/b/Givens.hello1. [12:17..12:25): sayHello -> a/b/Givens.sayHello(). [13:6..13:14): goodbye1 <- a/b/Givens.goodbye1. @@ -1816,12 +1816,12 @@ Occurrences: [20:6..20:7): x <- example/ImplicitConversion#x. [20:9..20:12): Int -> scala/Int# [20:15..20:22): message -> example/ImplicitConversion#message. +[23:2..23:3): s -> scala/StringContext#s(). [23:11..23:18): message -> example/ImplicitConversion#message. [23:20..23:26): number -> example/ImplicitConversion#number. -[23:26..23:27): " -> scala/StringContext#s(). +[24:2..24:3): s -> scala/StringContext#s(). [25:7..25:14): message -> example/ImplicitConversion#message. [26:7..26:13): number -> example/ImplicitConversion#number. -[26:15..26:16): " -> scala/StringContext#s(). [26:17..26:28): stripMargin -> scala/collection/StringOps#stripMargin(+1). [28:6..28:7): a <- example/ImplicitConversion#a. [28:9..28:12): Int -> scala/Int# @@ -3326,6 +3326,73 @@ Occurrences: [13:17..13:17): <- selfs/C6#``(). [13:27..13:28): B -> selfs/B# +expect/StructuralTypes.scala +---------------------------- + +Summary: +Schema => SemanticDB v4 +Uri => StructuralTypes.scala +Text => empty +Language => Scala +Symbols => 12 entries +Occurrences => 33 entries +Synthetics => 4 entries + +Symbols: +example/StructuralTypes. => final object StructuralTypes extends Object { self: StructuralTypes.type => +5 decls } +example/StructuralTypes.User# => type User = Object { abstract method foo (param x: Int): Int; abstract method age => Int; abstract method name => String } +example/StructuralTypes.V. => val method V Object { abstract method scalameta => String } +example/StructuralTypes.fooBar. => val method fooBar Int +example/StructuralTypes.user. => val method user User +local0 => abstract method name => String +local1 => abstract method age => Int +local2 => param x: Int +local3 => abstract method foo (param x: Int): Int +local4 => abstract method scalameta => String +local5 => method scalameta => String +local6 => final class $anon extends Object { self: $anon => +2 decls } + +Occurrences: +[0:8..0:15): example <- example/ +[2:7..2:14): reflect -> scala/reflect/ +[2:15..2:25): Selectable -> scala/reflect/Selectable. +[2:26..2:46): reflectiveSelectable -> scala/reflect/Selectable.reflectiveSelectable(). +[4:7..4:22): StructuralTypes <- example/StructuralTypes. +[5:7..5:11): User <- example/StructuralTypes.User# +[6:8..6:12): name <- local0 +[6:14..6:20): String -> scala/Predef.String# +[7:8..7:11): age <- local1 +[7:13..7:16): Int -> scala/Int# +[8:8..8:11): foo <- local3 +[8:12..8:13): x <- local2 +[8:15..8:18): Int -> scala/Int# +[8:21..8:24): Int -> scala/Int# +[11:6..11:10): user <- example/StructuralTypes.user. +[11:18..11:30): asInstanceOf -> scala/Any#asInstanceOf(). +[11:31..11:35): User -> example/StructuralTypes.User# +[12:2..12:6): user -> example/StructuralTypes.user. +[12:7..12:11): name -> scala/reflect/Selectable#selectDynamic(). +[13:2..13:6): user -> example/StructuralTypes.user. +[13:7..13:10): age -> scala/reflect/Selectable#selectDynamic(). +[14:6..14:12): fooBar <- example/StructuralTypes.fooBar. +[14:15..14:19): user -> example/StructuralTypes.user. +[14:20..14:23): foo -> scala/reflect/Selectable#applyDynamic(). +[16:6..16:7): V <- example/StructuralTypes.V. +[16:9..16:15): Object -> java/lang/Object# +[17:8..17:17): scalameta <- local4 +[17:19..17:25): String -> scala/Predef.String# +[18:6..18:6): <- local6 +[19:8..19:17): scalameta <- local5 +[20:2..20:3): V -> example/StructuralTypes.V. +[20:4..20:13): scalameta -> scala/reflect/Selectable#selectDynamic(). +[21:4..21:19): StructuralTypes -> example/StructuralTypes. + +Synthetics: +[12:2..12:6):user => reflectiveSelectable(*) +[13:2..13:6):user => reflectiveSelectable(*) +[14:15..14:19):user => reflectiveSelectable(*) +[20:2..20:3):V => reflectiveSelectable(*) + expect/Synthetic.scala ---------------------- @@ -3924,7 +3991,7 @@ Occurrences: [48:18..48:19): v -> example/ValUsages.v. [48:20..48:23): yim -> example/Vals#yim(). [49:2..49:3): v -> example/ValUsages.v. -[49:3..49:18): .explicitSetter -> example/Vals#`explicitSetter_=`(). +[49:4..49:18): explicitSetter -> example/Vals#`explicitSetter_=`(). expect/Vararg.scala ------------------- From a7d1e3332dbee3588fa1da1f2538d0be8d82b7fe Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 21 Sep 2023 16:48:13 +0200 Subject: [PATCH 47/90] remove implementation details from enum spec --- docs/_spec/05-classes-and-objects.md | 157 +++++++++++---------------- 1 file changed, 62 insertions(+), 95 deletions(-) diff --git a/docs/_spec/05-classes-and-objects.md b/docs/_spec/05-classes-and-objects.md index d777ebfe73e8..cc8d97704a50 100644 --- a/docs/_spec/05-classes-and-objects.md +++ b/docs/_spec/05-classes-and-objects.md @@ -904,7 +904,7 @@ Very much like a concrete class definition, an object definition may still conta ```ebnf TmplDef ::= ‘enum’ EnumDef -EnumDef ::= id ClassConstr [‘extends’ [ConstrApps]] EnumBody +EnumDef ::= id ClassConstr [‘extends’ ConstrApps] EnumBody EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ EnumStat ::= TemplateStat | {Annotation [nl]} {Modifier} EnumCase @@ -925,18 +925,15 @@ First, some terminology and notational conventions: - We use `<...>` for syntactic constructs that in some circumstances might be empty. For instance, `` represents one or more parameter lists `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` or nothing at all. - Enum classes fall into two categories: - - _parameterized_ enum classes have at least one of the following: - - a type parameter section, denoted as `[´\mathit{tps}\,´]`; - - one or more (possibly empty) parameter sections, denoted as `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. - - _unparameterized_ enum classes have no type parameter sections and no parameter sections. + - _parameterized_ enum classes have at least one or more (possibly empty) term parameter clauses, denoted as `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. + - _unparameterized_ enum classes have no term parameter clauses, but may optionally have a type parameter clause, denoted as `[´\mathit{tps}\,´]`. - Enum cases fall into three categories: - - - _Class cases_ are those cases that are parameterized, either with a type parameter section `[´\mathit{tps}\,´]` or with one or more (possibly empty) parameter sections `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. - - _Simple cases_ are cases of an unparameterized enum that have neither parameters nor an extends clause or body. + - _Class enum cases_ are those cases that possibly have a type parameter clause `[´\mathit{tps}\,´]`, and necessarily have one or more (possibly empty) parameter clauses `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. + - _Simple enum cases_ are those cases that have no parameter clauses and no extends clause. That is, they consist of a name only. - - _Value cases_ are all cases that do not have a parameter section but that do have a (possibly generated) `extends` clause and/or a body. + - _Value enum cases_ are those cases that have no parameter clauses but that do have a (possibly generated) `extends` clause. -- Simple cases and value cases are collectively called _singleton cases_. +- Simple enum cases and value enum cases are collectively called _singleton enum cases_. ###### Example @@ -970,13 +967,11 @@ enum Option[+T]: ### Lowering of Enum Definitions ###### Summary -An enum class is represented as a `sealed` class that extends the `scala.reflect.Enum` trait. +An enum class is represented as a `sealed abstract` class that extends the `scala.reflect.Enum` trait. Enum cases are represented as follows: -- a class case is mapped to a `case class`, -- a singleton case is mapped to a `val` definition, where - - Simple cases all share a single implementation class. - - Value cases will each be implemented by a unique class. +- a class enum case is mapped to a `case class` member of enum class' companion object, +- a singleton enum case is mapped to a `val` member of the enum class' companion object, implemented by a local class definition which may be shared between cases. ###### Precise rules The `scala.reflect.Enum` trait defines a single public method, `ordinal`: @@ -989,106 +984,119 @@ transparent trait Enum extends Any, Product, Serializable: ``` There are nine desugaring rules. Rule (1) desugars enum definitions. -Rules (2) and (3) desugar simple cases. -Rules (4) to (6) define `extends` clauses for cases that are missing them. -Rules (7) to (9) define how such cases with `extends` clauses map into `case class`es or `val`s. +Rule (2) desugars cases of comma-separated names to simple enum cases. +Rules (3) to (7) desugar inferrable details of enum cases. +Rules (8) and (9) define how fully-desugared enum cases map into `case class`es or `val`s. +Explicit `extends` clauses must be provided in the following cases, where rules (2) to (6) do not apply: +- any enum case of a parameterized enum, +- any singleton enum case of an unparameterized enum with non-variant type parameters, +- any class enum case of an enum with type parameters, where the case also has type parameters. 1. An `enum` definition ```scala - enum ´E´ ... { } + enum ´E´ { } ``` expands to a `sealed abstract` class that extends the `scala.reflect.Enum` trait and an associated companion object that contains the defined cases, expanded according to rules (2 - 8). The enum class starts with a compiler-generated import that imports the names `` of all cases so that they can be used without prefix in the class. ```scala - sealed abstract class ´E´ ... extends with scala.reflect.Enum { - import ´E´.{ } - + sealed abstract class ´E´ + extends with scala.reflect.Enum { + import ´E´.{ } + } object ´E´ { } ``` -2. A singleton case consisting of a comma-separated list of enum names +2. A simple enum case consisting of a comma-separated list of names ```scala case ´C_1´, ..., ´C_n´ ``` - expands to + expands to the following simple enum cases ```scala case ´C_1´; ...; case ´C_n´ ``` Any modifiers or annotations on the original case extend to all expanded cases. - This result is then further rewritten by either (3 or 4). +

This result is then further rewritten by either (3 or 4).

-3. A singleton case without an extends clause +3. A simple enum case `´C´` of an unparameterized enum `´E´` without type parameters ```scala case ´C´ ``` - of an unparameterized enum `´E´` expands to the following simple enum case in `´E´`'s companion object: + expands to the following value enum case: ```scala - val ´C´ = $new(n, "C") + case ´C´ extends ´E´ ``` - Here, `$new` is a private method that creates an instance of ´E´ (see below). + This result is then further rewritten with rule (8). -4. A singleton case without an extends clause +4. A simple enum case `´C´` of an unparameterized enum `´E´[´\mathit{tps}´]` with type parameters ```scala case ´C´ ``` - of an enum `´E´` with type parameters + where `´\mathit{tps}´` are of the following form ```scala ´\mathit{v}_1´ ´T_1´ >: ´L_1´ <: ´U_1´ , ... , ´\mathit{v}_n´ ´T_n´ >: ´L_n´ <: ´U_n´ (n > 0) ``` - where each of the variances `´\mathit{v}_i´` is either `'+'` or `'-'`, expands to the following value enum case: + and where each of the variances `´\mathit{v}_i´` is either `'+'` or `'-'`, expands to the following value enum case: ```scala case ´C´ extends ´E´[´B_1´, ..., ´B_n´] ``` where `´B_i´` is `´L_i´` if `´\mathit{v}_i´ = '+'` and `´U_i´` if `´\mathit{v}_i´ = '-'`. - This result is then further rewritten with rule (8). - **NOTE:** It is not permitted for enums with non-variant type parameters to have singleton cases without an extends clause. +

This result is then further rewritten with rule (8).

-5. A class case without an extends clause +5. A class enum case with type parameters, but without an extends clause ```scala - case ´C´ + case ´C´[´\mathit{tps}´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) ``` - of an enum `´E´` that does not take type parameters expands to + of an unparameterized enum `´E´` without type parameters expands to ```scala - case ´C´ extends ´E´ + case ´C´[´\mathit{tps}´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ´E´ ``` This result is then further rewritten with rule (9). -6. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case with neither type parameters nor an extends clause +6. A class enum case without type parameters or an extends clause ```scala - case ´C´ + case ´C´(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) ``` - expands to + of an unparameterized enum `´E´[´\mathit{tps}´]` with type parameters expands to ```scala - case ´C´[´\mathit{tps}´] extends ´E´[´\mathit{tps}´] + case ´C´(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ´E´[´\mathit{tps}´] ``` - This result is then further rewritten with rule (9). - For class cases that have type parameters themselves, an extends clause needs to be given explicitly. - + This result is then further rewritten with rule (7). -7. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case without type parameters but with an extends clause +7. A class enum case without type parameters, but has an extends clause ```scala - case ´C´ extends + case ´C´(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ``` - expands to + of an enum `´E´[´\mathit{tps}´]` with type parameters expands to ```scala - case ´C´[´\mathit{tps}´] extends + case ´C´[´\mathit{tps}´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ``` - provided at least one of the parameters `´\mathit{tps}´` is mentioned in a parameter type in `` or in a type argument in ``. + provided at least one of the parameters `´\mathit{tps}´` is mentioned in a parameter type in `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` or in a type argument in ``. +

+ This result is then further rewritten with rule (9). -8. A value case +8. A singleton enum case ```scala case ´C´ extends ``` expands to the following `val` definition in `´E´`'s companion object: ```scala - val ´C´ = new { ; def ordinal = ´\mathit{n}´ } + val ´C´ = $factory(_$ordinal = ´\mathit{n}´, $name = "C") ``` where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0. + `$factory` is a placeholder that expands its arguments into an expression that produces something equivalent to + a new instance of the following (possibly shared) anonymous class: + ```scala + new { + def ordinal: Int = _$ordinal + override def toString: String = $name + } + ``` The anonymous class also implements the abstract `Product` methods that it inherits from `Enum`. +

**NOTE:** It is an error if a value case refers to a type parameter of `´E´` in a type argument within ``. -9. A class case +9. A class enum case ```scala case ´C´ extends ``` @@ -1099,6 +1107,7 @@ Rules (7) to (9) define how such cases with `extends` clauses map into `case cla } ``` where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0. +

**NOTE:** It is an error if a class case refers to a type parameter of `´E´` in a parameter type in `` or `` or in a type argument of ``, unless that parameter is already a type parameter of the case, i.e. the parameter name is defined in ``. ###### Superclass of an enum case @@ -1131,34 +1140,6 @@ private def $new(_$ordinal: Int, $name: String) = override def toString = $name ``` - -###### Example - -Consider the more complex enumeration `Color`, consisting of value enum cases: -```scala -enum Color(val rgb: Int): - case Red extends Color(0xFF0000) - case Green extends Color(0x00FF00) - case Blue extends Color(0x0000FF) -``` - -The three value cases will expand as follows in the companion of `Color`: - -```scala -val Red = new Color(0xFF0000): - def ordinal: Int = 0 - override def productPrefix: String = "Red" - override def toString: String = "Red" -val Green = new Color(0x00FF00): - def ordinal: Int = 1 - override def productPrefix: String = "Green" - override def toString: String = "Green" -val Blue = new Color(0x0000FF): - def ordinal: Int = 2 - override def productPrefix: String = "Blue" - override def toString: String = "Blue" -``` - ### Widening of enum cases post-construction The compiler-generated `apply` and `copy` methods of an class enum case ```scala @@ -1176,20 +1157,6 @@ An enum `´E´` (possibly generic) that defines one or more singleton cases, and It returns the singleton case value whose identifier is `name`. - A method `values` which returns an `Array[´E'´]` of all singleton case values defined by `E`, in the order of their definitions. -### Factory method for simple enum cases - -If an enum `´E´` contains at least one simple case, its companion object will define in addition: - - - A private method `$new` which defines a new simple case value with given ordinal number and name. - This method can be thought as being defined as follows. - - ```scala - private def $new(_$ordinal: Int, $name: String): ´E´ with runtime.EnumValue - ``` - - `$new` returns a new instance of an anonymous class which implements the abstract `Product` methods that it inherits from `Enum`. - - if `´E´` inherits from `java.lang.Enum` the anonymous class does not override the `ordinal` or `toString` methods, as these are final in `java.lang.Enum`. - Additionally `productPrefix` will delegate to `this.name`. - ### Translation of Java-compatible enums A Java-compatible enum is an enum that extends `java.lang.Enum`. From f11c717a96fcb5a9859691a41799188bdc6b0f64 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 22 Sep 2023 09:06:30 +0200 Subject: [PATCH 48/90] Allow stdlib-bootstrapped/run to clone paths rooted in scala/scala --- project/Build.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 0766ae707d7f..bf8afdbb750c 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1087,8 +1087,9 @@ object Build { case Seq(cmd @ ("clone" | "overwrite"), files*) => log.info("Cloning scala-library sources: " + files.mkString(", ")) for (file <- files) { - val referenceStdlibPaths = reference / file - val destination = srcDir / file + val fileRootedAtInLibraryFolder = file.stripPrefix("src/library/") + val referenceStdlibPaths = reference / fileRootedAtInLibraryFolder + val destination = srcDir / fileRootedAtInLibraryFolder if (!referenceStdlibPaths.exists) { log.error("Not found " + referenceStdlibPaths) } else if (destination.exists && cmd == "clone") { From 0dc02d2b5539e4eefd98a35c130d412e757cc3bb Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 22 Sep 2023 09:07:54 +0200 Subject: [PATCH 49/90] Categorize Scala 2 stdlib TASTy MiMa failures --- project/TastyMiMaFilters.scala | 46 +++++++++++++++++++++++++++------- 1 file changed, 37 insertions(+), 9 deletions(-) diff --git a/project/TastyMiMaFilters.scala b/project/TastyMiMaFilters.scala index c4aeb0325ff1..7468b1891f5c 100644 --- a/project/TastyMiMaFilters.scala +++ b/project/TastyMiMaFilters.scala @@ -14,9 +14,6 @@ object TastyMiMaFilters { // Tested in stdlib-bootstrapped/test/scala/collection/UnrolledBufferTest.scala ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.mutable.UnrolledBuffer.Unrolled.$default$4"), - // Problem? Very complicated signature - ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.generic.IsMap.mapOpsIsMap"), - // Probably OK: Overriding java method (`public abstract Object underlying();` with `def underlying: Object`) // Calls to the underlying seem to link correctly. // Tested in stdlib-bootstrapped/test/Main.scala @@ -24,16 +21,44 @@ object TastyMiMaFilters { ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.math.ScalaNumericConversions.underlying"), // Problem: super accessors - ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.*.superscala$*$*$$*"), // The member scala.collection.mutable.Cloneable.superscala$collection$mutable$Cloneable$$clone was concrete or did not exist but is abstract in current version + // In Scala 3 these accessors are added in the `postyper` phase. + // In Scala 2 these accessors are added in the `superaccessors` phase after typer. + // Are these accessors in the Scala 2 pickles? If so, it implies that TASTy Query/MiMa is ignoring them in Scala 2 but not Scala 3. + // Otherwise, if these are not in the Scala 2 pickles, we might need to remove them when compiling with -Yscala2-stdlib + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.immutable.IndexedSeqOps.superscala$collection$immutable$IndexedSeqOps$$slice"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.immutable.StrictOptimizedSeqOps.superscala$collection$immutable$StrictOptimizedSeqOps$$sorted"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.immutable.IndexedSeq.superscala$collection$immutable$IndexedSeq$$*"/* sameElements, canEqual */), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SortedSetOps.superscala$collection$SortedSetOps$$*"/* min, max */), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SortedSet.superscala$collection$SortedSet$$equals"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.LinearSeqOps.superscala$collection$LinearSeqOps$$sameElements"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SortedMap.superscala$collection$SortedMap$$equals"), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.SeqOps.superscala$collection$SeqOps$$*"/* concat, sizeCompare */), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.BitSetOps.superscala$collection$BitSetOps$$*"/* min, intersect, concat, diff, max */), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.mutable.Cloneable.superscala$collection$mutable$Cloneable$$clone"), // The member scala.collection.mutable.Cloneable.superscala$collection$mutable$Cloneable$$clone was concrete or did not exist but is abstract in current version + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.util.control.NoStackTrace.superscala$util$control$NoStackTrace$$fillInStackTrace"), - // Problem: `private[scala] var` in case class + // TASTy-MiMa bug (probably OK): `private[scala] var` in case class + // This is probably because we can only access the next field from the scala library. ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.immutable.::.next$access$1"), - // Problem Missing setter for `protected var` - ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.*_="), + // Probably OK: Problem Missing setter for `protected var` + // All the classes that contain these `protected var`s are private in `collection` or `convert` + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.index_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.myCurrent_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.maxLength_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.BinaryTreeStepperBase.stack_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.ChampStepperBase.maxSize_="), // The member scala.collection.convert.impl.ChampStepperBase.maxSize_= with signature (scala.Int):scala.Unit was concrete or did not exist but is abstract in current version + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.IndexedStepperBase.iN_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.IndexedStepperBase.i0_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.InOrderStepperBase.iN_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.InOrderStepperBase.i0_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.TableStepperBase.i0_="), + ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.TableStepperBase.maxLength_="), // Problem: ??? - ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.convert.JavaCollectionWrappers.IterableWrapperTrait.iterator"), + // Member is defined and has explicit result type + // https://github.com/scala/scala/blob/2.13.x/src/library/scala/collection/convert/JavaCollectionWrappers.scala#L66-L71 + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.collection.convert.JavaCollectionWrappers.IterableWrapperTrait.iterator"), // The member scala.collection.convert.JavaCollectionWrappers.IterableWrapperTrait.iterator with signature ():scala.collection.convert.JavaCollectionWrappers.IteratorWrapper does not have a correspondant in current version // TASTy-MiMa bugs ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.appendedAll"), @@ -41,7 +66,10 @@ object TastyMiMaFilters { ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.prependedAll"), ProblemMatcher.make(ProblemKind.InternalError, "scala.concurrent.duration.package.*"), - // Problems introduced in 2.13.11 + // Problem? Very complicated signature + ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.generic.IsMap.mapOpsIsMap"), // The symbol scala.collection.generic.IsMap.mapOpsIsMap has an incompatible type in current version: before: [CC0 <: ([X, Y] =>> scala.collection.MapOps[X, Y, ([X, Y] =>> scala.collection.Iterable[scala.Tuple2[X, Y]]), CC0[X, Y]]), K0, V0](((scala.collection.generic.IsMap[CC0[K0, V0]] { type V = V0 }) { type C = CC0[.this.K, .this.V] }) { type K = K0 }); after: [CC0 >: ([X, Y] =>> scala.Nothing) <: ([X, Y] =>> scala.collection.MapOps[X, Y, IsMap$.this.Tupled[([A] =>> scala.collection.Iterable[A])]#Ap, CC0[X, Y]]), K0, V0]{ 726875885 => (((scala.collection.generic.IsMap[CC0[K0, V0]] { type K = K0 }) { type V = V0 }) { type C = CC0[726875885.K, 726875885.V] }) } + + // Problems introduced in 2.13.11: Implicit classes with complex signatures ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFromLowPriority1.buildFromSortedSetOps"), // The symbol scala.collection.BuildFromLowPriority1.buildFromSortedSetOps has an incompatible type in current version: before: [CC <: ([X] =>> (scala.collection.SortedSet[X] & scala.collection.SortedSetOps[X, CC, ?])), A0, A](evidence$3: scala.package.Ordering[A])scala.collection.BuildFrom[(CC[A0] & scala.collection.SortedSet[A0]), A, (CC[A] & scala.collection.SortedSet[A])]; after: [CC >: ([X] =>> scala.Nothing) <: ([X] =>> scala.&[scala.collection.SortedSet[X], scala.collection.SortedSetOps[X, CC, ?]]), A0, A](evidence$3: scala.package.Ordering[A])scala.collection.BuildFrom[scala.&[CC[A0], scala.collection.SortedSet[A0]], A, scala.&[CC[A], scala.collection.SortedSet[A]]] ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFrom.buildFromMapOps"), // The symbol scala.collection.BuildFrom.buildFromMapOps has an incompatible type in current version: before: [CC <: ([X, Y] =>> (scala.collection.Map[X, Y] & scala.collection.MapOps[X, Y, CC, ?])), K0, V0, K, V]scala.collection.BuildFrom[(CC[K0, V0] & scala.collection.Map[K0, V0]), scala.Tuple2[K, V], (CC[K, V] & scala.collection.Map[K, V])]; after: [CC >: ([X, Y] =>> scala.Nothing) <: ([X, Y] =>> scala.&[scala.collection.Map[X, Y], scala.collection.MapOps[X, Y, CC, ?]]), K0, V0, K, V]scala.collection.BuildFrom[scala.&[CC[K0, V0], scala.collection.Map[K0, V0]], scala.Tuple2[K, V], scala.&[CC[K, V], scala.collection.Map[K, V]]] ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFrom.buildFromSortedMapOps"), // The symbol scala.collection.BuildFrom.buildFromSortedMapOps has an incompatible type in current version: before: [CC <: ([X, Y] =>> (scala.collection.SortedMap[X, Y] & scala.collection.SortedMapOps[X, Y, CC, ?])), K0, V0, K, V](evidence$1: scala.package.Ordering[K])scala.collection.BuildFrom[(CC[K0, V0] & scala.collection.SortedMap[K0, V0]), scala.Tuple2[K, V], (CC[K, V] & scala.collection.SortedMap[K, V])]; after: [CC >: ([X, Y] =>> scala.Nothing) <: ([X, Y] =>> scala.&[scala.collection.SortedMap[X, Y], scala.collection.SortedMapOps[X, Y, CC, ?]]), K0, V0, K, V](evidence$1: scala.package.Ordering[K])scala.collection.BuildFrom[scala.&[CC[K0, V0], scala.collection.SortedMap[K0, V0]], scala.Tuple2[K, V], scala.&[CC[K, V], scala.collection.SortedMap[K, V]]] From 57a0f174762e6009e46c77ba309d33b5301d499d Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 22 Sep 2023 11:56:50 +0200 Subject: [PATCH 50/90] address review comments --- docs/_spec/05-classes-and-objects.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/_spec/05-classes-and-objects.md b/docs/_spec/05-classes-and-objects.md index cc8d97704a50..e1d4ace3d81f 100644 --- a/docs/_spec/05-classes-and-objects.md +++ b/docs/_spec/05-classes-and-objects.md @@ -970,8 +970,8 @@ enum Option[+T]: An enum class is represented as a `sealed abstract` class that extends the `scala.reflect.Enum` trait. Enum cases are represented as follows: -- a class enum case is mapped to a `case class` member of enum class' companion object, -- a singleton enum case is mapped to a `val` member of the enum class' companion object, implemented by a local class definition which may be shared between cases. +- a class enum case is mapped to a `case class` member of the enum class' companion object, +- a singleton enum case is mapped to a `val` member of the enum class' companion object, implemented by a local class definition. Whether that local class is shared with other singleton cases, and which ones, is left as an implementation detail. ###### Precise rules The `scala.reflect.Enum` trait defines a single public method, `ordinal`: @@ -994,7 +994,7 @@ Explicit `extends` clauses must be provided in the following cases, where rules 1. An `enum` definition ```scala - enum ´E´ { } + enum ´E´ extends { } ``` expands to a `sealed abstract` class that extends the `scala.reflect.Enum` trait and an associated companion object that contains the defined cases, expanded according to rules (2 - 8). The enum class starts with a compiler-generated import that imports the names `` of all cases so that they can be used without prefix in the class. From a2bbef0f2a6514982492da01ef20e3ed7ce32436 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 22 Sep 2023 13:09:59 +0200 Subject: [PATCH 51/90] [spec] some fixes to tuples --- docs/_spec/03-types.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/_spec/03-types.md b/docs/_spec/03-types.md index 1cf58b789182..407a69b8c8c5 100644 --- a/docs/_spec/03-types.md +++ b/docs/_spec/03-types.md @@ -239,13 +239,13 @@ SimpleType1 ::= ... | ‘(’ TypesOrWildcards ‘)’ ``` -A _tuple type_ ´(T_1, ..., T_n)´ where ´n \geq 2´ is sugar for the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`, which is itself a series of nested infix types which are sugar for `*:[´T_1´, *:[´T_2´, ... *[´T_n´, scala.EmptyTuple]]]`. +A _tuple type_ ´(T_1, ..., T_n)´ where ´n \geq 2´ is sugar for the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`, which is itself a series of nested infix types which are sugar for `*:[´T_1´, *:[´T_2´, ... *:[´T_n´, scala.EmptyTuple]]]`. The ´T_i´ can be wildcard type arguments. Notes: - `(´T_1´)` is the type ´T_1´, and not `´T_1´ *: scala.EmptyTuple` (´T_1´ cannot be a wildcard type argument in that case). -- `()` is not a valid type (not even `scala.EmptyTuple`). +- `()` is not a valid type (i.e. it is not desugared to `scala.EmptyTuple`). ### Concrete Refined Types From 2000f08a806c8b650e851e1d9b1bdc6749906633 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Fri, 22 Sep 2023 16:36:55 +0200 Subject: [PATCH 52/90] Set reference version to 3.3.1 --- project/Build.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 0766ae707d7f..349e94aed3e6 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -83,7 +83,7 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.3.1-RC7" + val referenceVersion = "3.3.1" val baseVersion = "3.4.0-RC1" @@ -101,7 +101,7 @@ object Build { * set to 3.1.3. If it is going to be 3.1.0, it must be set to the latest * 3.0.x release. */ - val previousDottyVersion = "3.3.0" + val previousDottyVersion = "3.3.1" object CompatMode { final val BinaryCompatible = 0 From 8113165545320d44ecc7b96ebf11a6d97a3b7a4e Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 22 Sep 2023 16:52:30 +0200 Subject: [PATCH 53/90] address review comments --- compiler/src/dotty/tools/dotc/util/EqHashSet.scala | 2 +- compiler/src/dotty/tools/dotc/util/GenericHashSet.scala | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/EqHashSet.scala b/compiler/src/dotty/tools/dotc/util/EqHashSet.scala index 44a050ae2bf8..d584441fd00a 100644 --- a/compiler/src/dotty/tools/dotc/util/EqHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/EqHashSet.scala @@ -15,7 +15,7 @@ object EqHashSet: * initial size of the table will be the smallest power of two * that is equal or greater than the given `initialCapacity`. * Minimum value is 4. -* @param capacityMultiple The minimum multiple of capacity relative to used elements. + * @param capacityMultiple The minimum multiple of capacity relative to used elements. * The hash table will be re-sized once the number of elements * multiplied by capacityMultiple exceeds the current size of the hash table. * However, a table of size up to DenseLimit will be re-sized only diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala index 704298e55fb7..7abe40a8e13d 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala @@ -36,8 +36,7 @@ abstract class GenericHashSet[T](initialCapacity: Int = 8, capacityMultiple: Int private def roundToPower(n: Int) = if n < 4 then 4 - else if Integer.bitCount(n) == 1 then n - else 1 << (32 - Integer.numberOfLeadingZeros(n)) + else 1 << (32 - Integer.numberOfLeadingZeros(n - 1)) def clear(resetToInitial: Boolean): Unit = used = 0 @@ -49,10 +48,10 @@ abstract class GenericHashSet[T](initialCapacity: Int = 8, capacityMultiple: Int protected def isDense = limit < DenseLimit - /** Hashcode, by default a processed `x.hashCode`, can be overridden */ + /** Hashcode, to be implemented in subclass */ protected def hash(key: T): Int - /** Hashcode, by default `equals`, can be overridden */ + /** Equality, to be implemented in subclass */ protected def isEqual(x: T, y: T): Boolean /** Turn hashcode `x` into a table index */ From a49eeae43cefc3e3b91749975e2457b41fd65f4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Wed, 30 Aug 2023 17:31:42 +0200 Subject: [PATCH 54/90] Use 3.4 as language version number in the spec. --- docs/_spec/README.md | 6 +++--- docs/_spec/_config.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/_spec/README.md b/docs/_spec/README.md index b9eba413f8a2..f8a59e86896c 100644 --- a/docs/_spec/README.md +++ b/docs/_spec/README.md @@ -1,6 +1,6 @@ # WIP Scala 3 Language Specification -**This is still a work in progress, and should *not* be regarded as a source of truth.** +**This is still a work in progress. There are still Scala 3 features missing, as well as some areas that have not been updated since 2.13 yet.** First of all, the language specification is meant to be correct, precise and clear. @@ -25,8 +25,8 @@ To preview locally, run the following commands in the docs/_spec subfolder: env UID="$(id -u)" GID="$(id -g)" docker-compose up ``` -and open http://0.0.0.0:4000/files/archive/spec/2.13/ to view the spec. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`. -