From 3459704697d578951ad13d4c9593bc17017fb25a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 27 Jan 2024 14:51:50 -0800 Subject: [PATCH 001/827] Add binary lits to syntax summaries --- docs/_docs/internals/syntax.md | 3 ++- docs/_docs/reference/syntax.md | 3 ++- docs/_spec/13-syntax-summary.md | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index aa8cd15f00a0..ac20fb262914 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -57,9 +57,10 @@ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; -integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +integerLiteral ::= (decimalNumeral | hexNumeral | binaryNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit [{binaryDigit | ‘_’} binaryDigit] floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index bf2c27d57863..65d1a24fdb85 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -60,9 +60,10 @@ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; -integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +integerLiteral ::= (decimalNumeral | hexNumeral | binaryNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit [{binaryDigit | ‘_’} binaryDigit] floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] diff --git a/docs/_spec/13-syntax-summary.md b/docs/_spec/13-syntax-summary.md index 2dc971fc9840..f02b2210bb1d 100644 --- a/docs/_spec/13-syntax-summary.md +++ b/docs/_spec/13-syntax-summary.md @@ -49,9 +49,10 @@ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; -integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +integerLiteral ::= (decimalNumeral | hexNumeral | binaryNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit [{binaryDigit | ‘_’} binaryDigit] floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] From 76542f83e95e7bd5d23e4d1cc7af6745521e9ab9 Mon Sep 17 00:00:00 2001 From: OlegYch Date: Mon, 15 Apr 2024 17:29:38 +0300 Subject: [PATCH 002/827] Regression: fix compilation performance on Windows by caching isDirectory calls Fixes #19924 backport to start-3.4.1 --- compiler/src/dotty/tools/io/PlainFile.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala index acef191d3072..933157e997ac 100644 --- a/compiler/src/dotty/tools/io/PlainFile.scala +++ b/compiler/src/dotty/tools/io/PlainFile.scala @@ -13,7 +13,7 @@ import java.nio.file.{InvalidPathException, Paths} /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { - override def isDirectory: Boolean = true + override val isDirectory: Boolean = true override def iterator(): Iterator[PlainFile] = givenPath.list.filter(_.exists).map(new PlainFile(_)) override def delete(): Unit = givenPath.deleteRecursively() } @@ -78,7 +78,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile { } /** Is this abstract file a directory? */ - def isDirectory: Boolean = givenPath.isDirectory + val isDirectory: Boolean = givenPath.isDirectory /** Returns the time that this abstract file was last modified. */ def lastModified: Long = givenPath.lastModified.toMillis From 52c5c14402f1ab4ed9b75f20240e297095a68633 Mon Sep 17 00:00:00 2001 From: OlegYch Date: Tue, 16 Apr 2024 16:00:31 +0300 Subject: [PATCH 003/827] Remove mutable methods which might potentially break due to introduced caching --- compiler/src/dotty/tools/io/AbstractFile.scala | 6 ------ compiler/src/dotty/tools/io/NoAbstractFile.scala | 2 -- compiler/src/dotty/tools/io/PlainFile.scala | 9 --------- compiler/src/dotty/tools/io/VirtualDirectory.scala | 6 ------ compiler/src/dotty/tools/io/VirtualFile.scala | 6 ------ compiler/src/dotty/tools/io/ZipArchive.scala | 2 -- 6 files changed, 31 deletions(-) diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index 233b1ca8fb62..ee72297c2a4f 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -136,12 +136,6 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Does this abstract file represent something which can contain classfiles? */ def isClassContainer: Boolean = isDirectory || (jpath != null && ext.isJarOrZip) - /** Create a file on disk, if one does not exist already. */ - def create(): Unit - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit - /** Is this abstract file a directory? */ def isDirectory: Boolean diff --git a/compiler/src/dotty/tools/io/NoAbstractFile.scala b/compiler/src/dotty/tools/io/NoAbstractFile.scala index 13c2c6851d2b..bef045e290a5 100644 --- a/compiler/src/dotty/tools/io/NoAbstractFile.scala +++ b/compiler/src/dotty/tools/io/NoAbstractFile.scala @@ -17,8 +17,6 @@ import java.io.InputStream object NoAbstractFile extends AbstractFile { def absolute: AbstractFile = this def container: AbstractFile = this - def create(): Unit = ??? - def delete(): Unit = ??? def jpath: JPath = null def input: InputStream = null def isDirectory: Boolean = false diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala index 933157e997ac..f0562fbfcdde 100644 --- a/compiler/src/dotty/tools/io/PlainFile.scala +++ b/compiler/src/dotty/tools/io/PlainFile.scala @@ -15,7 +15,6 @@ import java.nio.file.{InvalidPathException, Paths} class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { override val isDirectory: Boolean = true override def iterator(): Iterator[PlainFile] = givenPath.list.filter(_.exists).map(new PlainFile(_)) - override def delete(): Unit = givenPath.deleteRecursively() } /** This class implements an abstract file backed by a File. @@ -113,14 +112,6 @@ class PlainFile(val givenPath: Path) extends AbstractFile { null } - /** Does this abstract file denote an existing file? */ - def create(): Unit = if (!exists) givenPath.createFile() - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit = - if (givenPath.isFile) givenPath.delete() - else if (givenPath.isDirectory) givenPath.toDirectory.deleteRecursively() - /** Returns a plain file with the given name. It does not * check that it exists. */ diff --git a/compiler/src/dotty/tools/io/VirtualDirectory.scala b/compiler/src/dotty/tools/io/VirtualDirectory.scala index 157f63a2ac1a..949f2d0e61dd 100644 --- a/compiler/src/dotty/tools/io/VirtualDirectory.scala +++ b/compiler/src/dotty/tools/io/VirtualDirectory.scala @@ -34,12 +34,6 @@ extends AbstractFile { override def input: InputStream = sys.error("directories cannot be read") override def output: OutputStream = sys.error("directories cannot be written") - /** Does this abstract file denote an existing file? */ - def create(): Unit = { unsupported() } - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit = { unsupported() } - /** Returns an abstract file with the given name. It does not * check that it exists. */ diff --git a/compiler/src/dotty/tools/io/VirtualFile.scala b/compiler/src/dotty/tools/io/VirtualFile.scala index 9d290a9b0e6a..6fb9859503f2 100644 --- a/compiler/src/dotty/tools/io/VirtualFile.scala +++ b/compiler/src/dotty/tools/io/VirtualFile.scala @@ -82,12 +82,6 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF Iterator.empty } - /** Does this abstract file denote an existing file? */ - def create(): Unit = unsupported() - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit = unsupported() - /** * Returns the abstract file in this abstract directory with the * specified name. If there is no such file, returns null. The diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index 9af935690ffc..a23bde8faaed 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -61,8 +61,6 @@ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) ex def isDirectory: Boolean = true def lookupName(name: String, directory: Boolean): AbstractFile = unsupported() def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported() - def create(): Unit = unsupported() - def delete(): Unit = unsupported() def output: OutputStream = unsupported() def container: AbstractFile = unsupported() def absolute: AbstractFile = unsupported() From 29706467e87aa5283a526f011d5e0bb485757d98 Mon Sep 17 00:00:00 2001 From: OlegYch Date: Wed, 17 Apr 2024 14:20:37 +0300 Subject: [PATCH 004/827] Update compiler/src/dotty/tools/io/PlainFile.scala Co-authored-by: Nicolas Stucki --- compiler/src/dotty/tools/io/PlainFile.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala index f0562fbfcdde..a6a39d9ff3eb 100644 --- a/compiler/src/dotty/tools/io/PlainFile.scala +++ b/compiler/src/dotty/tools/io/PlainFile.scala @@ -77,7 +77,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile { } /** Is this abstract file a directory? */ - val isDirectory: Boolean = givenPath.isDirectory + val isDirectory: Boolean = givenPath.isDirectory // cached for performance on Windows /** Returns the time that this abstract file was last modified. */ def lastModified: Long = givenPath.lastModified.toMillis From 19a453d2a4701153f2d9ff38ca8a6e652738e8b6 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 6 Mar 2024 21:01:56 +0100 Subject: [PATCH 005/827] simple scripts to run non-bootstrapped compiler after 'sbt buildQuick' --- .gitignore | 2 ++ bin/commonQ | 6 ++++++ bin/scalaQ | 6 ++++++ bin/scalacQ | 6 ++++++ docs/_docs/contributing/getting-started.md | 6 ++++++ project/Build.scala | 7 +++++++ 6 files changed, 33 insertions(+) create mode 100755 bin/commonQ create mode 100755 bin/scalaQ create mode 100755 bin/scalacQ diff --git a/.gitignore b/.gitignore index 0fc39ecbae5b..7ee4342439be 100644 --- a/.gitignore +++ b/.gitignore @@ -64,6 +64,8 @@ testlogs/ local/ compiler/test/debug/Gen.jar +/bin/.cp + before-pickling.txt after-pickling.txt bench/compile.txt diff --git a/bin/commonQ b/bin/commonQ new file mode 100755 index 000000000000..a25d52db3d90 --- /dev/null +++ b/bin/commonQ @@ -0,0 +1,6 @@ +cp=$(cat $ROOT/bin/.cp) 2> /dev/null + +if [[ "$cp" == "" ]]; then + echo "run 'sbt buildQuick' first" + exit 1 +fi diff --git a/bin/scalaQ b/bin/scalaQ new file mode 100755 index 000000000000..c14a2f0372ff --- /dev/null +++ b/bin/scalaQ @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." +. $ROOT/bin/commonQ + +java -cp $cp dotty.tools.MainGenericRunner -usejavacp "$@" diff --git a/bin/scalacQ b/bin/scalacQ new file mode 100755 index 000000000000..f3dafba9fe27 --- /dev/null +++ b/bin/scalacQ @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." +. $ROOT/bin/commonQ + +java -cp $cp dotty.tools.MainGenericCompiler -usejavacp "$@" diff --git a/docs/_docs/contributing/getting-started.md b/docs/_docs/contributing/getting-started.md index 071cbeb0c0a2..b6e3e4fac00a 100644 --- a/docs/_docs/contributing/getting-started.md +++ b/docs/_docs/contributing/getting-started.md @@ -81,6 +81,12 @@ $ scalac tests/pos/HelloWorld.scala $ scala HelloWorld ``` +Note that the `scalac` and `scala` scripts have slow roundtrip times when working on the compiler codebase: whenever +any source file changes they invoke `sbt dist/pack` first. + +As an alternative, run the `buildQuick` task in sbt. It builds the compiler and writes its classpath to the `bin/.cp` +file, which enables the `scalacQ` and `scalaQ` scripts in the `bin/` folder. + ## Starting a REPL ```bash diff --git a/project/Build.scala b/project/Build.scala index cfffda810f75..e43ac0550d1a 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -205,6 +205,8 @@ object Build { val repl = taskKey[Unit]("spawns a repl with the correct classpath") + val buildQuick = taskKey[Unit]("builds the compiler and writes the classpath to bin/.cp to enable the bin/scalacQ and bin/scalaQ scripts") + // Compiles the documentation and static site val genDocs = inputKey[Unit]("run scaladoc to generate static documentation site") @@ -2136,6 +2138,11 @@ object Build { // default. addCommandAlias("publishLocal", "scala3-bootstrapped/publishLocal"), repl := (`scala3-compiler-bootstrapped` / repl).value, + buildQuick := { + val _ = (`scala3-compiler` / Compile / compile).value + val cp = (`scala3-compiler` / Compile / fullClasspath).value.map(_.data.getAbsolutePath).mkString(File.pathSeparator) + IO.write(baseDirectory.value / "bin" / ".cp", cp) + }, (Compile / console) := (Compile / console).dependsOn(Def.task { import _root_.scala.io.AnsiColor._ val msg = "`console` uses the reference Scala version. Use `repl` instead." From 6fd2734e7fc8f473342c526c8e0616e2418f3935 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 11 Apr 2024 23:16:17 +0200 Subject: [PATCH 006/827] tryCompiletimeConstantFold in disjointnessBoundary Fixes #20166 --- .../dotty/tools/dotc/core/TypeComparer.scala | 2 ++ tests/pos/i20166.scala | 24 +++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 tests/pos/i20166.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index cee1ec7fffa8..0890d5889fa7 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2902,6 +2902,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp case tp: ConstantType => tp + case tp: AppliedType if tp.tryCompiletimeConstantFold.exists => + tp.tryCompiletimeConstantFold case tp: HKTypeLambda => tp case tp: ParamRef => diff --git a/tests/pos/i20166.scala b/tests/pos/i20166.scala new file mode 100644 index 000000000000..38b7556a82f4 --- /dev/null +++ b/tests/pos/i20166.scala @@ -0,0 +1,24 @@ +import scala.compiletime.ops.int.* + +// NOTE ops.int.S is documented as equivalent to MyS + +type MyS[X] = X match + case 0 => 1 + case 1 => 2 + case 2 => 3 + +type M[I <: Int] = 4 match + case 1 - 1 => "0" + case MyS[I] => "2" + case S[I] => "2" // Not provablyDisjoint before changes + case 2 + I => "3" + case I + 3 => "4" + +val _: M[1] = "4" + + +type M2[I <: Int, P] = I match + case P => "b" + case _ => "c" + +val _: M2[5, 2 + 3] = "b" From c544e4128931bba8650bd61d2d29131f75426ccb Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 May 2024 13:23:31 +0200 Subject: [PATCH 007/827] Harden GADT constraint handling to survive illegal F-bounds --- compiler/src/dotty/tools/dotc/core/GadtConstraint.scala | 3 ++- tests/neg/i20317.scala | 3 +++ tests/neg/i20317a.scala | 5 +++++ 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 tests/neg/i20317.scala create mode 100644 tests/neg/i20317a.scala diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 1cbfabc08958..5a8938602523 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -193,7 +193,8 @@ sealed trait GadtState { case i => pt.paramRefs(i) case tp => tp } - + if !param.info.exists then + throw TypeError(em"illegal recursive reference involving $param") val tb = param.info.bounds tb.derivedTypeBounds( lo = substDependentSyms(tb.lo, isUpper = false), diff --git a/tests/neg/i20317.scala b/tests/neg/i20317.scala new file mode 100644 index 000000000000..e9d8599b9fc1 --- /dev/null +++ b/tests/neg/i20317.scala @@ -0,0 +1,3 @@ +type Foo[A] = A + +def foo[A <: Foo[A]]: Unit = () // error // error diff --git a/tests/neg/i20317a.scala b/tests/neg/i20317a.scala new file mode 100644 index 000000000000..d7b8b66eb80e --- /dev/null +++ b/tests/neg/i20317a.scala @@ -0,0 +1,5 @@ +type SemigroupStructural[A] = + A & { def combine(a: A): A } +def combineAll[A <: SemigroupStructural[A]]( + i: A, l: List[A] +): A = l.foldLeft(i)(_.combine(_)) // error From 5e408bdf2e9c6b326398d27e6f682187e8db708f Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 12:48:45 +0200 Subject: [PATCH 008/827] Add test to best effort blacklist --- compiler/test/dotc/neg-best-effort-pickling.blacklist | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/test/dotc/neg-best-effort-pickling.blacklist b/compiler/test/dotc/neg-best-effort-pickling.blacklist index ff02be107a8a..1c8421b44539 100644 --- a/compiler/test/dotc/neg-best-effort-pickling.blacklist +++ b/compiler/test/dotc/neg-best-effort-pickling.blacklist @@ -13,6 +13,7 @@ curried-dependent-ift.scala i17121.scala illegal-match-types.scala i13780-1.scala +i20317a.scala # semantic db generation fails in the first compilation i1642.scala From 34f17b753ad8dc5fcc038d592a8fc1c748ec62b4 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 7 Jan 2024 13:22:06 +0100 Subject: [PATCH 009/827] New modularity language import --- .../src/scala/runtime/stdLibPatches/language.scala | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index b2bd4b791423..b6d256b240f9 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -96,7 +96,18 @@ object language: * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] */ @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") - object namedTuples + object namedTupleas + + /** Experimental support for new features for better modularity, including + * - better tracking of dependencies through classes + * - better usability of context bounds + * - better syntax and conventions for type classes + * - ability to merge exported types in intersections + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/modularity]] + */ + @compileTimeOnly("`modularity` can only be used at compile time in import statements") + object modularity /** Was needed to add support for relaxed imports of extension methods. * The language import is no longer needed as this is now a standard feature since SIP was accepted. From 31c9e8a850e3f40dd797dc9e3669dcadb020586d Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 18 Nov 2023 15:10:34 +0100 Subject: [PATCH 010/827] Allow vals in using clauses of givens --- .../dotty/tools/dotc/parsing/Parsers.scala | 25 +++++++++++++------ 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 60b2a2b1d3cf..8d5c50d6d608 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -62,7 +62,7 @@ object Parsers { case ExtensionFollow // extension clause, following extension parameter def isClass = // owner is a class - this == Class || this == CaseClass + this == Class || this == CaseClass || this == Given def takesOnlyUsingClauses = // only using clauses allowed for this owner this == Given || this == ExtensionFollow def acceptsVariance = @@ -3372,7 +3372,7 @@ object Parsers { val isAbstractOwner = paramOwner == ParamOwner.Type || paramOwner == ParamOwner.TypeParam val start = in.offset var mods = annotsAsMods() | Param - if paramOwner == ParamOwner.Class || paramOwner == ParamOwner.CaseClass then + if paramOwner.isClass then mods |= PrivateLocal if isIdent(nme.raw.PLUS) && checkVarianceOK() then mods |= Covariant @@ -4100,6 +4100,14 @@ object Parsers { val nameStart = in.offset val name = if isIdent && followingIsGivenSig() then ident() else EmptyTermName + // TODO Change syntax description + def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = + paramss.nestedMap: param => + if !param.mods.isAllOf(PrivateLocal) then + syntaxError(em"method parameter ${param.name} may not be `a val`", param.span) + param.withMods(param.mods &~ (AccessFlags | ParamAccessor | Mutable) | Param) + .asInstanceOf[List[ParamClause]] + val gdef = val tparams = typeParamClauseOpt(ParamOwner.Given) newLineOpt() @@ -4121,16 +4129,17 @@ object Parsers { mods1 |= Lazy ValDef(name, parents.head, subExpr()) else - DefDef(name, joinParams(tparams, vparamss), parents.head, subExpr()) + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) else if (isStatSep || isStatSeqEnd) && parentsIsType then if name.isEmpty then syntaxError(em"anonymous given cannot be abstract") - DefDef(name, joinParams(tparams, vparamss), parents.head, EmptyTree) + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else - val tparams1 = tparams.map(tparam => tparam.withMods(tparam.mods | PrivateLocal)) - val vparamss1 = vparamss.map(_.map(vparam => - vparam.withMods(vparam.mods &~ Param | ParamAccessor | Protected))) - val constr = makeConstructor(tparams1, vparamss1) + val vparamss1 = vparamss.nestedMap: vparam => + if vparam.mods.is(Private) + then vparam.withMods(vparam.mods &~ PrivateLocal | Protected) + else vparam + val constr = makeConstructor(tparams, vparamss1) val templ = if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil) else withTemplate(constr, parents) From 84655ca3409c3ec2c1645b0c8f56ff7d17cc304d Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 15 Dec 2023 16:16:58 +0100 Subject: [PATCH 011/827] A relaxation concerning exported type aliases The rules for export forwarders are changed as follows. Previously, all export forwarders were declared `final`. Now, only term members are declared `final`. Type aliases left aside. This makes it possible to export the same type member into several traits and then mix these traits in the same class. `typeclass-aggregates.scala` shows why this is essential to be able to combine multiple givens with type members. The change does not lose safety since different type aliases would in any case lead to uninstantiatable classes. --- .../src/dotty/tools/dotc/config/Feature.scala | 1 + .../src/dotty/tools/dotc/core/Flags.scala | 2 - .../src/dotty/tools/dotc/typer/Namer.scala | 6 ++- .../reference/other-new-features/export.md | 16 +++++-- tests/neg/i0248-inherit-refined.check | 12 +++++ tests/pos/typeclass-aggregates.scala | 47 +++++++++++++++++++ 6 files changed, 77 insertions(+), 7 deletions(-) create mode 100644 tests/neg/i0248-inherit-refined.check create mode 100644 tests/pos/typeclass-aggregates.scala diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 1fe9cae936c9..d2bfdcb550dc 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -34,6 +34,7 @@ object Feature: val captureChecking = experimental("captureChecking") val into = experimental("into") val namedTuples = experimental("namedTuples") + val modularity = experimental("modularity") def experimentalAutoEnableFeatures(using Context): List[TermName] = defn.languageExperimentalFeatures diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 8110bc769d4f..98c57a96a5c0 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -543,8 +543,6 @@ object Flags { /** Flags retained in type export forwarders */ val RetainedExportTypeFlags = Infix - val MandatoryExportTypeFlags = Exported | Final - /** Flags that apply only to classes */ val ClassOnlyFlags = Sealed | Open | Abstract.toTypeFlags diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 72ca6a35bf4b..d2121ede2a67 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -26,7 +26,7 @@ import Nullables.* import transform.ValueClasses.* import TypeErasure.erasure import reporting.* -import config.Feature.sourceVersion +import config.Feature.{sourceVersion, modularity} import config.SourceVersion.* import scala.compiletime.uninitialized @@ -1203,7 +1203,9 @@ class Namer { typer: Typer => target = target.etaExpand newSymbol( cls, forwarderName, - MandatoryExportTypeFlags | (sym.flags & RetainedExportTypeFlags), + Exported + | (sym.flags & RetainedExportTypeFlags) + | (if Feature.enabled(modularity) then EmptyFlags else Final), TypeAlias(target), coord = span) // Note: This will always create unparameterzied aliases. So even if the original type is diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md index 98e9a7d3d711..e21d369b6b5e 100644 --- a/docs/_docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -37,7 +37,12 @@ final def print(bits: BitMap): Unit = printUnit.print(bits) final type PrinterType = printUnit.PrinterType ``` -They can be accessed inside `Copier` as well as from outside: +With the experimental `modularity` language import, only exported methods and values are final, whereas the generated `PrinterType` would be a simple type alias +```scala + type PrinterType = printUnit.PrinterType +``` + +These aliases can be accessed inside `Copier` as well as from outside: ```scala val copier = new Copier @@ -90,12 +95,17 @@ export O.* ``` Export aliases copy the type and value parameters of the members they refer to. -Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: +Export aliases of term members are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: - - Export aliases cannot be overridden, since they are final. + - Export aliases of methods or fields cannot be overridden, since they are final. - Export aliases cannot override concrete members in base classes, since they are not marked `override`. - However, export aliases can implement deferred members of base classes. + - Export type aliases are normally also final, except when the experimental + language import `modularity` is present. The general + rules for type aliases ensure in any case that if there are several type aliases in a class, + they must agree on their right hand sides, or the class could not be instantiated. + So dropping the `final` for export type aliases is safe. Export aliases for public value definitions that are accessed without referring to private values in the qualifier path diff --git a/tests/neg/i0248-inherit-refined.check b/tests/neg/i0248-inherit-refined.check new file mode 100644 index 000000000000..4e14c3c6f14b --- /dev/null +++ b/tests/neg/i0248-inherit-refined.check @@ -0,0 +1,12 @@ +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:8:18 ------------------------------------------------------- +8 | class C extends Y // error + | ^ + | test.A & test.B is not a class type + | + | longer explanation available when compiling with `-explain` +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:10:18 ------------------------------------------------------ +10 | class D extends Z // error + | ^ + | test.A | test.B is not a class type + | + | longer explanation available when compiling with `-explain` diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala new file mode 100644 index 000000000000..77b0f1a9f04a --- /dev/null +++ b/tests/pos/typeclass-aggregates.scala @@ -0,0 +1,47 @@ +//> using options -source future -language:experimental.modularity +trait Ord: + type This + extension (x: This) + def compareTo(y: This): Int + def < (y: This): Boolean = compareTo(y) < 0 + def > (y: This): Boolean = compareTo(y) > 0 + + trait OrdProxy extends Ord: + export Ord.this.* + +trait SemiGroup: + type This + extension (x: This) def combine(y: This): This + + trait SemiGroupProxy extends SemiGroup: + export SemiGroup.this.* + +trait Monoid extends SemiGroup: + def unit: This + + trait MonoidProxy extends Monoid: + export Monoid.this.* + +def ordWithMonoid(ord: Ord, monoid: Monoid{ type This = ord.This }): Ord & Monoid = + new ord.OrdProxy with monoid.MonoidProxy {} + +trait OrdWithMonoid extends Ord, Monoid + +def ordWithMonoid2(ord: Ord, monoid: Monoid{ type This = ord.This }) = //: OrdWithMonoid { type This = ord.This} = + new OrdWithMonoid with ord.OrdProxy with monoid.MonoidProxy {} + +given intOrd: Ord { type This = Int } = ??? +given intMonoid: Monoid { type This = Int } = ??? + +//given (using ord: Ord, monoid: Monoid{ type This = ord.This }): (Ord & Monoid { type This = ord.This}) = +// ordWithMonoid2(ord, monoid) + +val x = summon[Ord & Monoid { type This = Int}] +val y: Int = ??? : x.This + +// given [A, B](using ord: A is Ord, monoid: A is Monoid) => A is Ord & Monoid = +// new ord.OrdProxy with monoid.MonoidProxy {} + +given [A](using ord: Ord { type This = A }, monoid: Monoid { type This = A}): (Ord & Monoid) { type This = A} = + new ord.OrdProxy with monoid.MonoidProxy {} + From 48944142182932b0bb1f97d7261d6033aa96888a Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 13 Dec 2023 10:54:15 +0100 Subject: [PATCH 012/827] Allow class parents to be refined types. Refinements of a class parent are added as synthetic members to the inheriting class. --- .../src/dotty/tools/dotc/core/NamerOps.scala | 21 +++++ .../tools/dotc/core/tasty/TreeUnpickler.scala | 2 +- .../tools/dotc/transform/init/Util.scala | 1 + .../src/dotty/tools/dotc/typer/Namer.scala | 37 +++++++-- .../src/dotty/tools/dotc/typer/Typer.scala | 30 +++++-- tests/neg/i0248-inherit-refined.scala | 6 +- tests/neg/parent-refinement-access.check | 7 ++ tests/neg/parent-refinement-access.scala | 6 ++ tests/neg/parent-refinement.check | 29 ++++++- tests/neg/parent-refinement.scala | 20 ++++- tests/pos/parent-refinement.scala | 48 +++++++++++ tests/pos/typeclasses.scala | 79 ++++--------------- 12 files changed, 200 insertions(+), 86 deletions(-) create mode 100644 tests/neg/parent-refinement-access.check create mode 100644 tests/neg/parent-refinement-access.scala create mode 100644 tests/pos/parent-refinement.scala diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 75a135826785..8d096913e285 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -5,6 +5,7 @@ package core import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.* import SymDenotations.{LazyType, SymDenotation}, StdNames.nme import TypeApplications.EtaExpansion +import collection.mutable /** Operations that are shared between Namer and TreeUnpickler */ object NamerOps: @@ -18,6 +19,26 @@ object NamerOps: case TypeSymbols(tparams) :: _ => ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)) case _ => ctor.owner.typeRef + /** Split dependent class refinements off parent type. Add them to `refinements`, + * unless it is null. + */ + extension (tp: Type) + def separateRefinements(cls: ClassSymbol, refinements: mutable.LinkedHashMap[Name, Type] | Null)(using Context): Type = + tp match + case RefinedType(tp1, rname, rinfo) => + try tp1.separateRefinements(cls, refinements) + finally + if refinements != null then + refinements(rname) = refinements.get(rname) match + case Some(tp) => tp & rinfo + case None => rinfo + case tp @ AnnotatedType(tp1, ann) => + tp.derivedAnnotatedType(tp1.separateRefinements(cls, refinements), ann) + case tp: RecType => + tp.parent.substRecThis(tp, cls.thisType).separateRefinements(cls, refinements) + case tp => + tp + /** If isConstructor, make sure it has at least one non-implicit parameter list * This is done by adding a () in front of a leading old style implicit parameter, * or by adding a () as last -- or only -- parameter list if the constructor has diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 04d19f2f8821..f6fa9faf0114 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1074,7 +1074,7 @@ class TreeUnpickler(reader: TastyReader, } val parentReader = fork val parents = readParents(withArgs = false)(using parentCtx) - val parentTypes = parents.map(_.tpe.dealias) + val parentTypes = parents.map(_.tpe.dealiasKeepAnnots.separateRefinements(cls, null)) if cls.is(JavaDefined) && parentTypes.exists(_.derivesFrom(defn.JavaAnnotationClass)) then cls.setFlag(JavaAnnotation) val self = diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala index 756fd1a0a8e7..e11d0e1e21a5 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -20,6 +20,7 @@ object Util: def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match case tref: TypeRef => tref + case RefinedType(parent, _, _) => typeRefOf(parent) case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index d2121ede2a67..530423fd2613 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -55,11 +55,12 @@ class Namer { typer: Typer => import untpd.* - val TypedAhead : Property.Key[tpd.Tree] = new Property.Key - val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key - val ExportForwarders: Property.Key[List[tpd.MemberDef]] = new Property.Key - val SymOfTree : Property.Key[Symbol] = new Property.Key - val AttachedDeriver : Property.Key[Deriver] = new Property.Key + val TypedAhead : Property.Key[tpd.Tree] = new Property.Key + val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key + val ExportForwarders : Property.Key[List[tpd.MemberDef]] = new Property.Key + val ParentRefinements: Property.Key[List[Symbol]] = new Property.Key + val SymOfTree : Property.Key[Symbol] = new Property.Key + val AttachedDeriver : Property.Key[Deriver] = new Property.Key // was `val Deriver`, but that gave shadowing problems with constructor proxies /** A partial map from unexpanded member and pattern defs and to their expansions. @@ -1515,6 +1516,7 @@ class Namer { typer: Typer => /** The type signature of a ClassDef with given symbol */ override def completeInCreationContext(denot: SymDenotation): Unit = { val parents = impl.parents + val parentRefinements = new mutable.LinkedHashMap[Name, Type] /* The type of a parent constructor. Types constructor arguments * only if parent type contains uninstantiated type parameters. @@ -1569,8 +1571,13 @@ class Namer { typer: Typer => val ptype = parentType(parent)(using completerCtx.superCallContext).dealiasKeepAnnots if (cls.isRefinementClass) ptype else { - val pt = checkClassType(ptype, parent.srcPos, - traitReq = parent ne parents.head, stablePrefixReq = !isJava) + val pt = checkClassType( + if Feature.enabled(modularity) + then ptype.separateRefinements(cls, parentRefinements) + else ptype, + parent.srcPos, + traitReq = parent ne parents.head, + stablePrefixReq = !isJava) if (pt.derivesFrom(cls)) { val addendum = parent match { case Select(qual: Super, _) if Feature.migrateTo3 => @@ -1597,6 +1604,21 @@ class Namer { typer: Typer => } } + /** Enter all parent refinements as public class members, unless a definition + * with the same name already exists in the class. + */ + def enterParentRefinementSyms(refinements: List[(Name, Type)]) = + val refinedSyms = mutable.ListBuffer[Symbol]() + for (name, tp) <- refinements do + if decls.lookupEntry(name) == null then + val flags = tp match + case tp: MethodOrPoly => Method | Synthetic | Deferred + case _ => Synthetic | Deferred + refinedSyms += newSymbol(cls, name, flags, tp, coord = original.rhs.span.startPos).entered + if refinedSyms.nonEmpty then + typr.println(i"parent refinement symbols: ${refinedSyms.toList}") + original.pushAttachment(ParentRefinements, refinedSyms.toList) + /** If `parents` contains references to traits that have supertraits with implicit parameters * add those supertraits in linearization order unless they are already covered by other * parent types. For instance, in @@ -1667,6 +1689,7 @@ class Namer { typer: Typer => cls.invalidateMemberCaches() // we might have checked for a member when parents were not known yet. cls.setNoInitsFlags(parentsKind(parents), untpd.bodyKind(rest)) cls.setStableConstructor() + enterParentRefinementSyms(parentRefinements.toList) processExports(using localCtx) defn.patchStdLibClass(cls) addConstructorProxies(cls) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 46982cf1406d..c5b6faf455f7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -40,8 +40,7 @@ import annotation.tailrec import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} -import config.Feature -import config.Feature.{sourceVersion, migrateTo3} +import config.Feature, Feature.{sourceVersion, migrateTo3, modularity} import config.SourceVersion.* import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel @@ -1004,10 +1003,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tp.exists && !tp.typeSymbol.is(Final) && (!tp.isTopType || tp.isAnyRef) // Object is the only toplevel class that can be instantiated - if (templ1.parents.isEmpty && - isFullyDefined(pt, ForceDegree.flipBottom) && - isSkolemFree(pt) && - isEligible(pt.underlyingClassRef(refinementOK = false))) + if templ1.parents.isEmpty + && isFullyDefined(pt, ForceDegree.flipBottom) + && isSkolemFree(pt) + && isEligible(pt.underlyingClassRef(refinementOK = Feature.enabled(modularity))) + then templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) for case parent: RefTree <- templ1.parents do typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) @@ -2871,6 +2871,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } + /** Add all parent refinement symbols as declarations to this class */ + def addParentRefinements(body: List[Tree])(using Context): List[Tree] = + cdef.getAttachment(ParentRefinements) match + case Some(refinedSyms) => + val refinements = refinedSyms.map: sym => + ( if sym.isType then TypeDef(sym.asType) + else if sym.is(Method) then DefDef(sym.asTerm) + else ValDef(sym.asTerm) + ).withSpan(impl.span.startPos) + body ++ refinements + case None => + body + ensureCorrectSuperClass() completeAnnotations(cdef, cls) val constr1 = typed(constr).asInstanceOf[DefDef] @@ -2891,7 +2904,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cdef.withType(UnspecifiedErrorType) else { val dummy = localDummy(cls, impl) - val body1 = addAccessorDefs(cls, typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1) + val body1 = + addParentRefinements( + addAccessorDefs(cls, + typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1)) checkNoDoubleDeclaration(cls) val impl1 = cpy.Template(impl)(constr1, parents1, Nil, self1, body1) diff --git a/tests/neg/i0248-inherit-refined.scala b/tests/neg/i0248-inherit-refined.scala index 97b6f5cdab73..f7cd6375afc9 100644 --- a/tests/neg/i0248-inherit-refined.scala +++ b/tests/neg/i0248-inherit-refined.scala @@ -1,10 +1,12 @@ +//> using options -source future -language:experimental.modularity + object test { class A { type T } type X = A { type T = Int } - class B extends X // error + class B extends X // was error, now OK type Y = A & B class C extends Y // error type Z = A | B class D extends Z // error - abstract class E extends ({ val x: Int }) // error + abstract class E extends ({ val x: Int }) // was error, now OK } diff --git a/tests/neg/parent-refinement-access.check b/tests/neg/parent-refinement-access.check new file mode 100644 index 000000000000..5cde9d51558f --- /dev/null +++ b/tests/neg/parent-refinement-access.check @@ -0,0 +1,7 @@ +-- [E164] Declaration Error: tests/neg/parent-refinement-access.scala:6:6 ---------------------------------------------- +6 |trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error + | ^ + | error overriding value x in trait Year2 of type Int; + | value x in trait Gen of type Any has weaker access privileges; it should be public + | (Note that value x in trait Year2 of type Int is abstract, + | and is therefore overridden by concrete value x in trait Gen of type Any) diff --git a/tests/neg/parent-refinement-access.scala b/tests/neg/parent-refinement-access.scala new file mode 100644 index 000000000000..57d45f4fb201 --- /dev/null +++ b/tests/neg/parent-refinement-access.scala @@ -0,0 +1,6 @@ +//> using options -source future -language:experimental.modularity + +trait Gen: + private[Gen] val x: Any = () + +trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error diff --git a/tests/neg/parent-refinement.check b/tests/neg/parent-refinement.check index 550430bd35a7..cf9a57bc7821 100644 --- a/tests/neg/parent-refinement.check +++ b/tests/neg/parent-refinement.check @@ -1,4 +1,25 @@ --- Error: tests/neg/parent-refinement.scala:5:2 ------------------------------------------------------------------------ -5 | with Ordered[Year] { // error - | ^^^^ - | end of toplevel definition expected but 'with' found +-- Error: tests/neg/parent-refinement.scala:11:6 ----------------------------------------------------------------------- +11 |class Bar extends IdOf[Int], (X { type Value = String }) // error + | ^^^ + |class Bar cannot be instantiated since it has a member Value with possibly conflicting bounds Int | String <: ... <: Int & String +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:15:17 ------------------------------------------------- +15 | val x: Value = 0 // error + | ^ + | Found: (0 : Int) + | Required: Baz.this.Value + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:21:6 -------------------------------------------------- +21 | foo(2) // error + | ^ + | Found: (2 : Int) + | Required: Boolean + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:17:22 ------------------------------------------------- +17 |val x: IdOf[Int] = Baz() // error + | ^^^^^ + | Found: Baz + | Required: IdOf[Int] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/parent-refinement.scala b/tests/neg/parent-refinement.scala index ca2b88a75fd8..868747faba57 100644 --- a/tests/neg/parent-refinement.scala +++ b/tests/neg/parent-refinement.scala @@ -1,7 +1,21 @@ +//> using options -source future -language:experimental.modularity trait Id { type Value } +trait X { type Value } +type IdOf[T] = Id { type Value = T } + case class Year(value: Int) extends AnyVal - with Id { type Value = Int } - with Ordered[Year] { // error + with (Id { type Value = Int }) + with Ordered[Year] + +class Bar extends IdOf[Int], (X { type Value = String }) // error + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = 0 // error + +val x: IdOf[Int] = Baz() // error -} \ No newline at end of file +object Clash extends ({ def foo(x: Int): Int }): + def foo(x: Boolean): Int = 1 + foo(2) // error diff --git a/tests/pos/parent-refinement.scala b/tests/pos/parent-refinement.scala new file mode 100644 index 000000000000..eaa74228c5d6 --- /dev/null +++ b/tests/pos/parent-refinement.scala @@ -0,0 +1,48 @@ +//> using options -source future -language:experimental.modularity + +class A +class B extends A +class C extends B + +trait Id { type Value } +type IdOf[T] = Id { type Value = T } +trait X { type Value } + +case class Year(value: Int) extends IdOf[Int]: + val x: Value = 2 + +type Between[Lo, Hi] = X { type Value >: Lo <: Hi } + +class Foo() extends IdOf[B], Between[C, A]: + val x: Value = B() + +trait Bar extends IdOf[Int], (X { type Value = String }) + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = "" + +trait Gen: + type T + val x: T + +type IntInst = Gen: + type T = Int + val x: 0 + +trait IntInstTrait extends IntInst + +abstract class IntInstClass extends IntInstTrait, IntInst + +object obj1 extends IntInstTrait: + val x = 0 + +object obj2 extends IntInstClass: + val x = 0 + +def main = + val x: obj1.T = 2 - obj2.x + val y: obj2.T = 2 - obj1.x + + + diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala index 07fe5a31ce5d..2bf7f76f0804 100644 --- a/tests/pos/typeclasses.scala +++ b/tests/pos/typeclasses.scala @@ -1,7 +1,6 @@ -class Common: +//> using options -source future -language:experimental.modularity - // this should go in Predef - infix type at [A <: { type This}, B] = A { type This = B } +class Common: trait Ord: type This @@ -26,41 +25,23 @@ class Common: extension [A](x: This[A]) def flatMap[B](f: A => This[B]): This[B] def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + infix type is[A <: AnyKind, B <: {type This <: AnyKind}] = B { type This = A } + end Common object Instances extends Common: -/* - instance Int: Ord as intOrd with - extension (x: Int) - def compareTo(y: Int) = - if x < y then -1 - else if x > y then +1 - else 0 -*/ - given intOrd: Ord with + given intOrd: (Int is Ord) with type This = Int extension (x: Int) def compareTo(y: Int) = if x < y then -1 else if x > y then +1 else 0 -/* - instance List[T: Ord]: Ord as listOrd with - extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match - case (Nil, Nil) => 0 - case (Nil, _) => -1 - case (_, Nil) => +1 - case (x :: xs1, y :: ys1) => - val fst = x.compareTo(y) - if (fst != 0) fst else xs1.compareTo(ys1) -*/ - // Proposed short syntax: - // given listOrd[T: Ord as ord]: Ord at T with - given listOrd[T](using ord: Ord { type This = T}): Ord with - type This = List[T] + given listOrd[T](using ord: T is Ord): (List[T] is Ord) with extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -70,32 +51,18 @@ object Instances extends Common: if (fst != 0) fst else xs1.compareTo(ys1) end listOrd -/* - instance List: Monad as listMonad with + given listMonad: (List is Monad) with extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = List(x) -*/ - given listMonad: Monad with - type This[A] = List[A] - extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = - xs.flatMap(f) - def pure[A](x: A): List[A] = - List(x) -/* - type Reader[Ctx] = X =>> Ctx => X - instance Reader[Ctx: _]: Monad as readerMonad with - extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = - ctx => f(r(ctx))(ctx) - def pure[A](x: A): Ctx => A = - ctx => x -*/ + type Reader[Ctx] = [X] =>> Ctx => X - given readerMonad[Ctx]: Monad with - type This[X] = Ctx => X + //given [Ctx] => Reader[Ctx] is Monad as readerMonad: + + given readerMonad[Ctx]: (Reader[Ctx] is Monad) with extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -110,29 +77,17 @@ object Instances extends Common: def second = xs.tail.head def third = xs.tail.tail.head - //Proposed short syntax: - //extension [M: Monad as m, A](xss: M[M[A]]) - // def flatten: M[A] = - // xs.flatMap(identity) - extension [M, A](using m: Monad)(xss: m.This[m.This[A]]) def flatten: m.This[A] = xss.flatMap(identity) - // Proposed short syntax: - //def maximum[T: Ord](xs: List[T]: T = - def maximum[T](xs: List[T])(using Ord at T): T = + def maximum[T](xs: List[T])(using T is Ord): T = xs.reduceLeft((x, y) => if (x < y) y else x) - // Proposed short syntax: - // def descending[T: Ord as asc]: Ord at T = new Ord: - def descending[T](using asc: Ord at T): Ord at T = new Ord: - type This = T + def descending[T](using asc: T is Ord): T is Ord = new: extension (x: T) def compareTo(y: T) = asc.compareTo(y)(x) - // Proposed short syntax: - // def minimum[T: Ord](xs: List[T]) = - def minimum[T](xs: List[T])(using Ord at T) = + def minimum[T](xs: List[T])(using T is Ord) = maximum(xs)(using descending) def test(): Unit = @@ -177,10 +132,10 @@ instance Sheep: Animal with override def talk(): Unit = println(s"$name pauses briefly... $noise") */ +import Instances.is // Implement the `Animal` trait for `Sheep`. -given Animal with - type This = Sheep +given (Sheep is Animal) with def apply(name: String) = Sheep(name) extension (self: This) def name: String = self.name From 5189e6854ad1dacc3454542c2f124f5bcb7e2a9c Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 19:11:11 +0200 Subject: [PATCH 013/827] Introduce tracked class parameters For a tracked class parameter we add a refinement in the constructor type that the class member is the same as the parameter. E.g. ```scala class C { type T } class D(tracked val x: C) { type T = x.T } ``` This will generate the constructor type: ```scala (x1: C): D { val x: x1.type } ``` Without `tracked` the refinement would not be added. This can solve several problems with dependent class types where previously we lost track of type dependencies. --- .../src/dotty/tools/dotc/ast/Desugar.scala | 13 +- compiler/src/dotty/tools/dotc/ast/untpd.scala | 2 + .../src/dotty/tools/dotc/core/Flags.scala | 9 +- .../src/dotty/tools/dotc/core/NamerOps.scala | 17 +- .../dotc/core/PatternTypeConstrainer.scala | 9 +- .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../tools/dotc/core/SymDenotations.scala | 12 +- .../src/dotty/tools/dotc/core/TypeUtils.scala | 15 +- .../tools/dotc/core/tasty/TreePickler.scala | 1 + .../tools/dotc/core/tasty/TreeUnpickler.scala | 6 +- .../dotty/tools/dotc/parsing/Parsers.scala | 15 +- .../tools/dotc/printing/PlainPrinter.scala | 2 +- .../dotty/tools/dotc/printing/Printer.scala | 5 +- .../tools/dotc/transform/PostTyper.scala | 16 +- .../src/dotty/tools/dotc/typer/Checking.scala | 13 +- .../src/dotty/tools/dotc/typer/Namer.scala | 55 +++-- .../dotty/tools/dotc/typer/RefChecks.scala | 17 +- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- .../test/dotc/pos-test-pickling.blacklist | 5 + docs/_docs/internals/syntax.md | 2 +- .../reference/experimental/modularity.md | 189 ++++++++++++++++++ docs/sidebar.yml | 1 + project/MiMaFilters.scala | 3 + tasty/src/dotty/tools/tasty/TastyFormat.scala | 5 +- tests/neg/i3964.scala | 12 ++ tests/neg/tracked.check | 50 +++++ tests/neg/tracked.scala | 20 ++ tests/neg/tracked2.scala | 1 + tests/new/tracked-mixin-traits.scala | 16 ++ tests/pos/depclass-1.scala | 19 ++ tests/pos/i3920.scala | 32 +++ tests/pos/i3964.scala | 32 +++ tests/pos/i3964a/Defs_1.scala | 18 ++ tests/pos/i3964a/Uses_2.scala | 16 ++ tests/pos/parsercombinators-expanded.scala | 64 ++++++ tests/pos/parsercombinators-givens-2.scala | 52 +++++ tests/pos/parsercombinators-givens.scala | 54 +++++ tests/run/i3920.scala | 26 +++ 38 files changed, 758 insertions(+), 69 deletions(-) create mode 100644 docs/_docs/reference/experimental/modularity.md create mode 100644 tests/neg/i3964.scala create mode 100644 tests/neg/tracked.check create mode 100644 tests/neg/tracked.scala create mode 100644 tests/neg/tracked2.scala create mode 100644 tests/new/tracked-mixin-traits.scala create mode 100644 tests/pos/depclass-1.scala create mode 100644 tests/pos/i3920.scala create mode 100644 tests/pos/i3964.scala create mode 100644 tests/pos/i3964a/Defs_1.scala create mode 100644 tests/pos/i3964a/Uses_2.scala create mode 100644 tests/pos/parsercombinators-expanded.scala create mode 100644 tests/pos/parsercombinators-givens-2.scala create mode 100644 tests/pos/parsercombinators-givens.scala create mode 100644 tests/run/i3920.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 1801a7fada7c..c3a0c05088cb 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -429,13 +429,13 @@ object desugar { private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { var mods = tparam.rawMods if (!keepAnnotations) mods = mods.withAnnotations(Nil) - tparam.withMods(mods & (EmptyFlags | Sealed) | Param) + tparam.withMods(mods & EmptyFlags | Param) } private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { var mods = vparam.rawMods if (!keepAnnotations) mods = mods.withAnnotations(Nil) val hasDefault = if keepDefault then HasDefault else EmptyFlags - vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) + vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = @@ -860,9 +860,8 @@ object desugar { // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. DefDef( - className.toTermName, joinParams(constrTparams, defParamss), - classTypeRef, creatorExpr) - .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) + className.toTermName, joinParams(constrTparams, defParamss), classTypeRef, creatorExpr + ) .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) .withSpan(cdef.span) :: Nil } @@ -890,7 +889,9 @@ object desugar { } if mods.isAllOf(Given | Inline | Transparent) then report.error("inline given instances cannot be trasparent", cdef) - val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + var classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + if vparamAccessors.exists(_.mods.is(Tracked)) then + classMods |= Dependent cpy.TypeDef(cdef: TypeDef)( name = className, rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 0dfe52c421d9..91ef462bcf05 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -230,6 +230,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) + case class Tracked()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Tracked) + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) } diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 98c57a96a5c0..2bc7610bb0ce 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -377,6 +377,9 @@ object Flags { /** Symbol cannot be found as a member during typer */ val (Invisible @ _, _, _) = newFlags(45, "") + /** Tracked modifier for class parameter / a class with some tracked parameters */ + val (Tracked @ _, _, Dependent @ _) = newFlags(46, "tracked") + // ------------ Flags following this one are not pickled ---------------------------------- /** Symbol is not a member of its owner */ @@ -452,7 +455,7 @@ object Flags { CommonSourceModifierFlags.toTypeFlags | Abstract | Sealed | Opaque | Open val TermSourceModifierFlags: FlagSet = - CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy + CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy | Tracked /** Flags representing modifiers that can appear in trees */ val ModifierFlags: FlagSet = @@ -466,7 +469,7 @@ object Flags { val FromStartFlags: FlagSet = commonFlags( Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessor, Scala2SpecialFlags, MutableOrOpen, Opaque, Touched, JavaStatic, - OuterOrCovariant, LabelOrContravariant, CaseAccessor, + OuterOrCovariant, LabelOrContravariant, CaseAccessor, Tracked, Extension, NonMember, Implicit, Given, Permanent, Synthetic, Exported, SuperParamAliasOrScala2x, Inline, Macro, ConstructorProxy, Invisible) @@ -477,7 +480,7 @@ object Flags { */ val AfterLoadFlags: FlagSet = commonFlags( FromStartFlags, AccessFlags, Final, AccessorOrSealed, - Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent) + Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent, Tracked) /** A value that's unstable unless complemented with a Stable flag */ val UnstableValueFlags: FlagSet = Mutable | Method diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 8d096913e285..af03573da4a8 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -16,8 +16,21 @@ object NamerOps: */ def effectiveResultType(ctor: Symbol, paramss: List[List[Symbol]])(using Context): Type = paramss match - case TypeSymbols(tparams) :: _ => ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)) - case _ => ctor.owner.typeRef + case TypeSymbols(tparams) :: rest => + addParamRefinements(ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)), rest) + case _ => + addParamRefinements(ctor.owner.typeRef, paramss) + + /** Given a method with tracked term-parameters `p1, ..., pn`, and result type `R`, add the + * refinements R { p1 = p1' } ... { pn = pn' }, where pi' is the term parameter ref + * of the parameter and pi is its name. This matters only under experimental.modularity, + * since wothout it there are no tracked parameters. Parameter refinements are added for + * constructors and given companion methods. + */ + def addParamRefinements(resType: Type, paramss: List[List[Symbol]])(using Context): Type = + paramss.flatten.foldLeft(resType): (rt, param) => + if param.is(Tracked) then RefinedType(rt, param.name, param.termRef) + else rt /** Split dependent class refinements off parent type. Add them to `refinements`, * unless it is null. diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 6d6a47cf6a1e..9baf0c40a80b 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -88,11 +88,6 @@ trait PatternTypeConstrainer { self: TypeComparer => } } - def stripRefinement(tp: Type): Type = tp match { - case tp: RefinedOrRecType => stripRefinement(tp.parent) - case tp => tp - } - def tryConstrainSimplePatternType(pat: Type, scrut: Type) = { val patCls = pat.classSymbol val scrCls = scrut.classSymbol @@ -182,14 +177,14 @@ trait PatternTypeConstrainer { self: TypeComparer => case AndType(scrut1, scrut2) => constrainPatternType(pat, scrut1) && constrainPatternType(pat, scrut2) case scrut: RefinedOrRecType => - constrainPatternType(pat, stripRefinement(scrut)) + constrainPatternType(pat, scrut.stripRefinement) case scrut => dealiasDropNonmoduleRefs(pat) match { case OrType(pat1, pat2) => either(constrainPatternType(pat1, scrut), constrainPatternType(pat2, scrut)) case AndType(pat1, pat2) => constrainPatternType(pat1, scrut) && constrainPatternType(pat2, scrut) case pat: RefinedOrRecType => - constrainPatternType(stripRefinement(pat), scrut) + constrainPatternType(pat.stripRefinement, scrut) case pat => tryConstrainSimplePatternType(pat, scrut) || classesMayBeCompatible && constrainUpcasted(scrut) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 62d7afa22ed2..7545cf5c4ba1 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -629,6 +629,7 @@ object StdNames { val toString_ : N = "toString" val toTypeConstructor: N = "toTypeConstructor" val tpe : N = "tpe" + val tracked: N = "tracked" val transparent : N = "transparent" val tree : N = "tree" val true_ : N = "true" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 09d45dbdf06b..49c466f0bfd5 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1187,21 +1187,25 @@ object SymDenotations { final def isExtensibleClass(using Context): Boolean = isClass && !isOneOf(FinalOrModuleClass) && !isAnonymousClass - /** A symbol is effectively final if it cannot be overridden in a subclass */ + /** A symbol is effectively final if it cannot be overridden */ final def isEffectivelyFinal(using Context): Boolean = isOneOf(EffectivelyFinalFlags) || is(Inline, butNot = Deferred) || is(JavaDefinedVal, butNot = Method) || isConstructor - || !owner.isExtensibleClass + || !owner.isExtensibleClass && !is(Deferred) + // Deferred symbols can arise through parent refinements. + // For them, the overriding relationship reverses anyway, so + // being in a final class does not mean the symbol cannot be + // implemented concretely in a superclass. /** A class is effectively sealed if has the `final` or `sealed` modifier, or it * is defined in Scala 3 and is neither abstract nor open. */ final def isEffectivelySealed(using Context): Boolean = isOneOf(FinalOrSealed) - || isClass && (!isOneOf(EffectivelyOpenFlags) - || isLocalToCompilationUnit) + || isClass + && (!isOneOf(EffectivelyOpenFlags) || isLocalToCompilationUnit) final def isLocalToCompilationUnit(using Context): Boolean = is(Private) diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index d4be03e9aae4..dd881bb1adf6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -7,12 +7,13 @@ import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* import Names.{Name, TermName} import Constants.Constant -class TypeUtils { +import Names.Name +class TypeUtils: /** A decorator that provides methods on types * that are needed in the transformer pipeline. */ - extension (self: Type) { + extension (self: Type) def isErasedValueType(using Context): Boolean = self.isInstanceOf[ErasedValueType] @@ -178,5 +179,11 @@ class TypeUtils { def isThisTypeOf(cls: Symbol)(using Context) = self match case self: Types.ThisType => self.cls == cls case _ => false - } -} + + /** Strip all outer refinements off this type */ + def stripRefinement: Type = self match + case self: RefinedOrRecType => self.parent.stripRefinement + case seld => self + +end TypeUtils + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 186e039c4d74..8d1eca8fb5f0 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -867,6 +867,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { if (flags.is(Exported)) writeModTag(EXPORTED) if (flags.is(Given)) writeModTag(GIVEN) if (flags.is(Implicit)) writeModTag(IMPLICIT) + if (flags.is(Tracked)) writeModTag(TRACKED) if (isTerm) { if (flags.is(Lazy, butNot = Module)) writeModTag(LAZY) if (flags.is(AbsOverride)) { writeModTag(ABSTRACT); writeModTag(OVERRIDE) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index f6fa9faf0114..15f58956fbe3 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -31,7 +31,8 @@ import util.{SourceFile, Property} import ast.{Trees, tpd, untpd} import Trees.* import Decorators.* -import dotty.tools.dotc.quoted.QuotePatterns +import config.Feature +import quoted.QuotePatterns import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer.* @@ -755,6 +756,7 @@ class TreeUnpickler(reader: TastyReader, case INVISIBLE => addFlag(Invisible) case TRANSPARENT => addFlag(Transparent) case INFIX => addFlag(Infix) + case TRACKED => addFlag(Tracked) case PRIVATEqualified => readByte() privateWithin = readWithin @@ -922,6 +924,8 @@ class TreeUnpickler(reader: TastyReader, val resType = if name == nme.CONSTRUCTOR then effectiveResultType(sym, paramss) + else if sym.isAllOf(Given | Method) && Feature.enabled(Feature.modularity) then + addParamRefinements(tpt.tpe, paramss) else tpt.tpe sym.info = methodType(paramss, resType) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 8d5c50d6d608..94814457523e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3189,6 +3189,7 @@ object Parsers { case nme.open => Mod.Open() case nme.transparent => Mod.Transparent() case nme.infix => Mod.Infix() + case nme.tracked => Mod.Tracked() } } @@ -3255,7 +3256,8 @@ object Parsers { * | AccessModifier * | override * | opaque - * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | inline | transparent | infix | erased + * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | erased | + * inline | transparent | infix */ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = { @tailrec @@ -3408,8 +3410,8 @@ object Parsers { /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause * UsingClsTermParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ * ClsParams ::= ClsParam {‘,’ ClsParam} - * ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’)] Param - * + * ClsParam ::= {Annotation} + * [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param * TypelessClause ::= DefTermParamClause * | UsingParamClause * @@ -3445,6 +3447,8 @@ object Parsers { if isErasedKw then mods = addModifier(mods) if paramOwner.isClass then + if isIdent(nme.tracked) && in.featureEnabled(Feature.modularity) && !in.lookahead.isColon then + mods = addModifier(mods) mods = addFlag(modifiers(start = mods), ParamAccessor) mods = if in.token == VAL then @@ -3516,7 +3520,8 @@ object Parsers { val isParams = !impliedMods.is(Given) || startParamTokens.contains(in.token) - || isIdent && (in.name == nme.inline || in.lookahead.isColon) + || isIdent + && (in.name == nme.inline || in.name == nme.tracked || in.lookahead.isColon) (mods, isParams) (if isParams then commaSeparated(() => param()) else contextTypes(paramOwner, numLeadParams, impliedMods)) match { @@ -4104,7 +4109,7 @@ object Parsers { def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = paramss.nestedMap: param => if !param.mods.isAllOf(PrivateLocal) then - syntaxError(em"method parameter ${param.name} may not be `a val`", param.span) + syntaxError(em"method parameter ${param.name} may not be a `val`", param.span) param.withMods(param.mods &~ (AccessFlags | ParamAccessor | Mutable) | Param) .asInstanceOf[List[ParamClause]] diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 87f7c88e0407..5808707326a0 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -113,7 +113,7 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def refinementNameString(tp: RefinedType): String = nameString(tp.refinedName) /** String representation of a refinement */ - protected def toTextRefinement(rt: RefinedType): Text = + def toTextRefinement(rt: RefinedType): Text = val keyword = rt.refinedInfo match { case _: ExprType | _: MethodOrPoly => "def " case _: TypeBounds => "type " diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 8687925ed5fb..297dc31ea94a 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -4,7 +4,7 @@ package printing import core.* import Texts.*, ast.Trees.* -import Types.{Type, SingletonType, LambdaParam, NamedType}, +import Types.{Type, SingletonType, LambdaParam, NamedType, RefinedType}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation, Contexts.Context import typer.Implicits.* @@ -104,6 +104,9 @@ abstract class Printer { /** Textual representation of a prefix of some reference, ending in `.` or `#` */ def toTextPrefixOf(tp: NamedType): Text + /** textual representation of a refinement, with no enclosing {...} */ + def toTextRefinement(rt: RefinedType): Text + /** Textual representation of a reference in a capture set */ def toTextCaptureRef(tp: Type): Text diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index d107de31829f..954b08c24ac1 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -369,11 +369,15 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => case Select(nu: New, nme.CONSTRUCTOR) if isCheckable(nu) => // need to check instantiability here, because the type of the New itself // might be a type constructor. - ctx.typer.checkClassType(tree.tpe, tree.srcPos, traitReq = false, stablePrefixReq = true) + def checkClassType(tpe: Type, stablePrefixReq: Boolean) = + ctx.typer.checkClassType(tpe, tree.srcPos, + traitReq = false, stablePrefixReq = stablePrefixReq, + refinementOK = Feature.enabled(Feature.modularity)) + checkClassType(tree.tpe, true) if !nu.tpe.isLambdaSub then // Check the constructor type as well; it could be an illegal singleton type // which would not be reflected as `tree.tpe` - ctx.typer.checkClassType(nu.tpe, tree.srcPos, traitReq = false, stablePrefixReq = false) + checkClassType(nu.tpe, false) Checking.checkInstantiable(tree.tpe, nu.tpe, nu.srcPos) withNoCheckNews(nu :: Nil)(app1) case _ => @@ -448,8 +452,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => // Constructor parameters are in scope when typing a parent. // While they can safely appear in a parent tree, to preserve // soundness we need to ensure they don't appear in a parent - // type (#16270). - val illegalRefs = parent.tpe.namedPartsWith(p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym)) + // type (#16270). We can strip any refinement of a parent type since + // these refinements are split off from the parent type constructor + // application `parent` in Namer and don't show up as parent types + // of the class. + val illegalRefs = parent.tpe.dealias.stripRefinement.namedPartsWith: + p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym) if illegalRefs.nonEmpty then report.error( em"The type of a class parent cannot refer to constructor parameters, but ${parent.tpe} refers to ${illegalRefs.map(_.name.show).mkString(",")}", parent.srcPos) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 7745c620312c..5839ec1766af 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -33,8 +33,7 @@ import Applications.UnapplyArgs import Inferencing.isFullyDefined import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotePattern} import transform.ValueClasses.underlyingOfValueClass -import config.Feature -import config.Feature.sourceVersion +import config.Feature, Feature.{sourceVersion, modularity} import config.SourceVersion.* import config.MigrationVersion import printing.Formatting.hlAsKeyword @@ -198,7 +197,7 @@ object Checking { * and that the instance conforms to the self type of the created class. */ def checkInstantiable(tp: Type, srcTp: Type, pos: SrcPos)(using Context): Unit = - tp.underlyingClassRef(refinementOK = false) match + tp.underlyingClassRef(refinementOK = Feature.enabled(modularity)) match case tref: TypeRef => val cls = tref.symbol if (cls.isOneOf(AbstractOrTrait)) { @@ -601,6 +600,7 @@ object Checking { // The issue with `erased inline` is that the erased semantics get lost // as the code is inlined and the reference is removed before the erased usage check. checkCombination(Erased, Inline) + checkNoConflict(Tracked, Mutable, em"mutable variables may not be `tracked`") checkNoConflict(Lazy, ParamAccessor, em"parameter may not be `lazy`") } @@ -1067,8 +1067,8 @@ trait Checking { * check that class prefix is stable. * @return `tp` itself if it is a class or trait ref, ObjectType if not. */ - def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = - tp.underlyingClassRef(refinementOK = false) match { + def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean = false)(using Context): Type = + tp.underlyingClassRef(refinementOK) match case tref: TypeRef => if (traitReq && !tref.symbol.is(Trait)) report.error(TraitIsExpected(tref.symbol), pos) if (stablePrefixReq && ctx.phase <= refchecksPhase) checkStable(tref.prefix, pos, "class prefix") @@ -1076,7 +1076,6 @@ trait Checking { case _ => report.error(NotClassType(tp), pos) defn.ObjectType - } /** If `sym` is an old-style implicit conversion, check that implicit conversions are enabled. * @pre sym.is(GivenOrImplicit) @@ -1626,7 +1625,7 @@ trait NoChecking extends ReChecking { override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = info override def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = () override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = () - override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = tp + override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean)(using Context): Type = tp override def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = () override def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = () override def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = tp diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 530423fd2613..e48c2fdf5066 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -122,7 +122,8 @@ class Namer { typer: Typer => /** Record `sym` as the symbol defined by `tree` */ def recordSym(sym: Symbol, tree: Tree)(using Context): Symbol = { - for (refs <- tree.removeAttachment(References); ref <- refs) ref.watching(sym) + for refs <- tree.removeAttachment(References); ref <- refs do + ref.watching(sym) tree.pushAttachment(SymOfTree, sym) sym } @@ -295,12 +296,15 @@ class Namer { typer: Typer => createOrRefine[Symbol](tree, name, flags, ctx.owner, _ => info, (fs, _, pwithin) => newSymbol(ctx.owner, name, fs, info, pwithin, tree.nameSpan)) case tree: Import => - recordSym(newImportSymbol(ctx.owner, Completer(tree)(ctx), tree.span), tree) + recordSym(newImportSym(tree), tree) case _ => NoSymbol } } + private def newImportSym(imp: Import)(using Context): Symbol = + newImportSymbol(ctx.owner, Completer(imp)(ctx), imp.span) + /** If `sym` exists, enter it in effective scope. Check that * package members are not entered twice in the same run. */ @@ -525,11 +529,9 @@ class Namer { typer: Typer => } /** Transfer all references to `from` to `to` */ - def transferReferences(from: ValDef, to: ValDef): Unit = { - val fromRefs = from.removeAttachment(References).getOrElse(Nil) - val toRefs = to.removeAttachment(References).getOrElse(Nil) - to.putAttachment(References, fromRefs ++ toRefs) - } + def transferReferences(from: ValDef, to: ValDef): Unit = + for ref <- from.removeAttachment(References).getOrElse(Nil) do + ref.watching(to) /** Merge the module class `modCls` in the expanded tree of `mdef` with the * body and derived clause of the synthetic module class `fromCls`. @@ -707,7 +709,18 @@ class Namer { typer: Typer => enterSymbol(companion) end addAbsentCompanions - stats.foreach(expand) + /** Expand each statement, keeping track of language imports in the context. This is + * necessary since desugaring might depend on language imports. + */ + def expandTopLevel(stats: List[Tree])(using Context): Unit = stats match + case (imp @ Import(qual, _)) :: stats1 if untpd.languageImport(qual).isDefined => + expandTopLevel(stats1)(using ctx.importContext(imp, newImportSym(imp))) + case stat :: stats1 => + expand(stat) + expandTopLevel(stats1) + case Nil => + + expandTopLevel(stats) mergeCompanionDefs() val ctxWithStats = stats.foldLeft(ctx)((ctx, stat) => indexExpanded(stat)(using ctx)) inContext(ctxWithStats) { @@ -1530,8 +1543,9 @@ class Namer { typer: Typer => core match case Select(New(tpt), nme.CONSTRUCTOR) => val targs1 = targs map (typedAheadType(_)) - val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes - if (ptype.typeParams.isEmpty) ptype + val ptype = typedAheadType(tpt).tpe.appliedTo(targs1.tpes) + if ptype.typeParams.isEmpty && !ptype.dealias.typeSymbol.is(Dependent) then + ptype else if (denot.is(ModuleClass) && denot.sourceModule.isOneOf(GivenOrImplicit)) missingType(denot.symbol, "parent ")(using creationContext) @@ -1612,7 +1626,8 @@ class Namer { typer: Typer => for (name, tp) <- refinements do if decls.lookupEntry(name) == null then val flags = tp match - case tp: MethodOrPoly => Method | Synthetic | Deferred + case tp: MethodOrPoly => Method | Synthetic | Deferred | Tracked + case _ if name.isTermName => Synthetic | Deferred | Tracked case _ => Synthetic | Deferred refinedSyms += newSymbol(cls, name, flags, tp, coord = original.rhs.span.startPos).entered if refinedSyms.nonEmpty then @@ -1660,11 +1675,9 @@ class Namer { typer: Typer => val parentTypes = defn.adjustForTuple(cls, cls.typeParams, defn.adjustForBoxedUnit(cls, - addUsingTraits( - locally: - val isJava = ctx.isJava - ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) - ) + addUsingTraits: + val isJava = ctx.isJava + ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) ) ) typr.println(i"completing $denot, parents = $parents%, %, parentTypes = $parentTypes%, %") @@ -1824,7 +1837,7 @@ class Namer { typer: Typer => } /** The type signature of a DefDef with given symbol */ - def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = { + def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR @@ -1863,13 +1876,19 @@ class Namer { typer: Typer => def wrapMethType(restpe: Type): Type = instantiateDependent(restpe, paramSymss) methodType(paramSymss, restpe, ddef.mods.is(JavaDefined)) + + def wrapRefinedMethType(restpe: Type): Type = + wrapMethType(addParamRefinements(restpe, paramSymss)) + if isConstructor then // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) wrapMethType(effectiveResultType(sym, paramSymss)) + else if sym.isAllOf(Given | Method) && Feature.enabled(modularity) then + valOrDefDefSig(ddef, sym, paramSymss, wrapRefinedMethType) else valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) - } + end defDefSig def inferredResultType( mdef: ValOrDefDef, diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 2bf4b959ebca..7cd1d67e9aa5 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -610,8 +610,13 @@ object RefChecks { overrideError("is not inline, cannot implement an inline method") else if (other.isScala2Macro && !member.isScala2Macro) // (1.11) overrideError("cannot be used here - only Scala-2 macros can override Scala-2 macros") - else if (!compatTypes(memberTp(self), otherTp(self)) && - !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf))) + else if !compatTypes(memberTp(self), otherTp(self)) + && !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf)) + && !member.is(Tracked) + // Tracked members need to be excluded since they are abstract type members with + // singleton types. Concrete overrides usually have a wider type. + // TODO: Should we exclude all refinements inherited from parents? + then overrideError("has incompatible type", compareTypes = true) else if (member.targetName != other.targetName) if (other.targetName != other.name) @@ -620,7 +625,9 @@ object RefChecks { overrideError("cannot have a @targetName annotation since external names would be different") else if intoOccurrences(memberTp(self)) != intoOccurrences(otherTp(self)) then overrideError("has different occurrences of `into` modifiers", compareTypes = true) - else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.12) + else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) + && !member.is(Tracked) + then // (1.12) report.errorOrMigrationWarning( em"cannot override val parameter ${other.showLocated}", member.srcPos, @@ -670,6 +677,10 @@ object RefChecks { mbr.isType || mbr.isSuperAccessor // not yet synthesized || mbr.is(JavaDefined) && hasJavaErasedOverriding(mbr) + || mbr.is(Tracked) + // Tracked members correspond to existing val parameters, so they don't + // count as deferred. The val parameter could not implement the tracked + // refinement since it usually has a wider type. def isImplemented(mbr: Symbol) = val mbrDenot = mbr.asSeenFrom(clazz.thisType) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index c5b6faf455f7..8f2b7ce95785 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4416,7 +4416,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) case _ => errorTree(tree, em"cannot convert from $tree to an instance creation expression") - val tycon = ctorResultType.underlyingClassRef(refinementOK = false) + val tycon = ctorResultType.underlyingClassRef(refinementOK = true) typed( untpd.Select( untpd.New(untpd.TypedSplice(tpt.withType(tycon))), diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index a856a5b84d92..ad9befa72f5f 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -124,3 +124,8 @@ i19955a.scala i19955b.scala i20053b.scala +# alias types at different levels of dereferencing +parsercombinators-givens.scala +parsercombinators-givens-2.scala + + diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 8cc070d5dbc5..c711d5f63db8 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -372,7 +372,7 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var - [{Modifier} (‘val’ | ‘var’)] Param + [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent DefParamClause ::= DefTypeParamClause diff --git a/docs/_docs/reference/experimental/modularity.md b/docs/_docs/reference/experimental/modularity.md new file mode 100644 index 000000000000..2062c4d5eda2 --- /dev/null +++ b/docs/_docs/reference/experimental/modularity.md @@ -0,0 +1,189 @@ +--- +layout: doc-page +title: "Modularity Improvements" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/modularity.html +--- + +# Modularity Improvements + +Martin Odersky, 7.1.2024 + +Scala is a language in the SML tradition, in the sense that it has +abstract and alias types as members of modules (which in Scala take the form of objects and classes). This leads to a simple dependently +typed system, where dependencies in types are on paths instead of full terms. + +So far, some key ingredients were lacking which meant that module composition with functors is harder in Scala than in SML. In particular, one often needs to resort the infamous `Aux` pattern that lifts type members into type parameters so that they can be tracked across class instantiations. This makes modular, dependently typed programs +much harder to write and read, and makes such programming only accessible to experts. + +In this note I propose some small changes to Scala's dependent typing that makes +modular programming much more straightforward. + +The suggested improvements have been implemented and are available +in source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +## Tracked Parameters + +Scala is dependently typed for functions, but unfortunately not for classes. +For instance, consider the following definitions: + +```scala + class C: + type T + ... + + def f(x: C): x.T = ... + + val y: C { type T = Int } +``` +Then `f(y)` would have type `Int`, since the compiler will substitute the +concrete parameter reference `y` for the formal parameter `x` in the result +type of `f`, and `y.T = Int` + +However, if we use a class `F` instead of a method `f`, things go wrong. + +```scala + class F(val x: C): + val result: x.T = ... +``` +Now `F(y).result` would not have type `Int` but instead the rather less useful type `?1.T` where `?1` is a so-called skolem constant of type `C` (a skolem represents an unknown value). + +This shortcoming means that classes cannot really be used for advanced +modularity constructs that rely on dependent typing. + +**Proposal:** Introduce a `tracked` modifier that can be added to +a `val` parameter of a class or trait. For every tracked class parameter of a class `C`, add a refinement in the constructor type of `C` that the class member is the same as the parameter. + +**Example:** In the setting above, assume `F` is instead declared like this: +```scala + class F(tracked val x: C): + val result: x.T = ... +``` +Then the constructor `F` would get roughly the following type: +```scala + F(x1: C): F { val x: x1.type } +``` +_Aside:_ More precisely, both parameter and refinement would apply to the same name `x` but the refinement still refers to the parameter. We unfortunately can't express that in source, however, so we chose the new name `x1` for the parameter in the explanation. + +With the new constructor type, the expression `F(y).result` would now have the type `Int`, as hoped for. The reasoning to get there is as follows: + + - The result of the constructor `F(y)` has type `F { val x: y.type }` by + the standard typing for dependent functions. + - The type of `result` inside `F` is `x.T`. + - Hence, the type of `result` as a member of `F { val x: y.type }` is `y.T`, which is equal to `Int`. + +The addition of tracked parameters makes classes suitable as a fundamental modularity construct supporting dependent typing. Here is an example, taken from issue #3920: + +```scala +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) +``` +This works as it should now. Without the addition of `tracked` to the +parameter of `SetFunctor` typechecking would immediately lose track of +the element type `T` after an `add`, and would therefore fail. + +**Syntax Change** + +``` +ClsParam ::= {Annotation} [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param +``` + +The (soft) `tracked` modifier is only allowed for `val` parameters of classes. + +**Discussion** + +Since `tracked` is so useful, why not assume it by default? First, `tracked` makes sense only for `val` parameters. If a class parameter is not also a field declared using `val` then there's nothing to refine in the constructor result type. One could think of at least making all `val` parameters tracked by default, but that would be a backwards incompatible change. For instance, the following code would break: + +```scala +case class Foo(x: Int) +var foo = Foo(1) +if someCondition then foo = Foo(2) +``` +If we assume `tracked` for parameter `x` (which is implicitly a `val`), +then `foo` would get inferred type `Foo { val x: 1 }`, so it could not +be reassigned to a value of type `Foo { val x: 2 }` on the next line. + +Another approach might be to assume `tracked` for a `val` parameter `x` +only if the class refers to a type member of `x`. But it turns out that this +scheme is unimplementable since it would quickly lead to cyclic references +when typechecking recursive class graphs. So an explicit `tracked` looks like the best available option. + +## Allow Class Parents to be Refined Types + +Since `tracked` parameters create refinements in constructor types, +it is now possible that a class has a parent that is a refined type. +Previously such types were not permitted, since we were not quite sure how to handle them. But with tracked parameters it becomes pressing so +admit such types. + +**Proposal** Allow refined types as parent types of classes. All refinements that are inherited in this way become synthetic members of the class. + +**Example** + +```scala +class C: + type T + def m(): T + +type R = C: + type T = Int + def m(): 22 + +class D extends R: + def next(): D +``` +This code now compiles. The definition of `D` is expanded as follows: + +```scala +class D extends C: + def next(): D + /*synthetic*/ type T = Int + /*synthetic*/ def m(): 22 +``` +Note how class refinements are moved from the parent constructor of `D` into the body of class `D` itself. + +This change does not entail a syntax change. Syntactically, parent types cannot be refined types themselves. So the following would be illegal: +```scala +class D extends C { type T = Int; def m(): 22 }: // error + def next(): D +``` +If a refined type should be used directly as a parent type of a class, it needs to come in parentheses: +```scala +class D extends (C { type T = Int; def m(): 22 }) // ok + def next(): D +``` + +## A Small Relaxation To Export Rules + +The rules for export forwarders are changed as follows. + +Previously, all export forwarders were declared `final`. Now, only term members are declared `final`. Type aliases are left aside. + +This makes it possible to export the same type member into several traits and then mix these traits in the same class. The test file `tests/pos/typeclass-aggregates.scala` shows why this is essential if we want to combine multiple givens with type members in a new given that aggregates all these givens in an intersection type. + +The change does not lose safety since different type aliases would in any case lead to uninstantiatable classes. \ No newline at end of file diff --git a/docs/sidebar.yml b/docs/sidebar.yml index b38e057f06b1..160698f1f44b 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -155,6 +155,7 @@ subsection: - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md - page: reference/experimental/named-tuples.md + - page: reference/experimental/modularity.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 40a3918b5943..3b28733226a0 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -18,6 +18,8 @@ object MiMaFilters { ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromIArray"), ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.namedTuples"), ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), ), // Additions since last LTS @@ -95,6 +97,7 @@ object MiMaFilters { // Additions that require a new minor version of tasty core Build.mimaPreviousDottyVersion -> Seq( ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype") + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.TRACKED"), ), // Additions since last LTS diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 164243d3b469..c29ea99bcd8d 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -228,6 +228,7 @@ Standard-Section: "ASTs" TopLevelStat* EXPORTED -- An export forwarder OPEN -- an open class INVISIBLE -- invisible during typechecking + TRACKED -- a tracked class parameter / a dependent class Annotation Variance = STABLE -- invariant @@ -509,6 +510,7 @@ object TastyFormat { final val INVISIBLE = 44 final val EMPTYCLAUSE = 45 final val SPLITCLAUSE = 46 + final val TRACKED = 47 // Tree Cat. 2: tag Nat final val firstNatTreeTag = SHAREDterm @@ -700,7 +702,8 @@ object TastyFormat { | INVISIBLE | ANNOTATION | PRIVATEqualified - | PROTECTEDqualified => true + | PROTECTEDqualified + | TRACKED => true case _ => false } diff --git a/tests/neg/i3964.scala b/tests/neg/i3964.scala new file mode 100644 index 000000000000..eaf3953bc230 --- /dev/null +++ b/tests/neg/i3964.scala @@ -0,0 +1,12 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test1: + + abstract class Bar { val x: Animal } + val bar: Bar { val x: Cat } = new Bar { val x = new Cat } // error, but should work + + trait Foo { val x: Animal } + val foo: Foo { val x: Cat } = new Foo { val x = new Cat } // error, but should work diff --git a/tests/neg/tracked.check b/tests/neg/tracked.check new file mode 100644 index 000000000000..ae734e7aa0b4 --- /dev/null +++ b/tests/neg/tracked.check @@ -0,0 +1,50 @@ +-- Error: tests/neg/tracked.scala:2:16 --------------------------------------------------------------------------------- +2 |class C(tracked x: Int) // error + | ^ + | `val` or `var` expected +-- [E040] Syntax Error: tests/neg/tracked.scala:7:18 ------------------------------------------------------------------- +7 | def foo(tracked a: Int) = // error + | ^ + | ':' expected, but identifier found +-- Error: tests/neg/tracked.scala:8:12 --------------------------------------------------------------------------------- +8 | tracked val b: Int = 2 // error + | ^^^ + | end of statement expected but 'val' found +-- Error: tests/neg/tracked.scala:11:10 -------------------------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^ + | end of statement expected but 'object' found +-- Error: tests/neg/tracked.scala:14:10 -------------------------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^ + | end of statement expected but 'class' found +-- Error: tests/neg/tracked.scala:17:10 -------------------------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^ + | end of statement expected but 'type' found +-- Error: tests/neg/tracked.scala:20:29 -------------------------------------------------------------------------------- +20 | given g2(using tracked val x: Int): C = C(x) // error + | ^^^^^^^^^^^^^^^^^^ + | method parameter x may not be a `val` +-- Error: tests/neg/tracked.scala:4:21 --------------------------------------------------------------------------------- +4 |class C2(tracked var x: Int) // error + | ^ + | mutable variables may not be `tracked` +-- [E006] Not Found Error: tests/neg/tracked.scala:11:2 ---------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:14:2 ---------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:17:2 ---------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/tracked.scala b/tests/neg/tracked.scala new file mode 100644 index 000000000000..8d315a7b89ac --- /dev/null +++ b/tests/neg/tracked.scala @@ -0,0 +1,20 @@ +//> using options -source future -language:experimental.modularity +class C(tracked x: Int) // error + +class C2(tracked var x: Int) // error + +object A: + def foo(tracked a: Int) = // error + tracked val b: Int = 2 // error + +object B: + tracked object Foo // error // error + +object C: + tracked class D // error // error + +object D: + tracked type T = Int // error // error + +object E: + given g2(using tracked val x: Int): C = C(x) // error diff --git a/tests/neg/tracked2.scala b/tests/neg/tracked2.scala new file mode 100644 index 000000000000..2e6fa8cf6045 --- /dev/null +++ b/tests/neg/tracked2.scala @@ -0,0 +1 @@ +class C(tracked val x: Int) // error diff --git a/tests/new/tracked-mixin-traits.scala b/tests/new/tracked-mixin-traits.scala new file mode 100644 index 000000000000..21d890d44f42 --- /dev/null +++ b/tests/new/tracked-mixin-traits.scala @@ -0,0 +1,16 @@ +trait A: + type T +object a extends A: + type T = Int + +trait B(tracked val b: A): + type T = b.T + +trait C(tracked val c: A): + type T = c.T + +class D extends B(a), C(a): + val x: T = 2 + + + diff --git a/tests/pos/depclass-1.scala b/tests/pos/depclass-1.scala new file mode 100644 index 000000000000..38daef85ae98 --- /dev/null +++ b/tests/pos/depclass-1.scala @@ -0,0 +1,19 @@ +//> using options -source future -language:experimental.modularity +class A(tracked val source: String) + +class B(x: Int, tracked val source1: String) extends A(source1) + +class C(tracked val source2: String) extends B(1, source2) + +//class D(source1: String) extends C(source1) +val x = C("hello") +val _: A{ val source: "hello" } = x + +class Vec[Elem](tracked val size: Int) +class Vec8 extends Vec[Float](8) + +val v = Vec[Float](10) +val v2 = Vec8() +val xx: 10 = v.size +val x2: 8 = v2.size + diff --git a/tests/pos/i3920.scala b/tests/pos/i3920.scala new file mode 100644 index 000000000000..6cd74187098f --- /dev/null +++ b/tests/pos/i3920.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Ordering { + type T + def compare(t1:T, t2: T): Int +} + +class SetFunctor(tracked val ord: Ordering) { + type Set = List[ord.T] + def empty: Set = Nil + + implicit class helper(s: Set) { + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def member(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + } +} + +object Test { + val orderInt = new Ordering { + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + } + + val IntSet = new SetFunctor(orderInt) + import IntSet.* + + def main(args: Array[String]) = { + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.member(7)) + assert(set.member(8)) + } +} \ No newline at end of file diff --git a/tests/pos/i3964.scala b/tests/pos/i3964.scala new file mode 100644 index 000000000000..42412b910899 --- /dev/null +++ b/tests/pos/i3964.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +object Test3: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) + class Lst8 extends Lst(8) + + val v8a: Vec { val size: 8 } = new Vec8 + val v8b: Vec { val size: 8 } = new Vec(8) {} + + val l8a: Lst { val size: 8 } = new Lst8 + val l8b: Lst { val size: 8 } = new Lst(8) {} + + class VecN(tracked val n: Int) extends Vec(n) + class Vec9 extends VecN(9) + val v9a = VecN(9) + val _: Vec { val size: 9 } = v9a + val v9b = Vec9() + val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/i3964a/Defs_1.scala b/tests/pos/i3964a/Defs_1.scala new file mode 100644 index 000000000000..7dcc89f7003e --- /dev/null +++ b/tests/pos/i3964a/Defs_1.scala @@ -0,0 +1,18 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +package coll: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) \ No newline at end of file diff --git a/tests/pos/i3964a/Uses_2.scala b/tests/pos/i3964a/Uses_2.scala new file mode 100644 index 000000000000..9d1b6ebaa58b --- /dev/null +++ b/tests/pos/i3964a/Uses_2.scala @@ -0,0 +1,16 @@ +//> using options -source future -language:experimental.modularity +import coll.* +class Lst8 extends Lst(8) + +val v8a: Vec { val size: 8 } = new Vec8 +val v8b: Vec { val size: 8 } = new Vec(8) {} + +val l8a: Lst { val size: 8 } = new Lst8 +val l8b: Lst { val size: 8 } = new Lst(8) {} + +class VecN(tracked val n: Int) extends Vec(n) +class Vec9 extends VecN(9) +val v9a = VecN(9) +val _: Vec { val size: 9 } = v9a +val v9b = Vec9() +val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/parsercombinators-expanded.scala b/tests/pos/parsercombinators-expanded.scala new file mode 100644 index 000000000000..cf8137bfe8eb --- /dev/null +++ b/tests/pos/parsercombinators-expanded.scala @@ -0,0 +1,64 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +object test: + + class apply[C, E] extends Combinator[Apply[C, E]]: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + + def apply[C, E]: apply[C, E] = new apply[C, E] + + class combine[A, B]( + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context} + ) extends Combinator[Combine[A, B]]: + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + + def combine[A, B]( + _f: Combinator[A], + _s: Combinator[B] { type Context = _f.Context} + ) = new combine[A, B](_f, _s) + // cast is needed since the type of new combine[A, B](_f, _s) + // drops the required refinement. + + extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + + @main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val c = combine( + apply[mutable.ListBuffer[Int], Int], + apply[mutable.ListBuffer[Int], Int] + ) + val r = c.parse(m)(stream) // was type mismatch, now OK + val rc: Option[(Int, Int)] = r + } diff --git a/tests/pos/parsercombinators-givens-2.scala b/tests/pos/parsercombinators-givens-2.scala new file mode 100644 index 000000000000..8349d69a30af --- /dev/null +++ b/tests/pos/parsercombinators-givens-2.scala @@ -0,0 +1,52 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B, C](using + f: Combinator[A] { type Context = C }, + s: Combinator[B] { type Context = C } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // works, but Element type is not resolved correctly +} diff --git a/tests/pos/parsercombinators-givens.scala b/tests/pos/parsercombinators-givens.scala new file mode 100644 index 000000000000..5b5588c93840 --- /dev/null +++ b/tests/pos/parsercombinators-givens.scala @@ -0,0 +1,54 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B](using + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/run/i3920.scala b/tests/run/i3920.scala new file mode 100644 index 000000000000..c66fd8908976 --- /dev/null +++ b/tests/run/i3920.scala @@ -0,0 +1,26 @@ +//> using options -source future -language:experimental.modularity +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) \ No newline at end of file From ea3c688b94d9982cceda7b63969cd7e2a1887a46 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 19:44:35 +0200 Subject: [PATCH 014/827] Make explicit arguments for context bounds an error from 3.5 --- compiler/src/dotty/tools/dotc/typer/ReTyper.scala | 1 + compiler/src/dotty/tools/dotc/typer/Typer.scala | 3 +++ tests/warn/context-bounds-migration.scala | 9 +++++++++ 3 files changed, 13 insertions(+) create mode 100644 tests/warn/context-bounds-migration.scala diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 9741a366da89..7a5c838848ac 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -182,4 +182,5 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override protected def checkEqualityEvidence(tree: tpd.Tree, pt: Type)(using Context): Unit = () override protected def matchingApply(methType: MethodOrPoly, pt: FunProto)(using Context): Boolean = true override protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = promote(call) + override protected def migrate[T](migration: => T, disabled: => T = ()): T = disabled } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 8f2b7ce95785..17a2cba25019 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -183,6 +183,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Overridden in derived typers def newLikeThis(nestingLevel: Int): Typer = new Typer(nestingLevel) + // Overridden to do nothing in derived typers + protected def migrate[T](migration: => T, disabled: => T = ()): T = migration + /** Find the type of an identifier with given `name` in given context `ctx`. * @param name the name of the identifier * @param pt the expected type diff --git a/tests/warn/context-bounds-migration.scala b/tests/warn/context-bounds-migration.scala new file mode 100644 index 000000000000..cdd3eca62b5c --- /dev/null +++ b/tests/warn/context-bounds-migration.scala @@ -0,0 +1,9 @@ + +class C[T] +def foo[X: C] = () + +given [T]: C[T] = C[T]() + +def Test = + foo(C[Int]()) // warning + foo(using C[Int]()) // ok From f96a769b17f362d14d2265693e72ad7311301172 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 20:02:13 +0200 Subject: [PATCH 015/827] Drop restriction against typedefs at level * only Allow the RHS of a type def to be higher-kinded. But keep the restriction for opaque type aliases; their RHS must be fully applied. I am not sure why the restriction applies to them, but there was a test specifically about that, so there night be a reason. # Conflicts: # compiler/src/dotty/tools/dotc/typer/Typer.scala # Conflicts: # compiler/src/dotty/tools/dotc/typer/Typer.scala # tests/pos/typeclasses-this.scala --- .../src/dotty/tools/dotc/typer/Checking.scala | 16 ++++++++-------- compiler/src/dotty/tools/dotc/typer/Typer.scala | 5 +++-- tests/neg/i12456.scala | 2 +- tests/neg/i13757-match-type-anykind.scala | 2 +- tests/neg/i9328.scala | 2 +- tests/neg/parser-stability-12.scala | 2 +- tests/neg/unapplied-types.scala | 7 ------- tests/pos/unapplied-types.scala | 7 +++++++ 8 files changed, 22 insertions(+), 21 deletions(-) delete mode 100644 tests/neg/unapplied-types.scala create mode 100644 tests/pos/unapplied-types.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 5839ec1766af..073055ba5b58 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -1331,20 +1331,20 @@ trait Checking { } /** Check that user-defined (result) type is fully applied */ - def checkFullyAppliedType(tree: Tree)(using Context): Unit = tree match + def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = tree match case TypeBoundsTree(lo, hi, alias) => - checkFullyAppliedType(lo) - checkFullyAppliedType(hi) - checkFullyAppliedType(alias) + checkFullyAppliedType(lo, prefix) + checkFullyAppliedType(hi, prefix) + checkFullyAppliedType(alias, prefix) case Annotated(arg, annot) => - checkFullyAppliedType(arg) + checkFullyAppliedType(arg, prefix) case LambdaTypeTree(_, body) => - checkFullyAppliedType(body) + checkFullyAppliedType(body, prefix) case _: TypeTree => case _ => if tree.tpe.typeParams.nonEmpty then val what = if tree.symbol.exists then tree.symbol.show else i"type $tree" - report.error(em"$what takes type parameters", tree.srcPos) + report.error(em"$prefix$what takes type parameters", tree.srcPos) /** Check that we are in an inline context (inside an inline method or in inline code) */ def checkInInlineContext(what: String, pos: SrcPos)(using Context): Unit = @@ -1609,7 +1609,7 @@ trait ReChecking extends Checking { override def checkEnumParent(cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = () - override def checkFullyAppliedType(tree: Tree)(using Context): Unit = () + override def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = () override def checkEnumCaseRefsLegal(cdef: TypeDef, enumCtx: Context)(using Context): Unit = () override def checkAnnotApplicable(annot: Tree, sym: Symbol)(using Context): Boolean = true override def checkMatchable(tp: Type, pos: SrcPos, pattern: Boolean)(using Context): Unit = () diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 17a2cba25019..a357f06e4ee8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2780,8 +2780,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typeIndexedLambdaTypeTree(rhs, tparams, body) case rhs => typedType(rhs) - checkFullyAppliedType(rhs1) - if sym.isOpaqueAlias then checkNoContextFunctionType(rhs1) + if sym.isOpaqueAlias then + checkFullyAppliedType(rhs1, "Opaque type alias must be fully applied, but ") + checkNoContextFunctionType(rhs1) assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } diff --git a/tests/neg/i12456.scala b/tests/neg/i12456.scala index b9fb0283dcd7..c1a3ada5a420 100644 --- a/tests/neg/i12456.scala +++ b/tests/neg/i12456.scala @@ -1 +1 @@ -object F { type T[G[X] <: X, F <: G[F]] } // error // error +object F { type T[G[X] <: X, F <: G[F]] } // error diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala index a80e8b2b289b..998c54292b15 100644 --- a/tests/neg/i13757-match-type-anykind.scala +++ b/tests/neg/i13757-match-type-anykind.scala @@ -8,7 +8,7 @@ object Test: type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int - type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded + type AnyKindMatchType4[X <: Option] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded diff --git a/tests/neg/i9328.scala b/tests/neg/i9328.scala index dabde498e1dc..c13d33e103b9 100644 --- a/tests/neg/i9328.scala +++ b/tests/neg/i9328.scala @@ -3,7 +3,7 @@ type Id[T] = T match { case _ => T } -class Foo2[T <: Id[T]] // error // error +class Foo2[T <: Id[T]] // error object Foo { // error object Foo { } diff --git a/tests/neg/parser-stability-12.scala b/tests/neg/parser-stability-12.scala index 78ff178d010c..17a611d70e34 100644 --- a/tests/neg/parser-stability-12.scala +++ b/tests/neg/parser-stability-12.scala @@ -1,4 +1,4 @@ trait x0[]: // error - trait x1[x1 <:x0] // error: type x0 takes type parameters + trait x1[x1 <:x0] extends x1[ // error // error \ No newline at end of file diff --git a/tests/neg/unapplied-types.scala b/tests/neg/unapplied-types.scala deleted file mode 100644 index 2f2339baa026..000000000000 --- a/tests/neg/unapplied-types.scala +++ /dev/null @@ -1,7 +0,0 @@ -trait T { - type L[X] = List[X] - type T1 <: L // error: takes type parameters - type T2 = L // error: takes type parameters - type T3 = List // error: takes type parameters - type T4 <: List // error: takes type parameters -} diff --git a/tests/pos/unapplied-types.scala b/tests/pos/unapplied-types.scala new file mode 100644 index 000000000000..604e63deb8ad --- /dev/null +++ b/tests/pos/unapplied-types.scala @@ -0,0 +1,7 @@ +trait T { + type L[X] = List[X] + type T1 <: L // was error: takes type parameters + type T2 = L // was error: takes type parameters + type T3 = List // was error: takes type parameters + type T4 <: List // was error: takes type parameters +} From ef71dcb45a0f31b72c5fe05fc48764865e1cea8e Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 20:10:36 +0200 Subject: [PATCH 016/827] Allow types in given definitions to be infix types A type implemented in a given definition can now be an infix type, without enclosing parens being necessary. By contrast, it cannot anymore be a refined type. Refined types have to be enclosed in parens. This second point aligns the dotty parser with the published syntax and the scala meta parser. # Conflicts: # tests/pos/typeclasses-this.scala --- .../dotty/tools/dotc/parsing/Parsers.scala | 26 +++++++++++++------ docs/_docs/internals/syntax.md | 4 ++- docs/_docs/reference/syntax.md | 9 ++++--- tests/neg/i12348.check | 16 ++++++------ tests/neg/i12348.scala | 3 +-- tests/neg/i7045.scala | 7 +++++ tests/pos/i7045.scala | 9 ------- tests/pos/typeclass-aggregates.scala | 6 ++--- 8 files changed, 45 insertions(+), 35 deletions(-) create mode 100644 tests/neg/i7045.scala delete mode 100644 tests/pos/i7045.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 94814457523e..6c0f19de3dd1 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1806,8 +1806,8 @@ object Parsers { */ def infixType(): Tree = infixTypeRest(refinedType()) - def infixTypeRest(t: Tree): Tree = - infixOps(t, canStartInfixTypeTokens, refinedTypeFn, Location.ElseWhere, ParseKind.Type, + def infixTypeRest(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = + infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, isOperator = !followingIsVararg() && !isPureArrow && nextCanFollowOperator(canStartInfixTypeTokens)) @@ -1872,6 +1872,10 @@ object Parsers { */ def annotType(): Tree = annotTypeRest(simpleType()) + /** AnnotType1 ::= SimpleType1 {Annotation} + */ + def annotType1(): Tree = annotTypeRest(simpleType1()) + def annotTypeRest(t: Tree): Tree = if (in.token == AT) annotTypeRest(atSpan(startOffset(t)) { @@ -4097,8 +4101,10 @@ object Parsers { syntaxError(em"extension clause can only define methods", stat.span) } - /** GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) - * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + /** GivenDef ::= [GivenSig] (GivenType [‘=’ Expr] | StructuralInstance) + * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + * GivenType ::= AnnotType1 {id [nl] AnnotType1} + * StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] */ def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) @@ -4124,8 +4130,12 @@ object Parsers { val noParams = tparams.isEmpty && vparamss.isEmpty if !(name.isEmpty && noParams) then acceptColon() val parents = - if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil - else refinedTypeRest(constrApp()) :: withConstrApps() + if isSimpleLiteral then + rejectWildcardType(annotType()) :: Nil + else constrApp() match + case parent: Apply => parent :: withConstrApps() + case parent if in.isIdent => infixTypeRest(parent, _ => annotType1()) :: Nil + case parent => parent :: withConstrApps() val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then accept(EQUALS) @@ -4219,10 +4229,10 @@ object Parsers { /* -------- TEMPLATES ------------------------------------------- */ - /** ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} + /** ConstrApp ::= AnnotType1 {ParArgumentExprs} */ val constrApp: () => Tree = () => - val t = rejectWildcardType(annotTypeRest(simpleType1()), + val t = rejectWildcardType(annotType1(), fallbackTree = Ident(tpnme.ERROR)) // Using Ident(tpnme.ERROR) to avoid causing cascade errors on non-user-written code if in.token == LPAREN then parArgumentExprss(wrapNew(t)) else t diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index c711d5f63db8..6ef346ab22cc 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -191,6 +191,7 @@ MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) AnnotType ::= SimpleType {Annotation} Annotated(t, annot) +AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) SimpleType ::= SimpleLiteral SingletonTypeTree(l) | ‘?’ TypeBounds @@ -466,8 +467,9 @@ ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) +GivenDef ::= [GivenSig] (GivenType [‘=’ Expr] | StructuralInstance) GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +GivenType ::= AnnotType1 {id [nl] AnnotType1} StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index ae541b65d8c4..66cf5a18fac9 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -200,8 +200,8 @@ SimpleType ::= SimpleLiteral | Singleton ‘.’ ‘type’ | ‘(’ [Types] ‘)’ | Refinement - | SimpleType1 TypeArgs - | SimpleType1 ‘#’ id + | SimpleType TypeArgs + | SimpleType ‘#’ id Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id @@ -392,7 +392,7 @@ LocalModifier ::= ‘abstract’ AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] AccessQualifier ::= ‘[’ id ‘]’ -Annotation ::= ‘@’ SimpleType1 {ParArgumentExprs} +Annotation ::= ‘@’ SimpleType {ParArgumentExprs} Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} @@ -444,6 +444,7 @@ ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +GivenType ::= AnnotType {id [nl] AnnotType} StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods @@ -453,7 +454,7 @@ ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef Template ::= InheritClauses [TemplateBody] InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) -ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrApp ::= SimpleType {Annotation} {ParArgumentExprs} ConstrExpr ::= SelfInvocation | <<< SelfInvocation {semi BlockStat} >>> SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check index ccc2b9f7ed00..eded51f70f31 100644 --- a/tests/neg/i12348.check +++ b/tests/neg/i12348.check @@ -1,8 +1,8 @@ --- [E040] Syntax Error: tests/neg/i12348.scala:2:15 -------------------------------------------------------------------- -2 | given inline x: Int = 0 // error - | ^ - | 'with' expected, but identifier found --- [E040] Syntax Error: tests/neg/i12348.scala:3:10 -------------------------------------------------------------------- -3 |} // error - | ^ - | '}' expected, but eof found +-- [E040] Syntax Error: tests/neg/i12348.scala:2:16 -------------------------------------------------------------------- +2 | given inline x: Int = 0 // error // error + | ^ + | an identifier expected, but ':' found +-- [E067] Syntax Error: tests/neg/i12348.scala:2:8 --------------------------------------------------------------------- +2 | given inline x: Int = 0 // error // error + | ^ + |Declaration of given instance given_x_inline_ not allowed here: only classes can have declared but undefined members diff --git a/tests/neg/i12348.scala b/tests/neg/i12348.scala index 69fc77fb532e..43daf9a2801b 100644 --- a/tests/neg/i12348.scala +++ b/tests/neg/i12348.scala @@ -1,3 +1,2 @@ object A { - given inline x: Int = 0 // error -} // error \ No newline at end of file + given inline x: Int = 0 // error // error diff --git a/tests/neg/i7045.scala b/tests/neg/i7045.scala new file mode 100644 index 000000000000..b4c6d60cd35a --- /dev/null +++ b/tests/neg/i7045.scala @@ -0,0 +1,7 @@ +trait Bar { type Y } +trait Foo { type X } + +class Test: + given a1(using b: Bar): Foo = new Foo { type X = b.Y } // ok + given a2(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } // ok + given a3(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } // error \ No newline at end of file diff --git a/tests/pos/i7045.scala b/tests/pos/i7045.scala deleted file mode 100644 index e683654dd5c3..000000000000 --- a/tests/pos/i7045.scala +++ /dev/null @@ -1,9 +0,0 @@ -trait Bar { type Y } -trait Foo { type X } - -class Test: - given a1(using b: Bar): Foo = new Foo { type X = b.Y } - - given a2(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } - - given a3(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala index 77b0f1a9f04a..9bb576603b7b 100644 --- a/tests/pos/typeclass-aggregates.scala +++ b/tests/pos/typeclass-aggregates.scala @@ -30,8 +30,8 @@ trait OrdWithMonoid extends Ord, Monoid def ordWithMonoid2(ord: Ord, monoid: Monoid{ type This = ord.This }) = //: OrdWithMonoid { type This = ord.This} = new OrdWithMonoid with ord.OrdProxy with monoid.MonoidProxy {} -given intOrd: Ord { type This = Int } = ??? -given intMonoid: Monoid { type This = Int } = ??? +given intOrd: (Ord { type This = Int }) = ??? +given intMonoid: (Monoid { type This = Int }) = ??? //given (using ord: Ord, monoid: Monoid{ type This = ord.This }): (Ord & Monoid { type This = ord.This}) = // ordWithMonoid2(ord, monoid) @@ -42,6 +42,6 @@ val y: Int = ??? : x.This // given [A, B](using ord: A is Ord, monoid: A is Monoid) => A is Ord & Monoid = // new ord.OrdProxy with monoid.MonoidProxy {} -given [A](using ord: Ord { type This = A }, monoid: Monoid { type This = A}): (Ord & Monoid) { type This = A} = +given [A](using ord: Ord { type This = A }, monoid: Monoid { type This = A}): ((Ord & Monoid) { type This = A}) = new ord.OrdProxy with monoid.MonoidProxy {} From 2f58cbc145dec06679b571f8b90b8729fc2a1094 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 20:44:59 +0200 Subject: [PATCH 017/827] New syntax for given defs given [A: Ord] => A is Ord: ... given [A: Ord] => A is Ord as listOrd: ... --- .../dotty/tools/dotc/parsing/Parsers.scala | 70 +++++++-- .../test/dotc/pos-test-pickling.blacklist | 2 + docs/_docs/internals/syntax.md | 9 +- tests/pos/typeclasses-arrow0.scala | 136 ++++++++++++++++++ 4 files changed, 201 insertions(+), 16 deletions(-) create mode 100644 tests/pos/typeclasses-arrow0.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 6c0f19de3dd1..a5b33994d4a9 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -976,12 +976,14 @@ object Parsers { * i.e. an identifier followed by type and value parameters, followed by `:`? * @pre The current token is an identifier */ - def followingIsGivenSig() = + def followingIsOldStyleGivenSig() = val lookahead = in.LookaheadScanner() if lookahead.isIdent then lookahead.nextToken() + var paramsSeen = false def skipParams(): Unit = if lookahead.token == LPAREN || lookahead.token == LBRACKET then + paramsSeen = true lookahead.skipParens() skipParams() else if lookahead.isNewLine then @@ -989,6 +991,16 @@ object Parsers { skipParams() skipParams() lookahead.isColon + && { + !in.featureEnabled(Feature.modularity) + || { // with modularity language import, a `:` at EOL after an identifier represents a single identifier given + // Example: + // given C: + // def f = ... + lookahead.nextToken() + !lookahead.isAfterLineEnd + } + } def followingIsExtension() = val next = in.lookahead.token @@ -1808,7 +1820,9 @@ object Parsers { def infixTypeRest(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, - isOperator = !followingIsVararg() && !isPureArrow + isOperator = !followingIsVararg() + && !isPureArrow + && !(isIdent(nme.as) && in.featureEnabled(Feature.modularity)) && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] @@ -4101,15 +4115,30 @@ object Parsers { syntaxError(em"extension clause can only define methods", stat.span) } - /** GivenDef ::= [GivenSig] (GivenType [‘=’ Expr] | StructuralInstance) - * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ - * GivenType ::= AnnotType1 {id [nl] AnnotType1} + /** GivenDef ::= OldGivenDef | NewGivenDef + * OldGivenDef ::= [OldGivenSig] (GivenType [‘=’ Expr] | StructuralInstance) + * OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ * StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + * + * NewGivenDef ::= [GivenConditional '=>'] NewGivenSig + * GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} + * NewGivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + * | ConstrApps ['as' id] TemplateBody + * + * GivenType ::= AnnotType1 {id [nl] AnnotType1} */ def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) val nameStart = in.offset - val name = if isIdent && followingIsGivenSig() then ident() else EmptyTermName + var name = if isIdent && followingIsOldStyleGivenSig() then ident() else EmptyTermName + var newSyntaxAllowed = in.featureEnabled(Feature.modularity) + + def moreConstrApps() = + if newSyntaxAllowed && in.token == COMMA then + in.nextToken() + constrApps() + else // need to be careful with last `with` + withConstrApps() // TODO Change syntax description def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = @@ -4128,14 +4157,24 @@ object Parsers { else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty - if !(name.isEmpty && noParams) then acceptColon() + if !(name.isEmpty && noParams) then + if in.isColon then + newSyntaxAllowed = false + in.nextToken() + else if newSyntaxAllowed then accept(ARROW) + else acceptColon() val parents = if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil else constrApp() match - case parent: Apply => parent :: withConstrApps() - case parent if in.isIdent => infixTypeRest(parent, _ => annotType1()) :: Nil - case parent => parent :: withConstrApps() + case parent: Apply => parent :: moreConstrApps() + case parent if in.isIdent => + infixTypeRest(parent, _ => annotType1()) :: Nil + case parent => parent :: moreConstrApps() + if newSyntaxAllowed && in.isIdent(nme.as) then + in.nextToken() + name = ident() + val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then accept(EQUALS) @@ -4145,7 +4184,7 @@ object Parsers { ValDef(name, parents.head, subExpr()) else DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) - else if (isStatSep || isStatSeqEnd) && parentsIsType then + else if (isStatSep || isStatSeqEnd) && parentsIsType && !newSyntaxAllowed then if name.isEmpty then syntaxError(em"anonymous given cannot be abstract") DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) @@ -4156,8 +4195,13 @@ object Parsers { else vparam val constr = makeConstructor(tparams, vparamss1) val templ = - if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil) - else withTemplate(constr, parents) + if isStatSep || isStatSeqEnd then + Template(constr, parents, Nil, EmptyValDef, Nil) + else if !newSyntaxAllowed || in.token == WITH then + withTemplate(constr, parents) + else + possibleTemplateStart() + templateBodyOpt(constr, parents, Nil) if noParams && !mods.is(Inline) then ModuleDef(name, templ) else TypeDef(name.toTypeName, templ) end gdef diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index ad9befa72f5f..3b14ce28569d 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -127,5 +127,7 @@ i20053b.scala # alias types at different levels of dereferencing parsercombinators-givens.scala parsercombinators-givens-2.scala +parsercombinators-arrow.scala + diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 6ef346ab22cc..db858ba05fbc 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -467,10 +467,13 @@ ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenSig] (GivenType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present + +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody GivenType ::= AnnotType1 {id [nl] AnnotType1} -StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> diff --git a/tests/pos/typeclasses-arrow0.scala b/tests/pos/typeclasses-arrow0.scala new file mode 100644 index 000000000000..22d84fe6478d --- /dev/null +++ b/tests/pos/typeclasses-arrow0.scala @@ -0,0 +1,136 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord[A]: + extension (x: A) + def compareTo(y: A): Int + def < (y: A): Boolean = compareTo(y) < 0 + def > (y: A): Boolean = compareTo(y) > 0 + def <= (y: A): Boolean = compareTo(y) <= 0 + def >= (y: A): Boolean = compareTo(y) >= 0 + def max(y: A): A = if x < y then y else x + + trait Show[A]: + extension (x: A) def show: String + + trait SemiGroup[A]: + extension (x: A) def combine(y: A): A + + trait Monoid[A] extends SemiGroup[A]: + def unit: A + + trait Functor[F[_]]: + extension [A](x: F[A]) def map[B](f: A => B): F[B] + + trait Monad[F[_]] extends Functor[F]: + def pure[A](x: A): F[A] + extension [A](x: F[A]) + def flatMap[B](f: A => F[B]): F[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given Ord[Int] as intOrd: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => Ord[List[T]]: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given Monad[List] as listMonad: + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Monad[Reader[Ctx]] as readerMonad: + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => Ord[T] as descending: + extension (x: T) def compareTo(y: T) = summon[Ord[T]].compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal[Self]: + + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Animal[Sheep]: + def apply(name: String) = Sheep(name) + extension (self: Sheep) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ From 598c6adff60179e1533a3dd0226d58363ea19d29 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Apr 2024 21:59:15 +0200 Subject: [PATCH 018/827] Allow multiple context bounds in `{...}` --- .../src/dotty/tools/dotc/ast/Desugar.scala | 2 ++ compiler/src/dotty/tools/dotc/ast/untpd.scala | 8 +++++++ .../dotty/tools/dotc/parsing/Parsers.scala | 11 +++++++--- .../tools/dotc/printing/RefinedPrinter.scala | 21 ++++++++++++++----- .../src/dotty/tools/dotc/typer/Typer.scala | 11 ++++++++++ tests/neg/i9330.scala | 2 +- tests/pos/FromString-typeparam.scala | 13 ++++++++++++ tests/semanticdb/expect/Methods.expect.scala | 2 +- .../semanticdb/expect/Synthetic.expect.scala | 2 +- tests/semanticdb/metac.expect | 9 ++++---- 10 files changed, 66 insertions(+), 15 deletions(-) create mode 100644 tests/pos/FromString-typeparam.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index c3a0c05088cb..774e77aa4b44 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1144,6 +1144,8 @@ object desugar { case tree: TypeDef => tree.name.toString case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" + case ContextBoundTypeTree(tycon, paramName, _) => + s"${apply(x, tycon)}_$paramName" case InfixOp(left, op, right) => if followArgs then s"${op.name}_${extractArgs(List(left, right))}" else op.name.toString diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 91ef462bcf05..0486e2e6d3d7 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -118,6 +118,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class ContextBoundTypeTree(tycon: Tree, paramName: TypeName, ownName: TermName)(implicit @constructorOnly src: SourceFile) extends Tree case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -677,6 +678,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def ContextBoundTypeTree(tree: Tree)(tycon: Tree, paramName: TypeName, ownName: TermName)(using Context): Tree = tree match + case tree: ContextBoundTypeTree if (tycon eq tree.tycon) && paramName == tree.paramName && ownName == tree.ownName => tree + case _ => finalize(tree, untpd.ContextBoundTypeTree(tycon, paramName, ownName)(tree.source)) def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) @@ -742,6 +746,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) case ExtMethods(paramss, methods) => cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case ContextBoundTypeTree(tycon, paramName, ownName) => + cpy.ContextBoundTypeTree(tree)(transform(tycon), paramName, ownName) case ImportSelector(imported, renamed, bound) => cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) case Number(_, _) | TypedSplice(_) => @@ -797,6 +803,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(this(x, pats), tpt), rhs) case ExtMethods(paramss, methods) => this(paramss.foldLeft(x)(apply), methods) + case ContextBoundTypeTree(tycon, paramName, ownName) => + this(x, tycon) case ImportSelector(imported, renamed, bound) => this(this(this(x, imported), renamed), bound) case Number(_, _) => diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index a5b33994d4a9..8680ba8c1335 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -2205,11 +2205,16 @@ object Parsers { else atSpan((t.span union cbs.head.span).start) { ContextBounds(t, cbs) } } + /** ContextBound ::= Type [`as` id] */ + def contextBound(pname: TypeName): Tree = + ContextBoundTypeTree(toplevelTyp(), pname, EmptyTermName) + def contextBounds(pname: TypeName): List[Tree] = if in.isColon then - atSpan(in.skipToken()) { - AppliedTypeTree(toplevelTyp(), Ident(pname)) - } :: contextBounds(pname) + in.nextToken() + if in.token == LBRACE && in.featureEnabled(Feature.modularity) + then inBraces(commaSeparated(() => contextBound(pname))) + else contextBound(pname) :: contextBounds(pname) else if in.token == VIEWBOUND then report.errorOrMigrationWarning( em"view bounds `<%' are no longer supported, use a context bound `:' instead", diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 0329f0639d87..1ff4c8cae339 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -386,7 +386,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(GlobalPrec) { keywordStr("for ") ~ Text(enums map enumText, "; ") ~ sep ~ toText(expr) } def cxBoundToText(bound: untpd.Tree): Text = bound match { // DD - case AppliedTypeTree(tpt, _) => " : " ~ toText(tpt) + case ContextBoundTypeTree(tpt, _, _) => " : " ~ toText(tpt) case untpd.Function(_, tpt) => " <% " ~ toText(tpt) } @@ -658,7 +658,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextAnnot = toTextLocal(arg) ~~ annotText(annot.symbol.enclosingClass, annot) def toTextRetainsAnnot = - try changePrec(GlobalPrec)(toText(arg) ~ "^" ~ toTextCaptureSet(captureSet)) + try changePrec(GlobalPrec)(toTextLocal(arg) ~ "^" ~ toTextCaptureSet(captureSet)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner.isRetains && Feature.ccEnabled && !printDebug @@ -747,9 +747,18 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case GenAlias(pat, expr) => toText(pat) ~ " = " ~ toText(expr) case ContextBounds(bounds, cxBounds) => - cxBounds.foldLeft(toText(bounds)) {(t, cxb) => - t ~ cxBoundToText(cxb) - } + if Feature.enabled(Feature.modularity) then + def boundsText(bounds: Tree) = bounds match + case ContextBoundTypeTree(tpt, _, ownName) => + toText(tpt) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) + case bounds => toText(bounds) + cxBounds match + case bound :: Nil => ": " ~ boundsText(bound) + case _ => ": {" ~ Text(cxBounds.map(boundsText), ", ") ~ "}" + else + cxBounds.foldLeft(toText(bounds)) {(t, cxb) => + t ~ cxBoundToText(cxb) + } case PatDef(mods, pats, tpt, rhs) => modText(mods, NoSymbol, keywordStr("val"), isType = false) ~~ toText(pats, ", ") ~ optAscription(tpt) ~ optText(rhs)(" = " ~ _) @@ -794,6 +803,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { prefix ~~ idx.toString ~~ "|" ~~ tpeText ~~ "|" ~~ argsText ~~ "|" ~~ contentText ~~ postfix case CapturesAndResult(refs, parent) => changePrec(GlobalPrec)("^{" ~ Text(refs.map(toText), ", ") ~ "}" ~ toText(parent)) + case ContextBoundTypeTree(tycon, pname, ownName) => + toText(pname) ~ " : " ~ toText(tycon) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) case _ => tree.fallbackToText(this) } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index a357f06e4ee8..b90b742aa0ec 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2284,6 +2284,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree.tpFun(tsyms, vsyms) completeTypeTree(InferredTypeTree(), tp, tree) + def typedContextBoundTypeTree(tree: untpd.ContextBoundTypeTree)(using Context): Tree = + val tycon = typedType(tree.tycon) + val tyconSplice = untpd.TypedSplice(tycon) + val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) + if tycon.tpe.typeParams.nonEmpty then + typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) + else + errorTree(tree, + em"""Illegal context bound: ${tycon.tpe} does not take type parameters.""") + def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) checkStable(ref1.tpe, tree.srcPos, "singleton type") @@ -3269,6 +3279,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.UnApply => typedUnApply(tree, pt) case tree: untpd.Tuple => typedTuple(tree, pt) case tree: untpd.InLambdaTypeTree => typedInLambdaTypeTree(tree, pt) + case tree: untpd.ContextBoundTypeTree => typedContextBoundTypeTree(tree) case tree: untpd.InfixOp => typedInfixOp(tree, pt) case tree: untpd.ParsedTry => typedTry(tree, pt) case tree @ untpd.PostfixOp(qual, Ident(nme.WILDCARD)) => typedAsFunction(tree, pt) diff --git a/tests/neg/i9330.scala b/tests/neg/i9330.scala index ca25582ef7e8..6ba57c033473 100644 --- a/tests/neg/i9330.scala +++ b/tests/neg/i9330.scala @@ -1,4 +1,4 @@ val x = { - () == "" // error + () == "" implicit def foo[A: A] // error // error // error } diff --git a/tests/pos/FromString-typeparam.scala b/tests/pos/FromString-typeparam.scala new file mode 100644 index 000000000000..893bcfd3decc --- /dev/null +++ b/tests/pos/FromString-typeparam.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[A]: + def fromString(s: String): A + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString, Numeric}](a: String, b: String): N = + val num = summon[Numeric[N]] + val N = summon[FromString[N]] + num.plus(N.fromString(a), N.fromString(b)) diff --git a/tests/semanticdb/expect/Methods.expect.scala b/tests/semanticdb/expect/Methods.expect.scala index f34c657b2f6d..4ec723ad584e 100644 --- a/tests/semanticdb/expect/Methods.expect.scala +++ b/tests/semanticdb/expect/Methods.expect.scala @@ -15,7 +15,7 @@ class Methods/*<-example::Methods#*/[T/*<-example::Methods#[T]*/] { def m6/*<-example::Methods#m6().*/(x/*<-example::Methods#m6().(x)*/: Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+1).*/(x/*<-example::Methods#m6(+1).(x)*/: List/*->example::Methods#List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+2).*/(x/*<-example::Methods#m6(+2).(x)*/: scala.List/*->scala::package.List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ - def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/: Ordering/*->scala::math::Ordering#*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ + def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ def `m8()./*<-example::Methods#`m8().`().*/`() = ???/*->scala::Predef.`???`().*/ class `m9()./*<-example::Methods#`m9().`#*/` def m9/*<-example::Methods#m9().*/(x/*<-example::Methods#m9().(x)*/: `m9().`/*->example::Methods#`m9().`#*/) = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/expect/Synthetic.expect.scala b/tests/semanticdb/expect/Synthetic.expect.scala index a4419aa8bd82..4d797ce2b856 100644 --- a/tests/semanticdb/expect/Synthetic.expect.scala +++ b/tests/semanticdb/expect/Synthetic.expect.scala @@ -30,7 +30,7 @@ class Synthetic/*<-example::Synthetic#*/ { null.asInstanceOf/*->scala::Any#asInstanceOf().*/[Int/*->scala::Int#*/ => Int/*->scala::Int#*/](2) } - class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*//*<-example::Synthetic#J#evidence$1.*/: Manifest/*->scala::Predef.Manifest#*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } + class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*/: /*<-example::Synthetic#J#evidence$1.*/Manifest/*->scala::Predef.Manifest#*//*->example::Synthetic#J#[T]*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } class F/*<-example::Synthetic#F#*/ implicit val ordering/*<-example::Synthetic#ordering.*/: Ordering/*->scala::package.Ordering#*/[F/*->example::Synthetic#F#*/] = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 2120cc633da8..84c3e7c6a110 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2732,8 +2732,8 @@ Occurrences: [16:29..16:32): ??? -> scala/Predef.`???`(). [17:6..17:8): m7 <- example/Methods#m7(). [17:9..17:10): U <- example/Methods#m7().[U] -[17:10..17:10): <- example/Methods#m7().(evidence$1) -[17:12..17:20): Ordering -> scala/math/Ordering# +[17:12..17:20): Ordering -> example/Methods#m7().[U] +[17:12..17:12): <- example/Methods#m7().(evidence$1) [17:22..17:23): c <- example/Methods#m7().(c) [17:25..17:32): Methods -> example/Methods# [17:33..17:34): T -> example/Methods#[T] @@ -3533,7 +3533,7 @@ Uri => Synthetic.scala Text => empty Language => Scala Symbols => 52 entries -Occurrences => 136 entries +Occurrences => 137 entries Synthetics => 39 entries Symbols: @@ -3659,8 +3659,9 @@ Occurrences: [32:8..32:9): J <- example/Synthetic#J# [32:9..32:9): <- example/Synthetic#J#``(). [32:10..32:11): T <- example/Synthetic#J#[T] -[32:11..32:11): <- example/Synthetic#J#evidence$1. +[32:13..32:13): <- example/Synthetic#J#evidence$1. [32:13..32:21): Manifest -> scala/Predef.Manifest# +[32:13..32:21): Manifest -> example/Synthetic#J#[T] [32:29..32:32): arr <- example/Synthetic#J#arr. [32:35..32:40): Array -> scala/Array. [32:41..32:46): empty -> scala/Array.empty(). From a61d2bc7b5c4ba97c037a2e46856fb8290594310 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 12:27:52 +0200 Subject: [PATCH 019/827] Allow renamings `as N` in context bounds Also, provide the possibility to use the parameter name for single context bounds. This is controlled by a Config setting, which is off by default. --- .../src/dotty/tools/dotc/ast/Desugar.scala | 180 +++++++++++------- .../src/dotty/tools/dotc/config/Config.scala | 7 + .../dotty/tools/dotc/parsing/Parsers.scala | 16 +- docs/_docs/internals/syntax.md | 8 +- tests/pos/FromString-named.scala | 11 ++ 5 files changed, 146 insertions(+), 76 deletions(-) create mode 100644 tests/pos/FromString-named.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 774e77aa4b44..04fd1afca8be 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -10,7 +10,7 @@ import Annotations.Annotation import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, SrcPos, Chars} -import config.Feature.{sourceVersion, migrateTo3, enabled} +import config.{Feature, Config} import config.SourceVersion.* import collection.mutable import reporting.* @@ -46,6 +46,11 @@ object desugar { */ val UntupledParam: Property.Key[Unit] = Property.StickyKey() + /** An attachment key to indicate that a ValDef is an evidence parameter + * for a context bound. + */ + val ContextBoundParam: Property.Key[Unit] = Property.StickyKey() + /** What static check should be applied to a Match? */ enum MatchCheck { case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom @@ -195,17 +200,6 @@ object desugar { else vdef1 end valDef - def makeImplicitParameters( - tpts: List[Tree], implicitFlag: FlagSet, - mkParamName: Int => TermName, - forPrimaryConstructor: Boolean = false - )(using Context): List[ValDef] = - for (tpt, i) <- tpts.zipWithIndex yield { - val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param - val epname = mkParamName(i) - ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) - } - def mapParamss(paramss: List[ParamClause]) (mapTypeParam: TypeDef => TypeDef) (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = @@ -232,34 +226,57 @@ object desugar { private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) - private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = - val DefDef(_, paramss, tpt, rhs) = meth - val evidenceParamBuf = mutable.ListBuffer[ValDef]() + private def desugarContextBounds( + tdef: TypeDef, + evidenceBuf: mutable.ListBuffer[ValDef], + flags: FlagSet, + freshName: untpd.Tree => TermName, + allParamss: List[ParamClause])(using Context): TypeDef = - var seenContextBounds: Int = 0 - def desugarContextBounds(rhs: Tree): Tree = rhs match + val evidenceNames = mutable.ListBuffer[TermName]() + + def desugarRhs(rhs: Tree): Tree = rhs match case ContextBounds(tbounds, cxbounds) => - val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit - evidenceParamBuf ++= makeImplicitParameters( - cxbounds, iflag, - // Just like with `makeSyntheticParameter` on nameless parameters of - // using clauses, we only need names that are unique among the - // parameters of the method since shadowing does not affect - // implicit resolution in Scala 3. - mkParamName = i => - val index = seenContextBounds + 1 // Start at 1 like FreshNameCreator. - val ret = ContextBoundParamName(EmptyTermName, index) - seenContextBounds += 1 - ret, - forPrimaryConstructor = isPrimaryConstructor) + for bound <- cxbounds do + val evidenceName = bound match + case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => + ownName + case _ if Config.nameSingleContextBounds && cxbounds.tail.isEmpty + && Feature.enabled(Feature.modularity) => + tdef.name.toTermName + case _ => + freshName(bound) + evidenceNames += evidenceName + val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(flags) + evidenceParam.pushAttachment(ContextBoundParam, ()) + evidenceBuf += evidenceParam tbounds case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) + cpy.LambdaTypeTree(rhs)(tparams, desugarRhs(body)) case _ => rhs + + cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + end desugarContextBounds + + private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = + val DefDef(_, paramss, tpt, rhs) = meth + val evidenceParamBuf = mutable.ListBuffer[ValDef]() + + var seenContextBounds: Int = 0 + def freshName(unused: Tree) = + seenContextBounds += 1 // Start at 1 like FreshNameCreator. + ContextBoundParamName(EmptyTermName, seenContextBounds) + // Just like with `makeSyntheticParameter` on nameless parameters of + // using clauses, we only need names that are unique among the + // parameters of the method since shadowing does not affect + // implicit resolution in Scala 3. + val paramssNoContextBounds = + val iflag = if Feature.sourceVersion.isAtLeast(`future`) then Given else Implicit + val flags = if isPrimaryConstructor then iflag | LocalParamAccessor else iflag | Param mapParamss(paramss) { - tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) + tparam => desugarContextBounds(tparam, evidenceParamBuf, flags, freshName, paramss) }(identity) rhs match @@ -399,43 +416,70 @@ object desugar { (Nil, tree) /** Add all evidence parameters in `params` as implicit parameters to `meth`. - * If the parameters of `meth` end in an implicit parameter list or using clause, - * evidence parameters are added in front of that list. Otherwise they are added - * as a separate parameter clause. + * The position of the added parameters is determined as follows: + * + * - If there is an existing parameter list that refers to one of the added + * parameters in one of its parameter types, add the new parameters + * in front of the first such parameter list. + * - Otherwise, if the last parameter list consists implicit or using parameters, + * join the new parameters in front of this parameter list, creating one + * parameter list (this is equilavent to Scala 2's scheme). + * - Otherwise, add the new parameter list at the end as a separate parameter clause. */ private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = - params match + if params.isEmpty then return meth + + val boundNames = params.map(_.name).toSet + + //println(i"add ev params ${meth.name}, ${boundNames.toList}") + + def references(vdef: ValDef): Boolean = + vdef.tpt.existsSubTree: + case Ident(name: TermName) => boundNames.contains(name) + case _ => false + + def recur(mparamss: List[ParamClause]): List[ParamClause] = mparamss match + case ValDefs(mparams) :: _ if mparams.exists(references) => + params :: mparamss + case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => + (params ++ mparams) :: Nil + case mparams :: mparamss1 => + mparams :: recur(mparamss1) case Nil => - meth - case evidenceParams => - val paramss1 = meth.paramss.reverse match - case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => - ((evidenceParams ++ vparams) :: rparamss).reverse - case _ => - meth.paramss :+ evidenceParams - cpy.DefDef(meth)(paramss = paramss1) + params :: Nil + + cpy.DefDef(meth)(paramss = recur(meth.paramss)) + end addEvidenceParams /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = meth.paramss.reverse match { case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.name.is(ContextBoundParamName)) + vparams.takeWhile(_.hasAttachment(ContextBoundParam)) case _ => Nil } @sharable private val synthetic = Modifiers(Synthetic) - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { - var mods = tparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) + /** Filter annotations in `mods` according to `keep` */ + private def filterAnnots(mods: Modifiers, keep: Boolean)(using Context) = + if keep then mods else mods.withAnnotations(Nil) + + private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean)(using Context): TypeDef = + val mods = filterAnnots(tparam.rawMods, keepAnnotations) tparam.withMods(mods & EmptyFlags | Param) - } - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { - var mods = vparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) + + private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean)(using Context): ValDef = { + val mods = filterAnnots(vparam.rawMods, keepAnnotations) val hasDefault = if keepDefault then HasDefault else EmptyFlags - vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) + // Need to ensure that tree is duplicated since term parameters can be watched + // and cloning a term parameter will copy its watchers to the clone, which means + // we'd get cross-talk between the original parameter and the clone. + ValDef(vparam.name, vparam.tpt, vparam.rhs) + .withSpan(vparam.span) + .withAttachmentsFrom(vparam) + .withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = @@ -609,6 +653,11 @@ object desugar { case _ => false } + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { + case PostfixOp(_, Ident(tpnme.raw.STAR)) => true + case _ => false + } + def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { val targs = for (tparam <- tparams) yield { val targ = refOfDef(tparam) @@ -625,11 +674,6 @@ object desugar { appliedTypeTree(tycon, targs) } - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { - case PostfixOp(_, Ident(tpnme.raw.STAR)) => true - case _ => false - } - // a reference to the class type bound by `cdef`, with type parameters coming from the constructor val classTypeRef = appliedRef(classTycon) @@ -667,7 +711,7 @@ object desugar { } ensureApplied(nu) - val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags + val copiedAccessFlags = if Feature.migrateTo3 then EmptyFlags else AccessFlags // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) // def _1: T1 = this.p1 @@ -850,12 +894,11 @@ object desugar { Nil } else { - val defParamss = constrVparamss match { + val defParamss = constrVparamss match case Nil :: paramss => paramss // drop leading () that got inserted by class // TODO: drop this once we do not silently insert empty class parameters anymore case paramss => paramss - } val finalFlag = if ctx.settings.YcompileScala2Library.value then EmptyFlags else Final // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. @@ -1681,14 +1724,13 @@ object desugar { .collect: case vd: ValDef => vd - def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = { - val mods = Given - val params = makeImplicitParameters(formals, mods, - mkParamName = i => - if paramNamesOrNil.isEmpty then ContextFunctionParamName.fresh() - else paramNamesOrNil(i)) - FunctionWithMods(params, body, Modifiers(mods), erasedParams) - } + def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = + val paramNames = + if paramNamesOrNil.nonEmpty then paramNamesOrNil + else formals.map(_ => ContextFunctionParamName.fresh()) + val params = for (tpt, pname) <- formals.zip(paramNames) yield + ValDef(pname, tpt, EmptyTree).withFlags(Given | Param) + FunctionWithMods(params, body, Modifiers(Given), erasedParams) private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { val vdef = ValDef(named.name.asTermName, tpt, rhs) diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 2746476261e5..293044c245ef 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -235,4 +235,11 @@ object Config { */ inline val checkLevelsOnConstraints = false inline val checkLevelsOnInstantiation = true + + /** If a type parameter `X` has a single context bound `X: C`, should the + * witness parameter be named `X`? This would prevent the creation of a + * context bound companion. + */ + inline val nameSingleContextBounds = false } + diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 8680ba8c1335..bbc4096f266b 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -2196,9 +2196,9 @@ object Parsers { if (in.token == tok) { in.nextToken(); toplevelTyp() } else EmptyTree - /** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} + /** TypeAndCtxBounds ::= TypeBounds [`:` ContextBounds] */ - def typeParamBounds(pname: TypeName): Tree = { + def typeAndCtxBounds(pname: TypeName): Tree = { val t = typeBounds() val cbs = contextBounds(pname) if (cbs.isEmpty) t @@ -2207,8 +2207,16 @@ object Parsers { /** ContextBound ::= Type [`as` id] */ def contextBound(pname: TypeName): Tree = - ContextBoundTypeTree(toplevelTyp(), pname, EmptyTermName) + val t = toplevelTyp() + val ownName = + if isIdent(nme.as) && in.featureEnabled(Feature.modularity) then + in.nextToken() + ident() + else EmptyTermName + ContextBoundTypeTree(t, pname, ownName) + /** ContextBounds ::= ContextBound | `{` ContextBound {`,` ContextBound} `}` + */ def contextBounds(pname: TypeName): List[Tree] = if in.isColon then in.nextToken() @@ -3411,7 +3419,7 @@ object Parsers { } else ident().toTypeName val hkparams = typeParamClauseOpt(ParamOwner.Type) - val bounds = if (isAbstractOwner) typeBounds() else typeParamBounds(name) + val bounds = if (isAbstractOwner) typeBounds() else typeAndCtxBounds(name) TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) } } diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index db858ba05fbc..e123fa900258 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -221,7 +221,9 @@ IntoTargetType ::= Type TypeArgs ::= ‘[’ Types ‘]’ ts Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) -TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] ContextBounds(typeBounds, tps) +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} NamesAndTypes ::= NameAndType {‘,’ NameAndType} NameAndType ::= id ':' Type @@ -359,7 +361,7 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ```ebnf ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) - id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) + id [HkTypeParamClause] TypeAndCtxBounds Bound(below, above, context) TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -384,7 +386,7 @@ TypelessClause ::= DefTermParamClause | UsingParamClause DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ diff --git a/tests/pos/FromString-named.scala b/tests/pos/FromString-named.scala new file mode 100644 index 000000000000..efa0882ae347 --- /dev/null +++ b/tests/pos/FromString-named.scala @@ -0,0 +1,11 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[A]: + def fromString(s: String): A + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString as N, Numeric as num}](a: String, b: String): N = + num.plus(N.fromString(a), N.fromString(b)) From b48fb99fd607bd3955477db8c1d94ceec295b1a1 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 13:38:52 +0200 Subject: [PATCH 020/827] Implement `deferred` givens A definition like `given T = deferred` in a trait will be expanded to an abstract given in the trait that is implemented automatically in all classes inheriting the trait. --- .../dotty/tools/dotc/core/Definitions.scala | 1 + .../src/dotty/tools/dotc/core/Flags.scala | 1 + .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../dotty/tools/dotc/transform/Erasure.scala | 8 +- .../dotty/tools/dotc/typer/Implicits.scala | 4 +- .../src/dotty/tools/dotc/typer/Namer.scala | 12 + .../dotty/tools/dotc/typer/RefChecks.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 81 +++- .../test/dotc/pos-test-pickling.blacklist | 4 +- library/src/scala/compiletime/package.scala | 13 + tests/neg/deferred-givens.check | 13 + tests/neg/deferred-givens.scala | 30 ++ tests/neg/deferredSummon.check | 17 + tests/neg/deferredSummon.scala | 19 + tests/pos/deferred-givens.scala | 26 ++ tests/pos/deferredSummon.scala | 30 ++ .../pos/hylolib-deferred-given-extract.scala | 19 + .../AnyCollection.scala | 69 ++++ .../pos/hylolib-deferred-given/AnyValue.scala | 76 ++++ .../pos/hylolib-deferred-given/BitArray.scala | 375 ++++++++++++++++++ .../hylolib-deferred-given/Collection.scala | 281 +++++++++++++ .../hylolib-deferred-given/CoreTraits.scala | 57 +++ tests/pos/hylolib-deferred-given/Hasher.scala | 38 ++ .../pos/hylolib-deferred-given/HyArray.scala | 224 +++++++++++ .../pos/hylolib-deferred-given/Integers.scala | 58 +++ tests/pos/hylolib-deferred-given/Range.scala | 37 ++ tests/pos/hylolib-deferred-given/Slice.scala | 49 +++ .../StringConvertible.scala | 14 + 28 files changed, 1545 insertions(+), 14 deletions(-) create mode 100644 tests/neg/deferred-givens.check create mode 100644 tests/neg/deferred-givens.scala create mode 100644 tests/neg/deferredSummon.check create mode 100644 tests/neg/deferredSummon.scala create mode 100644 tests/pos/deferred-givens.scala create mode 100644 tests/pos/deferredSummon.scala create mode 100644 tests/pos/hylolib-deferred-given-extract.scala create mode 100644 tests/pos/hylolib-deferred-given/AnyCollection.scala create mode 100644 tests/pos/hylolib-deferred-given/AnyValue.scala create mode 100644 tests/pos/hylolib-deferred-given/BitArray.scala create mode 100644 tests/pos/hylolib-deferred-given/Collection.scala create mode 100644 tests/pos/hylolib-deferred-given/CoreTraits.scala create mode 100644 tests/pos/hylolib-deferred-given/Hasher.scala create mode 100644 tests/pos/hylolib-deferred-given/HyArray.scala create mode 100644 tests/pos/hylolib-deferred-given/Integers.scala create mode 100644 tests/pos/hylolib-deferred-given/Range.scala create mode 100644 tests/pos/hylolib-deferred-given/Slice.scala create mode 100644 tests/pos/hylolib-deferred-given/StringConvertible.scala diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 15880207b3c8..9ee5891f1606 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -240,6 +240,7 @@ class Definitions { @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") + @tu lazy val Compiletime_deferred : Symbol = CompiletimePackageClass.requiredMethod("deferred") @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 2bc7610bb0ce..e17834d61fdc 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -573,6 +573,7 @@ object Flags { val DeferredOrLazyOrMethod: FlagSet = Deferred | Lazy | Method val DeferredOrTermParamOrAccessor: FlagSet = Deferred | ParamAccessor | TermParam // term symbols without right-hand sides val DeferredOrTypeParam: FlagSet = Deferred | TypeParam // type symbols without right-hand sides + val DeferredGivenFlags = Deferred | Given | HasDefault val EnumValue: FlagSet = Enum | StableRealizable // A Scala enum value val FinalOrInline: FlagSet = Final | Inline val FinalOrModuleClass: FlagSet = Final | ModuleClass // A module class or a final class diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 7545cf5c4ba1..c0eb8a690eb4 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -455,6 +455,7 @@ object StdNames { val create: N = "create" val currentMirror: N = "currentMirror" val curried: N = "curried" + val deferred: N = "deferred" val definitions: N = "definitions" val delayedInit: N = "delayedInit" val delayedInitArg: N = "delayedInit$body" diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 8bfbb90a0700..a25a2fcb5c6d 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -567,7 +567,13 @@ object Erasure { case Some(annot) => val message = annot.argumentConstant(0) match case Some(c) => - c.stringValue.toMessage + val addendum = tree match + case tree: RefTree + if tree.symbol == defn.Compiletime_deferred && tree.name != nme.deferred => + i".\nNote that `deferred` can only be used under its own name when implementing a given in a trait; `${tree.name}` is not accepted." + case _ => + "" + (c.stringValue ++ addendum).toMessage case _ => em"""Reference to ${tree.symbol.showLocated} should not have survived, |it should have been processed and eliminated during expansion of an enclosing macro or term erasure.""" diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index bc19e97b85d8..5ac12ce1aa0c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -924,10 +924,10 @@ trait Implicits: /** Search an implicit argument and report error if not found */ - def implicitArgTree(formal: Type, span: Span)(using Context): Tree = { + def implicitArgTree(formal: Type, span: Span, where: => String = "")(using Context): Tree = { val arg = inferImplicitArg(formal, span) if (arg.tpe.isInstanceOf[SearchFailureType]) - report.error(missingArgMsg(arg, formal, ""), ctx.source.atSpan(span)) + report.error(missingArgMsg(arg, formal, where), ctx.source.atSpan(span)) arg } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index e48c2fdf5066..22a12ed0f468 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1830,6 +1830,18 @@ class Namer { typer: Typer => case _ => WildcardType } + + // translate `given T = deferred` to an abstract given with HasDefault flag + if sym.is(Given) then + mdef.rhs match + case rhs: RefTree + if rhs.name == nme.deferred + && typedAheadExpr(rhs).symbol == defn.Compiletime_deferred + && sym.maybeOwner.is(Trait) => + sym.resetFlag(Final) + sym.setFlag(Deferred | HasDefault) + case _ => + val mbrTpe = paramFn(checkSimpleKinded(typedAheadType(mdef.tpt, tptProto)).tpe) if (ctx.explicitNulls && mdef.mods.is(JavaDefined)) JavaNullInterop.nullifyMember(sym, mbrTpe, mdef.mods.isAllOf(JavaEnumValue)) diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 7cd1d67e9aa5..266b69d029c1 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -552,7 +552,7 @@ object RefChecks { overrideError("is an extension method, cannot override a normal method") else if (other.is(ExtensionMethod) && !member.is(ExtensionMethod)) // (1.3) overrideError("is a normal method, cannot override an extension method") - else if !other.is(Deferred) + else if (!other.is(Deferred) || other.isAllOf(Given | HasDefault)) && !member.is(Deferred) && !other.name.is(DefaultGetterName) && !member.isAnyOverride diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index b90b742aa0ec..c467a4507730 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2649,12 +2649,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) - if (sym.isOneOf(GivenOrImplicit)) checkImplicitConversionDefOK(sym) + if sym.is(Implicit) then checkImplicitConversionDefOK(sym) if sym.is(Module) then checkNoModuleClash(sym) val tpt1 = checkSimpleKinded(typedType(tpt)) val rhs1 = vdef.rhs match { - case rhs @ Ident(nme.WILDCARD) => rhs withType tpt1.tpe - case rhs => typedExpr(rhs, tpt1.tpe.widenExpr) + case rhs @ Ident(nme.WILDCARD) => + rhs.withType(tpt1.tpe) + case rhs: RefTree + if rhs.name == nme.deferred && sym.isAllOf(DeferredGivenFlags, butNot = Param) => + EmptyTree + case rhs => + typedExpr(rhs, tpt1.tpe.widenExpr) } val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) postProcessInfo(vdef1, sym) @@ -2715,9 +2720,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.isInlineMethod then rhsCtx.addMode(Mode.InlineableBody) if sym.is(ExtensionMethod) then rhsCtx.addMode(Mode.InExtensionMethod) - val rhs1 = PrepareInlineable.dropInlineIfError(sym, - if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) - else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) + val rhs1 = ddef.rhs match + case Ident(nme.deferred) if sym.isAllOf(DeferredGivenFlags) => + EmptyTree + case rhs => + PrepareInlineable.dropInlineIfError(sym, + if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) + else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then if StagingLevel.level > 0 then @@ -2898,6 +2907,59 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case None => body + /** Implement givens that were declared with a `deferred` rhs. + * The a given value matching the declared type is searched in a + * context directly enclosing the current class, in which all given + * parameters of the current class are also defined. + */ + def implementDeferredGivens(body: List[Tree]): List[Tree] = + if cls.is(Trait) || ctx.isAfterTyper then body + else + def isGivenValue(mbr: TermRef) = + val dcl = mbr.symbol + if dcl.is(Method) then + report.error( + em"""Cannnot infer the implementation of the deferred ${dcl.showLocated} + |since that given is parameterized. An implementing given needs to be written explicitly.""", + cdef.srcPos) + false + else true + + def givenImpl(mbr: TermRef): ValDef = + val dcl = mbr.symbol + val target = dcl.info.asSeenFrom(cls.thisType, dcl.owner) + val constr = cls.primaryConstructor + val usingParamAccessors = cls.paramAccessors.filter(_.is(Given)) + val paramScope = newScopeWith(usingParamAccessors*) + val searchCtx = ctx.outer.fresh.setScope(paramScope) + val rhs = implicitArgTree(target, cdef.span, + where = i"inferring the implementation of the deferred ${dcl.showLocated}" + )(using searchCtx) + + val impl = dcl.copy(cls, + flags = dcl.flags &~ (HasDefault | Deferred) | Final | Override, + info = target, + coord = rhs.span).entered.asTerm + + def anchorParams = new TreeMap: + override def transform(tree: Tree)(using Context): Tree = tree match + case id: Ident if usingParamAccessors.contains(id.symbol) => + cpy.Select(id)(This(cls), id.name) + case _ => + super.transform(tree) + ValDef(impl, anchorParams.transform(rhs)) + end givenImpl + + val givenImpls = + cls.thisType.implicitMembers + //.showing(i"impl def givens for $cls/$result") + .filter(_.symbol.isAllOf(DeferredGivenFlags, butNot = Param)) + //.showing(i"impl def filtered givens for $cls/$result") + .filter(isGivenValue) + .map(givenImpl) + body ++ givenImpls + end implementDeferredGivens + ensureCorrectSuperClass() completeAnnotations(cdef, cls) val constr1 = typed(constr).asInstanceOf[DefDef] @@ -2919,9 +2981,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else { val dummy = localDummy(cls, impl) val body1 = - addParentRefinements( - addAccessorDefs(cls, - typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1)) + implementDeferredGivens( + addParentRefinements( + addAccessorDefs(cls, + typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1))) checkNoDoubleDeclaration(cls) val impl1 = cpy.Template(impl)(constr1, parents1, Nil, self1, body1) diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 3b14ce28569d..5c715faa504b 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -103,7 +103,7 @@ i13842.scala # Position change under captureChecking boxmap-paper.scala -# Function types print differnt after unpickling since test mispredicts Feature.preFundsEnabled +# Function types print different after unpickling since test mispredicts Feature.preFundsEnabled caps-universal.scala # GADT cast applied to singleton type difference @@ -128,6 +128,8 @@ i20053b.scala parsercombinators-givens.scala parsercombinators-givens-2.scala parsercombinators-arrow.scala +hylolib-deferred-given + diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index 3eca997554a0..be76941a680b 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -42,6 +42,19 @@ def erasedValue[T]: T = erasedValue[T] @compileTimeOnly("`uninitialized` can only be used as the right hand side of a mutable field definition") def uninitialized: Nothing = ??? +/** Used as the right hand side of a given in a trait, like this + * + * ``` + * given T = deferred + * ``` + * + * This signifies that the given will get a synthesized definition in all classes + * that implement the enclosing trait and that do not contain an explicit overriding + * definition of that given. + */ +@compileTimeOnly("`deferred` can only be used as the right hand side of a given definition in a trait") +def deferred: Nothing = ??? + /** The error method is used to produce user-defined compile errors during inline expansion. * If an inline expansion results in a call error(msgStr) the compiler produces an error message containing the given msgStr. * diff --git a/tests/neg/deferred-givens.check b/tests/neg/deferred-givens.check new file mode 100644 index 000000000000..cc15901d087f --- /dev/null +++ b/tests/neg/deferred-givens.check @@ -0,0 +1,13 @@ +-- [E172] Type Error: tests/neg/deferred-givens.scala:11:6 ------------------------------------------------------------- +11 |class B extends A // error + |^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- [E172] Type Error: tests/neg/deferred-givens.scala:13:15 ------------------------------------------------------------ +13 |abstract class C extends A // error + |^^^^^^^^^^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- Error: tests/neg/deferred-givens.scala:26:8 ------------------------------------------------------------------------- +26 | class E extends A2 // error, can't summon polymorphic given + | ^^^^^^^^^^^^^^^^^^ + | Cannnot infer the implementation of the deferred given instance given_Ctx3_T in trait A2 + | since that given is parameterized. An implementing given needs to be written explicitly. diff --git a/tests/neg/deferred-givens.scala b/tests/neg/deferred-givens.scala new file mode 100644 index 000000000000..7ff67d784714 --- /dev/null +++ b/tests/neg/deferred-givens.scala @@ -0,0 +1,30 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +class Ctx +class Ctx2 + +trait A: + given Ctx as ctx = deferred + given Ctx2 = deferred + +class B extends A // error + +abstract class C extends A // error + +class D extends A: + given Ctx as ctx = Ctx() // ok, was implemented + given Ctx2 = Ctx2() // ok + +class Ctx3[T] + +trait A2: + given [T] => Ctx3[T] = deferred + +object O: + given [T] => Ctx3[T] = Ctx3[T]() + class E extends A2 // error, can't summon polymorphic given + +class E extends A2: + given [T] => Ctx3[T] = Ctx3[T]() // ok + diff --git a/tests/neg/deferredSummon.check b/tests/neg/deferredSummon.check new file mode 100644 index 000000000000..bd76ad73467e --- /dev/null +++ b/tests/neg/deferredSummon.check @@ -0,0 +1,17 @@ +-- Error: tests/neg/deferredSummon.scala:4:26 -------------------------------------------------------------------------- +4 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:7:26 -------------------------------------------------------------------------- +7 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:12:16 ------------------------------------------------------------------------- +12 | given Int = deferred // error + | ^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:16:14 ------------------------------------------------------------------------- +16 | given Int = defered // error + | ^^^^^^^ + |`deferred` can only be used as the right hand side of a given definition in a trait. + |Note that `deferred` can only be used under its own name when implementing a given in a trait; `defered` is not accepted. diff --git a/tests/neg/deferredSummon.scala b/tests/neg/deferredSummon.scala new file mode 100644 index 000000000000..cddde82535fb --- /dev/null +++ b/tests/neg/deferredSummon.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.modularity + +object Test: + given Int = compiletime.deferred // error + +abstract class C: + given Int = compiletime.deferred // error + +trait A: + import compiletime.deferred + locally: + given Int = deferred // error + +trait B: + import compiletime.deferred as defered + given Int = defered // error + + + diff --git a/tests/pos/deferred-givens.scala b/tests/pos/deferred-givens.scala new file mode 100644 index 000000000000..51fa43866d1e --- /dev/null +++ b/tests/pos/deferred-givens.scala @@ -0,0 +1,26 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* +class Ord[Elem] + +given Ord[Double] + +trait B: + type Elem + given Ord[Elem] = deferred + def foo = summon[Ord[Elem]] + +class C extends B: + type Elem = String + override given Ord[Elem] = ??? + +def bar(using Ord[String]) = 1 + +class D(using Ord[String]) extends B: + type Elem = String + +class E(using x: Ord[String]) extends B: + type Elem = String + override given Ord[Elem] = x + +class F[X: Ord] extends B: + type Elem = X diff --git a/tests/pos/deferredSummon.scala b/tests/pos/deferredSummon.scala new file mode 100644 index 000000000000..d12a98e52736 --- /dev/null +++ b/tests/pos/deferredSummon.scala @@ -0,0 +1,30 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +trait Ord[Self]: + def less(x: Self, y: Self): Boolean + +trait A: + type Elem + given Ord[Elem] = deferred + def foo = summon[Ord[Elem]] + +object Inst: + given Ord[Int]: + def less(x: Int, y: Int) = x < y + +object Test: + import Inst.given + class C extends A: + type Elem = Int + object E extends A: + type Elem = Int + given A: + type Elem = Int + +class D[T: Ord] extends A: + type Elem = T + + + + diff --git a/tests/pos/hylolib-deferred-given-extract.scala b/tests/pos/hylolib-deferred-given-extract.scala new file mode 100644 index 000000000000..02d889dc9aac --- /dev/null +++ b/tests/pos/hylolib-deferred-given-extract.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.modularity -source future +package hylotest +import compiletime.deferred + +trait Value[Self] + +/** A collection of elements accessible by their position. */ +trait Collection[Self]: + + /** The type of the elements in the collection. */ + type Element + given elementIsValue: Value[Element] = compiletime.deferred + +class BitArray + +given Value[Boolean] {} + +given Collection[BitArray] with + type Element = Boolean diff --git a/tests/pos/hylolib-deferred-given/AnyCollection.scala b/tests/pos/hylolib-deferred-given/AnyCollection.scala new file mode 100644 index 000000000000..55e453d6dc87 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/AnyCollection.scala @@ -0,0 +1,69 @@ +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base](using b: Collection[Base])(base: Base): AnyCollection[b.Element] = + // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` + // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these + // choices is even correct! Note also that the ambiguity is suppressed if the constructor of + // `AnyValue` is declared with a context bound rather than an implicit parameter. + given Value[b.Position] = b.positionIsValue + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[b.Position])) + + def at(p: AnyValue): b.Element = + base.at(p.unsafelyUnwrappedAs[b.Position]) + + new AnyCollection[b.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given anyCollectionIsCollection[T](using tIsValue: Value[T]): Collection[AnyCollection[T]] with { + + type Element = T + //given elementIsValue: Value[Element] = tIsValue + + type Position = AnyValue + given positionIsValue: Value[Position] = anyValueIsValue + + extension (self: AnyCollection[T]) { + + def startPosition = + self._start() + + def endPosition = + self._end() + + def positionAfter(p: Position) = + self._after(p) + + def at(p: Position) = + self._at(p) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/AnyValue.scala b/tests/pos/hylolib-deferred-given/AnyValue.scala new file mode 100644 index 000000000000..b9d39869c09a --- /dev/null +++ b/tests/pos/hylolib-deferred-given/AnyValue.scala @@ -0,0 +1,76 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T](using Value[T])(wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given anyValueIsValue: Value[AnyValue] with { + + extension (self: AnyValue) { + + def copy(): AnyValue = + self.copy() + + def eq(other: AnyValue): Boolean = + self `eq` other + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/BitArray.scala b/tests/pos/hylolib-deferred-given/BitArray.scala new file mode 100644 index 000000000000..485f30472847 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/BitArray.scala @@ -0,0 +1,375 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given bitArrayPositionIsValue: Value[BitArray.Position] with { + + extension (self: BitArray.Position) { + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} + +given bitArrayIsCollection: Collection[BitArray] with { + + type Element = Boolean + //given elementIsValue: Value[Boolean] = booleanIsValue + + type Position = BitArray.Position + given positionIsValue: Value[BitArray.Position] = bitArrayPositionIsValue + + extension (self: BitArray) { + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + + } + +} + +given bitArrayIsStringConvertible: StringConvertible[BitArray] with { + + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + +} diff --git a/tests/pos/hylolib-deferred-given/Collection.scala b/tests/pos/hylolib-deferred-given/Collection.scala new file mode 100644 index 000000000000..6b5e7a762dc8 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Collection.scala @@ -0,0 +1,281 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection[Self] { + + /** The type of the elements in the collection. */ + type Element + given elementIsValue: Value[Element] = compiletime.deferred + + /** The type of a position in the collection. */ + type Position + given positionIsValue: Value[Position] + + extension (self: Self) { + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def _count(p: Position, n: Int): Int = + if p `eq` e then n else _count(self.positionAfter(p), n + 1) + _count(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if (i.eq(e)) { + false + } else if (j.eq(e)) { + true + } else { + def _isBefore(n: Position): Boolean = + if (n.eq(j)) { + true + } else if (n.eq(e)) { + false + } else { + _isBefore(self.positionAfter(n)) + } + _isBefore(self.positionAfter(i)) + } + + } + +} + +extension [Self](self: Self)(using s: Collection[Self]) { + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(s.Element, Slice[Self])] = + if (self.isEmpty) { + None + } else { + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + } + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T, combine: (T, s.Element) => T): T = + val e = self.endPosition + def loop(p: s.Position, r: T): T = + if (p.eq(e)) { + r + } else { + loop(self.positionAfter(p), combine(r, self.at(p))) + } + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: (s.Element) => Boolean): Boolean = + val e = self.endPosition + def loop(p: s.Position): Boolean = + if (p.eq(e)) { + true + } else if (!action(self.at(p))) { + false + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T](using Value[T])(transform: (s.Element) => T): HyArray[T] = + self.reduce( + HyArray[T](), + (r, e) => r.append(transform(e), assumeUniqueness = true) + ) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: (s.Element) => Boolean): HyArray[s.Element] = + self.reduce( + HyArray[s.Element](), + (r, e) => if (isIncluded(e)) then r.append(e, assumeUniqueness = true) else r + ) + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: (s.Element) => Boolean): Option[s.Position] = + val e = self.endPosition + def loop(p: s.Position): Option[s.Position] = + if (p.eq(e)) { + None + } else if (predicate(self.at(p))) { + Some(p) + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Comparable[s.Element]): Option[s.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Comparable[s.Element]): Option[s.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (s.Element, s.Element) => Boolean): Option[s.Element] = + if (self.isEmpty) { + None + } else { + val e = self.endPosition + def _least(p: s.Position, least: s.Element): s.Element = + if (p.eq(e)) { + least + } else { + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + _least(self.positionAfter(p), y) + } + + val b = self.startPosition + Some(_least(self.positionAfter(b), self.at(b))) + } + +} + +extension [Self](self: Self)(using + s: Collection[Self], + e: Value[s.Element] +) { + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T](using o: Collection[T] { type Element = s.Element })(other: T): Boolean = + def loop(i: s.Position, j: o.Position): Boolean = + if (i `eq` self.endPosition) { + j `eq` other.endPosition + } else if (j `eq` other.endPosition) { + false + } else if (self.at(i) `neq` other.at(j)) { + false + } else { + loop(self.positionAfter(i), other.positionAfter(j)) + } + loop(self.startPosition, other.startPosition) + +} diff --git a/tests/pos/hylolib-deferred-given/CoreTraits.scala b/tests/pos/hylolib-deferred-given/CoreTraits.scala new file mode 100644 index 000000000000..01b2c5242af9 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/CoreTraits.scala @@ -0,0 +1,57 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value[Self] { + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +} + +extension [Self: Value](self: Self) def neq(other: Self): Boolean = !self.eq(other) + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable[Self] extends Value[Self] { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib-deferred-given/Hasher.scala b/tests/pos/hylolib-deferred-given/Hasher.scala new file mode 100644 index 000000000000..ef6813df6b60 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Hasher.scala @@ -0,0 +1,38 @@ +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib-deferred-given/HyArray.scala b/tests/pos/hylolib-deferred-given/HyArray.scala new file mode 100644 index 000000000000..98632dcb65bc --- /dev/null +++ b/tests/pos/hylolib-deferred-given/HyArray.scala @@ -0,0 +1,224 @@ +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element] private (using + elementIsValue: Value[Element] +)( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + // NOTE: Can't refine `C.Element` without renaming the generic parameter of `HyArray`. + // /** Adds the contents of `source` at the end of the array. */ + // def appendContents[C](using + // s: Collection[C] + // )( + // source: C { type Element = Element }, + // assumeUniqueness: Boolean = false + // ): HyArray[Element] = + // val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + // source.reduce(result, (r, e) => r.append(e, assumeUniqueness = true)) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T](using t: Value[T])(elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { + + extension (self: HyArray[T]) { + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher, (h, e) => e.hashInto(h)) + + } + +} + +given hyArrayIsCollection[T](using tIsValue: Value[T]): Collection[HyArray[T]] with { + + type Element = T + //given elementIsValue: Value[T] = tIsValue + + type Position = Int + given positionIsValue: Value[Int] = intIsValue + + extension (self: HyArray[T]) { + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + + } + +} + +// NOTE: This should work. +// given hyArrayIsStringConvertible[T](using +// tIsValue: Value[T], +// tIsStringConvertible: StringConvertible[T] +// ): StringConvertible[HyArray[T]] with { +// +// given Collection[HyArray[T]] = hyArrayIsCollection[T] +// +// extension (self: HyArray[T]) +// override def description: String = +// var contents = mutable.StringBuilder() +// self.forEach((e) => { contents ++= e.description; true }) +// s"[${contents.mkString(", ")}]" +// +// } diff --git a/tests/pos/hylolib-deferred-given/Integers.scala b/tests/pos/hylolib-deferred-given/Integers.scala new file mode 100644 index 000000000000..b9bc203a88ea --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Integers.scala @@ -0,0 +1,58 @@ +package hylo + +given booleanIsValue: Value[Boolean] with { + + extension (self: Boolean) { + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + + } + +} + +given intIsValue: Value[Int] with { + + extension (self: Int) { + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + } + +} + +given intIsComparable: Comparable[Int] with { + + extension (self: Int) { + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + + } + +} + +given intIsStringConvertible: StringConvertible[Int] with {} diff --git a/tests/pos/hylolib-deferred-given/Range.scala b/tests/pos/hylolib-deferred-given/Range.scala new file mode 100644 index 000000000000..1f597652ead1 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound](lowerBound: Bound, upperBound: Bound)(using Comparable[Bound]) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib-deferred-given/Slice.scala b/tests/pos/hylolib-deferred-given/Slice.scala new file mode 100644 index 000000000000..57cdb38f6e53 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Slice.scala @@ -0,0 +1,49 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base](using + val b: Collection[Base] +)( + val base: Base, + val bounds: Range[b.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: b.Position = + bounds.lowerBound + + def endPosition: b.Position = + bounds.upperBound + + def positionAfter(p: b.Position): b.Position = + base.positionAfter(p) + + def at(p: b.Position): b.Element = + base.at(p) + +} + +given sliceIsCollection[T](using c: Collection[T]): Collection[Slice[T]] with { + + type Element = c.Element + //given elementIsValue: Value[Element] = c.elementIsValue + + type Position = c.Position + given positionIsValue: Value[Position] = c.positionIsValue + + extension (self: Slice[T]) { + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] // NOTE: Ugly hack + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/StringConvertible.scala b/tests/pos/hylolib-deferred-given/StringConvertible.scala new file mode 100644 index 000000000000..0702f79f2794 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/StringConvertible.scala @@ -0,0 +1,14 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible[Self] { + + extension (self: Self) { + + /** Returns a textual description of `self`. */ + def description: String = + self.toString + + } + +} From 600293ee2a74e945ad8870b9034b416e2294c0e6 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 17:06:18 +0200 Subject: [PATCH 021/827] FIX: Allow ContextBoundParamNames to be unmangled. Also, fix the unmangling of UniqueExtNames, which seemingly never worked. --- .../src/dotty/tools/dotc/core/NameKinds.scala | 37 +++++++++---------- 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index d4f009cbbbd5..74d440562824 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -182,13 +182,13 @@ object NameKinds { case DerivedName(underlying, info: this.NumberedInfo) => Some((underlying, info.num)) case _ => None } - protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = { + protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = var i = name.length - while (i > 0 && name(i - 1).isDigit) i -= 1 - if (i > separator.length && i < name.length && - name.slice(i - separator.length, i).toString == separator) i + while i > 0 && name(i - 1).isDigit do i -= 1 + if i >= separator.length && i < name.length + && name.slice(i - separator.length, i).toString == separator + then i else -1 - } numberedNameKinds(tag) = this: @unchecked } @@ -240,6 +240,16 @@ object NameKinds { } } + /** Unique names that can be unmangled */ + class UniqueNameKindWithUnmangle(separator: String) extends UniqueNameKind(separator): + override def unmangle(name: SimpleName): TermName = + val i = skipSeparatorAndNum(name, separator) + if i > 0 then + val index = name.drop(i).toString.toInt + val original = name.take(i - separator.length).asTermName + apply(original, index) + else name + /** Names of the form `prefix . name` */ val QualifiedName: QualifiedNameKind = new QualifiedNameKind(QUALIFIED, ".") @@ -288,7 +298,7 @@ object NameKinds { * * The "evidence$" prefix is a convention copied from Scala 2. */ - val ContextBoundParamName: UniqueNameKind = new UniqueNameKind("evidence$") + val ContextBoundParamName: UniqueNameKind = new UniqueNameKindWithUnmangle("evidence$") /** The name of an inferred contextual function parameter: * @@ -323,20 +333,7 @@ object NameKinds { val InlineBinderName: UniqueNameKind = new UniqueNameKind("$proxy") val MacroNames: UniqueNameKind = new UniqueNameKind("$macro$") - /** A kind of unique extension methods; Unlike other unique names, these can be - * unmangled. - */ - val UniqueExtMethName: UniqueNameKind = new UniqueNameKind("$extension") { - override def unmangle(name: SimpleName): TermName = { - val i = skipSeparatorAndNum(name, separator) - if (i > 0) { - val index = name.drop(i).toString.toInt - val original = name.take(i - separator.length).asTermName - apply(original, index) - } - else name - } - } + val UniqueExtMethName: UniqueNameKind = new UniqueNameKindWithUnmangle("$extension") /** Kinds of unique names generated by the pattern matcher */ val PatMatStdBinderName: UniqueNameKind = new UniqueNameKind("x") From 48000ee3f578201279094c7d76152a9fbf0992cc Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 21 Dec 2023 11:32:24 +0100 Subject: [PATCH 022/827] Change rules for given prioritization Consider the following program: ```scala class A class B extends A class C extends A given A = A() given B = B() given C = C() def f(using a: A, b: B, c: C) = println(a.getClass) println(b.getClass) println(c.getClass) @main def Test = f ``` With the current rules, this would fail with an ambiguity error between B and C when trying to synthesize the A parameter. This is a problem without an easy remedy. We can fix this problem by flipping the priority for implicit arguments. Instead of requiring an argument to be most _specific_, we now require it to be most _general_ while still conforming to the formal parameter. There are three justifications for this change, which at first glance seems quite drastic: - It gives us a natural way to deal with inheritance triangles like the one in the code above. Such triangles are quite common. - Intuitively, we want to get the closest possible match between required formal parameter type and synthetisized argument. The "most general" rule provides that. - We already do a crucial part of this. Namely, with current rules we interpolate all type variables in an implicit argument downwards, no matter what their variance is. This makes no sense in theory, but solves hairy problems with contravariant typeclasses like `Comparable`. Instead of this hack, we now do something more principled, by flipping the direction everywhere, preferring general over specific, instead of just flipping contravariant type parameters. The behavior is dependent on the Scala version - Old behavior: up to 3.4 - New behavior: from 3.5, 3.5-migration warns on behavior change The CB builds under the new rules. One fix was needed for a shapeless 3 deriving test. There was a typo: mkInstances instead of mkProductInstances, which previously got healed by accident because of the most specific rule. Also: Don't flip contravariant type arguments for overloading resolution Flipping contravariant type arguments was needed for implicit search where it will be replaced by a more general scheme. But it makes no sense for overloading resolution. For overloading resolution, we want to pick the most specific alternative, analogous to us picking the most specific instantiation when we force a fully defined type. Also: Disable implicit search everywhere for disambiaguation Previously, one disambiguation step missed that, whereas implicits were turned off everywhere else. --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 76d057f15408..63e86e3a321d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1886,7 +1886,7 @@ trait Applications extends Compatibility { then // Intermediate rules: better means specialize, but map all type arguments downwards // These are enabled for 3.0-3.5, and for all comparisons between old-style implicits, - // and in 3.5 amd 3.6-migration when we compare with previous rules. + // and in 3.5 and 3.6-migration when we compare with previous rules. val flip = new TypeMap: def apply(t: Type) = t match case t @ AppliedType(tycon, args) => diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5ac12ce1aa0c..fd22f0ec5529 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -531,7 +531,7 @@ object Implicits: |must be more specific than $target""" :: Nil override def msg(using Context) = - super.msg.append("\nThe expected type $target is not specific enough, so no search was attempted") + super.msg.append(i"\nThe expected type $target is not specific enough, so no search was attempted") override def toString = s"TooUnspecific" end TooUnspecific From d923cac0f70b357d75721daf0cf316b4393f2beb Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 15:54:37 +0200 Subject: [PATCH 023/827] Allow context bounds in type declarations Expand them to deferred givens --- .../src/dotty/tools/dotc/ast/Desugar.scala | 15 +- .../dotty/tools/dotc/parsing/Parsers.scala | 53 +-- .../test/dotc/pos-test-pickling.blacklist | 1 + docs/_docs/internals/syntax.md | 2 +- tests/pos/deferredSummon.scala | 21 +- tests/pos/dep-context-bounds.scala | 10 + tests/pos/hylolib-cb-extract.scala | 18 + tests/pos/hylolib-cb/AnyCollection.scala | 66 ++++ tests/pos/hylolib-cb/AnyValue.scala | 76 ++++ tests/pos/hylolib-cb/BitArray.scala | 372 ++++++++++++++++++ tests/pos/hylolib-cb/Collection.scala | 279 +++++++++++++ tests/pos/hylolib-cb/CoreTraits.scala | 57 +++ tests/pos/hylolib-cb/Hasher.scala | 38 ++ tests/pos/hylolib-cb/HyArray.scala | 221 +++++++++++ tests/pos/hylolib-cb/Integers.scala | 58 +++ tests/pos/hylolib-cb/Range.scala | 37 ++ tests/pos/hylolib-cb/Slice.scala | 46 +++ tests/pos/hylolib-cb/StringConvertible.scala | 14 + .../pos/hylolib-deferred-given/AnyValue.scala | 2 +- tests/pos/hylolib-deferred-given/Range.scala | 2 +- 20 files changed, 1355 insertions(+), 33 deletions(-) create mode 100644 tests/pos/dep-context-bounds.scala create mode 100644 tests/pos/hylolib-cb-extract.scala create mode 100644 tests/pos/hylolib-cb/AnyCollection.scala create mode 100644 tests/pos/hylolib-cb/AnyValue.scala create mode 100644 tests/pos/hylolib-cb/BitArray.scala create mode 100644 tests/pos/hylolib-cb/Collection.scala create mode 100644 tests/pos/hylolib-cb/CoreTraits.scala create mode 100644 tests/pos/hylolib-cb/Hasher.scala create mode 100644 tests/pos/hylolib-cb/HyArray.scala create mode 100644 tests/pos/hylolib-cb/Integers.scala create mode 100644 tests/pos/hylolib-cb/Range.scala create mode 100644 tests/pos/hylolib-cb/Slice.scala create mode 100644 tests/pos/hylolib-cb/StringConvertible.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 04fd1afca8be..d6e442ed4a0c 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -237,12 +237,13 @@ object desugar { def desugarRhs(rhs: Tree): Tree = rhs match case ContextBounds(tbounds, cxbounds) => + val isMember = flags.isAllOf(DeferredGivenFlags) for bound <- cxbounds do val evidenceName = bound match case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => ownName - case _ if Config.nameSingleContextBounds && cxbounds.tail.isEmpty - && Feature.enabled(Feature.modularity) => + case _ if Config.nameSingleContextBounds && !isMember + && cxbounds.tail.isEmpty && Feature.enabled(Feature.modularity) => tdef.name.toTermName case _ => freshName(bound) @@ -492,6 +493,14 @@ object desugar { Apply(fn, params.map(refOfDef)) } + def typeDef(tdef: TypeDef)(using Context): Tree = + val evidenceBuf = new mutable.ListBuffer[ValDef] + val result = desugarContextBounds( + tdef, evidenceBuf, + (tdef.mods.flags.toTermFlags & AccessFlags) | Lazy | DeferredGivenFlags, + inventGivenOrExtensionName, Nil) + if evidenceBuf.isEmpty then result else Thicket(result :: evidenceBuf.toList) + /** The expansion of a class definition. See inline comments for what is involved */ def classDef(cdef: TypeDef)(using Context): Tree = { val impl @ Template(constr0, _, self, _) = cdef.rhs: @unchecked @@ -1426,7 +1435,7 @@ object desugar { case tree: TypeDef => if (tree.isClassDef) classDef(tree) else if (ctx.mode.isQuotedPattern) quotedPatternTypeDef(tree) - else tree + else typeDef(tree) case tree: DefDef => if (tree.name.isConstructorName) tree // was already handled by enclosing classDef else defDef(tree) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index bbc4096f266b..f3d02dda5c48 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3930,14 +3930,16 @@ object Parsers { argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] + /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() atSpan(start, nameStart) { val nameIdent = typeIdent() + val tname = nameIdent.name.asTypeName val tparams = typeParamClauseOpt(ParamOwner.Type) val vparamss = funParamClauses() + def makeTypeDef(rhs: Tree): Tree = { val rhs1 = lambdaAbstractAll(tparams :: vparamss, rhs) val tdef = TypeDef(nameIdent.name.toTypeName, rhs1) @@ -3945,36 +3947,37 @@ object Parsers { tdef.pushAttachment(Backquoted, ()) finalizeDef(tdef, mods, start) } + in.token match { case EQUALS => in.nextToken() makeTypeDef(toplevelTyp()) case SUBTYPE | SUPERTYPE => - val bounds = typeBounds() - if (in.token == EQUALS) { - val eqOffset = in.skipToken() - var rhs = toplevelTyp() - rhs match { - case mtt: MatchTypeTree => - bounds match { - case TypeBoundsTree(EmptyTree, upper, _) => - rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) - case _ => - syntaxError(em"cannot combine lower bound and match type alias", eqOffset) - } - case _ => - if mods.is(Opaque) then - rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) - else - syntaxError(em"cannot combine bound and alias", eqOffset) - } - makeTypeDef(rhs) - } - else makeTypeDef(bounds) + typeAndCtxBounds(tname) match + case bounds: TypeBoundsTree if in.token == EQUALS => + val eqOffset = in.skipToken() + var rhs = toplevelTyp() + rhs match { + case mtt: MatchTypeTree => + bounds match { + case TypeBoundsTree(EmptyTree, upper, _) => + rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) + case _ => + syntaxError(em"cannot combine lower bound and match type alias", eqOffset) + } + case _ => + if mods.is(Opaque) then + rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) + else + syntaxError(em"cannot combine bound and alias", eqOffset) + } + makeTypeDef(rhs) + case bounds => makeTypeDef(bounds) case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => - makeTypeDef(typeBounds()) - case _ if (staged & StageKind.QuotedPattern) != 0 => - makeTypeDef(typeBounds()) + makeTypeDef(typeAndCtxBounds(tname)) + case _ if (staged & StageKind.QuotedPattern) != 0 + || in.featureEnabled(Feature.modularity) && in.isColon => + makeTypeDef(typeAndCtxBounds(tname)) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) return EmptyTree // return to avoid setting the span to EmptyTree diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 5c715faa504b..e58277bdc0e5 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -129,6 +129,7 @@ parsercombinators-givens.scala parsercombinators-givens-2.scala parsercombinators-arrow.scala hylolib-deferred-given +hylolib-cb diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index e123fa900258..05f89a344148 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -457,7 +457,7 @@ PatDef ::= ids [‘:’ Type] [‘=’ Expr] DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound +TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef diff --git a/tests/pos/deferredSummon.scala b/tests/pos/deferredSummon.scala index d12a98e52736..31a9697eda6b 100644 --- a/tests/pos/deferredSummon.scala +++ b/tests/pos/deferredSummon.scala @@ -9,11 +9,15 @@ trait A: given Ord[Elem] = deferred def foo = summon[Ord[Elem]] +trait B: + type Elem: Ord + def foo = summon[Ord[Elem]] + object Inst: given Ord[Int]: def less(x: Int, y: Int) = x < y -object Test: +object Test1: import Inst.given class C extends A: type Elem = Int @@ -22,9 +26,22 @@ object Test: given A: type Elem = Int -class D[T: Ord] extends A: +class D1[T: Ord] extends B: + type Elem = T + +object Test2: + import Inst.given + class C extends B: + type Elem = Int + object E extends B: + type Elem = Int + given B: + type Elem = Int + +class D2[T: Ord] extends B: type Elem = T + diff --git a/tests/pos/dep-context-bounds.scala b/tests/pos/dep-context-bounds.scala new file mode 100644 index 000000000000..434805762622 --- /dev/null +++ b/tests/pos/dep-context-bounds.scala @@ -0,0 +1,10 @@ +//> using options -language:experimental.modularity -source future +trait A[X]: + type Self = X + +object Test2: + def foo[X: A as x](a: x.Self) = ??? + + def bar[X: A as x](a: Int) = ??? + + def baz[X: A as x](a: Int)(using String) = ??? diff --git a/tests/pos/hylolib-cb-extract.scala b/tests/pos/hylolib-cb-extract.scala new file mode 100644 index 000000000000..b80a88485a2b --- /dev/null +++ b/tests/pos/hylolib-cb-extract.scala @@ -0,0 +1,18 @@ +//> using options -language:experimental.modularity -source future +package hylotest +import compiletime.deferred + +trait Value[Self] + +/** A collection of elements accessible by their position. */ +trait Collection[Self]: + + /** The type of the elements in the collection. */ + type Element: Value + +class BitArray + +given Value[Boolean] {} + +given Collection[BitArray] with + type Element = Boolean diff --git a/tests/pos/hylolib-cb/AnyCollection.scala b/tests/pos/hylolib-cb/AnyCollection.scala new file mode 100644 index 000000000000..1a44344d0e51 --- /dev/null +++ b/tests/pos/hylolib-cb/AnyCollection.scala @@ -0,0 +1,66 @@ +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base](using b: Collection[Base])(base: Base): AnyCollection[b.Element] = + // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` + // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these + // choices is even correct! Note also that the ambiguity is suppressed if the constructor of + // `AnyValue` is declared with a context bound rather than an implicit parameter. + given Value[b.Position] = b.positionIsValue + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[b.Position])) + + def at(p: AnyValue): b.Element = + base.at(p.unsafelyUnwrappedAs[b.Position]) + + new AnyCollection[b.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given anyCollectionIsCollection[T](using tIsValue: Value[T]): Collection[AnyCollection[T]] with { + + type Element = T + type Position = AnyValue + + extension (self: AnyCollection[T]) { + + def startPosition = + self._start() + + def endPosition = + self._end() + + def positionAfter(p: Position) = + self._after(p) + + def at(p: Position) = + self._at(p) + + } + +} diff --git a/tests/pos/hylolib-cb/AnyValue.scala b/tests/pos/hylolib-cb/AnyValue.scala new file mode 100644 index 000000000000..b9d39869c09a --- /dev/null +++ b/tests/pos/hylolib-cb/AnyValue.scala @@ -0,0 +1,76 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T](using Value[T])(wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given anyValueIsValue: Value[AnyValue] with { + + extension (self: AnyValue) { + + def copy(): AnyValue = + self.copy() + + def eq(other: AnyValue): Boolean = + self `eq` other + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} diff --git a/tests/pos/hylolib-cb/BitArray.scala b/tests/pos/hylolib-cb/BitArray.scala new file mode 100644 index 000000000000..3a0b4658f747 --- /dev/null +++ b/tests/pos/hylolib-cb/BitArray.scala @@ -0,0 +1,372 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given bitArrayPositionIsValue: Value[BitArray.Position] with { + + extension (self: BitArray.Position) { + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} + +given bitArrayIsCollection: Collection[BitArray] with { + + type Element = Boolean + type Position = BitArray.Position + + extension (self: BitArray) { + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + + } + +} + +given bitArrayIsStringConvertible: StringConvertible[BitArray] with { + + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + +} diff --git a/tests/pos/hylolib-cb/Collection.scala b/tests/pos/hylolib-cb/Collection.scala new file mode 100644 index 000000000000..073a99cdd16b --- /dev/null +++ b/tests/pos/hylolib-cb/Collection.scala @@ -0,0 +1,279 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection[Self] { + + /** The type of the elements in the collection. */ + type Element: Value + + /** The type of a position in the collection. */ + type Position: Value as positionIsValue + + extension (self: Self) { + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def _count(p: Position, n: Int): Int = + if p `eq` e then n else _count(self.positionAfter(p), n + 1) + _count(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if (i.eq(e)) { + false + } else if (j.eq(e)) { + true + } else { + def _isBefore(n: Position): Boolean = + if (n.eq(j)) { + true + } else if (n.eq(e)) { + false + } else { + _isBefore(self.positionAfter(n)) + } + _isBefore(self.positionAfter(i)) + } + + } + +} + +extension [Self](self: Self)(using s: Collection[Self]) { + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(s.Element, Slice[Self])] = + if (self.isEmpty) { + None + } else { + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + } + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T, combine: (T, s.Element) => T): T = + val e = self.endPosition + def loop(p: s.Position, r: T): T = + if (p.eq(e)) { + r + } else { + loop(self.positionAfter(p), combine(r, self.at(p))) + } + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: (s.Element) => Boolean): Boolean = + val e = self.endPosition + def loop(p: s.Position): Boolean = + if (p.eq(e)) { + true + } else if (!action(self.at(p))) { + false + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T](using Value[T])(transform: (s.Element) => T): HyArray[T] = + self.reduce( + HyArray[T](), + (r, e) => r.append(transform(e), assumeUniqueness = true) + ) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: (s.Element) => Boolean): HyArray[s.Element] = + self.reduce( + HyArray[s.Element](), + (r, e) => if (isIncluded(e)) then r.append(e, assumeUniqueness = true) else r + ) + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: (s.Element) => Boolean): Option[s.Position] = + val e = self.endPosition + def loop(p: s.Position): Option[s.Position] = + if (p.eq(e)) { + None + } else if (predicate(self.at(p))) { + Some(p) + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Comparable[s.Element]): Option[s.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Comparable[s.Element]): Option[s.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (s.Element, s.Element) => Boolean): Option[s.Element] = + if (self.isEmpty) { + None + } else { + val e = self.endPosition + def _least(p: s.Position, least: s.Element): s.Element = + if (p.eq(e)) { + least + } else { + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + _least(self.positionAfter(p), y) + } + + val b = self.startPosition + Some(_least(self.positionAfter(b), self.at(b))) + } + +} + +extension [Self](self: Self)(using + s: Collection[Self], + e: Value[s.Element] +) { + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T](using o: Collection[T] { type Element = s.Element })(other: T): Boolean = + def loop(i: s.Position, j: o.Position): Boolean = + if (i `eq` self.endPosition) { + j `eq` other.endPosition + } else if (j `eq` other.endPosition) { + false + } else if (self.at(i) `neq` other.at(j)) { + false + } else { + loop(self.positionAfter(i), other.positionAfter(j)) + } + loop(self.startPosition, other.startPosition) + +} diff --git a/tests/pos/hylolib-cb/CoreTraits.scala b/tests/pos/hylolib-cb/CoreTraits.scala new file mode 100644 index 000000000000..01b2c5242af9 --- /dev/null +++ b/tests/pos/hylolib-cb/CoreTraits.scala @@ -0,0 +1,57 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value[Self] { + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +} + +extension [Self: Value](self: Self) def neq(other: Self): Boolean = !self.eq(other) + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable[Self] extends Value[Self] { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib-cb/Hasher.scala b/tests/pos/hylolib-cb/Hasher.scala new file mode 100644 index 000000000000..ef6813df6b60 --- /dev/null +++ b/tests/pos/hylolib-cb/Hasher.scala @@ -0,0 +1,38 @@ +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib-cb/HyArray.scala b/tests/pos/hylolib-cb/HyArray.scala new file mode 100644 index 000000000000..9347f7eb12cc --- /dev/null +++ b/tests/pos/hylolib-cb/HyArray.scala @@ -0,0 +1,221 @@ +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element] private (using + elementIsValue: Value[Element] +)( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + // NOTE: Can't refine `C.Element` without renaming the generic parameter of `HyArray`. + // /** Adds the contents of `source` at the end of the array. */ + // def appendContents[C](using + // s: Collection[C] + // )( + // source: C { type Element = Element }, + // assumeUniqueness: Boolean = false + // ): HyArray[Element] = + // val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + // source.reduce(result, (r, e) => r.append(e, assumeUniqueness = true)) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T](using t: Value[T])(elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { + + extension (self: HyArray[T]) { + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher, (h, e) => e.hashInto(h)) + + } + +} + +given hyArrayIsCollection[T](using tIsValue: Value[T]): Collection[HyArray[T]] with { + + type Element = T + type Position = Int + + extension (self: HyArray[T]) { + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + + } + +} + +// NOTE: This should work. +// given hyArrayIsStringConvertible[T](using +// tIsValue: Value[T], +// tIsStringConvertible: StringConvertible[T] +// ): StringConvertible[HyArray[T]] with { +// +// given Collection[HyArray[T]] = hyArrayIsCollection[T] +// +// extension (self: HyArray[T]) +// override def description: String = +// var contents = mutable.StringBuilder() +// self.forEach((e) => { contents ++= e.description; true }) +// s"[${contents.mkString(", ")}]" +// +// } diff --git a/tests/pos/hylolib-cb/Integers.scala b/tests/pos/hylolib-cb/Integers.scala new file mode 100644 index 000000000000..b9bc203a88ea --- /dev/null +++ b/tests/pos/hylolib-cb/Integers.scala @@ -0,0 +1,58 @@ +package hylo + +given booleanIsValue: Value[Boolean] with { + + extension (self: Boolean) { + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + + } + +} + +given intIsValue: Value[Int] with { + + extension (self: Int) { + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + } + +} + +given intIsComparable: Comparable[Int] with { + + extension (self: Int) { + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + + } + +} + +given intIsStringConvertible: StringConvertible[Int] with {} diff --git a/tests/pos/hylolib-cb/Range.scala b/tests/pos/hylolib-cb/Range.scala new file mode 100644 index 000000000000..1f597652ead1 --- /dev/null +++ b/tests/pos/hylolib-cb/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound](lowerBound: Bound, upperBound: Bound)(using Comparable[Bound]) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib-cb/Slice.scala b/tests/pos/hylolib-cb/Slice.scala new file mode 100644 index 000000000000..2289ac2a085b --- /dev/null +++ b/tests/pos/hylolib-cb/Slice.scala @@ -0,0 +1,46 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base](using + val b: Collection[Base] +)( + val base: Base, + val bounds: Range[b.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: b.Position = + bounds.lowerBound + + def endPosition: b.Position = + bounds.upperBound + + def positionAfter(p: b.Position): b.Position = + base.positionAfter(p) + + def at(p: b.Position): b.Element = + base.at(p) + +} + +given sliceIsCollection[T](using c: Collection[T]): Collection[Slice[T]] with { + + type Element = c.Element + type Position = c.Position + + extension (self: Slice[T]) { + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] // NOTE: Ugly hack + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + + } + +} diff --git a/tests/pos/hylolib-cb/StringConvertible.scala b/tests/pos/hylolib-cb/StringConvertible.scala new file mode 100644 index 000000000000..0702f79f2794 --- /dev/null +++ b/tests/pos/hylolib-cb/StringConvertible.scala @@ -0,0 +1,14 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible[Self] { + + extension (self: Self) { + + /** Returns a textual description of `self`. */ + def description: String = + self.toString + + } + +} diff --git a/tests/pos/hylolib-deferred-given/AnyValue.scala b/tests/pos/hylolib-deferred-given/AnyValue.scala index b9d39869c09a..21f2965e102e 100644 --- a/tests/pos/hylolib-deferred-given/AnyValue.scala +++ b/tests/pos/hylolib-deferred-given/AnyValue.scala @@ -44,7 +44,7 @@ final class AnyValue private ( object AnyValue { /** Creates an instance wrapping `wrapped`. */ - def apply[T](using Value[T])(wrapped: T): AnyValue = + def apply[T: Value](wrapped: T): AnyValue = def copy(a: AnyRef): AnyValue = AnyValue(a.asInstanceOf[Ref[T]].value.copy()) diff --git a/tests/pos/hylolib-deferred-given/Range.scala b/tests/pos/hylolib-deferred-given/Range.scala index 1f597652ead1..b0f50dd55c8c 100644 --- a/tests/pos/hylolib-deferred-given/Range.scala +++ b/tests/pos/hylolib-deferred-given/Range.scala @@ -30,7 +30,7 @@ object Range { * @requires * `lowerBound` is lesser than or equal to `upperBound`. */ - def apply[Bound](lowerBound: Bound, upperBound: Bound)(using Comparable[Bound]) = + def apply[Bound: Comparable](lowerBound: Bound, upperBound: Bound) = require(lowerBound `le` upperBound) new Range(lowerBound, upperBound) From 4d62692a69e994b10a1386e8d1a73a06b1528b85 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 17:46:23 +0200 Subject: [PATCH 024/827] Make some context bound evidence params tracked Make context bound evidence params tracked if they have types with abstract type members. --- .../src/dotty/tools/dotc/core/Symbols.scala | 8 ++--- .../src/dotty/tools/dotc/typer/Namer.scala | 30 +++++++++++++++++++ tests/pos/hylolib-cb/AnyCollection.scala | 4 +-- tests/pos/hylolib-cb/Collection.scala | 9 +++--- tests/pos/hylolib-cb/HyArray.scala | 11 ++++--- tests/pos/hylolib-cb/Slice.scala | 6 ++-- 6 files changed, 47 insertions(+), 21 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 0020efa5018d..da0ecac47b7d 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -312,7 +312,6 @@ object Symbols extends SymUtils { * With the given setup, all such calls will give implicit-not found errors */ final def symbol(implicit ev: DontUseSymbolOnSymbol): Nothing = unsupported("symbol") - type DontUseSymbolOnSymbol final def source(using Context): SourceFile = { def valid(src: SourceFile): SourceFile = @@ -402,13 +401,12 @@ object Symbols extends SymUtils { flags: FlagSet = this.flags, info: Type = this.info, privateWithin: Symbol = this.privateWithin, - coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap - compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.associatedFile` once we bootstrap + coord: Coord = NoCoord, // Can be `= owner.coord` once we have new default args + compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.compilationUnitInfo` once we have new default args ): Symbol = { val coord1 = if (coord == NoCoord) owner.coord else coord val compilationUnitInfo1 = if (compilationUnitInfo == null) owner.compilationUnitInfo else compilationUnitInfo - if isClass then newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, compilationUnitInfo1) else @@ -936,6 +934,8 @@ object Symbols extends SymUtils { case (x: Symbol) :: _ if x.isType => Some(xs.asInstanceOf[List[TypeSymbol]]) case _ => None + type DontUseSymbolOnSymbol + // ----- Locating predefined symbols ---------------------------------------- def requiredPackage(path: PreName)(using Context): TermSymbol = { diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 22a12ed0f468..85678b9685f7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1885,6 +1885,28 @@ class Namer { typer: Typer => ddef.trailingParamss.foreach(completeParams) val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) + + /** We add `tracked` to context bound witnesses that have abstract type members */ + def needsTracked(sym: Symbol, param: ValDef)(using Context) = + !sym.is(Tracked) + && param.hasAttachment(ContextBoundParam) + && sym.info.memberNames(abstractTypeNameFilter).nonEmpty + + /** Set every context bound evidence parameter of a class to be tracked, + * provided it has a type that has an abstract type member. Reset private and local flags + * so that the parameter becomes a `val`. + */ + def setTracked(param: ValDef): Unit = + val sym = symbolOfTree(param) + sym.maybeOwner.maybeOwner.infoOrCompleter match + case info: TempClassInfo if needsTracked(sym, param) => + typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") + for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do + acc.resetFlag(PrivateLocal) + acc.setFlag(Tracked) + sym.setFlag(Tracked) + case _ => + def wrapMethType(restpe: Type): Type = instantiateDependent(restpe, paramSymss) methodType(paramSymss, restpe, ddef.mods.is(JavaDefined)) @@ -1893,10 +1915,18 @@ class Namer { typer: Typer => wrapMethType(addParamRefinements(restpe, paramSymss)) if isConstructor then + if sym.isPrimaryConstructor && Feature.enabled(modularity) then + ddef.termParamss.foreach(_.foreach(setTracked)) // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) wrapMethType(effectiveResultType(sym, paramSymss)) else if sym.isAllOf(Given | Method) && Feature.enabled(modularity) then + // set every context bound evidence parameter of a given companion method + // to be tracked, provided it has a type that has an abstract type member. + // Add refinements for all tracked parameters to the result type. + for params <- ddef.termParamss; param <- params do + val psym = symbolOfTree(param) + if needsTracked(psym, param) then psym.setFlag(Tracked) valOrDefDefSig(ddef, sym, paramSymss, wrapRefinedMethType) else valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) diff --git a/tests/pos/hylolib-cb/AnyCollection.scala b/tests/pos/hylolib-cb/AnyCollection.scala index 1a44344d0e51..50f4313e46ce 100644 --- a/tests/pos/hylolib-cb/AnyCollection.scala +++ b/tests/pos/hylolib-cb/AnyCollection.scala @@ -14,7 +14,7 @@ final class AnyCollection[Element] private ( object AnyCollection { /** Creates an instance forwarding its operations to `base`. */ - def apply[Base](using b: Collection[Base])(base: Base): AnyCollection[b.Element] = + def apply[Base: Collection as b](base: Base): AnyCollection[b.Element] = // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these // choices is even correct! Note also that the ambiguity is suppressed if the constructor of @@ -42,7 +42,7 @@ object AnyCollection { } -given anyCollectionIsCollection[T](using tIsValue: Value[T]): Collection[AnyCollection[T]] with { +given anyCollectionIsCollection[T: Value]: Collection[AnyCollection[T]] with { type Element = T type Position = AnyValue diff --git a/tests/pos/hylolib-cb/Collection.scala b/tests/pos/hylolib-cb/Collection.scala index 073a99cdd16b..2fc04f02b9ac 100644 --- a/tests/pos/hylolib-cb/Collection.scala +++ b/tests/pos/hylolib-cb/Collection.scala @@ -89,7 +89,7 @@ trait Collection[Self] { } -extension [Self](self: Self)(using s: Collection[Self]) { +extension [Self: Collection as s](self: Self) { /** Returns the first element of `self` along with a slice containing the suffix after this * element, or `None` if `self` is empty. @@ -148,7 +148,7 @@ extension [Self](self: Self)(using s: Collection[Self]) { * @complexity * O(n) where n is the number of elements in `self`. */ - def map[T](using Value[T])(transform: (s.Element) => T): HyArray[T] = + def map[T: Value](transform: (s.Element) => T): HyArray[T] = self.reduce( HyArray[T](), (r, e) => r.append(transform(e), assumeUniqueness = true) @@ -257,9 +257,8 @@ extension [Self](self: Self)(using s: Collection[Self]) { } -extension [Self](self: Self)(using - s: Collection[Self], - e: Value[s.Element] +extension [Self: Collection as s](self: Self)(using + Value[s.Element] ) { /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ diff --git a/tests/pos/hylolib-cb/HyArray.scala b/tests/pos/hylolib-cb/HyArray.scala index 9347f7eb12cc..0fff45e744ec 100644 --- a/tests/pos/hylolib-cb/HyArray.scala +++ b/tests/pos/hylolib-cb/HyArray.scala @@ -1,12 +1,11 @@ +//> using options -language:experimental.modularity -source future package hylo import java.util.Arrays import scala.collection.mutable /** An ordered, random-access collection. */ -final class HyArray[Element] private (using - elementIsValue: Value[Element] -)( +final class HyArray[Element: Value as elementIsCValue]( private var _storage: scala.Array[AnyRef | Null] | Null, private var _count: Int // NOTE: where do I document private fields ) { @@ -155,14 +154,14 @@ final class HyArray[Element] private (using object HyArray { /** Creates an array with the given `elements`. */ - def apply[T](using t: Value[T])(elements: T*): HyArray[T] = + def apply[T: Value](elements: T*): HyArray[T] = var a = new HyArray[T](null, 0) for (e <- elements) a = a.append(e, assumeUniqueness = true) a } -given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { +given [T: Value] => Value[HyArray[T]] with { extension (self: HyArray[T]) { @@ -179,7 +178,7 @@ given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { } -given hyArrayIsCollection[T](using tIsValue: Value[T]): Collection[HyArray[T]] with { +given [T: Value] => Collection[HyArray[T]] with { type Element = T type Position = Int diff --git a/tests/pos/hylolib-cb/Slice.scala b/tests/pos/hylolib-cb/Slice.scala index 2289ac2a085b..b577ceeb3739 100644 --- a/tests/pos/hylolib-cb/Slice.scala +++ b/tests/pos/hylolib-cb/Slice.scala @@ -1,9 +1,7 @@ package hylo /** A view into a collection. */ -final class Slice[Base](using - val b: Collection[Base] -)( +final class Slice[Base: Collection as b]( val base: Base, val bounds: Range[b.Position] ) { @@ -26,7 +24,7 @@ final class Slice[Base](using } -given sliceIsCollection[T](using c: Collection[T]): Collection[Slice[T]] with { +given sliceIsCollection[T: Collection as c]: Collection[Slice[T]] with { type Element = c.Element type Position = c.Position From 11d7fa39372c430220f1818632ff1fe0c25ba60d Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 18:01:56 +0200 Subject: [PATCH 025/827] FIX: Fix typing of RefinedTypes with watching parents If a refined type has a parent type watching some other type, the parent should not be mapped to Object. Previously, the parent counted as `isEmpty` which caused this mapping. Fixes #10929 --- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- tests/pos/hylolib-deferred-given/Hasher.scala | 1 + tests/pos/i10929.scala | 21 +++++++++++++++++++ tests/pos/i13580.scala | 13 ++++++++++++ 4 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i10929.scala create mode 100644 tests/pos/i13580.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index c467a4507730..f744eb392d7c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2301,7 +2301,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = { - val tpt1 = if (tree.tpt.isEmpty) TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) + val tpt1 = if tree.tpt == EmptyTree then TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements).withSpan(tree.span) val refineCls = createSymbol(refineClsDef).asClass val TypeDef(_, impl: Template) = typed(refineClsDef): @unchecked diff --git a/tests/pos/hylolib-deferred-given/Hasher.scala b/tests/pos/hylolib-deferred-given/Hasher.scala index ef6813df6b60..ca45550ed002 100644 --- a/tests/pos/hylolib-deferred-given/Hasher.scala +++ b/tests/pos/hylolib-deferred-given/Hasher.scala @@ -1,3 +1,4 @@ +//> using options -language:experimental.modularity -source future package hylo import scala.util.Random diff --git a/tests/pos/i10929.scala b/tests/pos/i10929.scala new file mode 100644 index 000000000000..e916e4547e59 --- /dev/null +++ b/tests/pos/i10929.scala @@ -0,0 +1,21 @@ +//> using options -language:experimental.modularity -source future +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: tup.map(x.tail)(f)) + +def foo[T](xs: T)(using tup: T TupleOf Int): tup.Mapped[Int] = tup.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok \ No newline at end of file diff --git a/tests/pos/i13580.scala b/tests/pos/i13580.scala new file mode 100644 index 000000000000..c3c491a19dbe --- /dev/null +++ b/tests/pos/i13580.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +trait IntWidth: + type Out +given IntWidth: + type Out = 155 + +trait IntCandidate: + type Out +given (using tracked val w: IntWidth) => IntCandidate: + type Out = w.Out + +val x = summon[IntCandidate] +val xx = summon[x.Out =:= 155] From 96fbf2942a296df3f63b05e2503f6a1a904e28cf Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 6 Jan 2024 13:53:17 +0100 Subject: [PATCH 026/827] Also reduce term projections We already reduce `R { type A = T } # A` to `T` in most situations when we create types. We now also reduce `R { val x: S } # x` to `S` if `S` is a singleton type. This will simplify types as we go to more term-dependent typing. As a concrete benefit, it will avoid several test-pickling failures due to pickling differences when using dependent types. --- .../src/dotty/tools/dotc/core/Types.scala | 66 +++++++++---------- 1 file changed, 32 insertions(+), 34 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index a6136a20cf32..ac3aef2a59d2 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1642,17 +1642,19 @@ object Types extends TypeUtils { * * P { ... type T = / += / -= U ... } # T * - * to just U. Does not perform the reduction if the resulting type would contain - * a reference to the "this" of the current refined type, except in the following situation + * to just U. Analogously, `P { val x: S} # x` is reduced tp `S` is `S` + * is a singleton type. * - * (1) The "this" reference can be avoided by following an alias. Example: + * Does not perform the reduction if the resulting type would contain + * a reference to the "this" of the current refined type, except if the "this" + * reference can be avoided by following an alias. Example: * * P { type T = String, type R = P{...}.T } # R --> String * * (*) normalizes means: follow instantiated typevars and aliases. */ - def lookupRefined(name: Name)(using Context): Type = { - @tailrec def loop(pre: Type): Type = pre.stripTypeVar match { + def lookupRefined(name: Name)(using Context): Type = + @tailrec def loop(pre: Type): Type = pre match case pre: RefinedType => pre.refinedInfo match { case tp: AliasingBounds => @@ -1675,12 +1677,13 @@ object Types extends TypeUtils { case TypeAlias(alias) => loop(alias) case _ => NoType } + case pre: (TypeVar | AnnotatedType) => + loop(pre.underlying) case _ => NoType - } loop(this) - } + end lookupRefined /** The type , reduced if possible */ def select(name: Name)(using Context): Type = @@ -2820,35 +2823,30 @@ object Types extends TypeUtils { def derivedSelect(prefix: Type)(using Context): Type = if prefix eq this.prefix then this else if prefix.isExactlyNothing then prefix - else { - val res = - if (isType && currentValidSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) + else + val reduced = + if isType && currentValidSymbol.isAllOf(ClassTypeParam) then argForParam(prefix) else prefix.lookupRefined(name) - if (res.exists) return res - if (isType) { - if (Config.splitProjections) - prefix match { - case prefix: AndType => - def isMissing(tp: Type) = tp match { - case tp: TypeRef => !tp.info.exists - case _ => false - } - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return ( - if (isMissing(derived1)) derived2 - else if (isMissing(derived2)) derived1 - else prefix.derivedAndType(derived1, derived2)) - case prefix: OrType => - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return prefix.derivedOrType(derived1, derived2) - case _ => - } - } - if (prefix.isInstanceOf[WildcardType]) WildcardType.sameKindAs(this) + if reduced.exists then return reduced + if Config.splitProjections && isType then + prefix match + case prefix: AndType => + def isMissing(tp: Type) = tp match + case tp: TypeRef => !tp.info.exists + case _ => false + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return + if isMissing(derived1) then derived2 + else if isMissing(derived2) then derived1 + else prefix.derivedAndType(derived1, derived2) + case prefix: OrType => + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return prefix.derivedOrType(derived1, derived2) + case _ => + if prefix.isInstanceOf[WildcardType] then WildcardType.sameKindAs(this) else withPrefix(prefix) - } /** A reference like this one, but with the given symbol, if it exists */ private def withSym(sym: Symbol)(using Context): ThisType = From ce09ef3bc4a49c4f851b3f8ab3c4b3c2ba64bb7d Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 20:43:48 +0200 Subject: [PATCH 027/827] Implement context bound companions --- .../src/dotty/tools/dotc/ast/Desugar.scala | 50 ++++++++---- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 31 ++++++++ .../src/dotty/tools/dotc/core/Contexts.scala | 13 ++-- .../dotty/tools/dotc/core/Definitions.scala | 9 +++ .../src/dotty/tools/dotc/core/NamerOps.scala | 53 +++++++++++++ .../src/dotty/tools/dotc/core/StdNames.scala | 2 + .../src/dotty/tools/dotc/core/SymUtils.scala | 3 + .../tools/dotc/core/tasty/TreeUnpickler.scala | 1 + .../tools/dotc/printing/PlainPrinter.scala | 4 +- .../tools/dotc/reporting/ErrorMessageID.scala | 4 +- .../dotty/tools/dotc/reporting/messages.scala | 36 +++++++++ .../tools/dotc/transform/PostTyper.scala | 22 ++++-- .../tools/dotc/transform/TreeChecker.scala | 21 ++--- .../src/dotty/tools/dotc/typer/Namer.scala | 35 +++++++-- .../src/dotty/tools/dotc/typer/Typer.scala | 76 +++++++++++++++++++ .../annotation/internal/WitnessNames.scala | 53 +++++++++++++ project/MiMaFilters.scala | 2 + tests/neg/cb-companion-leaks.check | 66 ++++++++++++++++ tests/neg/cb-companion-leaks.scala | 16 ++++ tests/pos-macros/i8325/Macro_1.scala | 4 +- tests/pos-macros/i8325/Test_2.scala | 2 +- tests/pos-macros/i8325b/Macro_1.scala | 4 +- tests/pos-macros/i8325b/Test_2.scala | 2 +- tests/pos/FromString-cb-companion.scala | 14 ++++ tests/pos/cb-companion-joins.scala | 21 +++++ 25 files changed, 496 insertions(+), 48 deletions(-) create mode 100644 library/src/scala/annotation/internal/WitnessNames.scala create mode 100644 tests/neg/cb-companion-leaks.check create mode 100644 tests/neg/cb-companion-leaks.scala create mode 100644 tests/pos/FromString-cb-companion.scala create mode 100644 tests/pos/cb-companion-joins.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index d6e442ed4a0c..08953f1dec6b 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -257,7 +257,16 @@ object desugar { case _ => rhs - cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + if Feature.enabled(Feature.modularity) + && evidenceNames.nonEmpty + && !evidenceNames.contains(tdef.name.toTermName) + && !allParamss.nestedExists(_.name == tdef.name.toTermName) + then + tdef1.withAddedAnnotation: + WitnessNamesAnnot(evidenceNames.toList).withSpan(tdef.span) + else + tdef1 end desugarContextBounds private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = @@ -323,9 +332,9 @@ object desugar { def getterParamss(n: Int): List[ParamClause] = mapParamss(takeUpTo(paramssNoRHS, n)) { - tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) + tparam => dropContextBounds(toDefParam(tparam, KeepAnnotations.All)) } { - vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) + vparam => toDefParam(vparam, KeepAnnotations.All, keepDefault = false) } def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match @@ -430,7 +439,12 @@ object desugar { private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = if params.isEmpty then return meth - val boundNames = params.map(_.name).toSet + var boundNames = params.map(_.name).toSet + for mparams <- meth.paramss; mparam <- mparams do + mparam match + case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => + boundNames += tparam.name.toTermName + case _ => //println(i"add ev params ${meth.name}, ${boundNames.toList}") @@ -463,16 +477,26 @@ object desugar { @sharable private val synthetic = Modifiers(Synthetic) + /** Which annotations to keep in derived parameters */ + private enum KeepAnnotations: + case None, All, WitnessOnly + /** Filter annotations in `mods` according to `keep` */ - private def filterAnnots(mods: Modifiers, keep: Boolean)(using Context) = - if keep then mods else mods.withAnnotations(Nil) + private def filterAnnots(mods: Modifiers, keep: KeepAnnotations)(using Context) = keep match + case KeepAnnotations.None => mods.withAnnotations(Nil) + case KeepAnnotations.All => mods + case KeepAnnotations.WitnessOnly => + mods.withAnnotations: + mods.annotations.filter: + case WitnessNamesAnnot(_) => true + case _ => false - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean)(using Context): TypeDef = - val mods = filterAnnots(tparam.rawMods, keepAnnotations) + private def toDefParam(tparam: TypeDef, keep: KeepAnnotations)(using Context): TypeDef = + val mods = filterAnnots(tparam.rawMods, keep) tparam.withMods(mods & EmptyFlags | Param) - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean)(using Context): ValDef = { - val mods = filterAnnots(vparam.rawMods, keepAnnotations) + private def toDefParam(vparam: ValDef, keep: KeepAnnotations, keepDefault: Boolean)(using Context): ValDef = { + val mods = filterAnnots(vparam.rawMods, keep) val hasDefault = if keepDefault then HasDefault else EmptyFlags // Need to ensure that tree is duplicated since term parameters can be watched // and cloning a term parameter will copy its watchers to the clone, which means @@ -573,7 +597,7 @@ object desugar { // Annotations on class _type_ parameters are set on the derived parameters // but not on the constructor parameters. The reverse is true for // annotations on class _value_ parameters. - val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) + val constrTparams = impliedTparams.map(toDefParam(_, KeepAnnotations.WitnessOnly)) val constrVparamss = if (originalVparamss.isEmpty) { // ensure parameter list is non-empty if (isCaseClass) @@ -584,7 +608,7 @@ object desugar { report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) ListOfNil } - else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) + else originalVparamss.nestedMap(toDefParam(_, KeepAnnotations.All, keepDefault = true)) val derivedTparams = constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) @@ -606,7 +630,7 @@ object desugar { defDef( addEvidenceParams( cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), - evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) + evidenceParams(constr1).map(toDefParam(_, KeepAnnotations.None, keepDefault = false))))) case stat => stat } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 941e7b8f1219..990fb37f4e60 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -5,6 +5,8 @@ package ast import core.* import Flags.*, Trees.*, Types.*, Contexts.* import Names.*, StdNames.*, NameOps.*, Symbols.* +import Annotations.Annotation +import NameKinds.ContextBoundParamName import typer.ConstFold import reporting.trace @@ -380,6 +382,35 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => tree.tpe.isInstanceOf[ThisType] } + + /** Extractor for annotation.internal.WitnessNames(name_1, ..., name_n)` + * represented as an untyped or typed tree. + */ + object WitnessNamesAnnot: + def apply(names0: List[TermName])(using Context): untpd.Tree = + untpd.TypedSplice(tpd.New( + defn.WitnessNamesAnnot.typeRef, + tpd.SeqLiteral(names0.map(n => tpd.Literal(Constant(n.toString))), tpd.TypeTree(defn.StringType)) :: Nil + )) + + def unapply(tree: Tree)(using Context): Option[List[TermName]] = + def isWitnessNames(tp: Type) = tp match + case tp: TypeRef => + tp.name == tpnme.WitnessNames && tp.symbol == defn.WitnessNamesAnnot + case _ => + false + unsplice(tree) match + case Apply( + Select(New(tpt: tpd.TypeTree), nme.CONSTRUCTOR), + SeqLiteral(elems, _) :: Nil + ) if isWitnessNames(tpt.tpe) => + Some: + elems.map: + case Literal(Constant(str: String)) => + ContextBoundParamName.unmangle(str.toTermName.asSimpleName) + case _ => + None + end WitnessNamesAnnot } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index d0c30a665289..a5b0e2dba254 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -12,6 +12,7 @@ import Symbols.* import Scopes.* import Uniques.* import ast.Trees.* +import Flags.ParamAccessor import ast.untpd import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} @@ -399,7 +400,8 @@ object Contexts { * * - as owner: The primary constructor of the class * - as outer context: The context enclosing the class context - * - as scope: The parameter accessors in the class context + * - as scope: type parameters, the parameter accessors, and + * the context bound companions in the class context, * * The reasons for this peculiar choice of attributes are as follows: * @@ -413,10 +415,11 @@ object Contexts { * context see the constructor parameters instead, but then we'd need a final substitution step * from constructor parameters to class parameter accessors. */ - def superCallContext: Context = { - val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors*) - superOrThisCallContext(owner.primaryConstructor, locals) - } + def superCallContext: Context = + val locals = owner.typeParams + ++ owner.asClass.unforcedDecls.filter: sym => + sym.is(ParamAccessor) || sym.isContextBoundCompanion + superOrThisCallContext(owner.primaryConstructor, newScopeWith(locals*)) /** The context for the arguments of a this(...) constructor call. * The context is computed from the local auxiliary constructor context. diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 9ee5891f1606..b408883009ab 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -459,6 +459,13 @@ class Definitions { @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) + @tu lazy val CBCompanion: TypeSymbol = // type ``[-Refs] + enterPermanentSymbol(tpnme.CBCompanion, + TypeBounds(NothingType, + HKTypeLambda(tpnme.syntheticTypeParamName(0) :: Nil, Contravariant :: Nil)( + tl => TypeBounds.empty :: Nil, + tl => AnyType))).asType + /** Method representing a throw */ @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, MethodType(List(ThrowableType), NothingType)) @@ -1062,6 +1069,7 @@ class Definitions { @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") @tu lazy val RetainsArgAnnot: ClassSymbol = requiredClass("scala.annotation.retainsArg") @tu lazy val PublicInBinaryAnnot: ClassSymbol = requiredClass("scala.annotation.publicInBinary") + @tu lazy val WitnessNamesAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WitnessNames") @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") @@ -2158,6 +2166,7 @@ class Definitions { NullClass, NothingClass, SingletonClass, + CBCompanion, MaybeCapabilityAnnot) @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index af03573da4a8..58b4ad681c6f 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -4,8 +4,10 @@ package core import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.* import SymDenotations.{LazyType, SymDenotation}, StdNames.nme +import ContextOps.enter import TypeApplications.EtaExpansion import collection.mutable +import config.Printers.typr /** Operations that are shared between Namer and TreeUnpickler */ object NamerOps: @@ -256,4 +258,55 @@ object NamerOps: rhsCtx.gadtState.addBound(psym, tr, isUpper = true) } + /** Create a context-bound companion for type symbol `tsym`, which has a context + * bound that defines a set of witnesses with names `witnessNames`. + * + * @param parans If `tsym` is a type parameter, a list of parameter symbols + * that include all witnesses, otherwise the empty list. + * + * The context-bound companion has as name the name of `tsym` translated to + * a term name. We create a synthetic val of the form + * + * val A: ``[witnessRef1 | ... | witnessRefN] + * + * where + * + * is the CBCompanion type created in Definitions + * withnessRefK is a refence to the K'th witness. + * + * The companion has the same access flags as the original type. + */ + def addContextBoundCompanionFor(tsym: Symbol, witnessNames: List[TermName], params: List[Symbol])(using Context): Unit = + val prefix = ctx.owner.thisType + val companionName = tsym.name.toTermName + val witnessRefs = + if params.nonEmpty then + witnessNames.map: witnessName => + prefix.select(params.find(_.name == witnessName).get) + else + witnessNames.map(TermRef(prefix, _)) + val cbtype = defn.CBCompanion.typeRef.appliedTo: + witnessRefs.reduce[Type](OrType(_, _, soft = false)) + val cbc = newSymbol( + ctx.owner, companionName, + (tsym.flagsUNSAFE & (AccessFlags)).toTermFlags | Synthetic, + cbtype) + typr.println(s"context bound companion created $cbc for $witnessNames in ${ctx.owner}") + ctx.enter(cbc) + end addContextBoundCompanionFor + + /** Add context bound companions to all context-bound types declared in + * this class. This assumes that these types already have their + * WitnessNames annotation set even before they are completed. This is + * the case for unpickling but currently not for Namer. So the method + * is only called during unpickling, and is not part of NamerOps. + */ + def addContextBoundCompanions(cls: ClassSymbol)(using Context): Unit = + for sym <- cls.info.decls do + if sym.isType && !sym.isClass then + for ann <- sym.annotationsUNSAFE do + if ann.symbol == defn.WitnessNamesAnnot then + ann.tree match + case ast.tpd.WitnessNamesAnnot(witnessNames) => + addContextBoundCompanionFor(sym, witnessNames, Nil) end NamerOps diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index c0eb8a690eb4..ab7e4eea0b46 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -288,6 +288,7 @@ object StdNames { // Compiler-internal val CAPTURE_ROOT: N = "cap" + val CBCompanion: N = "" val CONSTRUCTOR: N = "" val STATIC_CONSTRUCTOR: N = "" val EVT2U: N = "evt2u$" @@ -396,6 +397,7 @@ object StdNames { val TypeApply: N = "TypeApply" val TypeRef: N = "TypeRef" val UNIT : N = "UNIT" + val WitnessNames: N = "WitnessNames" val acc: N = "acc" val adhocExtensions: N = "adhocExtensions" val andThen: N = "andThen" diff --git a/compiler/src/dotty/tools/dotc/core/SymUtils.scala b/compiler/src/dotty/tools/dotc/core/SymUtils.scala index 65634241b790..3a97a0053dbd 100644 --- a/compiler/src/dotty/tools/dotc/core/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/SymUtils.scala @@ -87,6 +87,9 @@ class SymUtils: !d.isPrimitiveValueClass } + def isContextBoundCompanion(using Context): Boolean = + self.is(Synthetic) && self.infoOrCompleter.typeSymbol == defn.CBCompanion + /** Is this a case class for which a product mirror is generated? * Excluded are value classes, abstract classes and case classes with more than one * parameter section. diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 15f58956fbe3..91a5899146cc 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1138,6 +1138,7 @@ class TreeUnpickler(reader: TastyReader, }) defn.patchStdLibClass(cls) NamerOps.addConstructorProxies(cls) + NamerOps.addContextBoundCompanions(cls) setSpan(start, untpd.Template(constr, mappedParents, self, lazyStats) .withType(localDummy.termRef)) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 5808707326a0..c06b43cafe17 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -434,11 +434,11 @@ class PlainPrinter(_ctx: Context) extends Printer { sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName /** String representation of a definition's type following its name, - * if symbol is completed, "?" otherwise. + * if symbol is completed, ": ?" otherwise. */ protected def toTextRHS(optType: Option[Type]): Text = optType match { case Some(tp) => toTextRHS(tp) - case None => "?" + case None => ": ?" } protected def decomposeLambdas(bounds: TypeBounds): (Text, TypeBounds) = diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index e51f0a8b77ac..04380a7b8e4a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -208,7 +208,9 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case UnstableInlineAccessorID // errorNumber: 192 case VolatileOnValID // errorNumber: 193 case ExtensionNullifiedByMemberID // errorNumber: 194 - case InlinedAnonClassWarningID // errorNumber: 195 + case ConstructorProxyNotValueID // errorNumber: 195 + case ContextBoundCompanionNotValueID // errorNumber: 196 + case InlinedAnonClassWarningID // errorNumber: 197 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 51556a5c93ac..ceb8ecbc8e03 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3203,3 +3203,39 @@ class VolatileOnVal()(using Context) extends SyntaxMsg(VolatileOnValID): protected def msg(using Context): String = "values cannot be volatile" protected def explain(using Context): String = "" + +class ConstructorProxyNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"constructor proxy $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A constructor proxy is a symbol made up by the compiler to represent a non-existent + |factory method of a class. For instance, in + | + | class C(x: Int) + | + |C does not have an apply method since it is not a case class. Yet one can + |still create instances with applications like `C(3)` which expand to `new C(3)`. + |The `C` in this call is a constructor proxy. It can only be used as applications + |but not as a stand-alone value.""" + +class ContextBoundCompanionNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"context bound companion $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A context bound companion is a symbol made up by the compiler to represent the + |witness or witnesses generated for the context bound(s) of a type parameter or type. + |For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + |there is just a type `A` declared but not a value `A`. Nevertheless, one can write + |the selection `A.unit`, which works because the compiler created a context bound + |companion value with the (term-)name `A`. However, these context bound companions + |are not values themselves, they can only be referred to in selections.""" + diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 954b08c24ac1..a110ec53abc0 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -279,9 +279,13 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } } - def checkNoConstructorProxy(tree: Tree)(using Context): Unit = + def checkUsableAsValue(tree: Tree)(using Context): Unit = + def unusable(msg: Symbol => Message) = + report.error(msg(tree.symbol), tree.srcPos) if tree.symbol.is(ConstructorProxy) then - report.error(em"constructor proxy ${tree.symbol} cannot be used as a value", tree.srcPos) + unusable(ConstructorProxyNotValue(_)) + if tree.symbol.isContextBoundCompanion then + unusable(ContextBoundCompanionNotValue(_)) def checkStableSelection(tree: Tree)(using Context): Unit = def check(qual: Tree) = @@ -326,7 +330,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if tree.isType then checkNotPackage(tree) else - checkNoConstructorProxy(tree) + checkUsableAsValue(tree) registerNeedsInlining(tree) tree.tpe match { case tpe: ThisType => This(tpe.cls).withSpan(tree.span) @@ -338,7 +342,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => Checking.checkRealizable(qual.tpe, qual.srcPos) withMode(Mode.Type)(super.transform(checkNotPackage(tree))) else - checkNoConstructorProxy(tree) + checkUsableAsValue(tree) transformSelect(tree, Nil) case tree: Apply => val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] @@ -469,8 +473,14 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => val relativePath = util.SourceFile.relativePath(ctx.compilationUnit.source, reference) sym.addAnnotation(Annotation(defn.SourceFileAnnot, Literal(Constants.Constant(relativePath)), tree.span)) else - if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then - Checking.checkGoodBounds(tree.symbol) + if !sym.is(Param) then + if !sym.owner.isOneOf(AbstractOrTrait) then + Checking.checkGoodBounds(tree.symbol) + if sym.owner.isClass && sym.hasAnnotation(defn.WitnessNamesAnnot) then + val decls = sym.owner.info.decls + for cbCompanion <- decls.lookupAll(sym.name.toTermName) do + if cbCompanion.isContextBoundCompanion then + decls.openForMutations.unlink(cbCompanion) (tree.rhs, sym.info) match case (rhs: LambdaTypeTree, bounds: TypeBounds) => VarianceChecker.checkLambda(rhs, bounds) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 2ebe33a9a14f..c4e1c7892e8d 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -311,9 +311,11 @@ object TreeChecker { def assertDefined(tree: untpd.Tree)(using Context): Unit = if (tree.symbol.maybeOwner.isTerm) { val sym = tree.symbol + def isAllowed = // constructor proxies and context bound companions are flagged at PostTyper + isSymWithoutDef(sym) && ctx.phase.id < postTyperPhase.id assert( - nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym), - i"undefined symbol ${sym} at line " + tree.srcPos.line + nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym) || isAllowed, + i"undefined symbol ${sym} in ${sym.owner} at line " + tree.srcPos.line ) if (!ctx.phase.patternTranslated) @@ -384,6 +386,9 @@ object TreeChecker { case _ => } + def isSymWithoutDef(sym: Symbol)(using Context): Boolean = + sym.is(ConstructorProxy) || sym.isContextBoundCompanion + /** Exclude from double definition checks any erased symbols that were * made `private` in phase `UnlinkErasedDecls`. These symbols will be removed * completely in phase `Erasure` if they are defined in a currently compiled unit. @@ -614,14 +619,12 @@ object TreeChecker { val decls = cls.classInfo.decls.toList.toSet.filter(isNonMagicalMember) val defined = impl.body.map(_.symbol) - def isAllowed(sym: Symbol): Boolean = sym.is(ConstructorProxy) - - val symbolsNotDefined = (decls -- defined - constr.symbol).filterNot(isAllowed) + val symbolsMissingDefs = (decls -- defined - constr.symbol).filterNot(isSymWithoutDef) - assert(symbolsNotDefined.isEmpty, - i" $cls tree does not define members: ${symbolsNotDefined.toList}%, %\n" + - i"expected: ${decls.toList}%, %\n" + - i"defined: ${defined}%, %") + assert(symbolsMissingDefs.isEmpty, + i"""$cls tree does not define members: ${symbolsMissingDefs.toList}%, % + |expected: ${decls.toList}%, % + |defined: ${defined}%, %""") super.typedClassDef(cdef, cls) } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 85678b9685f7..393b38c5ff57 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -406,6 +406,11 @@ class Namer { typer: Typer => enterSymbol(sym) setDocstring(sym, origStat) addEnumConstants(mdef, sym) + mdef match + case tdef: TypeDef if ctx.owner.isClass => + for case WitnessNamesAnnot(witnessNames) <- tdef.mods.annotations do + addContextBoundCompanionFor(symbolOfTree(tdef), witnessNames, Nil) + case _ => ctx case stats: Thicket => stats.toList.foreach(recur) @@ -1749,12 +1754,6 @@ class Namer { typer: Typer => val sym = tree.symbol if sym.isConstructor then sym.owner else sym - /** Enter and typecheck parameter list */ - def completeParams(params: List[MemberDef])(using Context): Unit = { - index(params) - for (param <- params) typedAheadExpr(param) - } - /** The signature of a module valdef. * This will compute the corresponding module class TypeRef immediately * without going through the defined type of the ValDef. This is necessary @@ -1853,6 +1852,30 @@ class Namer { typer: Typer => // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR + val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() + if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then + for params <- ddef.paramss; case tdef: TypeDef <- params do + for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do + witnessNamesOfParam(tdef) = ws + + /** Are all names in `wnames` defined by the longest prefix of all `params` + * that have been typed ahead (i.e. that carry the TypedAhead attachment)? + */ + def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = + (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty + + /** Enter and typecheck parameter list, add context companions as. + * Once all witness parameters for a context bound are seen, create a + * context bound companion for it. + */ + def completeParams(params: List[MemberDef])(using Context): Unit = + index(params) + for param <- params do + typedAheadExpr(param) + for (tdef, wnames) <- witnessNamesOfParam do + if wnames.contains(param.name) && allParamsSeen(wnames, params) then + addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) + // The following 3 lines replace what was previously just completeParams(tparams). // But that can cause bad bounds being computed, as witnessed by // tests/pos/paramcycle.scala. The problematic sequence is this: diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index f744eb392d7c..37da51157e91 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -840,6 +840,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return dynSelected.ensureConforms(fieldType) case _ => + // Otherwise, if the qualifier is a context bound companion, handle + // by selecting a witness in typedCBSelect + if qual.tpe.typeSymbol == defn.CBCompanion then + val witnessSelection = typedCBSelect(tree0, pt, qual) + if !witnessSelection.isEmpty then return witnessSelection + // Otherwise, report an error assignType(tree, rawType match @@ -849,6 +855,76 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer notAMemberErrorType(tree, qual, pt)) end typedSelect + /** Expand a selection A.m on a context bound companion A with type + * `[ref_1 | ... | ref_N]` as described by + * Step 3 of the doc comment of annotation.internal.WitnessNames. + * @return the best alternative if it exists, + * or EmptyTree if no witness admits selecting with the given name, + * or EmptyTree and report an ambiguity error of there are several + * possible witnesses and no selection is better than the other + * according to the critera given in Step 3. + */ + def typedCBSelect(tree: untpd.Select, pt: Type, qual: Tree)(using Context): Tree = + + type Alts = List[(/*prev: */Tree, /*prevState: */TyperState, /*prevWitness: */TermRef)] + + /** Compare two alternative selections `alt1` and `alt2` from witness types + * `wit1`, `wit2` according to the 3 criteria in the enclosing doc comment. I.e. + * + * alt1 = qual1.m, alt2 = qual2.m, qual1: wit1, qual2: wit2 + * + * @return 1 if 1st alternative is preferred over 2nd + * -1 if 2nd alternative is preferred over 1st + * 0 if neither alternative is preferred over the other + */ + def compareAlts(alt1: Tree, alt2: Tree, wit1: TermRef, wit2: TermRef): Int = + val cmpPrefix = compare(wit1, wit2, preferGeneral = true) + typr.println(i"compare witnesses $wit1: ${wit1.info}, $wit2: ${wit2.info} = $cmpPrefix") + if cmpPrefix != 0 then cmpPrefix + else (alt1.tpe, alt2.tpe) match + case (tp1: TypeRef, tp2: TypeRef) => + if tp1.dealias == tp2.dealias then 1 else 0 + case (tp1: TermRef, tp2: TermRef) => + if tp1.info.isSingleton && (tp1 frozen_=:= tp2) then 1 + else compare(tp1, tp2, preferGeneral = false) + case (tp1: TermRef, _) => 1 + case (_, tp2: TermRef) => -1 + case _ => 0 + + /** Find the set of maximally preferred alternative among `prev` and the + * remaining alternatives generated from `witnesses` with is a union type + * of witness references. + */ + def tryAlts(prevs: Alts, witnesses: Type): Alts = witnesses match + case OrType(wit1, wit2) => + tryAlts(tryAlts(prevs, wit1), wit2) + case witness: TermRef => + val altQual = tpd.ref(witness).withSpan(qual.span) + val altCtx = ctx.fresh.setNewTyperState() + val alt = typedSelect(tree, pt, altQual)(using altCtx) + def current = (alt, altCtx.typerState, witness) + if altCtx.reporter.hasErrors then prevs + else + val cmps = prevs.map: (prevTree, prevState, prevWitness) => + compareAlts(prevTree, alt, prevWitness, witness) + if cmps.exists(_ == 1) then prevs + else current :: prevs.zip(cmps).collect{ case (prev, cmp) if cmp != -1 => prev } + + qual.tpe.widen match + case AppliedType(_, arg :: Nil) => + tryAlts(Nil, arg) match + case Nil => EmptyTree + case (best @ (bestTree, bestState, _)) :: Nil => + bestState.commit() + bestTree + case multiAlts => + report.error( + em"""Ambiguous witness reference. None of the following alternatives is more specific than the other: + |${multiAlts.map((alt, _, witness) => i"\n $witness.${tree.name}: ${alt.tpe.widen}")}""", + tree.srcPos) + EmptyTree + end typedCBSelect + def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { record("typedSelect") diff --git a/library/src/scala/annotation/internal/WitnessNames.scala b/library/src/scala/annotation/internal/WitnessNames.scala new file mode 100644 index 000000000000..f859cda96d06 --- /dev/null +++ b/library/src/scala/annotation/internal/WitnessNames.scala @@ -0,0 +1,53 @@ +package scala.annotation +package internal + +/** An annotation that is used for marking type definitions that should get + * context bound companions. The scheme is as follows: + * + * 1. When desugaring a context-bounded type A, add a @WitnessNames(n_1, ... , n_k) + * annotation to the type declaration node, where n_1, ..., n_k are the names of + * all the witnesses generated for the context bounds of A. This annotation will + * be pickled as usual. + * + * 2. During Namer or Unpickling, when encountering a type declaration A with + * a WitnessNames(n_1, ... , n_k) annotation, create a CB companion `val A` with + * rtype ``[ref_1 | ... | ref_k] where ref_i is a TermRef + * with the same prefix as A and name n_i. Except, don't do this if the type in + * question is a type parameter and there is already a term parameter with name A + * defined for the same method. + * + * ContextBoundCompanion is defined as an internal abstract type like this: + * + * type ``[-Refs] + * + * The context bound companion's variance is negative, so that unons in the + * arguments are joined when encountering multiple definfitions and forming a glb. + * + * 3. Add a special case for typing a selection A.m on a value A of type + * ContextBoundCompanion[ref_1, ..., ref_k]. Namely, try to typecheck all + * selections ref_1.m, ..., ref_k.m with the expected type. There must be + * a unique selection ref_i.m that typechecks and such that for all other + * selections ref_j.m that also typecheck one of the following three criteria + * applies: + * + * 1. ref_i.m and ref_j.m are the same. This means: If they are types then + * ref_i.m is an alias of ref_j.m. If they are terms then they are both + * singleton types and ref_i.m =:= ref_j.m. + * 2. The underlying type (under widen) of ref_i is a true supertype of the + * underlying type of ref_j. + * 3. ref_i.m is a term, the underlying type of ref_j is not a strict subtype + * of the underlying type of ref_j, and the underlying type ref_i.m is a + * strict subtype of the underlying type of ref_j.m. + * + * If there is such a selection, map A.m to ref_i.m, otherwise report an error. + * + * (2) might surprise. It is the analogue of given disambiguation, where we also + * pick the most general candidate that matches the expected type. E.g. we have + * context bounds for Functor, Monad, and Applicable. In this case we want to + * select the `map` method of `Functor`. + * + * 4. At PostTyper, issue an error when encountering any reference to a CB companion. + */ +class WitnessNames(names: String*) extends StaticAnnotation + + diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 3b28733226a0..6c3640eed12c 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -20,6 +20,8 @@ object MiMaFilters { ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.compiletime.package#package.deferred"), + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.internal.WitnessNames"), ), // Additions since last LTS diff --git a/tests/neg/cb-companion-leaks.check b/tests/neg/cb-companion-leaks.check new file mode 100644 index 000000000000..156f8a7ab3ee --- /dev/null +++ b/tests/neg/cb-companion-leaks.check @@ -0,0 +1,66 @@ +-- [E194] Type Error: tests/neg/cb-companion-leaks.scala:9:23 ---------------------------------------------------------- +9 | def foo[A: {C, D}] = A // error + | ^ + | context bound companion value A cannot be used as a value + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + --------------------------------------------------------------------------------------------------------------------- +-- [E194] Type Error: tests/neg/cb-companion-leaks.scala:13:10 --------------------------------------------------------- +13 | val x = A // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- +-- [E194] Type Error: tests/neg/cb-companion-leaks.scala:15:9 ---------------------------------------------------------- +15 | val y: A.type = ??? // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/cb-companion-leaks.scala b/tests/neg/cb-companion-leaks.scala new file mode 100644 index 000000000000..07155edb05dc --- /dev/null +++ b/tests/neg/cb-companion-leaks.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future -explain + +class C[Self] + +class D[Self] + +trait Test: + + def foo[A: {C, D}] = A // error + + type A: C + + val x = A // error + + val y: A.type = ??? // error + diff --git a/tests/pos-macros/i8325/Macro_1.scala b/tests/pos-macros/i8325/Macro_1.scala index 18466e17b3df..92a54d21b00a 100644 --- a/tests/pos-macros/i8325/Macro_1.scala +++ b/tests/pos-macros/i8325/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -15,7 +15,7 @@ object A: import quotes.reflect.* expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) - case Apply(fun,args) => '{ A.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } + case Apply(fun,args) => '{ O.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325/Test_2.scala b/tests/pos-macros/i8325/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325/Test_2.scala +++ b/tests/pos-macros/i8325/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos-macros/i8325b/Macro_1.scala b/tests/pos-macros/i8325b/Macro_1.scala index 181efa260f9b..139abed94078 100644 --- a/tests/pos-macros/i8325b/Macro_1.scala +++ b/tests/pos-macros/i8325b/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -16,7 +16,7 @@ object A: expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) case r@Apply(fun,args) => '{ - A.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } + O.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325b/Test_2.scala b/tests/pos-macros/i8325b/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325b/Test_2.scala +++ b/tests/pos-macros/i8325b/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos/FromString-cb-companion.scala b/tests/pos/FromString-cb-companion.scala new file mode 100644 index 000000000000..d086420761ee --- /dev/null +++ b/tests/pos/FromString-cb-companion.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[Self]: + def fromString(s: String): Self + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString, Numeric as num}](a: String, b: String): N = + N.plus( + num.plus(N.fromString(a), N.fromString(b)), + N.fromString(a) + ) \ No newline at end of file diff --git a/tests/pos/cb-companion-joins.scala b/tests/pos/cb-companion-joins.scala new file mode 100644 index 000000000000..97e0a8a7e4ac --- /dev/null +++ b/tests/pos/cb-companion-joins.scala @@ -0,0 +1,21 @@ +import language.experimental.modularity +import language.future + +trait M[Self]: + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num[Self]: + def zero: Self + +trait A extends M[A] +trait B extends M[A] + +trait AA: + type X: M +trait BB: + type X: Num +class CC[X1: {M, Num}] extends AA, BB: + type X = X1 + X.zero + X.unit From c6388c2785f628b7e4a8680b6d4f1e7be0b0a925 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Apr 2024 23:33:36 +0200 Subject: [PATCH 028/827] Allow contecxt bounds with abstract `Self` types If a context bound type `T` for type parameter `A` does not have type parameters, demand evidence of type `T { type Self = A }` instead. --- .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../src/dotty/tools/dotc/typer/Typer.scala | 6 +- .../test/dotc/pos-test-pickling.blacklist | 7 +- .../scala/runtime/stdLibPatches/Predef.scala | 13 + tests/pos/FromString.scala | 15 + tests/pos/deferred-givens.scala | 12 +- tests/pos/deferredSummon.scala | 11 +- tests/pos/dep-context-bounds.scala | 11 +- tests/pos/hylolib-extract.scala | 29 ++ tests/pos/hylolib/AnyCollection.scala | 51 +++ tests/pos/hylolib/AnyValue.scala | 67 ++++ tests/pos/hylolib/AnyValueTests.scala | 15 + tests/pos/hylolib/BitArray.scala | 362 ++++++++++++++++++ tests/pos/hylolib/Collection.scala | 267 +++++++++++++ tests/pos/hylolib/CollectionTests.scala | 67 ++++ tests/pos/hylolib/CoreTraits.scala | 56 +++ tests/pos/hylolib/Hasher.scala | 39 ++ tests/pos/hylolib/HyArray.scala | 202 ++++++++++ tests/pos/hylolib/HyArrayTests.scala | 17 + tests/pos/hylolib/Integers.scala | 46 +++ tests/pos/hylolib/IntegersTests.scala | 14 + tests/pos/hylolib/Range.scala | 37 ++ tests/pos/hylolib/Slice.scala | 63 +++ tests/pos/hylolib/StringConvertible.scala | 9 + tests/pos/hylolib/Test.scala | 16 + tests/pos/i10929-new-syntax.scala | 22 ++ tests/pos/ord-over-tracked.scala | 15 + tests/pos/parsercombinators-arrow.scala | 48 +++ tests/pos/parsercombinators-ctx-bounds.scala | 49 +++ tests/pos/parsercombinators-new-syntax.scala | 45 +++ tests/pos/parsercombinators-this.scala | 53 +++ tests/pos/sets-tc.scala | 46 +++ tests/pos/typeclass-aggregates.scala | 32 +- tests/pos/typeclasses-arrow.scala | 140 +++++++ tests/pos/typeclasses-this.scala | 141 +++++++ tests/pos/typeclasses.scala | 47 ++- tests/run/for-desugar-strawman.scala | 96 +++++ tests/run/given-disambiguation.scala | 58 +++ tests/run/i15840.scala | 27 ++ 39 files changed, 2199 insertions(+), 53 deletions(-) create mode 100644 tests/pos/FromString.scala create mode 100644 tests/pos/hylolib-extract.scala create mode 100644 tests/pos/hylolib/AnyCollection.scala create mode 100644 tests/pos/hylolib/AnyValue.scala create mode 100644 tests/pos/hylolib/AnyValueTests.scala create mode 100644 tests/pos/hylolib/BitArray.scala create mode 100644 tests/pos/hylolib/Collection.scala create mode 100644 tests/pos/hylolib/CollectionTests.scala create mode 100644 tests/pos/hylolib/CoreTraits.scala create mode 100644 tests/pos/hylolib/Hasher.scala create mode 100644 tests/pos/hylolib/HyArray.scala create mode 100644 tests/pos/hylolib/HyArrayTests.scala create mode 100644 tests/pos/hylolib/Integers.scala create mode 100644 tests/pos/hylolib/IntegersTests.scala create mode 100644 tests/pos/hylolib/Range.scala create mode 100644 tests/pos/hylolib/Slice.scala create mode 100644 tests/pos/hylolib/StringConvertible.scala create mode 100644 tests/pos/hylolib/Test.scala create mode 100644 tests/pos/i10929-new-syntax.scala create mode 100644 tests/pos/ord-over-tracked.scala create mode 100644 tests/pos/parsercombinators-arrow.scala create mode 100644 tests/pos/parsercombinators-ctx-bounds.scala create mode 100644 tests/pos/parsercombinators-new-syntax.scala create mode 100644 tests/pos/parsercombinators-this.scala create mode 100644 tests/pos/sets-tc.scala create mode 100644 tests/pos/typeclasses-arrow.scala create mode 100644 tests/pos/typeclasses-this.scala create mode 100644 tests/run/for-desugar-strawman.scala create mode 100644 tests/run/given-disambiguation.scala create mode 100644 tests/run/i15840.scala diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index ab7e4eea0b46..b935488695e0 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -388,6 +388,7 @@ object StdNames { val RootPackage: N = "RootPackage" val RootClass: N = "RootClass" val Select: N = "Select" + val Self: N = "Self" val Shape: N = "Shape" val StringContext: N = "StringContext" val This: N = "This" diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 37da51157e91..6ac41ed619b6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2366,9 +2366,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) if tycon.tpe.typeParams.nonEmpty then typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) + else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractType then + val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) + typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else errorTree(tree, - em"""Illegal context bound: ${tycon.tpe} does not take type parameters.""") + em"""Illegal context bound: ${tycon.tpe} does not take type parameters and + |does not have an abstract type member named `Self` either.""") def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index e58277bdc0e5..d6f962176ecc 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -127,10 +127,11 @@ i20053b.scala # alias types at different levels of dereferencing parsercombinators-givens.scala parsercombinators-givens-2.scala +parsercombinators-ctx-bounds.scala +parsercombinators-this.scala parsercombinators-arrow.scala +parsercombinators-new-syntax.scala hylolib-deferred-given hylolib-cb - - - +hylolib diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 7abd92e408f8..a68a628623bf 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -66,4 +66,17 @@ object Predef: extension (opt: Option.type) @experimental inline def fromNullable[T](t: T | Null): Option[T] = Option(t).asInstanceOf[Option[T]] + + /** A type supporting Self-based type classes. + * + * A is TC + * + * expands to + * + * TC { type Self = A } + * + * which is what is needed for a context bound `[A: TC]`. + */ + infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } + end Predef diff --git a/tests/pos/FromString.scala b/tests/pos/FromString.scala new file mode 100644 index 000000000000..333a4c002989 --- /dev/null +++ b/tests/pos/FromString.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future + +trait FromString: + type Self + def fromString(s: String): Self + +given Int is FromString = _.toInt + +given Double is FromString = _.toDouble + +def add[N: {FromString, Numeric as num}](a: String, b: String): N = + N.plus( + num.plus(N.fromString(a), N.fromString(b)), + N.fromString(a) + ) \ No newline at end of file diff --git a/tests/pos/deferred-givens.scala b/tests/pos/deferred-givens.scala index 51fa43866d1e..b9018c97e151 100644 --- a/tests/pos/deferred-givens.scala +++ b/tests/pos/deferred-givens.scala @@ -1,9 +1,19 @@ //> using options -language:experimental.modularity -source future import compiletime.* class Ord[Elem] - given Ord[Double] +trait A: + type Elem : Ord + def foo = summon[Ord[Elem]] + +class AC extends A: + type Elem = Double + override given Ord[Elem] = ??? + +class AD extends A: + type Elem = Double + trait B: type Elem given Ord[Elem] = deferred diff --git a/tests/pos/deferredSummon.scala b/tests/pos/deferredSummon.scala index 31a9697eda6b..f8252576d81a 100644 --- a/tests/pos/deferredSummon.scala +++ b/tests/pos/deferredSummon.scala @@ -1,20 +1,21 @@ //> using options -language:experimental.modularity -source future import compiletime.deferred -trait Ord[Self]: +trait Ord: + type Self def less(x: Self, y: Self): Boolean trait A: type Elem - given Ord[Elem] = deferred - def foo = summon[Ord[Elem]] + given Elem is Ord = deferred + def foo = summon[Elem is Ord] trait B: type Elem: Ord - def foo = summon[Ord[Elem]] + def foo = summon[Elem is Ord] object Inst: - given Ord[Int]: + given Int is Ord: def less(x: Int, y: Int) = x < y object Test1: diff --git a/tests/pos/dep-context-bounds.scala b/tests/pos/dep-context-bounds.scala index 434805762622..c724d92e9809 100644 --- a/tests/pos/dep-context-bounds.scala +++ b/tests/pos/dep-context-bounds.scala @@ -1,6 +1,13 @@ //> using options -language:experimental.modularity -source future -trait A[X]: - type Self = X +trait A: + type Self + +object Test1: + def foo[X: A](x: X.Self) = ??? + + def bar[X: A](a: Int) = ??? + + def baz[X: A](a: Int)(using String) = ??? object Test2: def foo[X: A as x](a: x.Self) = ??? diff --git a/tests/pos/hylolib-extract.scala b/tests/pos/hylolib-extract.scala new file mode 100644 index 000000000000..846e52f30df6 --- /dev/null +++ b/tests/pos/hylolib-extract.scala @@ -0,0 +1,29 @@ +//> using options -language:experimental.modularity -source future +package hylotest + +trait Value: + type Self + extension (self: Self) def eq(other: Self): Boolean + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + +class BitArray + +given Boolean is Value: + extension (self: Self) def eq(other: Self): Boolean = + self == other + +given BitArray is Collection: + type Element = Boolean + +extension [Self: Value](self: Self) + def neq(other: Self): Boolean = !self.eq(other) + +extension [Self: Collection](self: Self) + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + ??? diff --git a/tests/pos/hylolib/AnyCollection.scala b/tests/pos/hylolib/AnyCollection.scala new file mode 100644 index 000000000000..6c2b835852e6 --- /dev/null +++ b/tests/pos/hylolib/AnyCollection.scala @@ -0,0 +1,51 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base: Collection](base: Base): AnyCollection[Base.Element] = + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[Base.Position])) + + def at(p: AnyValue): Base.Element = + base.at(p.unsafelyUnwrappedAs[Base.Position]) + + new AnyCollection[Base.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given [T: Value] => AnyCollection[T] is Collection: + + type Element = T + type Position = AnyValue + + extension (self: AnyCollection[T]) + def startPosition = self._start() + def endPosition = self._end() + def positionAfter(p: Position) = self._after(p) + def at(p: Position) = self._at(p) + diff --git a/tests/pos/hylolib/AnyValue.scala b/tests/pos/hylolib/AnyValue.scala new file mode 100644 index 000000000000..6844135b646b --- /dev/null +++ b/tests/pos/hylolib/AnyValue.scala @@ -0,0 +1,67 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T: Value](wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given AnyValue is Value: + + extension (self: AnyValue) + def copy(): AnyValue = self.copy() + def eq(other: AnyValue): Boolean = self `eq` other + def hashInto(hasher: Hasher): Hasher = self.hashInto(hasher) + diff --git a/tests/pos/hylolib/AnyValueTests.scala b/tests/pos/hylolib/AnyValueTests.scala new file mode 100644 index 000000000000..96d3563f4f53 --- /dev/null +++ b/tests/pos/hylolib/AnyValueTests.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class AnyValueTests extends munit.FunSuite: + + test("eq"): + val a = AnyValue(1) + assert(a `eq` a) + assert(!(a `neq` a)) + + val b = AnyValue(2) + assert(!(a `eq` b)) + assert(a `neq` b) + diff --git a/tests/pos/hylolib/BitArray.scala b/tests/pos/hylolib/BitArray.scala new file mode 100644 index 000000000000..6ef406e5ad83 --- /dev/null +++ b/tests/pos/hylolib/BitArray.scala @@ -0,0 +1,362 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given BitArray.Position is Value: + + extension (self: BitArray.Position) + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + +given BitArray is Collection: + + type Element = Boolean + type Position = BitArray.Position + + extension (self: BitArray) + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + +given BitArray is StringConvertible: + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + diff --git a/tests/pos/hylolib/Collection.scala b/tests/pos/hylolib/Collection.scala new file mode 100644 index 000000000000..bef86a967e6e --- /dev/null +++ b/tests/pos/hylolib/Collection.scala @@ -0,0 +1,267 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + + /** The type of a position in the collection. */ + type Position: Value + + extension (self: Self) + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def loop(p: Position, n: Int): Int = + if p `eq` e then n else loop(self.positionAfter(p), n + 1) + loop(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if i `eq` e then false + else if j `eq` e then true + else + def recur(n: Position): Boolean = + if n `eq` j then true + else if n `eq` e then false + else recur(self.positionAfter(n)) + recur(self.positionAfter(i)) + + class Slice2(val base: Self, val bounds: Range[Position]): + + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Position = + bounds.lowerBound + + def endPosition: Position = + bounds.upperBound + + def at(p: Position): Element = + base.at(p) + end Slice2 + +end Collection + +extension [Self: Collection](self: Self) + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(Self.Element, Slice[Self])] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + def headAndTail2: Option[(Self.Element, Self.Slice2)] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Self.Slice2(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T)(combine: (T, Self.Element) => T): T = + val e = self.endPosition + def loop(p: Self.Position, r: T): T = + if p `eq` e then r + else loop(self.positionAfter(p), combine(r, self.at(p))) + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: Self.Element => Boolean): Boolean = + val e = self.endPosition + def loop(p: Self.Position): Boolean = + if p `eq` e then true + else if !action(self.at(p)) then false + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T: Value](transform: Self.Element => T): HyArray[T] = + self.reduce(HyArray[T]()): (r, e) => + r.append(transform(e), assumeUniqueness = true) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: Self.Element => Boolean): HyArray[Self.Element] = + self.reduce(HyArray[Self.Element]()): (r, e) => + if isIncluded(e) then r.append(e, assumeUniqueness = true) else r + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: Self.Element => Boolean): Option[Self.Position] = + val e = self.endPosition + def loop(p: Self.Position): Option[Self.Position] = + if p `eq` e then None + else if predicate(self.at(p)) then Some(p) + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Self.Element is Comparable): Option[Self.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Self.Element is Comparable): Option[Self.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + if self.isEmpty then + None + else + val e = self.endPosition + def loop(p: Self.Position, least: Self.Element): Self.Element = + if p `eq` e then + least + else + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + loop(self.positionAfter(p), y) + val b = self.startPosition + Some(loop(self.positionAfter(b), self.at(b))) + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + def loop(i: Self.Position, j: T.Position): Boolean = + if i `eq` self.endPosition then + j `eq` other.endPosition + else if j `eq` other.endPosition then + false + else if self.at(i) `neq` other.at(j)then + false + else + loop(self.positionAfter(i), other.positionAfter(j)) + loop(self.startPosition, other.startPosition) +end extension diff --git a/tests/pos/hylolib/CollectionTests.scala b/tests/pos/hylolib/CollectionTests.scala new file mode 100644 index 000000000000..d884790f64d7 --- /dev/null +++ b/tests/pos/hylolib/CollectionTests.scala @@ -0,0 +1,67 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class CollectionTests extends munit.FunSuite: + + test("isEmpty"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.isEmpty) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + assert(!nonEmpty.isEmpty) + + test("count"): + val a = AnyCollection(HyArray[Int](1, 2)) + assertEquals(a.count, 2) + + test("isBefore"): + val empty = AnyCollection(HyArray[Int]()) + assert(!empty.isBefore(empty.startPosition, empty.endPosition)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + val p0 = nonEmpty.startPosition + val p1 = nonEmpty.positionAfter(p0) + val p2 = nonEmpty.positionAfter(p1) + assert(nonEmpty.isBefore(p0, nonEmpty.endPosition)) + assert(nonEmpty.isBefore(p1, nonEmpty.endPosition)) + assert(!nonEmpty.isBefore(p2, nonEmpty.endPosition)) + + test("headAndTail"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.headAndTail, None) + + val one = AnyCollection(HyArray[Int](1)) + val Some((h0, t0)) = one.headAndTail: @unchecked + assert(h0 eq 1) + assert(t0.isEmpty) + + val two = AnyCollection(HyArray[Int](1, 2)) + val Some((h1, t1)) = two.headAndTail: @unchecked + assertEquals(h1, 1) + assertEquals(t1.count, 1) + + test("reduce"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.reduce(0)((s, x) => s + x), 0) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + assertEquals(nonEmpty.reduce(0)((s, x) => s + x), 6) + + test("forEach"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.forEach((e) => false)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + var s = 0 + assert(nonEmpty.forEach((e) => { s += e; true })) + assertEquals(s, 6) + + s = 0 + assert(!nonEmpty.forEach((e) => { s += e; false })) + assertEquals(s, 1) + + test("elementsEqual"): + val a = HyArray(1, 2) + assert(a.elementsEqual(a)) +end CollectionTests diff --git a/tests/pos/hylolib/CoreTraits.scala b/tests/pos/hylolib/CoreTraits.scala new file mode 100644 index 000000000000..f4b3699b430e --- /dev/null +++ b/tests/pos/hylolib/CoreTraits.scala @@ -0,0 +1,56 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value: + type Self + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + def neq(other: Self): Boolean = !self.eq(other) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable extends Value { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib/Hasher.scala b/tests/pos/hylolib/Hasher.scala new file mode 100644 index 000000000000..ca45550ed002 --- /dev/null +++ b/tests/pos/hylolib/Hasher.scala @@ -0,0 +1,39 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib/HyArray.scala b/tests/pos/hylolib/HyArray.scala new file mode 100644 index 000000000000..de5e83d3b1a3 --- /dev/null +++ b/tests/pos/hylolib/HyArray.scala @@ -0,0 +1,202 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element: Value as elementIsCValue]( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + /** Adds the contents of `source` at the end of the array. */ + def appendContents[C: Collection { type Element = HyArray.this.Element }]( + source: C, assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + source.reduce(result): (r, e) => + r.append(e, assumeUniqueness = true) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T: Value](elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given [T: Value] => HyArray[T] is Value: + + extension (self: HyArray[T]) + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher)((h, e) => e.hashInto(h)) + +given [T: Value] => HyArray[T] is Collection: + + type Element = T + type Position = Int + + extension (self: HyArray[T]) + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + +given [T: {Value, StringConvertible}] => HyArray[T] is StringConvertible: + extension (self: HyArray[T]) + override def description: String = + val contents = mutable.StringBuilder() + self.forEach: e => + contents ++= e.description + true + s"[${contents.mkString(", ")}]" diff --git a/tests/pos/hylolib/HyArrayTests.scala b/tests/pos/hylolib/HyArrayTests.scala new file mode 100644 index 000000000000..0de65603d0c7 --- /dev/null +++ b/tests/pos/hylolib/HyArrayTests.scala @@ -0,0 +1,17 @@ +import hylo.* +import hylo.given + +class HyArrayTests extends munit.FunSuite: + + test("reserveCapacity"): + var a = HyArray[Int]() + a = a.append(1) + a = a.append(2) + + a = a.reserveCapacity(10) + assert(a.capacity >= 10) + assertEquals(a.count, 2) + assertEquals(a.at(0), 1) + assertEquals(a.at(1), 2) + +end HyArrayTests diff --git a/tests/pos/hylolib/Integers.scala b/tests/pos/hylolib/Integers.scala new file mode 100644 index 000000000000..f7334ae40786 --- /dev/null +++ b/tests/pos/hylolib/Integers.scala @@ -0,0 +1,46 @@ +package hylo + +given Boolean is Value: + + extension (self: Boolean) + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + +given Int is Value: + + extension (self: Int) + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + +given Int is Comparable: + + extension (self: Int) + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + +given Int is StringConvertible diff --git a/tests/pos/hylolib/IntegersTests.scala b/tests/pos/hylolib/IntegersTests.scala new file mode 100644 index 000000000000..74dedf30d83e --- /dev/null +++ b/tests/pos/hylolib/IntegersTests.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class IntegersTests extends munit.FunSuite: + + test("Int.hashInto"): + val x = Hasher.hash(42) + val y = Hasher.hash(42) + assertEquals(x, y) + + val z = Hasher.hash(1337) + assertNotEquals(x, z) + diff --git a/tests/pos/hylolib/Range.scala b/tests/pos/hylolib/Range.scala new file mode 100644 index 000000000000..b0f50dd55c8c --- /dev/null +++ b/tests/pos/hylolib/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound: Comparable](lowerBound: Bound, upperBound: Bound) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib/Slice.scala b/tests/pos/hylolib/Slice.scala new file mode 100644 index 000000000000..d54f855b1041 --- /dev/null +++ b/tests/pos/hylolib/Slice.scala @@ -0,0 +1,63 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base: Collection]( + val base: Base, + val bounds: Range[Base.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Base.Position = + bounds.lowerBound + + def endPosition: Base.Position = + bounds.upperBound + + def positionAfter(p: Base.Position): Base.Position = + base.positionAfter(p) + + def at(p: Base.Position): Base.Element = + base.at(p) + +} + +given [C: Collection] => Slice[C] is Collection: + + type Element = C.Element + type Position = C.Position + + extension (self: Slice[C]) + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] + // This is actually unsafe. We have: + // self.bounds: Range(Slice[C].Base.Position) + // But the _value_ of Slice[C].Base is not necssarily this given, even + // though it is true that `type Slice[C].Base = C`. There might be multiple + // implementations of `Slice[C] is Collection` that define different `Position` + // types. So we cannot conclude that `Slice[C].Base.Position = this.Position`. + // To make this safe, we'd need some form of coherence, where we ensure that + // there is only one way to implement `Slice is Collection`. + // + // As an alternativem we can make Slice dependent on the original Collection + // _instance_ instead of the original Collection _type_. This design is + // realized by the Slice2 definitions. It works without casts. + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + +given [C: Collection] => C.Slice2 is Collection: + type Element = C.Element + type Position = C.Position + + extension (self: C.Slice2) + + def startPosition = self.bounds.lowerBound + def endPosition = self.bounds.upperBound + def positionAfter(p: Position) = self.base.positionAfter(p) + def at(p: Position) = self.base.at(p) diff --git a/tests/pos/hylolib/StringConvertible.scala b/tests/pos/hylolib/StringConvertible.scala new file mode 100644 index 000000000000..cf901d9a3313 --- /dev/null +++ b/tests/pos/hylolib/StringConvertible.scala @@ -0,0 +1,9 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible: + type Self + + /** Returns a textual description of `self`. */ + extension (self: Self) + def description: String = self.toString diff --git a/tests/pos/hylolib/Test.scala b/tests/pos/hylolib/Test.scala new file mode 100644 index 000000000000..9e8d6181affd --- /dev/null +++ b/tests/pos/hylolib/Test.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +object munit: + open class FunSuite: + def test(name: String)(op: => Unit): Unit = op + def assertEquals[T](x: T, y: T) = assert(x == y) + def assertNotEquals[T](x: T, y: T) = assert(x != y) + +@main def Test = + CollectionTests() + AnyValueTests() + HyArrayTests() + IntegersTests() + println("done") diff --git a/tests/pos/i10929-new-syntax.scala b/tests/pos/i10929-new-syntax.scala new file mode 100644 index 000000000000..11c5e9313d4c --- /dev/null +++ b/tests/pos/i10929-new-syntax.scala @@ -0,0 +1,22 @@ +//> using options -language:experimental.modularity -source future +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple is TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest is TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: Rest.map(x.tail)(f)) + +def foo[T: TupleOf[Int]](xs: T): T.Mapped[Int] = T.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok diff --git a/tests/pos/ord-over-tracked.scala b/tests/pos/ord-over-tracked.scala new file mode 100644 index 000000000000..a9b4aba556e1 --- /dev/null +++ b/tests/pos/ord-over-tracked.scala @@ -0,0 +1,15 @@ +import language.experimental.modularity + +trait Ord[T]: + def lt(x: T, y: T): Boolean + +given Ord[Int] = ??? + +case class D(tracked val x: Int) +given [T <: D]: Ord[T] = (a, b) => a.x < b.x + +def mySort[T: Ord](x: Array[T]): Array[T] = ??? + +def test = + val arr = Array(D(1)) + val arr1 = mySort(arr) // error: no given instance of type Ord[D{val x: (1 : Int)}] \ No newline at end of file diff --git a/tests/pos/parsercombinators-arrow.scala b/tests/pos/parsercombinators-arrow.scala new file mode 100644 index 000000000000..f8bec02067e5 --- /dev/null +++ b/tests/pos/parsercombinators-arrow.scala @@ -0,0 +1,48 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given [C, E] => Apply[C, E] is Combinator: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + +given [A: Combinator, B: Combinator { type Context = A.Context }] + => Combine[A, B] is Combinator: + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked diff --git a/tests/pos/parsercombinators-ctx-bounds.scala b/tests/pos/parsercombinators-ctx-bounds.scala new file mode 100644 index 000000000000..d77abea5e539 --- /dev/null +++ b/tests/pos/parsercombinators-ctx-bounds.scala @@ -0,0 +1,49 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: [X] =>> Combinator[X] { type Context = A.Context }] + : Combinator[Combine[A, B]] with + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/parsercombinators-new-syntax.scala b/tests/pos/parsercombinators-new-syntax.scala new file mode 100644 index 000000000000..f984972b915d --- /dev/null +++ b/tests/pos/parsercombinators-new-syntax.scala @@ -0,0 +1,45 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + type Self + type Input + type Result + + extension (self: Self) + /// Parses and returns an element from input `in`. + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](first: A, second: B) + +given [I, R] => Apply[I, R] is Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] is Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for x <- self.first.parse(in); y <- self.second.parse(in) yield (x, y) + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // was error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Input` + val rc: Option[(Int, Int)] = r + diff --git a/tests/pos/parsercombinators-this.scala b/tests/pos/parsercombinators-this.scala new file mode 100644 index 000000000000..70b423985400 --- /dev/null +++ b/tests/pos/parsercombinators-this.scala @@ -0,0 +1,53 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator with { + type Self = Apply[C, E] + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: Combinator { type Context = A.Context }] + : Combinator with + type Self = Combine[A, B] + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/sets-tc.scala b/tests/pos/sets-tc.scala new file mode 100644 index 000000000000..86349bf6a405 --- /dev/null +++ b/tests/pos/sets-tc.scala @@ -0,0 +1,46 @@ +import language.experimental.modularity + +// First version: higher-kinded self type +object v1: + trait Set: + type Self[A] + def empty[A]: Self[A] + def union[A](self: Self[A], other: Self[A]): Self[A] + + case class ListSet[A](elems: List[A]) + + given ListSet is Set: + def empty[A]: ListSet[A] = ListSet(Nil) + + def union[A](self: ListSet[A], other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S[_]: Set](xs: List[S[A]]): S[A] = + xs.foldLeft(S.empty)(S.union) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + + // Second version: parameterized type class +object v2: + trait Set[A]: + type Self + def empty: Self + extension (s: Self) def union (other: Self): Self + + case class ListSet[A](elems: List[A]) + + given [A] => ListSet[A] is Set[A]: + def empty: ListSet[A] = ListSet(Nil) + + extension (self: ListSet[A]) def union(other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S: Set[A]](xs: List[S]): S = + xs.foldLeft(S.empty)(_ `union` _) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala index 9bb576603b7b..5e4551b226b7 100644 --- a/tests/pos/typeclass-aggregates.scala +++ b/tests/pos/typeclass-aggregates.scala @@ -1,47 +1,47 @@ //> using options -source future -language:experimental.modularity trait Ord: - type This - extension (x: This) - def compareTo(y: This): Int - def < (y: This): Boolean = compareTo(y) < 0 - def > (y: This): Boolean = compareTo(y) > 0 + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 trait OrdProxy extends Ord: export Ord.this.* trait SemiGroup: - type This - extension (x: This) def combine(y: This): This + type Self + extension (x: Self) def combine(y: Self): Self trait SemiGroupProxy extends SemiGroup: export SemiGroup.this.* trait Monoid extends SemiGroup: - def unit: This + def unit: Self trait MonoidProxy extends Monoid: export Monoid.this.* -def ordWithMonoid(ord: Ord, monoid: Monoid{ type This = ord.This }): Ord & Monoid = +def ordWithMonoid(ord: Ord, monoid: Monoid{ type Self = ord.Self }): Ord & Monoid = new ord.OrdProxy with monoid.MonoidProxy {} trait OrdWithMonoid extends Ord, Monoid -def ordWithMonoid2(ord: Ord, monoid: Monoid{ type This = ord.This }) = //: OrdWithMonoid { type This = ord.This} = +def ordWithMonoid2(ord: Ord, monoid: Monoid{ type Self = ord.Self }) = //: OrdWithMonoid { type Self = ord.Self} = new OrdWithMonoid with ord.OrdProxy with monoid.MonoidProxy {} -given intOrd: (Ord { type This = Int }) = ??? -given intMonoid: (Monoid { type This = Int }) = ??? +given intOrd: (Ord { type Self = Int }) = ??? +given intMonoid: (Monoid { type Self = Int }) = ??? -//given (using ord: Ord, monoid: Monoid{ type This = ord.This }): (Ord & Monoid { type This = ord.This}) = +//given (using ord: Ord, monoid: Monoid{ type Self = ord.Self }): (Ord & Monoid { type Self = ord.Self}) = // ordWithMonoid2(ord, monoid) -val x = summon[Ord & Monoid { type This = Int}] -val y: Int = ??? : x.This +val x = summon[Ord & Monoid { type Self = Int}] +val y: Int = ??? : x.Self // given [A, B](using ord: A is Ord, monoid: A is Monoid) => A is Ord & Monoid = // new ord.OrdProxy with monoid.MonoidProxy {} -given [A](using ord: Ord { type This = A }, monoid: Monoid { type This = A}): ((Ord & Monoid) { type This = A}) = +given [A](using ord: Ord { type Self = A }, monoid: Monoid { type Self = A}): ((Ord & Monoid) { type Self = A}) = new ord.OrdProxy with monoid.MonoidProxy {} diff --git a/tests/pos/typeclasses-arrow.scala b/tests/pos/typeclasses-arrow.scala new file mode 100644 index 000000000000..379365ffa1c5 --- /dev/null +++ b/tests/pos/typeclasses-arrow.scala @@ -0,0 +1,140 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given Int is Ord as intOrd: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] is Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List is Monad as listMonad: + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] is Monad as readerMonad: + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T is Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep is Animal: + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses-this.scala b/tests/pos/typeclasses-this.scala new file mode 100644 index 000000000000..20ce78678b22 --- /dev/null +++ b/tests/pos/typeclasses-this.scala @@ -0,0 +1,141 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given intOrd: Int is Ord with + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + +// given [T](using tracked val ev: Ord { type Self = T}): Ord { type Self = List[T] } with + given [T: Ord]: List[T] is Ord with + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given listMonad: List is Monad with + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given readerMonad[Ctx]: Reader[Ctx] is Monad with + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given descending[T: Ord]: T is Ord with + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep is Animal with + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala index 2bf7f76f0804..d0315a318310 100644 --- a/tests/pos/typeclasses.scala +++ b/tests/pos/typeclasses.scala @@ -3,38 +3,36 @@ class Common: trait Ord: - type This - extension (x: This) - def compareTo(y: This): Int - def < (y: This): Boolean = compareTo(y) < 0 - def > (y: This): Boolean = compareTo(y) > 0 + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 trait SemiGroup: - type This - extension (x: This) def combine(y: This): This + type Self + extension (x: Self) def combine(y: Self): Self trait Monoid extends SemiGroup: - def unit: This + def unit: Self trait Functor: - type This[A] - extension [A](x: This[A]) def map[B](f: A => B): This[B] + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] trait Monad extends Functor: - def pure[A](x: A): This[A] - extension [A](x: This[A]) - def flatMap[B](f: A => This[B]): This[B] + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] def map[B](f: A => B) = x.flatMap(f `andThen` pure) - infix type is[A <: AnyKind, B <: {type This <: AnyKind}] = B { type This = A } - end Common object Instances extends Common: given intOrd: (Int is Ord) with - type This = Int + type Self = Int extension (x: Int) def compareTo(y: Int) = if x < y then -1 @@ -77,8 +75,8 @@ object Instances extends Common: def second = xs.tail.head def third = xs.tail.tail.head - extension [M, A](using m: Monad)(xss: m.This[m.This[A]]) - def flatten: m.This[A] = + extension [M, A](using m: Monad)(xss: m.Self[m.Self[A]]) + def flatten: m.Self[A] = xss.flatMap(identity) def maximum[T](xs: List[T])(using T is Ord): T = @@ -103,12 +101,12 @@ object Instances extends Common: // wc Scala: 30 115 853 // wc Rust : 57 193 1466 trait Animal: - type This - // Associated function signature; `This` refers to the implementor type. - def apply(name: String): This + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self // Method signatures; these will return a string. - extension (self: This) + extension (self: Self) def name: String def noise: String def talk(): Unit = println(s"$name, $noise") @@ -126,18 +124,17 @@ class Sheep(val name: String): /* instance Sheep: Animal with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = println(s"$name pauses briefly... $noise") */ -import Instances.is // Implement the `Animal` trait for `Sheep`. given (Sheep is Animal) with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = diff --git a/tests/run/for-desugar-strawman.scala b/tests/run/for-desugar-strawman.scala new file mode 100644 index 000000000000..a92b19b9150a --- /dev/null +++ b/tests/run/for-desugar-strawman.scala @@ -0,0 +1,96 @@ + +@main def Test = + println: + for + x <- List(1, 2, 3) + y = x + x + if x >= 2 + i <- List.range(0, y) + z = i * i + if z % 2 == 0 + yield + i * x + + println: + val xs = List(1, 2, 3) + xs.flatMapDefined: x => + val y = x + x + xs.applyFilter(x >= 2): + val is = List.range(0, y) + is.mapDefined: i => + val z = i * i + is.applyFilter(z % 2 == 0): + i * x + +extension [A](as: List[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then Some(b) else None + + def flatMapDefined[B](f: A => Option[IterableOnce[B]]): List[B] = + as.flatMap: x => + f(x).getOrElse(Nil) + + def mapDefined[B](f: A => Option[B]): List[B] = + as.flatMap(f) + +object UNDEFINED + +extension [A](as: Vector[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then b else UNDEFINED + + def flatMapDefined[B](f: A => IterableOnce[B] | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: IterableOnce[B] => y + + def mapDefined[B](f: A => B | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: B => y :: Nil + +/* +F ::= val x = E; F + x <- E; G +G ::= [] + val x = E; G + if E; G + x <- E; G + +Translation scheme: + +{ for F yield E }c where c = undefined +{ for G yield E }c where c is a reference to the generator preceding the G sequence + +{ for [] yield E }c = E +{ for p = Ep; G yield E }c = val p = Ep; { for G yield E }c +{ for if Ep; G yield E}c = c.applyFilter(Ep)({ for G yield E }c) +{ for p <- Ep; G yield E }c = val c1 = Ep; c1.BIND{ case p => { for G yield E }c1 } (c1 fresh) + + where BIND = flatMapDefined if isGen(G), isFilter(G) + = mapDefined if !isGen(G), isFilter(G) + = flatMap if isGen(G), !isFilter(G) + = map if !isGen(G), !isFilter(G) + +{ for case p <- Ep; G yield E }c = { for $x <- Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E }c +{ for case p = Ep; G yield E }c = { for $x = Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E}c + +isFilter(if E; S) +isFilter(val x = E; S) if isFilter(S) + +isGen(x <- E; S) +isGen(val x = E; S) if isGen(S) +isGen(if E; S) if isGen(S) + +*/ + +val foo = 1 + +def main2 = + foo + ??? + ??? match { case _ => 0 } \ No newline at end of file diff --git a/tests/run/given-disambiguation.scala b/tests/run/given-disambiguation.scala new file mode 100644 index 000000000000..637c02a5621f --- /dev/null +++ b/tests/run/given-disambiguation.scala @@ -0,0 +1,58 @@ +import language.experimental.modularity +import language.future + +trait M: + type Self + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num: + type Self + def zero: Self + +trait A extends M +trait B extends M + +def f[X: {M, A, B}](x: X) = + summon[X is M] + x.combine(x) + +trait AA: + type XX: {M, A, B} + val x = XX.unit + val A: String = "hello" + +trait AAA: + type X: M +trait BBB: + type X: Num +class CCC[X1: {M, Num}] extends AAA, BBB: + type X = X1 + X.zero + X.unit + +@main def Test = + class C + + given C is M: + extension (x: Self) def combine (y: Self) = "M" + def unit = C() + + given C is A: + extension (x: Self) def combine (y: Self) = "A" + def unit = C() + + given C is B: + extension (x: Self) def combine (y: Self) = "B" + def unit = C() + + assert(f(C()) == "M") + + class CC extends AA: + type XX = C + assert(A.length == 5) + assert(A.toString == "hello") + + CC() + + diff --git a/tests/run/i15840.scala b/tests/run/i15840.scala new file mode 100644 index 000000000000..0f238e2e7148 --- /dev/null +++ b/tests/run/i15840.scala @@ -0,0 +1,27 @@ +//> using options -language:experimental.modularity -source future + +trait Nat: + type N <: Nat + +class _0 extends Nat: + type N = _0 + +class NatOps[N <: Nat](tracked val n: N): + def toInt(using toIntN: ToInt[n.N]): Int = toIntN() + +// works +def toInt[N <: Nat](n: N)(using toIntN: ToInt[n.N]) = toIntN() + +sealed abstract class ToInt[N <: Nat]: + def apply(): Int + +object ToInt: + given ToInt[_0] { + def apply() = 0 + } + +@main def Test() = + assert(toInt(new _0) == 0) + assert(NatOps[_0](new _0).toInt == 0) + assert: + NatOps(new _0).toInt == 0 // did not work From f444b4605c39ff38c8e41c61fdc93efec3bd02d8 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 3 Apr 2024 10:06:39 +0200 Subject: [PATCH 029/827] Add a doc page --- .../reference/experimental/typeclasses.md | 776 ++++++++++++++++++ docs/sidebar.yml | 1 + .../runtime/stdLibPatches/language.scala | 1 + 3 files changed, 778 insertions(+) create mode 100644 docs/_docs/reference/experimental/typeclasses.md diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md new file mode 100644 index 000000000000..5ac81061e42d --- /dev/null +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -0,0 +1,776 @@ + +--- +layout: doc-page +title: "Type Classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses.html +--- + +# Some Proposed Changes for Better Support of Type Classes + +Martin Odersky, 8.1.2024 + +A type class in Scala is a pattern where we define + + - a trait with one type parameter (the _type class_) + - given instances at specific instantiations of that trait, + - using clauses or context bounds abstracting over that trait. + +Type classes as a pattern work overall OK, but if we compare them to native implementations in Haskell, or protocols in Swift, or traits in Rust, then there are some idiosyncrasies and rough corners which in the end make them +a bit cumbersome and limiting for standard generic programming patterns. Much has improved since Scala 2's implicits, but there is still some gap to bridge to get to parity with these languages. + +This note shows that with some fairly small and reasonable tweaks to Scala's syntax and typing rules we can obtain a much better scheme for working with type classes, or do generic programming in general. + +The bulk of the suggested improvements has been implemented and is available +under source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +## Generalizing Context Bounds + + The only place in Scala's syntax where the type class pattern is relevant is + in context bounds. A context bound such as + +```scala + def min[A: Ordering](x: List[A]): A +``` +requires that `Ordering` is a trait or class with a single type parameter (which makes it a type class) and expands to a `using` clause that instantiates that parameter. Here is the expansion of `min`: +```scala + def min[A](x: List[A])(using Ordering[A]): A +``` + +**Proposal** Allow type classes to define an abstract type member named `Self` instead of a type parameter. + +**Example** + +```scala + trait Ord: + type Self + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + def reduce[A: Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) + + trait ParserCombinator: + type Self + type Input + type Result + extension (self: Self) + def parse(input: Input): Option[Result] = ... + + def combine[A: ParserCombinator, B: ParserCombinator { type Input = A.Input }] = ... +``` + +**Advantages** + + - Avoids repetitive type parameters, concentrates on what's essential, namely the type class hierarchy. + - Gives a clear indication of traits intended as type classes. A trait is a type class + if it has type `Self` as a member + - Allows to create aggregate type classes that combine givens via intersection types. + - Allows to use refinements in context bounds (the `combine` example above would be very awkward to express using the old way of context bounds expanding to type constructors). + +`Self`-based context bounds are a better fit for a dependently typed language like Scala than parameter-based ones. The main reason is that we are dealing with proper types, not type constructors. Proper types can be parameterized, intersected, or refined. This makes `Self`-based designs inherently more compositional than parameterized ones. + + + +**Details** + +When a trait has both a type parameter and an abstract `Self` type, we + resolve a context bound to the `Self` type. This allows type classes + that carry type parameters, as in + +```scala +trait Sequential[E]: + type Self +``` + +Here, +```scala +[S: Sequential[Int]] +``` +should resolve to: +```scala +[S](using Sequential[Int] { type Self = S }) +``` +and not to: +```scala +[S](using Sequential[S]) +``` + +**Discussion** + + Why not use `This` for the self type? The name `This` suggests that it is the type of `this`. But this is not true for type class traits. `Self` is the name of the type implementing a distinguished _member type_ of the trait in a `given` definition. `Self` is an established term in both Rust and Swift with the meaning used here. + + One possible objection to the `Self` based design is that it does not cover "multi-parameter" type classes. But neither do context bounds! "Multi-parameter" type classes in Scala are simply givens that can be synthesized with the standard mechanisms. Type classes in the strict sense abstract only over a single type, namely the implementation type of a trait. + + +## Auxiliary Type Alias `is` + +We introduce a standard type alias `is` in the Scala package or in `Predef`, defined like this: + +```scala + infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } +``` + +This makes writing instance definitions quite pleasant. Examples: + +```scala + given Int is Ord ... + given Int is Monoid ... + + type Reader = [X] =>> Env => X + given Reader is Monad ... +``` + +(more examples will follow below) + + + +## Naming Context Bounds + +Context bounds are a convenient and legible abbreviation. A problem so far is that they are always anonymous, +one cannot name the using parameter to which a context bound expands. + +For instance, consider a `reduce` method over `Monoid`s defined like this: + +```scala +def reduce[A : Monoid](xs: List[A]): A = ??? +``` +Since we don't have a name for the `Monoid` instance of `A`, we need to resort to `summon` in the body of `reduce`: +```scala +def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(summon Monoid[A])(_ `combine` _) +``` +That's generally considered too painful to write and read, hence people usually adopt one of two alternatives. Either, eschew context bounds and switch to using clauses: +```scala +def reduce[A](xs: List[A])(using m: Monoid[A]): A = + xs.foldLeft(m)(_ `combine` _) +``` +Or, plan ahead and define a "trampoline" method in `Monoid`'s companion object: +```scala + trait Monoid[A] extends SemiGroup[A]: + def unit: A + object Monoid: + def unit[A](using m: Monoid[A]): A = m.unit + ... + def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) +``` +This is all accidental complexity which can be avoided by the following proposal. + +**Proposal:** Allow to name a context bound, like this: +```scala + def reduce[A : Monoid as m](xs: List[A]): A = + xs.foldLeft(m.unit)(_ `combine` _) +``` + +We use `as x` after the type to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. + +**Benefits:** The new syntax is simple and clear. +It avoids the awkward choice between concise context bounds that can't be named and verbose using clauses that can. + +### New Syntax for Aggregate Context Bounds + +Aggregate context bounds like `A : X : Y` are not obvious to read, and it becomes worse when we add names, e.g. `A : X as x : Y as y`. + +**Proposal:** Allow to combine several context bounds inside `{...}`, analogous +to import clauses. Example: + +```scala + trait: + def showMax[X : {Ordering, Show}](x: X, y: X): String + class B extends A: + def showMax[X : {Ordering as ordering, Show as show}](x: X, y: X): String = + show.asString(ordering.max(x, y)) +``` + +The old syntax with multiple `:` should be phased out over time. + +**Benefits:** The new syntax is much clearer than the old one, in particular for newcomers that don't know context bounds well. + +### Better Default Names for Context Bounds + +So far, an unnamed context bound for a type parameter gets a synthesized fresh name. It would be much more useful if it got the name of the constrained type parameter instead, translated to be a term name. This means our `reduce` method over monoids would not even need an `as` binding. We could simply formulate it as follows: +``` + def reduce[A : Monoid](xs: List[A]) = + xs.foldLeft(A.unit)(_ `combine` _) +``` + +The use of a name like `A` above in two variants, both as a type name and as a term name is of course familiar to Scala programmers. We use the same convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. + +**Proposed Rules** + + 1. The generated evidence parameter for a context bound `A : C as a` has name `a` + 2. The generated evidence for a context bound `A : C` without an `as` binding has name `A` (seen as a term name). So, `A : C` is equivalent to `A : C as A`. + 3. If there are multiple context bounds for a type parameter, as in `A : {C_1, ..., C_n}`, the generated evidence parameter for every context bound `C_i` has a fresh synthesized name, unless the context bound carries an `as` clause, in which case rule (1) applies. + +The default naming convention reduces the need for named context bounds. But named context bounds are still essential, for at least two reasons: + + - They are needed to give names to multiple context bounds. + - They give an explanation what a single unnamed context bound expands to. + + +### Expansion of Context Bounds + +Context bounds are currently translated to implicit parameters in the last parameter list of a method or class. This is a problem if a context bound is mentioned in one of the preceding parameter types. For example, consider a type class of parsers with associated type members `Input` and `Result` describing the input type on which the parsers operate and the type of results they produce: +```scala +trait Parser[P]: + type Input + type Result +``` +Here is a method `run` that runs a parser on an input of the required type: + +```scala +def run[P : Parser](in: P.Input): P.Result +``` +Or, making clearer what happens by using an explicit name for the context bound: +```scala +def run[P : Parser as p](in: p.Input): p.Result +``` +With the current translation this does not work since it would be expanded to: +```scala + def run[P](x: p.Input)(using p: Parser[P]): p.Result +``` +Note that the `p` in `p.Input` refers to the `p` introduced in the using clause, which comes later. So this is ill-formed. + +This problem would be fixed by changing the translation of context bounds so that they expand to using clauses immediately after the type parameter. But such a change is infeasible, for two reasons: + + 1. It would be a binary-incompatible change. + 2. Putting using clauses earlier can impair type inference. A type in + a using clause can be constrained by term arguments coming before that + clause. Moving the using clause first would miss those constraints, which could cause ambiguities in implicit search. + +But there is an alternative which is feasible: + +**Proposal:** Map the context bounds of a method or class as follows: + + 1. If one of the bounds is referred to by its term name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. + 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. + 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. + +Rules (2) and (3) are the status quo, and match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility is maintained. + +**Discussion** More refined rules could be envisaged where context bounds are spread over different using clauses so that each comes as late as possible. But it would make matters more complicated and the gain in expressiveness is not clear to me. + +Named (either explicitly, or by default) context bounds in givens that produce classes are mapped to tracked val's of these classes (see #18958). This allows +references to these parameters to be precise, so that information about dependent type members is preserved. + + +## Context Bounds for Type Members + +It's not very orthogonal to allow subtype bounds for both type parameters and abstract type members, but context bounds only for type parameters. What's more, we don't even have the fallback of an explicit using clause for type members. The only alternative is to also introduce a set of abstract givens that get implemented in each subclass. This is extremely heavyweight and opaque to newcomers. + +**Proposal**: Allow context bounds for type members. Example: + +```scala + class Collection: + type Element : Ord +``` + +The question is how these bounds are expanded. Context bounds on type parameters +are expanded into using clauses. But for type members this does not work, since we cannot refer to a member type of a class in a parameter type of that class. What we are after is an equivalent of using parameter clauses but represented as class members. + +**Proposal:** Introduce a new way to implement a given definition in a trait like this: +```scala +given T = deferred +``` +`deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. + +Deferred givens allow a clean implementation of context bounds in traits, +as in the following example: +```scala +trait Sorted: + type Element : Ord + +class SortedSet[A : Ord] extends Sorted: + type Element = A +``` +The compiler expands this to the following implementation: +```scala +trait Sorted: + type Element + given Ord[Element] = compiletime.deferred + +class SortedSet[A](using A: Ord[A]) extends Sorted: + type Element = A + override given Ord[Element] = A // i.e. the A defined by the using clause +``` + +The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. + +**Benefits:** + + - Better orthogonality, type parameters and abstract type members now accept the same kinds of bounds. + - Better ergonomics, since deferred givens get naturally implemented in inheriting classes, no need for boilerplate to fill in definitions of abstract givens. + +**Alternative:** It was suggested that we use a modifier for a deferred given instead of a `= deferred`. Something like `deferred given C[T]`. But a modifier does not suggest the concept that a deferred given will be implemented automatically in subclasses unless an explicit definition is written. In a sense, we can see `= deferred` as the invocation of a magic macro that is provided by the compiler. So from a user's point of view a given with `deferred` right hand side is not abstract. +It is a concrete definition where the compiler will provide the correct implementation. + +## New Given Syntax + +A good language syntax is like a Bach fugue: A small set of motifs is combined in a multitude of harmonic ways. Dissonances and irregularities should be avoided. + +When designing Scala 3, I believe that, by and large, we achieved that goal, except in one area, which is the syntax of givens. There _are_ some glaring dissonances, as seen in this code for defining an ordering on lists: +```scala +given [A](using Ord[A]): Ord[List[A]] with + def compare(x: List[A], y: List[A]) = ... +``` +The `:` feels utterly foreign in this position. It's definitely not a type ascription, so what is its role? Just as bad is the trailing `with`. Everywhere else we use braces or trailing `:` to start a scope of nested definitions, so the need of `with` sticks out like a sore thumb. + +We arrived at that syntax not because of a flight of fancy but because even after trying for about a year to find other solutions it seemed like the least bad alternative. The awkwardness of the given syntax arose because we insisted that givens could be named or anonymous, with the default on anonymous, that we would not use underscore for an anonymous given, and that the name, if present, had to come first, and have the form `name [parameters] :`. In retrospect, that last requirement showed a lack of creativity on our part. + +Sometimes unconventional syntax grows on you and becomes natural after a while. But here it was unfortunately the opposite. The longer I used given definitions in this style the more awkward they felt, in particular since the rest of the language seemed so much better put together by comparison. And I believe many others agree with me on this. Since the current syntax is unnatural and esoteric, this means it's difficult to discover and very foreign even after that. This makes it much harder to learn and apply givens than it need be. + +Things become much simpler if we introduce the optional name instead with an `as name` clause at the end, just like we did for context bounds. We can then use a more intuitive syntax for givens like this: +```scala +given String is Ord: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] is Ord: + def compare(x: List[A], y: List[A]) = ... + +given Int is Monoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` +If explicit names are desired, we add them with `as` clauses: +```scala +given String is Ord as intOrd: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] is Ord as listOrd: + def compare(x: List[A], y: List[A]) = ... + +given Int is Monoid as intMonoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` + +The underlying principles are: + + - A `given` clause consists of the following elements: + + - An optional _precondition_, which introduces type parameters and/or using clauses and which ends in `=>`, + - the implemented _type_, + - an optional name binding using `as`, + - an implementation which consists of either an `=` and an expression, + or a template body. + + - Since there is no longer a middle `:` separating name and parameters from the implemented type, we can use a `:` to start the class body without looking unnatural, as is done everywhere else. That eliminates the special case where `with` was used before. + +This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s +in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. + +Changing something introduced just recently in Scala 3 is not fun, +but I believe these adjustments are preferable to let bad syntax +sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code +starts migrating. + +Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. + + +### Abolish Abstract Givens + +Another simplification is possible. So far we have special syntax for abstract givens: +```scala +given x: T +``` +The problem is that this syntax clashes with the quite common case where we want to establish a given without any nested definitions. For instance +consider a given that constructs a type tag: +```scala +class Tag[T] +``` +Then this works: +```scala +given Tag[String]() +given Tag[String] with {} +``` +But the following more natural syntax fails: +```scala +given Tag[String] +``` +The last line gives a rather cryptic error: +``` +1 |given Tag[String] + | ^ + | anonymous given cannot be abstract +``` +The problem is that the compiler thinks that the last given is intended to be abstract, and complains since abstract givens need to be named. This is another annoying dissonance. Nowhere else in Scala's syntax does adding a +`()` argument to a class cause a drastic change in meaning. And it's also a violation of the principle that it should be possible to define all givens without providing names for them. + +Fortunately, abstract givens are no longer necessary since they are superseded by the new `deferred` scheme. So we can deprecate that syntax over time. Abstract givens are a highly specialized mechanism with a so far non-obvious syntax. We have seen that this syntax clashes with reasonable expectations of Scala programmers. My estimate is that maybe a dozen people world-wide have used abstract givens in anger so far. + +**Proposal** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. + +This is less of a disruption than it might appear at first: + + - `given T` was illegal before since abstract givens could not be anonymous. + It now means a concrete given of class `T` with no member definitions. + - `given x: T` is legacy syntax for an abstract given. + - `given T as x = deferred` is the analogous new syntax, which is more powerful since + it allows for automatic instantiation. + - `given T = deferred` is the anonymous version in the new syntax, which was not expressible before. + +**Benefits:** + + - Simplification of the language since a feature is dropped + - Eliminate non-obvious and misleading syntax. + +## Summary of Syntax Changes + +Here is the complete context-free syntax for all proposed features. +Overall the syntax for givens becomes a lot simpler than what it was before. + +``` +TmplDef ::= 'given' GivenDef +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody +GivenType ::= AnnotType {id [nl] AnnotType} + +TypeDef ::= id [TypeParamClause] TypeAndCtxBounds +TypeParamBounds ::= TypeAndCtxBounds +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] +``` + + + +## Examples + + +### Example 1 + +Here are some standard type classes, which were mostly already introduced at the start of this note, now with associated instance givens and some test code: + +```scala + // Type classes + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] // Here, Self is a type constructor with parameter A + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + // Instances + + given Int is Ord: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] is Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = + (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List is Monad: + extension [A](xs: List[A]) + def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] is Monad: + extension [A](r: Ctx => A) + def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + // Usages + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T is Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) +``` + + +### Example 2 + +The following contributed code by @LPTK (issue #10929) did _not_ work at first since +references were not tracked correctly. The version below adds explicit tracked parameters which makes the code compile. +```scala +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: tup.map(x.tail)(f) +``` + +Note the quite convoluted syntax, which makes the code hard to understand. Here is the same example in the new type class syntax, which also compiles correctly: +```scala +//> using options -language:experimental.modularity -source future + +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple is TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest is TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: Rest.map(x.tail)(f) +``` +Note in particular the following points: + + - In the original code, it was not clear that `TupleOf` is a type class, + since it contained two type parameters, one of which played the role + of the instance type `Self`. The new version is much clearer: `TupleOf` is + a type class over `Self` with one additional parameter, the common type of all tuple elements. + - The two given definitions are obfuscated in the old code. Their version + in the new code makes it clear what kind of instances they define: + + - `EmptyTuple` is a tuple of `Nothing`. + - if `Rest` is a tuple of `A`, then `A *: Rest` is also a tuple of `A`. + + - There's no need to introduce names for parameter instances in using clauses; the default naming scheme for context bound evidences works fine, and is more concise. + - There's no need to manually declare implicit parameters as `tracked`, + context bounds provide that automatically. + - Everything in the new code feels like idiomatic Scala 3, whereas the original code exhibits the awkward corner case that requires a `with` in + front of given definitions. + +### Example 3 + +Dimi Racordon tried to [define parser combinators](https://users.scala-lang.org/t/create-an-instance-of-a-type-class-with-methods-depending-on-type-members/9613) in Scala that use dependent type members for inputs and results. It was intended as a basic example of type class constraints, but it did not work in current Scala. + +Here is the problem solved with the new syntax. Note how much clearer that syntax is compared to Dimi's original version, which did not work out in the end. + +```scala +/** A parser combinator */ +trait Combinator: + type Self + + type Input + type Result + + extension (self: Self) + /** Parses and returns an element from input `in` */ + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](a: A, b: B) + +given [I, R] => Apply[I, R] is Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] is Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for + x <- self.a.parse(in) + y <- self.b.parse(in) + yield (x, y) +``` +The example is now as expressed as straightforwardly as it should be: + + - `Combinator` is a type class with two associated types, `Input` and `Result`, and a `parse` method. + - `Apply` and `Combine` are two data constructors representing parser combinators. They are declared to be `Combinators` in the two subsequent `given` declarations. + - `Apply`'s parse method applies the `action` function to the input. + - `Combine[A, B]` is a parser combinator provided `A` and `B` are parser combinators + that process the same type of `Input`, which is also the input type of + `Combine[A, B]`. Its `Result` type is a pair of the `Result` types of `A` and `B`. + Results are produced by a simple for-expression. + +Compared to the original example, which required serious contortions, this is now all completely straightforward. + +_Note 1:_ One could also explore improvements, for instance making this purely functional. But that's not the point of the demonstration here, where I wanted +to take the original example and show how it can be made to work with the new constructs, and be expressed more clearly as well. + +_Note 2:_ One could improve the notation even further by adding equality constraints in the style of Swift, which in turn resemble the _sharing constraints_ of SML. A hypothetical syntax applied to the second given would be: +```scala +given [A: Combinator, B: Combinator with A.Input == B.Input] + => Combine[A, B] is Combinator: +``` +This variant is aesthetically pleasing since it makes the equality constraint symmetric. The original version had to use an asymmetric refinement on the second type parameter bound instead. For now, such constraints are neither implemented nor proposed. This is left as a possibility for future work. Note also the analogy with +the work of @mbovel and @Sporarum on refinement types, where similar `with` clauses can appear for term parameters. If that work goes ahead, we could possibly revisit the issue of `with` clauses also for type parameters. + +### Example 4 + +Dimi Racordon tried to [port some core elements](https://github.com/kyouko-taiga/scala-hylolib) of the type class based [Hylo standard library to Scala](https://github.com/hylo-lang/hylo/tree/main/StandardLibrary/Sources). It worked to some degree, but there were some things that could not be expressed, and more things that could be expressed only awkwardly. + +With the improvements proposed here, the library can now be expressed quite clearly and straightforwardly. See tests/pos/hylolib in this PR for details. + +## Suggested Improvements unrelated to Type Classes + +The following improvements elsewhere would make sense alongside the suggested changes to type classes. But they are currently not part of this proposal or implementation. + +### Fixing Singleton + +We know the current treatment of `Singleton` as a type bound is broken since +`x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. + +A better approach is to treat `Singleton` as a type class that is interpreted specially by the compiler. + +We can do this in a backwards-compatible way by defining `Singleton` like this: + +```scala +trait Singleton: + type Self +``` + +Then, instead of using an unsound upper bound we can use a context bound: + +```scala +def f[X: Singleton](x: X) = ... +``` + +The context bound would be treated specially by the compiler so that no using clause is generated at runtime. + +_Aside_: This can also lead to a solution how to express precise type variables. We can introduce another special type class `Precise` and use it like this: + +```scala +def f[X: Precise](x: X) = ... +``` +This would disable automatic widening of singleton types in inferred instances of type variable `X`. + +### Using `as` also in Patterns + +Since we have now more precedents of `as` as a postfix binder, I want to come back to the proposal to use it in patterns as well, in favor of `@`, which should be deprecated. + +Examples: + +```scala + xs match + case (Person(name, age) as p) :: rest => ... + + tp match + case Param(tl, _) :: _ as tparams => ... + + val x :: xs1 as xs = ys.checkedCast +``` + +These would replace the previous syntax using `@`: + +```scala + xs match + case p @ Person(name, age) :: rest => ... + + tp match + case tparams @ (Param(tl, _) :: _) => ... + + val xs @ (x :: xs1) = ys.checkedCast +``` +**Advantages:** No unpronounceable and non-standard symbol like `@`. More regularity. + +Generally, we want to use `as name` to attach a name for some entity that could also have been used stand-alone. + +**Proposed Syntax Change** + +``` +Pattern2 ::= InfixPattern ['as' id] +``` + +## Summary + +I have proposed some tweaks to Scala 3, which would greatly increase its usability for modular, type class based, generic programming. The proposed changes are: + + 1. Allow context bounds over classes that define a `Self` member type. + 1. Allow context bounds to be named with `as`. Use the bound parameter name as a default name for the generated context bound evidence. + 1. Add a new `{...}` syntax for multiple context bounds. + 1. Make context bounds also available for type members, which expand into a new form of deferred given. Phase out the previous abstract givens in favor of the new form. + 1. Add a predefined type alias `is`. + 1. Introduce a new cleaner syntax of given clauses. + +It's interesting that givens, which are a very general concept in Scala, were "almost there" when it comes to full support of concepts and generic programming. We only needed to add a few usability tweaks to context bounds, +alongside two syntactic changes that supersede the previous forms of `given .. with` clauses and abstract givens. Also interesting is that the superseded syntax constructs were the two areas where we collectively felt that the previous solutions were a bit awkward, but we could not think of better ones at the time. It's very nice that more satisfactory solutions are now emerging. + +## Conclusion + +Generic programming can be expressed in a number of languages. For instance, with +type classes in Haskell, or with traits in Rust, or with protocols in Swift, or with concepts in C++. Each of these is constructed from a fairly heavyweight set of new constructs, different from expressions and types. By contrast, equivalent solutions in Scala rely on regular types. Type classes are simply traits that define a `Self` type member. + +The proposed scheme has similar expressiveness to Protocols in Swift or Traits in Rust. Both of these were largely influenced by Jeremy Siek's PdD thesis "[A language for generic programming](https://scholarworks.iu.edu/dspace/handle/2022/7067)", which was first proposed as a way to implement concepts in C++. C++ did not follow Siek's approach, but Swift and Rust did. + +In Siek's thesis and in the formal treatments of Rust and Swift, + type class concepts are explained by mapping them to a lower level language of explicit dictionaries with representations for terms and types. Crucially, that lower level is not expressible without loss of granularity in the source language itself, since type representations are mapped to term dictionaries. By contrast, the current proposal expands type class concepts into other well-typed Scala constructs, which ultimately map into well-typed DOT programs. Type classes are simply a convenient notation for something that can already be expressed in Scala. In that sense, we stay true to the philosophy of a _scalable language_, where a small core can support a large range of advanced use cases. + diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 160698f1f44b..efdab80595a6 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -156,6 +156,7 @@ subsection: - page: reference/experimental/tupled-function.md - page: reference/experimental/named-tuples.md - page: reference/experimental/modularity.md + - page: reference/experimental/typeclasses.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index b6d256b240f9..76a3be1579a9 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -105,6 +105,7 @@ object language: * - ability to merge exported types in intersections * * @see [[https://dotty.epfl.ch/docs/reference/experimental/modularity]] + * @see [[https://dotty.epfl.ch/docs/reference/experimental/typeclasses]] */ @compileTimeOnly("`modularity` can only be used at compile time in import statements") object modularity From f71365250688a6bc886b9900f8535e8babdd94be Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Apr 2024 20:21:30 +0200 Subject: [PATCH 030/827] Fix Singleton Allow to constrain type variables to be singletons by a context bound [X: Singleton] instead of an unsound supertype [X <: Singleton]. This fixes the soundness hole of singletons. --- .../tools/dotc/core/ConstraintHandling.scala | 18 +++----- .../dotty/tools/dotc/core/Definitions.scala | 12 ++--- .../dotty/tools/dotc/core/TypeComparer.scala | 8 ++-- .../src/dotty/tools/dotc/core/TypeOps.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 43 +++++++++++++++--- .../src/dotty/tools/dotc/typer/Namer.scala | 2 +- .../dotty/tools/dotc/typer/ProtoTypes.scala | 37 ++++++++++++---- .../dotty/tools/dotc/typer/Synthesizer.scala | 13 +++++- .../src/dotty/tools/dotc/typer/Typer.scala | 4 +- .../reference/experimental/typeclasses.md | 15 +++++-- .../scala/runtime/stdLibPatches/Predef.scala | 2 +- tests/neg/singleton-ctx-bound.scala | 20 +++++++++ tests/pos/singleton-ctx-bound.scala | 44 +++++++++++++++++++ 13 files changed, 175 insertions(+), 45 deletions(-) create mode 100644 tests/neg/singleton-ctx-bound.scala create mode 100644 tests/pos/singleton-ctx-bound.scala diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 109929f0c6f5..06711ec97abf 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -647,9 +647,9 @@ trait ConstraintHandling { * At this point we also drop the @Repeated annotation to avoid inferring type arguments with it, * as those could leak the annotation to users (see run/inferred-repeated-result). */ - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = + def widenInferred(inst: Type, bound: Type, widen: Widen)(using Context): Type = def widenOr(tp: Type) = - if widenUnions then + if widen == Widen.Unions then val tpw = tp.widenUnion if tpw ne tp then if tpw.isTransparent() then @@ -667,14 +667,10 @@ trait ConstraintHandling { val tpw = tp.widenSingletons(skipSoftUnions) if (tpw ne tp) && (tpw <:< bound) then tpw else tp - def isSingleton(tp: Type): Boolean = tp match - case WildcardType(optBounds) => optBounds.exists && isSingleton(optBounds.bounds.hi) - case _ => isSubTypeWhenFrozen(tp, defn.SingletonType) - val wideInst = - if isSingleton(bound) then inst + if widen == Widen.None || bound.isSingletonBounded(frozen = true) then inst else - val widenedFromSingle = widenSingle(inst, skipSoftUnions = widenUnions) + val widenedFromSingle = widenSingle(inst, skipSoftUnions = widen == Widen.Unions) val widenedFromUnion = widenOr(widenedFromSingle) val widened = dropTransparentTraits(widenedFromUnion, bound) widenIrreducible(widened) @@ -713,10 +709,10 @@ trait ConstraintHandling { * The instance type is not allowed to contain references to types nested deeper * than `maxLevel`. */ - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { + def instanceType(param: TypeParamRef, fromBelow: Boolean, widen: Widen, maxLevel: Int)(using Context): Type = { val approx = approximation(param, fromBelow, maxLevel).simplified if fromBelow then - val widened = widenInferred(approx, param, widenUnions) + val widened = widenInferred(approx, param, widen) // Widening can add extra constraints, in particular the widened type might // be a type variable which is now instantiated to `param`, and therefore // cannot be used as an instantiation of `param` without creating a loop. @@ -724,7 +720,7 @@ trait ConstraintHandling { // (we do not check for non-toplevel occurrences: those should never occur // since `addOneBound` disallows recursive lower bounds). if constraint.occursAtToplevel(param, widened) then - instanceType(param, fromBelow, widenUnions, maxLevel) + instanceType(param, fromBelow, widen, maxLevel) else widened else diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index b408883009ab..6d3a4de7b026 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -59,10 +59,10 @@ class Definitions { private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered - private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope): TypeSymbol = scope.enter(newPermanentSymbol(cls, name, flags, TypeBounds.empty)) - private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope): TypeSymbol = enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope) private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") = @@ -538,9 +538,11 @@ class Definitions { @tu lazy val SingletonClass: ClassSymbol = // needed as a synthetic class because Scala 2.x refers to it in classfiles // but does not define it as an explicit class. - enterCompleteClassSymbol( - ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final, - List(AnyType), EmptyScope) + val cls = enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final | Erased, + List(AnyType)) + enterTypeField(cls, tpnme.Self, Deferred, cls.info.decls.openForMutations) + cls @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef @tu lazy val MaybeCapabilityAnnot: ClassSymbol = diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index cee1ec7fffa8..a849d28c81d6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3257,8 +3257,8 @@ object TypeComparer { def subtypeCheckInProgress(using Context): Boolean = comparing(_.subtypeCheckInProgress) - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = - comparing(_.instanceType(param, fromBelow, widenUnions, maxLevel)) + def instanceType(param: TypeParamRef, fromBelow: Boolean, widen: Widen, maxLevel: Int = Int.MaxValue)(using Context): Type = + comparing(_.instanceType(param, fromBelow, widen: Widen, maxLevel)) def approximation(param: TypeParamRef, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = comparing(_.approximation(param, fromBelow, maxLevel)) @@ -3278,8 +3278,8 @@ object TypeComparer { def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = comparing(_.addToConstraint(tl, tvars)) - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = - comparing(_.widenInferred(inst, bound, widenUnions)) + def widenInferred(inst: Type, bound: Type, widen: Widen)(using Context): Type = + comparing(_.widenInferred(inst, bound, widen: Widen)) def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = comparing(_.dropTransparentTraits(tp, bound)) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 8461c0f091fe..1282b77f013e 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -545,7 +545,7 @@ object TypeOps: val lo = TypeComparer.instanceType( tp.origin, fromBelow = variance > 0 || variance == 0 && tp.hasLowerBound, - widenUnions = tp.widenUnions)(using mapCtx) + tp.widenPolicy)(using mapCtx) val lo1 = apply(lo) if (lo1 ne lo) lo1 else tp case _ => diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index ac3aef2a59d2..27931bad0bc3 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -44,8 +44,6 @@ import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe - - object Types extends TypeUtils { @sharable private var nextId = 0 @@ -330,6 +328,21 @@ object Types extends TypeUtils { /** Is this type a (possibly aliased) singleton type? */ def isSingleton(using Context): Boolean = dealias.isInstanceOf[SingletonType] + /** Is this upper-bounded by a (possibly aliased) singleton type? + * Overridden in TypeVar + */ + def isSingletonBounded(frozen: Boolean)(using Context): Boolean = this.dealias.normalized match + case tp: SingletonType => tp.isStable + case tp: TypeRef => + tp.name == tpnme.Singleton && tp.symbol == defn.SingletonClass + || tp.superType.isSingletonBounded(frozen) + case tp: TypeVar if !tp.isInstantiated => + if frozen then tp frozen_<:< defn.SingletonType else tp <:< defn.SingletonType + case tp: HKTypeLambda => false + case tp: TypeProxy => tp.superType.isSingletonBounded(frozen) + case AndType(tpL, tpR) => tpL.isSingletonBounded(frozen) || tpR.isSingletonBounded(frozen) + case _ => false + /** Is this type of kind `AnyKind`? */ def hasAnyKind(using Context): Boolean = { @tailrec def loop(tp: Type): Boolean = tp match { @@ -4924,7 +4937,11 @@ object Types extends TypeUtils { * @param creatorState the typer state in which the variable was created. * @param initNestingLevel the initial nesting level of the type variable. (c.f. nestingLevel) */ - final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState | Null, val initNestingLevel: Int) extends CachedProxyType with ValueType { + final class TypeVar private( + initOrigin: TypeParamRef, + creatorState: TyperState | Null, + val initNestingLevel: Int, + precise: Boolean) extends CachedProxyType with ValueType { private var currentOrigin = initOrigin def origin: TypeParamRef = currentOrigin @@ -5012,7 +5029,7 @@ object Types extends TypeUtils { } def typeToInstantiateWith(fromBelow: Boolean)(using Context): Type = - TypeComparer.instanceType(origin, fromBelow, widenUnions, nestingLevel) + TypeComparer.instanceType(origin, fromBelow, widenPolicy, nestingLevel) /** Instantiate variable from the constraints over its `origin`. * If `fromBelow` is true, the variable is instantiated to the lub @@ -5029,7 +5046,10 @@ object Types extends TypeUtils { instantiateWith(tp) /** Widen unions when instantiating this variable in the current context? */ - def widenUnions(using Context): Boolean = !ctx.typerState.constraint.isHard(this) + def widenPolicy(using Context): Widen = + if precise then Widen.None + else if ctx.typerState.constraint.isHard(this) then Widen.Singletons + else Widen.Unions /** For uninstantiated type variables: the entry in the constraint (either bounds or * provisional instance value) @@ -5070,8 +5090,17 @@ object Types extends TypeUtils { } } object TypeVar: - def apply(using Context)(initOrigin: TypeParamRef, creatorState: TyperState | Null, nestingLevel: Int = ctx.nestingLevel) = - new TypeVar(initOrigin, creatorState, nestingLevel) + def apply(using Context)( + initOrigin: TypeParamRef, + creatorState: TyperState | Null, + nestingLevel: Int = ctx.nestingLevel, + precise: Boolean = false) = + new TypeVar(initOrigin, creatorState, nestingLevel, precise) + + enum Widen: + case None // no widening + case Singletons // widen singletons but not unions + case Unions // widen singletons and unions type TypeVars = SimpleIdentitySet[TypeVar] diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 393b38c5ff57..b69d9f76852a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -2087,7 +2087,7 @@ class Namer { typer: Typer => if defaultTp.exists then TypeOps.SimplifyKeepUnchecked() else null) match case ctp: ConstantType if sym.isInlineVal => ctp - case tp => TypeComparer.widenInferred(tp, pt, widenUnions = true) + case tp => TypeComparer.widenInferred(tp, pt, Widen.Unions) // Replace aliases to Unit by Unit itself. If we leave the alias in // it would be erased to BoxedUnit. diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 46c12b244fbb..7afdc836f656 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -701,6 +701,12 @@ object ProtoTypes { case FunProto((arg: untpd.TypedSplice) :: Nil, _) => arg.isExtensionReceiver case _ => false + object SingletonConstrained: + def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match + case RefinedType(parent, tpnme.Self, TypeAlias(tp)) + if parent.typeSymbol == defn.SingletonClass => Some(tp) + case _ => None + /** Add all parameters of given type lambda `tl` to the constraint's domain. * If the constraint contains already some of these parameters in its domain, * make a copy of the type lambda and add the copy's type parameters instead. @@ -713,26 +719,41 @@ object ProtoTypes { tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean, nestingLevel: Int = ctx.nestingLevel - ): (TypeLambda, List[TypeVar]) = { + ): (TypeLambda, List[TypeVar]) = val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty if (tl.isInstanceOf[PolyType]) assert(!ctx.typerState.isCommittable || addTypeVars, s"inconsistent: no typevars were added to committable constraint ${state.constraint}") // hk type lambdas can be added to constraints without typevars during match reduction + val added = state.constraint.ensureFresh(tl) + + def singletonConstrainedRefs(tp: Type): Set[TypeParamRef] = tp match + case tp: MethodType if tp.isContextualMethod => + val ownBounds = + for case SingletonConstrained(ref: TypeParamRef) <- tp.paramInfos + yield ref + ownBounds.toSet ++ singletonConstrainedRefs(tp.resType) + case tp: LambdaType => + singletonConstrainedRefs(tp.resType) + case _ => + Set.empty + + val singletonRefs = singletonConstrainedRefs(added) + def isSingleton(ref: TypeParamRef) = singletonRefs.contains(ref) - def newTypeVars(tl: TypeLambda): List[TypeVar] = - for paramRef <- tl.paramRefs - yield - val tvar = TypeVar(paramRef, state, nestingLevel) + def newTypeVars: List[TypeVar] = + for paramRef <- added.paramRefs yield + val tvar = TypeVar(paramRef, state, nestingLevel, precise = isSingleton(paramRef)) state.ownedVars += tvar tvar - val added = state.constraint.ensureFresh(tl) - val tvars = if addTypeVars then newTypeVars(added) else Nil + val tvars = if addTypeVars then newTypeVars else Nil TypeComparer.addToConstraint(added, tvars) + for paramRef <- added.paramRefs do + if isSingleton(paramRef) then paramRef <:< defn.SingletonType (added, tvars) - } + end constrained def constrained(tl: TypeLambda, owningTree: untpd.Tree)(using Context): (TypeLambda, List[TypeVar]) = constrained(tl, owningTree, diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 21d1151bcfd3..9fb091e3306c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -237,6 +237,16 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): EmptyTreeNoError end synthesizedValueOf + val synthesizedSingleton: SpecialHandler = (formal, span) => formal match + case SingletonConstrained(tp) => + if tp.isSingletonBounded(frozen = false) then + withNoErrors: + ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) + else + withErrors(i"$tp is not a singleton") + case _ => + EmptyTreeNoError + /** Create an anonymous class `new Object { type MirroredMonoType = ... }` * and mark it with given attachment so that it is made into a mirror at PostTyper. */ @@ -536,7 +546,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val tparams = poly.paramRefs val variances = childClass.typeParams.map(_.paramVarianceSign) val instanceTypes = tparams.lazyZip(variances).map((tparam, variance) => - TypeComparer.instanceType(tparam, fromBelow = variance < 0, widenUnions = true) + TypeComparer.instanceType(tparam, fromBelow = variance < 0, Widen.Unions) ) val instanceType = resType.substParams(poly, instanceTypes) // this is broken in tests/run/i13332intersection.scala, @@ -738,6 +748,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): defn.MirrorClass -> synthesizedMirror, defn.ManifestClass -> synthesizedManifest, defn.OptManifestClass -> synthesizedOptManifest, + defn.SingletonClass -> synthesizedSingleton, ) def tryAll(formal: Type, span: Span)(using Context): TreeWithErrors = diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 6ac41ed619b6..d23f77143e14 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3321,8 +3321,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) if ctx.mode.is(Mode.Pattern) then app1 else - val elemTpes = elems.lazyZip(pts).map((elem, pt) => - TypeComparer.widenInferred(elem.tpe, pt, widenUnions = true)) + val elemTpes = elems.lazyZip(pts).map: (elem, pt) => + TypeComparer.widenInferred(elem.tpe, pt, Widen.Unions) val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index 5ac81061e42d..8c95152b8e46 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -7,7 +7,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses # Some Proposed Changes for Better Support of Type Classes -Martin Odersky, 8.1.2024 +Martin Odersky, 8.1.2024, edited 5.4.2024 A type class in Scala is a pattern where we define @@ -27,6 +27,8 @@ under source version `future` if the additional experimental language import `mo scala compile -source:future -language:experimental.modularity ``` +It is intended to turn features described here into proposals under the Scala improvement process. A first installment is SIP 64, which covers some syntactic changes, names for context bounds, multiple context bounds and deferred givens. The order of exposition described in this note is different from the planned proposals of SIPs. This doc is not a guide on how to sequence details, but instead wants to present a vision of what is possible. For instance, we start here with a feature (Self types and `is` syntax) that has turned out to be controversial and that will probably be proposed only late in the sequence of SIPs. + ## Generalizing Context Bounds The only place in Scala's syntax where the type class pattern is relevant is @@ -54,6 +56,8 @@ requires that `Ordering` is a trait or class with a single type parameter (which trait Monoid extends SemiGroup: def unit: Self + object Monoid: + def unit[M](using m: Monoid { type Self = M}): M trait Functor: type Self[A] @@ -129,7 +133,7 @@ We introduce a standard type alias `is` in the Scala package or in `Predef`, def infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } ``` -This makes writing instance definitions quite pleasant. Examples: +This makes writing instance definitions and using clauses quite pleasant. Examples: ```scala given Int is Ord ... @@ -137,6 +141,9 @@ This makes writing instance definitions quite pleasant. Examples: type Reader = [X] =>> Env => X given Reader is Monad ... + + object Monoid: + def unit[M](using m: M is Monoid): M ``` (more examples will follow below) @@ -682,7 +689,7 @@ With the improvements proposed here, the library can now be expressed quite clea ## Suggested Improvements unrelated to Type Classes -The following improvements elsewhere would make sense alongside the suggested changes to type classes. But they are currently not part of this proposal or implementation. +The following two improvements elsewhere would make sense alongside the suggested changes to type classes. But only the first (fixing singleton) forms a part of this proposal and is implemented. ### Fixing Singleton @@ -704,7 +711,7 @@ Then, instead of using an unsound upper bound we can use a context bound: def f[X: Singleton](x: X) = ... ``` -The context bound would be treated specially by the compiler so that no using clause is generated at runtime. +The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). _Aside_: This can also lead to a solution how to express precise type variables. We can introduce another special type class `Precise` and use it like this: diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index a68a628623bf..6c286f322ba7 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -77,6 +77,6 @@ object Predef: * * which is what is needed for a context bound `[A: TC]`. */ - infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } + infix type is[A <: AnyKind, B <: Any{type Self <: AnyKind}] = B { type Self = A } end Predef diff --git a/tests/neg/singleton-ctx-bound.scala b/tests/neg/singleton-ctx-bound.scala new file mode 100644 index 000000000000..64bb63a288b0 --- /dev/null +++ b/tests/neg/singleton-ctx-bound.scala @@ -0,0 +1,20 @@ +//> using options -language:experimental.modularity -source future +object Test: + + def someInt = 1 + + def f1[T <: Singleton](x: T): T = x + f1(someInt) // error + f1(if ??? then 1 else 2) // OK, but should be error + f1(3 * 2) // OK + + def f2[T](x: T)(using T is Singleton): T = x + f2(someInt) // error + f2(if ??? then 1 else 2) // error + f2(3 * 2) // OK + + def f3[T: Singleton](x: T): T = x + f3(someInt) // error + f3(if ??? then 1 else 2) // error + f3(3 * 2) // OK + f3(6) // OK diff --git a/tests/pos/singleton-ctx-bound.scala b/tests/pos/singleton-ctx-bound.scala new file mode 100644 index 000000000000..5d15cf53836e --- /dev/null +++ b/tests/pos/singleton-ctx-bound.scala @@ -0,0 +1,44 @@ +//> using options -language:experimental.modularity -source future +object Test: + + class Wrap[T](x: T) + + def f0[T](x: T): Wrap[T] = Wrap(x) + val x0 = f0(1) + val _: Wrap[Int] = x0 + + def f1[T <: Singleton](x: T): Wrap[T] = Wrap(x) + val x1 = f1(1) + val _: Wrap[1] = x1 + + def f2[T](x: T)(using Singleton { type Self = T}): Wrap[T] = Wrap(x) + val x2 = f2(1) + val _: Wrap[1] = x2 + + def f3[T: Singleton](x: T): Wrap[T] = Wrap(x) + val x3 = f3(1) + val _: Wrap[1] = x3 + + def f4[T](x: T)(using T is Singleton): Wrap[T] = Wrap(x) + val x4 = f4(1) + val _: Wrap[1] = x4 + + class C0[T](x: T): + def fld: T = x + val y0 = C0("hi") + val _: String = y0.fld + + class C1[T <: Singleton](x: T): + def fld: T = x + val y1 = C1("hi") + val _: "hi" = y1.fld + + class C2[T](x: T)(using T is Singleton): + def fld: T = x + val y2 = C2("hi") + val _: "hi" = y1.fld + + class C3[T: Singleton](x: T): + def fld: T = x + val y3 = C3("hi") + val _: "hi" = y1.fld \ No newline at end of file From 1f2e735565a7cb95b8b4ea3f71d330511da1f516 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 6 Apr 2024 15:13:07 +0200 Subject: [PATCH 031/827] Tweaks to doc pages --- docs/_docs/reference/experimental/modularity.md | 2 +- docs/_docs/reference/experimental/typeclasses.md | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/_docs/reference/experimental/modularity.md b/docs/_docs/reference/experimental/modularity.md index 2062c4d5eda2..a989b71770af 100644 --- a/docs/_docs/reference/experimental/modularity.md +++ b/docs/_docs/reference/experimental/modularity.md @@ -138,7 +138,7 @@ when typechecking recursive class graphs. So an explicit `tracked` looks like th Since `tracked` parameters create refinements in constructor types, it is now possible that a class has a parent that is a refined type. -Previously such types were not permitted, since we were not quite sure how to handle them. But with tracked parameters it becomes pressing so +Previously such types were not permitted, since we were not quite sure how to handle them. But with tracked parameters it becomes pressing to admit such types. **Proposal** Allow refined types as parent types of classes. All refinements that are inherited in this way become synthetic members of the class. diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index 8c95152b8e46..dab612512579 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -220,7 +220,7 @@ So far, an unnamed context bound for a type parameter gets a synthesized fresh n xs.foldLeft(A.unit)(_ `combine` _) ``` -The use of a name like `A` above in two variants, both as a type name and as a term name is of course familiar to Scala programmers. We use the same convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. +In Scala we are already familiar with using one name for two related things where one version names a type and the other an associated value. For instance, we use that convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. **Proposed Rules** @@ -228,6 +228,8 @@ The use of a name like `A` above in two variants, both as a type name and as a t 2. The generated evidence for a context bound `A : C` without an `as` binding has name `A` (seen as a term name). So, `A : C` is equivalent to `A : C as A`. 3. If there are multiple context bounds for a type parameter, as in `A : {C_1, ..., C_n}`, the generated evidence parameter for every context bound `C_i` has a fresh synthesized name, unless the context bound carries an `as` clause, in which case rule (1) applies. +TODO: Present context bound proxy concept. + The default naming convention reduces the need for named context bounds. But named context bounds are still essential, for at least two reasons: - They are needed to give names to multiple context bounds. @@ -357,6 +359,8 @@ given Int is Monoid: extension (x: Int) def combine(y: Int) = x + y def unit = 0 ``` +Here, the second given can be read as if `A` is an `Ord` then `List[A]` is also an`Ord`. Or: for all `A: Ord`, `List[A]` is `Ord`. The arrow can be seen as an implication, note also the analogy to pattern matching syntax. + If explicit names are desired, we add them with `as` clauses: ```scala given String is Ord as intOrd: @@ -558,6 +562,7 @@ Here are some standard type classes, which were mostly already introduced at the def minimum[T: Ord](xs: List[T]) = maximum(xs)(using descending) ``` +The `Reader` type is a bit hairy. It is a type class (written in the parameterized syntax) where we fix a context `Ctx` and then let `Reader` be the polymorphic function type over `X` that takes a context `Ctx` and returns an `X`. Type classes like this are commonly used in monadic effect systems. ### Example 2 From 94bc6fee3aa23e0d00fb5a044b3f99ea13a3cc37 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 6 Apr 2024 15:13:46 +0200 Subject: [PATCH 032/827] Add Precise type class for precise type inference --- .../dotty/tools/dotc/core/Definitions.scala | 2 + .../src/dotty/tools/dotc/core/Types.scala | 14 +++- .../dotty/tools/dotc/typer/ProtoTypes.scala | 71 +++++++++++++------ .../dotty/tools/dotc/typer/Synthesizer.scala | 10 ++- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- .../dotty/tools/repl/TabcompleteTests.scala | 4 +- .../reference/experimental/typeclasses.md | 65 +++++++++-------- library/src/scala/Precise.scala | 11 +++ tests/neg/singleton-ctx-bound.check | 34 +++++++++ tests/neg/singleton-ctx-bound.scala | 15 ++++ tests/pos/deferred-givens-singletons.scala | 13 ++++ tests/pos/precise-ctx-bound.scala | 51 +++++++++++++ tests/pos/precise-indexof.scala | 46 ++++++++++++ tests/pos/singleton-ctx-bound.scala | 7 +- .../stdlibExperimentalDefinitions.scala | 3 + 15 files changed, 287 insertions(+), 61 deletions(-) create mode 100644 library/src/scala/Precise.scala create mode 100644 tests/neg/singleton-ctx-bound.check create mode 100644 tests/pos/deferred-givens-singletons.scala create mode 100644 tests/pos/precise-ctx-bound.scala create mode 100644 tests/pos/precise-indexof.scala diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 6d3a4de7b026..11a4a8473e79 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -535,6 +535,8 @@ class Definitions { def ConsType: TypeRef = ConsClass.typeRef @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") + @tu lazy val PreciseClass: ClassSymbol = requiredClass("scala.Precise") + @tu lazy val SingletonClass: ClassSymbol = // needed as a synthetic class because Scala 2.x refers to it in classfiles // but does not define it as an explicit class. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 27931bad0bc3..3c6d9ecbf204 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4941,7 +4941,7 @@ object Types extends TypeUtils { initOrigin: TypeParamRef, creatorState: TyperState | Null, val initNestingLevel: Int, - precise: Boolean) extends CachedProxyType with ValueType { + val precise: Boolean) extends CachedProxyType with ValueType { private var currentOrigin = initOrigin def origin: TypeParamRef = currentOrigin @@ -5045,9 +5045,19 @@ object Types extends TypeUtils { else instantiateWith(tp) + def isPrecise(using Context) = + precise + || { + val constr = ctx.typerState.constraint + constr.upper(origin).exists: tparam => + constr.typeVarOfParam(tparam) match + case tvar: TypeVar => tvar.precise + case _ => false + } + /** Widen unions when instantiating this variable in the current context? */ def widenPolicy(using Context): Widen = - if precise then Widen.None + if isPrecise then Widen.None else if ctx.typerState.constraint.isHard(this) then Widen.Singletons else Widen.Unions diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 7afdc836f656..bb1d5ac71269 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -11,6 +11,7 @@ import Constants.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet} import Decorators.* import Uniques.* +import Flags.Method import inlines.Inlines import config.Printers.typr import Inferencing.* @@ -26,7 +27,7 @@ object ProtoTypes { import tpd.* /** A trait defining an `isCompatible` method. */ - trait Compatibility { + trait Compatibility: /** Is there an implicit conversion from `tp` to `pt`? */ def viewExists(tp: Type, pt: Type)(using Context): Boolean @@ -106,19 +107,34 @@ object ProtoTypes { if !res then ctx.typerState.constraint = savedConstraint res - /** Constrain result with special case if `meth` is an inlineable method in an inlineable context. - * In that case, we should always succeed and not constrain type parameters in the expected type, - * because the actual return type can be a subtype of the currently known return type. - * However, we should constrain parameters of the declared return type. This distinction is - * achieved by replacing expected type parameters with wildcards. + /** Constrain result with two special cases: + * 1. If `meth` is an inlineable method in an inlineable context, + * we should always succeed and not constrain type parameters in the expected type, + * because the actual return type can be a subtype of the currently known return type. + * However, we should constrain parameters of the declared return type. This distinction is + * achieved by replacing expected type parameters with wildcards. + * 2. When constraining the result of a primitive value operation against + * a precise typevar, don't lower-bound the typevar with a non-singleton type. */ def constrainResult(meth: Symbol, mt: Type, pt: Type)(using Context): Boolean = - if (Inlines.isInlineable(meth)) { + + def constFoldException(pt: Type): Boolean = pt.dealias match + case tvar: TypeVar => + tvar.isPrecise + && meth.is(Method) && meth.owner.isPrimitiveValueClass + && mt.resultType.isPrimitiveValueType && !mt.resultType.isSingleton + case tparam: TypeParamRef => + constFoldException(ctx.typerState.constraint.typeVarOfParam(tparam)) + case _ => + false + + if Inlines.isInlineable(meth) then constrainResult(mt, wildApprox(pt)) true - } - else constrainResult(mt, pt) - } + else + constFoldException(pt) || constrainResult(mt, pt) + end constrainResult + end Compatibility object NoViewsAllowed extends Compatibility { override def viewExists(tp: Type, pt: Type)(using Context): Boolean = false @@ -701,10 +717,18 @@ object ProtoTypes { case FunProto((arg: untpd.TypedSplice) :: Nil, _) => arg.isExtensionReceiver case _ => false - object SingletonConstrained: - def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match - case RefinedType(parent, tpnme.Self, TypeAlias(tp)) - if parent.typeSymbol == defn.SingletonClass => Some(tp) + /** An extractor for Singleton and Precise witness types. + * + * Singleton { type Self = T } returns Some(T, true) + * Precise { type Self = T } returns Some(T, false) + */ + object PreciseConstrained: + def unapply(tp: Type)(using Context): Option[(Type, Boolean)] = tp.dealias match + case RefinedType(parent, tpnme.Self, TypeAlias(tp)) => + val tsym = parent.typeSymbol + if tsym == defn.SingletonClass then Some((tp, true)) + else if tsym == defn.PreciseClass then Some((tp, false)) + else None case _ => None /** Add all parameters of given type lambda `tl` to the constraint's domain. @@ -728,30 +752,31 @@ object ProtoTypes { // hk type lambdas can be added to constraints without typevars during match reduction val added = state.constraint.ensureFresh(tl) - def singletonConstrainedRefs(tp: Type): Set[TypeParamRef] = tp match + def preciseConstrainedRefs(tp: Type, singletonOnly: Boolean): Set[TypeParamRef] = tp match case tp: MethodType if tp.isContextualMethod => val ownBounds = - for case SingletonConstrained(ref: TypeParamRef) <- tp.paramInfos + for + case PreciseConstrained(ref: TypeParamRef, singleton) <- tp.paramInfos + if !singletonOnly || singleton yield ref - ownBounds.toSet ++ singletonConstrainedRefs(tp.resType) + ownBounds.toSet ++ preciseConstrainedRefs(tp.resType, singletonOnly) case tp: LambdaType => - singletonConstrainedRefs(tp.resType) + preciseConstrainedRefs(tp.resType, singletonOnly) case _ => Set.empty - val singletonRefs = singletonConstrainedRefs(added) - def isSingleton(ref: TypeParamRef) = singletonRefs.contains(ref) - def newTypeVars: List[TypeVar] = + val preciseRefs = preciseConstrainedRefs(added, singletonOnly = false) for paramRef <- added.paramRefs yield - val tvar = TypeVar(paramRef, state, nestingLevel, precise = isSingleton(paramRef)) + val tvar = TypeVar(paramRef, state, nestingLevel, precise = preciseRefs.contains(paramRef)) state.ownedVars += tvar tvar val tvars = if addTypeVars then newTypeVars else Nil TypeComparer.addToConstraint(added, tvars) + val singletonRefs = preciseConstrainedRefs(added, singletonOnly = true) for paramRef <- added.paramRefs do - if isSingleton(paramRef) then paramRef <:< defn.SingletonType + if singletonRefs.contains(paramRef) then paramRef <:< defn.SingletonType (added, tvars) end constrained diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 9fb091e3306c..6b18540b6551 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -238,7 +238,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): end synthesizedValueOf val synthesizedSingleton: SpecialHandler = (formal, span) => formal match - case SingletonConstrained(tp) => + case PreciseConstrained(tp, true) => if tp.isSingletonBounded(frozen = false) then withNoErrors: ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) @@ -247,6 +247,13 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case _ => EmptyTreeNoError + val synthesizedPrecise: SpecialHandler = (formal, span) => formal match + case PreciseConstrained(tp, false) => + withNoErrors: + ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) + case _ => + EmptyTreeNoError + /** Create an anonymous class `new Object { type MirroredMonoType = ... }` * and mark it with given attachment so that it is made into a mirror at PostTyper. */ @@ -749,6 +756,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): defn.ManifestClass -> synthesizedManifest, defn.OptManifestClass -> synthesizedOptManifest, defn.SingletonClass -> synthesizedSingleton, + defn.PreciseClass -> synthesizedPrecise, ) def tryAll(formal: Type, span: Span)(using Context): TreeWithErrors = diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index d23f77143e14..b1b21bd1eee5 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3027,7 +3027,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Select(id)(This(cls), id.name) case _ => super.transform(tree) - ValDef(impl, anchorParams.transform(rhs)) + ValDef(impl, anchorParams.transform(rhs)).withSpan(impl.span.endPos) end givenImpl val givenImpls = diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index e4c3a2557e7d..f719752be353 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -122,11 +122,11 @@ class TabcompleteTests extends ReplTest { } @Test def moduleCompletion = initially { - assertEquals(List("Predef"), tabComplete("object Foo { type T = Pre")) + assertEquals(List("Predef"), tabComplete("object Foo { type T = Pred")) } @Test def i6415 = initially { - assertEquals(List("Predef"), tabComplete("object Foo { opaque type T = Pre")) + assertEquals(List("Predef"), tabComplete("object Foo { opaque type T = Pred")) } @Test def i6361 = initially { diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index dab612512579..cf5f3220faa6 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -444,6 +444,39 @@ This is less of a disruption than it might appear at first: - Simplification of the language since a feature is dropped - Eliminate non-obvious and misleading syntax. + +### Bonus: Fixing Singleton + +We know the current treatment of `Singleton` as a type bound is broken since +`x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. + +A better approach is to treat `Singleton` as a type class that is interpreted specially by the compiler. + +We can do this in a backwards-compatible way by defining `Singleton` like this: + +```scala +trait Singleton: + type Self +``` + +Then, instead of using an unsound upper bound we can use a context bound: + +```scala +def f[X: Singleton](x: X) = ... +``` + +The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). + +### Bonus: Precise Typing + +This approach also presents a solution to the problem how to express precise type variables. We can introduce another special type class `Precise` and use it like this: + +```scala +def f[X: Precise](x: X) = ... +``` +Like a `Singleton` bound, a `Precise` bound disables automatic widening of singleton types or union types in inferred instances of type variable `X`. But there is no requirement that the type argument _must_ be a singleton. + + ## Summary of Syntax Changes Here is the complete context-free syntax for all proposed features. @@ -692,38 +725,10 @@ Dimi Racordon tried to [port some core elements](https://github.com/kyouko-taiga With the improvements proposed here, the library can now be expressed quite clearly and straightforwardly. See tests/pos/hylolib in this PR for details. -## Suggested Improvements unrelated to Type Classes - -The following two improvements elsewhere would make sense alongside the suggested changes to type classes. But only the first (fixing singleton) forms a part of this proposal and is implemented. - -### Fixing Singleton - -We know the current treatment of `Singleton` as a type bound is broken since -`x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. - -A better approach is to treat `Singleton` as a type class that is interpreted specially by the compiler. +## Suggested Improvement unrelated to Type Classes -We can do this in a backwards-compatible way by defining `Singleton` like this: +The following improvement would make sense alongside the suggested changes to type classes. But it does not form part of this proposal and is not yet implemented. -```scala -trait Singleton: - type Self -``` - -Then, instead of using an unsound upper bound we can use a context bound: - -```scala -def f[X: Singleton](x: X) = ... -``` - -The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). - -_Aside_: This can also lead to a solution how to express precise type variables. We can introduce another special type class `Precise` and use it like this: - -```scala -def f[X: Precise](x: X) = ... -``` -This would disable automatic widening of singleton types in inferred instances of type variable `X`. ### Using `as` also in Patterns diff --git a/library/src/scala/Precise.scala b/library/src/scala/Precise.scala new file mode 100644 index 000000000000..aad42ca8950f --- /dev/null +++ b/library/src/scala/Precise.scala @@ -0,0 +1,11 @@ +package scala +import annotation.experimental +import language.experimental.erasedDefinitions + +/** A type class-like trait intended as a context bound for type variables. + * If we have `[X: Precise]`, instances of the type variable `X` are inferred + * in precise mode. This means that singleton types and union types are not + * widened. + */ +@experimental erased trait Precise: + type Self diff --git a/tests/neg/singleton-ctx-bound.check b/tests/neg/singleton-ctx-bound.check new file mode 100644 index 000000000000..785123c0e680 --- /dev/null +++ b/tests/neg/singleton-ctx-bound.check @@ -0,0 +1,34 @@ +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:7:5 ------------------------------------------------- +7 | f1(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:12:5 ------------------------------------------------ +12 | f2(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:13:26 -------------------------------------------------------- +13 | f2(if ??? then 1 else 2) // error + | ^ + |No given instance of type (1 : Int) | (2 : Int) is Singleton was found for parameter x$2 of method f2 in object Test. Failed to synthesize an instance of type (1 : Int) | (2 : Int) is Singleton: (1 : Int) | (2 : Int) is not a singleton +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:17:5 ------------------------------------------------ +17 | f3(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:18:26 -------------------------------------------------------- +18 | f3(if ??? then 1 else 2) // error + | ^ + |No given instance of type Singleton{type Self = (1 : Int) | (2 : Int)} was found for a context parameter of method f3 in object Test. Failed to synthesize an instance of type Singleton{type Self = (1 : Int) | (2 : Int)}: (1 : Int) | (2 : Int) is not a singleton +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:33:6 --------------------------------------------------------- +33 |class D extends A: // error + |^ + |No given instance of type Singleton{type Self = D.this.Elem} was found for inferring the implementation of the deferred given instance given_Singleton_Elem in trait A. Failed to synthesize an instance of type Singleton{type Self = D.this.Elem}: D.this.Elem is not a singleton +34 | type Elem = Int diff --git a/tests/neg/singleton-ctx-bound.scala b/tests/neg/singleton-ctx-bound.scala index 64bb63a288b0..e061ec54bb16 100644 --- a/tests/neg/singleton-ctx-bound.scala +++ b/tests/neg/singleton-ctx-bound.scala @@ -18,3 +18,18 @@ object Test: f3(if ??? then 1 else 2) // error f3(3 * 2) // OK f3(6) // OK + +import compiletime.* + +trait A: + type Elem: Singleton + +class B extends A: + type Elem = 1 // OK + +class C[X: Singleton] extends A: + type Elem = X // OK + +class D extends A: // error + type Elem = Int + diff --git a/tests/pos/deferred-givens-singletons.scala b/tests/pos/deferred-givens-singletons.scala new file mode 100644 index 000000000000..60a881340b75 --- /dev/null +++ b/tests/pos/deferred-givens-singletons.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* + +trait A: + type Elem: Singleton + +class B extends A: + type Elem = 1 + +class C[X: Singleton] extends A: + type Elem = X + + diff --git a/tests/pos/precise-ctx-bound.scala b/tests/pos/precise-ctx-bound.scala new file mode 100644 index 000000000000..3f17a5b4a54e --- /dev/null +++ b/tests/pos/precise-ctx-bound.scala @@ -0,0 +1,51 @@ +//> using options -language:experimental.modularity -source future +object Test: + + class Wrap[T](x: T) + + def f0[T](x: T): Wrap[T] = Wrap(x) + val x0 = f0(1) + val _: Wrap[Int] = x0 + + def f1[T: Precise](x: T): Wrap[T] = Wrap(x) + def l = "hello".length + val x1 = Wrap(l) + val _: Wrap[Int] = x1 + + def f2[T](x: T)(using Precise { type Self = T}): Wrap[T] = Wrap(x) + val x2 = f2(1) + val _: Wrap[1] = x2 + + def f3[T: Precise](x: T): Wrap[T] = Wrap(x) + val x3 = f3(identity(1)) + val _: Wrap[1] = x3 + val x3a = f3(1 + 2) + val _: Wrap[3] = x3a + + def f4[T](x: T)(using T is Precise): Wrap[T] = Wrap(x) + val x4 = f4(1) + val _: Wrap[1] = x4 + val x4a = f4(1 + 2) + val _: Wrap[3] = x4a + val y4 = f4(if ??? then 1 else 2) + val _: Wrap[1 | 2] = y4 + val z4 = f4(if ??? then B() else C()) + val _: Wrap[B | C] = z4 + trait A + class B extends A + class C extends A + + class C0[T](x: T): + def fld: T = x + val y0 = C0("hi") + val _: String = y0.fld + + class C2[T](x: T)(using T is Precise): + def fld: T = x + val y2 = C2(identity("hi")) + val _: "hi" = y2.fld + + class C3[T: Precise](x: T): + def fld: T = x + val y3 = C3("hi") + val _: "hi" = y3.fld diff --git a/tests/pos/precise-indexof.scala b/tests/pos/precise-indexof.scala new file mode 100644 index 000000000000..af1e6c5b504b --- /dev/null +++ b/tests/pos/precise-indexof.scala @@ -0,0 +1,46 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* +import compiletime.ops.int.* + +/** The index of `Y` in tuple `X` as a literal constant Int, + * or `Size[X]` if `Y` does not occur in `X` + */ +type IndexOf[X <: Tuple, Y] <: Int = X match + case Y *: _ => 0 + case x *: xs => S[IndexOf[xs, Y]] + case EmptyTuple => 0 + +extension [X <: Tuple](inline x: X) + + /** The index (starting at 0) of the first element in the type `X` of `x` + * that matches type `Y`. + */ + inline def indexOfType[Y] = constValue[IndexOf[X, Y]] + + inline def indexOf[Y: Precise](y: Y) = constValue[IndexOf[X, Y]] + +// Note: without the Precise, the index calcularion would go wrong. For instance, +// (1, 2, "hello", true).indexOf(2) would be 0, the same as (1, 2, "hello", true).indexOTypef[Int] +// (1, 2, "hello", true).indexOf("foo") would be 2, the same as (1, 2, "hello", true).indexOTypef[String] +// But we could alternatively pick Singleton + +@main def Test = + val t: (1, 2, "hello", true) = (1, 2, "hello", true) + val x1: 0 = t.indexOfType[1] + val x2: 1 = t.indexOfType[2] + val x3: 2 = t.indexOfType["hello"] + val x4: 3 = t.indexOfType[true] + val x5: 4 = t.indexOfType[77] + val x6: 0 = t.indexOfType[Int] + val x7: 2 = t.indexOfType[String] + val x8: 4 = t.indexOfType[Double] + + val y1: 0 = t.indexOf(1) + val y2: 1 = t.indexOf(2) + val y3: 2 = t.indexOf("hello") + val y4: 3 = t.indexOf(true) + val y5: 4 = t.indexOf(identity(77)) + val y6: 0 = t.indexOf(identity(1)) + val y7: 4 = t.indexOf("foo") + + diff --git a/tests/pos/singleton-ctx-bound.scala b/tests/pos/singleton-ctx-bound.scala index 5d15cf53836e..c6b0d2fb823c 100644 --- a/tests/pos/singleton-ctx-bound.scala +++ b/tests/pos/singleton-ctx-bound.scala @@ -36,9 +36,12 @@ object Test: class C2[T](x: T)(using T is Singleton): def fld: T = x val y2 = C2("hi") - val _: "hi" = y1.fld + val _: "hi" = y2.fld class C3[T: Singleton](x: T): def fld: T = x val y3 = C3("hi") - val _: "hi" = y1.fld \ No newline at end of file + val _: "hi" = y3.fld + + + diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 48ff5407ac87..df35bed19360 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -79,6 +79,9 @@ val experimentalDefinitionInLibrary = Set( "scala.NamedTuple$", "scala.NamedTupleDecomposition", "scala.NamedTupleDecomposition$", + + // New feature: Precise trait + "scala.Precise", ) From 887fbc4b4996d95360e5dd92492d8f3904cde27a Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 14 Apr 2024 15:26:30 +0200 Subject: [PATCH 033/827] Fix rebase breakage --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- tests/neg/cb-companion-leaks.check | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 63e86e3a321d..c3369ac58e31 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1791,7 +1791,7 @@ trait Applications extends Compatibility { * a. always as good as a method or a polymorphic method. * b. as good as a member of any other type `tp2` if `asGoodValueType(tp1, tp2) = true` */ - def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { + def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsGood $tp1 $tp2", overload) { tp1 match case tp1: MethodType => // (1) tp1.paramInfos.isEmpty && tp2.isInstanceOf[LambdaType] diff --git a/tests/neg/cb-companion-leaks.check b/tests/neg/cb-companion-leaks.check index 156f8a7ab3ee..560561e0e261 100644 --- a/tests/neg/cb-companion-leaks.check +++ b/tests/neg/cb-companion-leaks.check @@ -1,4 +1,4 @@ --- [E194] Type Error: tests/neg/cb-companion-leaks.scala:9:23 ---------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:9:23 ---------------------------------------------------------- 9 | def foo[A: {C, D}] = A // error | ^ | context bound companion value A cannot be used as a value @@ -20,7 +20,7 @@ | companion value with the (term-)name `A`. However, these context bound companions | are not values themselves, they can only be referred to in selections. --------------------------------------------------------------------------------------------------------------------- --- [E194] Type Error: tests/neg/cb-companion-leaks.scala:13:10 --------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:13:10 --------------------------------------------------------- 13 | val x = A // error | ^ | context bound companion value A cannot be used as a value @@ -42,7 +42,7 @@ | companion value with the (term-)name `A`. However, these context bound companions | are not values themselves, they can only be referred to in selections. -------------------------------------------------------------------------------------------------------------------- --- [E194] Type Error: tests/neg/cb-companion-leaks.scala:15:9 ---------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:15:9 ---------------------------------------------------------- 15 | val y: A.type = ??? // error | ^ | context bound companion value A cannot be used as a value From 1e72282418d93a968b36fa43415f1ea63125d982 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 17 Apr 2024 23:01:54 +0200 Subject: [PATCH 034/827] Delay roll-out of new prioritization scheme: Now: 3.5: old scheme but warn if there are changes in the future 3.6-migration: new scheme, warn if prioritization has changed 3.6: new scheme, no warning --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index c3369ac58e31..fd4c634801be 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1880,7 +1880,7 @@ trait Applications extends Compatibility { val tp1p = prepare(tp1) val tp2p = prepare(tp2) - if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) + if Feature.sourceVersion.isAtMost(SourceVersion.`3.5`) || oldResolution || !alt1isGiven && !alt2isGiven then From 9d0ca20f949c4c390f4fa414f3c5ff4460013960 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 28 Apr 2024 13:12:43 +0200 Subject: [PATCH 035/827] Fix rebase breakage again --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- project/MiMaFilters.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index b1b21bd1eee5..a2291d55bac8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2366,7 +2366,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) if tycon.tpe.typeParams.nonEmpty then typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) - else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractType then + else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 6c3640eed12c..18d2e985f844 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -98,7 +98,7 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of tasty core Build.mimaPreviousDottyVersion -> Seq( - ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype") + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype"), ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.TRACKED"), ), From fd072dc686bf0f0cc789ef0b7385d8189d64e374 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 28 Apr 2024 14:13:26 +0200 Subject: [PATCH 036/827] Make best effort compilation work with context bound companions If they are illegally used as values, we need to return an error tree, not a tree with a symbol that can't be pickled. --- .../tools/dotc/transform/PostTyper.scala | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index a110ec53abc0..22370e923a4b 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -279,13 +279,15 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } } - def checkUsableAsValue(tree: Tree)(using Context): Unit = + def checkUsableAsValue(tree: Tree)(using Context): Tree = def unusable(msg: Symbol => Message) = - report.error(msg(tree.symbol), tree.srcPos) + errorTree(tree, msg(tree.symbol)) if tree.symbol.is(ConstructorProxy) then unusable(ConstructorProxyNotValue(_)) - if tree.symbol.isContextBoundCompanion then + else if tree.symbol.isContextBoundCompanion then unusable(ContextBoundCompanionNotValue(_)) + else + tree def checkStableSelection(tree: Tree)(using Context): Unit = def check(qual: Tree) = @@ -330,11 +332,11 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if tree.isType then checkNotPackage(tree) else - checkUsableAsValue(tree) registerNeedsInlining(tree) - tree.tpe match { + val tree1 = checkUsableAsValue(tree) + tree1.tpe match { case tpe: ThisType => This(tpe.cls).withSpan(tree.span) - case _ => tree + case _ => tree1 } case tree @ Select(qual, name) => registerNeedsInlining(tree) @@ -342,8 +344,9 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => Checking.checkRealizable(qual.tpe, qual.srcPos) withMode(Mode.Type)(super.transform(checkNotPackage(tree))) else - checkUsableAsValue(tree) - transformSelect(tree, Nil) + checkUsableAsValue(tree) match + case tree1: Select => transformSelect(tree1, Nil) + case tree1 => tree1 case tree: Apply => val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] val app = From 21f5e678e6a58380d47b8f68edf89317402595a9 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 30 Apr 2024 11:01:33 +0200 Subject: [PATCH 037/827] Tweaks after review --- .../src/dotty/tools/dotc/ast/Desugar.scala | 93 +++++++++++-------- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 8 +- compiler/src/dotty/tools/dotc/ast/untpd.scala | 1 + .../src/dotty/tools/dotc/config/Config.scala | 3 +- compiler/src/dotty/tools/dotc/core/Mode.scala | 4 +- .../src/dotty/tools/dotc/core/NamerOps.scala | 8 +- .../tools/dotc/core/SymDenotations.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 11 ++- .../dotty/tools/dotc/parsing/Parsers.scala | 30 ++++-- .../tools/dotc/transform/PostTyper.scala | 6 +- .../src/dotty/tools/dotc/typer/Namer.scala | 18 ++-- .../dotty/tools/dotc/typer/ProtoTypes.scala | 1 + .../dotty/tools/dotc/typer/RefChecks.scala | 8 +- .../src/dotty/tools/dotc/typer/Typer.scala | 64 ++++++++----- .../annotation/internal/WitnessNames.scala | 3 +- library/src/scala/compiletime/package.scala | 3 +- .../scala/runtime/stdLibPatches/Predef.scala | 1 + tests/neg/i12348.check | 12 +-- tests/neg/i12348.scala | 2 +- tests/pos/typeclasses-this.scala | 10 +- .../stdlibExperimentalDefinitions.scala | 5 +- 21 files changed, 175 insertions(+), 118 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 08953f1dec6b..0681492a4ba7 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -226,10 +226,21 @@ object desugar { private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) + /** Drop context bounds in given TypeDef, replacing them with evidence ValDefs that + * get added to a buffer. + * @param tdef The given TypeDef + * @param evidenceBuf The buffer to which evidence gets added. This buffer + * is shared between desugarings of different type parameters + * of the same method. + * @param evidenceFlags The flags to use for evidence definitions + * @param freshName A function to generate fresh names for evidence definitions + * @param allParams If `tdef` is a type paramter, all parameters of the owning method, + * otherwise the empty list. + */ private def desugarContextBounds( tdef: TypeDef, evidenceBuf: mutable.ListBuffer[ValDef], - flags: FlagSet, + evidenceFlags: FlagSet, freshName: untpd.Tree => TermName, allParamss: List[ParamClause])(using Context): TypeDef = @@ -237,18 +248,18 @@ object desugar { def desugarRhs(rhs: Tree): Tree = rhs match case ContextBounds(tbounds, cxbounds) => - val isMember = flags.isAllOf(DeferredGivenFlags) + val isMember = evidenceFlags.isAllOf(DeferredGivenFlags) for bound <- cxbounds do val evidenceName = bound match case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => - ownName + ownName // if there is an explicitly given name, use it. case _ if Config.nameSingleContextBounds && !isMember && cxbounds.tail.isEmpty && Feature.enabled(Feature.modularity) => tdef.name.toTermName case _ => freshName(bound) evidenceNames += evidenceName - val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(flags) + val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(evidenceFlags) evidenceParam.pushAttachment(ContextBoundParam, ()) evidenceBuf += evidenceParam tbounds @@ -258,9 +269,13 @@ object desugar { rhs val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + // Under x.modularity, if there was a context bound, and `tdef`s name as a term name is + // neither a name of an existing parameter nor a name of generated evidence for + // the same method, add a WitnessAnnotation with all generated evidence names to `tdef`. + // This means a context bound proxy will be created later. if Feature.enabled(Feature.modularity) && evidenceNames.nonEmpty - && !evidenceNames.contains(tdef.name.toTermName) + && !evidenceBuf.exists(_.name == tdef.name.toTermName) && !allParamss.nestedExists(_.name == tdef.name.toTermName) then tdef1.withAddedAnnotation: @@ -332,9 +347,9 @@ object desugar { def getterParamss(n: Int): List[ParamClause] = mapParamss(takeUpTo(paramssNoRHS, n)) { - tparam => dropContextBounds(toDefParam(tparam, KeepAnnotations.All)) + tparam => dropContextBounds(toMethParam(tparam, KeepAnnotations.All)) } { - vparam => toDefParam(vparam, KeepAnnotations.All, keepDefault = false) + vparam => toMethParam(vparam, KeepAnnotations.All, keepDefault = false) } def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match @@ -429,32 +444,30 @@ object desugar { * The position of the added parameters is determined as follows: * * - If there is an existing parameter list that refers to one of the added - * parameters in one of its parameter types, add the new parameters - * in front of the first such parameter list. - * - Otherwise, if the last parameter list consists implicit or using parameters, + * parameters or their future context bound proxies in one of its parameter + * types, add the new parameters in front of the first such parameter list. + * - Otherwise, if the last parameter list consists of implicit or using parameters, * join the new parameters in front of this parameter list, creating one - * parameter list (this is equilavent to Scala 2's scheme). + * parameter list (this is equivalent to Scala 2's scheme). * - Otherwise, add the new parameter list at the end as a separate parameter clause. */ private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = if params.isEmpty then return meth - var boundNames = params.map(_.name).toSet + var boundNames = params.map(_.name).toSet // all evidence parameter + context bound proxy names for mparams <- meth.paramss; mparam <- mparams do mparam match case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => boundNames += tparam.name.toTermName case _ => - //println(i"add ev params ${meth.name}, ${boundNames.toList}") - - def references(vdef: ValDef): Boolean = + def referencesBoundName(vdef: ValDef): Boolean = vdef.tpt.existsSubTree: case Ident(name: TermName) => boundNames.contains(name) case _ => false def recur(mparamss: List[ParamClause]): List[ParamClause] = mparamss match - case ValDefs(mparams) :: _ if mparams.exists(references) => + case ValDefs(mparams) :: _ if mparams.exists(referencesBoundName) => params :: mparamss case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => (params ++ mparams) :: Nil @@ -468,12 +481,12 @@ object desugar { /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = - meth.paramss.reverse match { - case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.hasAttachment(ContextBoundParam)) - case _ => - Nil - } + for + case ValDefs(vparams @ (vparam :: _)) <- meth.paramss + if vparam.mods.isOneOf(GivenOrImplicit) + param <- vparams.takeWhile(_.hasAttachment(ContextBoundParam)) + yield + param @sharable private val synthetic = Modifiers(Synthetic) @@ -491,11 +504,13 @@ object desugar { case WitnessNamesAnnot(_) => true case _ => false - private def toDefParam(tparam: TypeDef, keep: KeepAnnotations)(using Context): TypeDef = + /** Map type parameter accessor to corresponding method (i.e. constructor) parameter */ + private def toMethParam(tparam: TypeDef, keep: KeepAnnotations)(using Context): TypeDef = val mods = filterAnnots(tparam.rawMods, keep) tparam.withMods(mods & EmptyFlags | Param) - private def toDefParam(vparam: ValDef, keep: KeepAnnotations, keepDefault: Boolean)(using Context): ValDef = { + /** Map term parameter accessor to corresponding method (i.e. constructor) parameter */ + private def toMethParam(vparam: ValDef, keep: KeepAnnotations, keepDefault: Boolean)(using Context): ValDef = { val mods = filterAnnots(vparam.rawMods, keep) val hasDefault = if keepDefault then HasDefault else EmptyFlags // Need to ensure that tree is duplicated since term parameters can be watched @@ -507,22 +522,16 @@ object desugar { .withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } - def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = - paramss.foldLeft(fn) { (fn, params) => params match - case TypeDefs(params) => - TypeApply(fn, params.map(refOfDef)) - case (vparam: ValDef) :: _ if vparam.mods.is(Given) => - Apply(fn, params.map(refOfDef)).setApplyKind(ApplyKind.Using) - case _ => - Apply(fn, params.map(refOfDef)) - } - + /** Desugar type def (not param): Under x.moduliity this can expand + * context bounds, which are expanded to evidence ValDefs. These will + * ultimately map to deferred givens. + */ def typeDef(tdef: TypeDef)(using Context): Tree = val evidenceBuf = new mutable.ListBuffer[ValDef] val result = desugarContextBounds( tdef, evidenceBuf, (tdef.mods.flags.toTermFlags & AccessFlags) | Lazy | DeferredGivenFlags, - inventGivenOrExtensionName, Nil) + inventGivenName, Nil) if evidenceBuf.isEmpty then result else Thicket(result :: evidenceBuf.toList) /** The expansion of a class definition. See inline comments for what is involved */ @@ -597,7 +606,7 @@ object desugar { // Annotations on class _type_ parameters are set on the derived parameters // but not on the constructor parameters. The reverse is true for // annotations on class _value_ parameters. - val constrTparams = impliedTparams.map(toDefParam(_, KeepAnnotations.WitnessOnly)) + val constrTparams = impliedTparams.map(toMethParam(_, KeepAnnotations.WitnessOnly)) val constrVparamss = if (originalVparamss.isEmpty) { // ensure parameter list is non-empty if (isCaseClass) @@ -608,7 +617,7 @@ object desugar { report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) ListOfNil } - else originalVparamss.nestedMap(toDefParam(_, KeepAnnotations.All, keepDefault = true)) + else originalVparamss.nestedMap(toMethParam(_, KeepAnnotations.All, keepDefault = true)) val derivedTparams = constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) @@ -630,7 +639,7 @@ object desugar { defDef( addEvidenceParams( cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), - evidenceParams(constr1).map(toDefParam(_, KeepAnnotations.None, keepDefault = false))))) + evidenceParams(constr1).map(toMethParam(_, KeepAnnotations.None, keepDefault = false))))) case stat => stat } @@ -1148,7 +1157,7 @@ object desugar { */ def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { var name = mdef.name - if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) + if (name.isEmpty) name = name.likeSpaced(inventGivenName(impl)) def errPos = mdef.source.atSpan(mdef.nameSpan) if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { val kind = if (name.isTypeName) "class" else "object" @@ -1195,7 +1204,7 @@ object desugar { end makePolyFunctionType /** Invent a name for an anonympus given of type or template `impl`. */ - def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = + def inventGivenName(impl: Tree)(using Context): SimpleName = val str = impl match case impl: Template => if impl.parents.isEmpty then @@ -1207,6 +1216,10 @@ object desugar { "given_" ++ inventTypeName(impl) str.toTermName.asSimpleName + /** Extract a synthesized given name from a type tree. This is used for + * both anonymous givens and (under x.modularity) deferred givens. + * @param followArgs If true include argument types in the name + */ private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { private def extractArgs(args: List[Tree])(using Context): String = args.map(argNameExtractor.apply("", _)).mkString("_") diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 990fb37f4e60..11fb572b66c6 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -382,15 +382,15 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => tree.tpe.isInstanceOf[ThisType] } - - /** Extractor for annotation.internal.WitnessNames(name_1, ..., name_n)` + + /** Under x.modularity: Extractor for `annotation.internal.WitnessNames(name_1, ..., name_n)` * represented as an untyped or typed tree. */ object WitnessNamesAnnot: - def apply(names0: List[TermName])(using Context): untpd.Tree = + def apply(names: List[TermName])(using Context): untpd.Tree = untpd.TypedSplice(tpd.New( defn.WitnessNamesAnnot.typeRef, - tpd.SeqLiteral(names0.map(n => tpd.Literal(Constant(n.toString))), tpd.TypeTree(defn.StringType)) :: Nil + tpd.SeqLiteral(names.map(n => tpd.Literal(Constant(n.toString))), tpd.TypeTree(defn.StringType)) :: Nil )) def unapply(tree: Tree)(using Context): Option[List[TermName]] = diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 0486e2e6d3d7..64f9fb4df95e 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -119,6 +119,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree case class ContextBoundTypeTree(tycon: Tree, paramName: TypeName, ownName: TermName)(implicit @constructorOnly src: SourceFile) extends Tree + // `paramName: tycon as ownName`, ownName != EmptyTermName only under x.modularity case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 293044c245ef..ee8ed4b215d7 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -236,7 +236,8 @@ object Config { inline val checkLevelsOnConstraints = false inline val checkLevelsOnInstantiation = true - /** If a type parameter `X` has a single context bound `X: C`, should the + /** Under x.modularity: + * If a type parameter `X` has a single context bound `X: C`, should the * witness parameter be named `X`? This would prevent the creation of a * context bound companion. */ diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 5dab5631c62a..14d7827974c0 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -104,8 +104,8 @@ object Mode { val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") /** Use previous Scheme for implicit resolution. Currently significant - * in 3.0-migration where we use Scala-2's scheme instead and in 3.5-migration - * where we use the previous scheme up to 3.4 instead. + * in 3.0-migration where we use Scala-2's scheme instead and in 3.5 and 3.6-migration + * where we use the previous scheme up to 3.4 for comparison with the new scheme. */ val OldImplicitResolution: Mode = newMode(15, "OldImplicitResolution") diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 58b4ad681c6f..5e76b09bbde6 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -24,9 +24,9 @@ object NamerOps: addParamRefinements(ctor.owner.typeRef, paramss) /** Given a method with tracked term-parameters `p1, ..., pn`, and result type `R`, add the - * refinements R { p1 = p1' } ... { pn = pn' }, where pi' is the term parameter ref + * refinements R { p1 = p1' } ... { pn = pn' }, where pi' is the TermParamRef * of the parameter and pi is its name. This matters only under experimental.modularity, - * since wothout it there are no tracked parameters. Parameter refinements are added for + * since without it there are no tracked parameters. Parameter refinements are added for * constructors and given companion methods. */ def addParamRefinements(resType: Type, paramss: List[List[Symbol]])(using Context): Type = @@ -261,7 +261,7 @@ object NamerOps: /** Create a context-bound companion for type symbol `tsym`, which has a context * bound that defines a set of witnesses with names `witnessNames`. * - * @param parans If `tsym` is a type parameter, a list of parameter symbols + * @param params If `tsym` is a type parameter, a list of parameter symbols * that include all witnesses, otherwise the empty list. * * The context-bound companion has as name the name of `tsym` translated to @@ -299,7 +299,7 @@ object NamerOps: * this class. This assumes that these types already have their * WitnessNames annotation set even before they are completed. This is * the case for unpickling but currently not for Namer. So the method - * is only called during unpickling, and is not part of NamerOps. + * is only called during unpickling. */ def addContextBoundCompanions(cls: ClassSymbol)(using Context): Unit = for sym <- cls.info.decls do diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 49c466f0bfd5..3904228756a0 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1194,7 +1194,7 @@ object SymDenotations { || is(JavaDefinedVal, butNot = Method) || isConstructor || !owner.isExtensibleClass && !is(Deferred) - // Deferred symbols can arise through parent refinements. + // Deferred symbols can arise through parent refinements under x.modularity. // For them, the overriding relationship reverses anyway, so // being in a final class does not mean the symbol cannot be // implemented concretely in a superclass. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 3c6d9ecbf204..a92893678a17 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1655,7 +1655,7 @@ object Types extends TypeUtils { * * P { ... type T = / += / -= U ... } # T * - * to just U. Analogously, `P { val x: S} # x` is reduced tp `S` is `S` + * to just U. Analogously, `P { val x: S} # x` is reduced tp `S` if `S` * is a singleton type. * * Does not perform the reduction if the resulting type would contain @@ -4936,6 +4936,7 @@ object Types extends TypeUtils { * @param origin the parameter that's tracked by the type variable. * @param creatorState the typer state in which the variable was created. * @param initNestingLevel the initial nesting level of the type variable. (c.f. nestingLevel) + * @param precise whether we should use instantiation without widening for this TypeVar. */ final class TypeVar private( initOrigin: TypeParamRef, @@ -5045,6 +5046,9 @@ object Types extends TypeUtils { else instantiateWith(tp) + /** Should we suppress widening? True if this TypeVar is precise + * or if it has as an upper bound a precise TypeVar. + */ def isPrecise(using Context) = precise || { @@ -5055,7 +5059,9 @@ object Types extends TypeUtils { case _ => false } - /** Widen unions when instantiating this variable in the current context? */ + /** The policy used for widening singletons or unions when instantiating + * this variable in the current context. + */ def widenPolicy(using Context): Widen = if isPrecise then Widen.None else if ctx.typerState.constraint.isHard(this) then Widen.Singletons @@ -5107,6 +5113,7 @@ object Types extends TypeUtils { precise: Boolean = false) = new TypeVar(initOrigin, creatorState, nestingLevel, precise) + /** The three possible widening policies */ enum Widen: case None // no widening case Singletons // widen singletons but not unions diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index f3d02dda5c48..fe23d97d58c3 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3542,23 +3542,26 @@ object Parsers { paramMods() if paramOwner.takesOnlyUsingClauses && !impliedMods.is(Given) then syntaxError(em"`using` expected") - val (firstParamMod, isParams) = + val (firstParamMod, paramsAreNamed) = var mods = EmptyModifiers if in.lookahead.isColon then (mods, true) else if isErased then mods = addModifier(mods) - val isParams = + val paramsAreNamed = !impliedMods.is(Given) || startParamTokens.contains(in.token) || isIdent - && (in.name == nme.inline || in.name == nme.tracked || in.lookahead.isColon) - (mods, isParams) - (if isParams then commaSeparated(() => param()) - else contextTypes(paramOwner, numLeadParams, impliedMods)) match { + && (in.name == nme.inline + || in.name == nme.tracked && in.featureEnabled(Feature.modularity) + || in.lookahead.isColon) + (mods, paramsAreNamed) + val params = + if paramsAreNamed then commaSeparated(() => param()) + else contextTypes(paramOwner, numLeadParams, impliedMods) + params match case Nil => Nil case (h :: t) => h.withAddedFlags(firstParamMod.flags) :: t - } checkVarArgsRules(clause) clause } @@ -4156,7 +4159,10 @@ object Parsers { else // need to be careful with last `with` withConstrApps() - // TODO Change syntax description + // Adjust parameter modifiers so that they are now parameters of a method + // (originally, we created class parameters) + // TODO: syntax.md should be adjusted to reflect the difference that + // parameters of an alias given cannot be vals. def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = paramss.nestedMap: param => if !param.mods.isAllOf(PrivateLocal) then @@ -4173,7 +4179,8 @@ object Parsers { else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty - if !(name.isEmpty && noParams) then + val hasParamsOrId = !name.isEmpty || !noParams + if hasParamsOrId then if in.isColon then newSyntaxAllowed = false in.nextToken() @@ -4184,7 +4191,7 @@ object Parsers { rejectWildcardType(annotType()) :: Nil else constrApp() match case parent: Apply => parent :: moreConstrApps() - case parent if in.isIdent => + case parent if in.isIdent && newSyntaxAllowed => infixTypeRest(parent, _ => annotType1()) :: Nil case parent => parent :: moreConstrApps() if newSyntaxAllowed && in.isIdent(nme.as) then @@ -4193,6 +4200,7 @@ object Parsers { val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then + // given alias accept(EQUALS) mods1 |= Final if noParams && !mods.is(Inline) then @@ -4201,10 +4209,12 @@ object Parsers { else DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) else if (isStatSep || isStatSeqEnd) && parentsIsType && !newSyntaxAllowed then + // old-style abstract given if name.isEmpty then syntaxError(em"anonymous given cannot be abstract") DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else + // structural instance val vparamss1 = vparamss.nestedMap: vparam => if vparam.mods.is(Private) then vparam.withMods(vparam.mods &~ PrivateLocal | Protected) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 22370e923a4b..c6ad1bb860e8 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -476,9 +476,9 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => val relativePath = util.SourceFile.relativePath(ctx.compilationUnit.source, reference) sym.addAnnotation(Annotation(defn.SourceFileAnnot, Literal(Constants.Constant(relativePath)), tree.span)) else - if !sym.is(Param) then - if !sym.owner.isOneOf(AbstractOrTrait) then - Checking.checkGoodBounds(tree.symbol) + if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then + Checking.checkGoodBounds(tree.symbol) + // Delete all context bound companions of this TypeDef if sym.owner.isClass && sym.hasAnnotation(defn.WitnessNamesAnnot) then val decls = sym.owner.info.decls for cbCompanion <- decls.lookupAll(sym.name.toTermName) do diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index b69d9f76852a..0588e27ea54f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -296,13 +296,13 @@ class Namer { typer: Typer => createOrRefine[Symbol](tree, name, flags, ctx.owner, _ => info, (fs, _, pwithin) => newSymbol(ctx.owner, name, fs, info, pwithin, tree.nameSpan)) case tree: Import => - recordSym(newImportSym(tree), tree) + recordSym(importSymbol(tree), tree) case _ => NoSymbol } } - private def newImportSym(imp: Import)(using Context): Symbol = + private def importSymbol(imp: Import)(using Context): Symbol = newImportSymbol(ctx.owner, Completer(imp)(ctx), imp.span) /** If `sym` exists, enter it in effective scope. Check that @@ -719,7 +719,7 @@ class Namer { typer: Typer => */ def expandTopLevel(stats: List[Tree])(using Context): Unit = stats match case (imp @ Import(qual, _)) :: stats1 if untpd.languageImport(qual).isDefined => - expandTopLevel(stats1)(using ctx.importContext(imp, newImportSym(imp))) + expandTopLevel(stats1)(using ctx.importContext(imp, importSymbol(imp))) case stat :: stats1 => expand(stat) expandTopLevel(stats1) @@ -1624,7 +1624,8 @@ class Namer { typer: Typer => } /** Enter all parent refinements as public class members, unless a definition - * with the same name already exists in the class. + * with the same name already exists in the class. Remember the refining symbols + * as an attachment on the ClassDef tree. */ def enterParentRefinementSyms(refinements: List[(Name, Type)]) = val refinedSyms = mutable.ListBuffer[Symbol]() @@ -1852,19 +1853,20 @@ class Namer { typer: Typer => // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR + // A map from context-bounded type parameters to associated evidence parameter names val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then for params <- ddef.paramss; case tdef: TypeDef <- params do for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do witnessNamesOfParam(tdef) = ws - /** Are all names in `wnames` defined by the longest prefix of all `params` + /** Is each name in `wnames` defined spmewhere in the longest prefix of all `params` * that have been typed ahead (i.e. that carry the TypedAhead attachment)? */ def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty - /** Enter and typecheck parameter list, add context companions as. + /** Enter and typecheck parameter list. * Once all witness parameters for a context bound are seen, create a * context bound companion for it. */ @@ -1909,7 +1911,9 @@ class Namer { typer: Typer => val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) - /** We add `tracked` to context bound witnesses that have abstract type members */ + /** Under x.modularity, we add `tracked` to context bound witnesses + * that have abstract type members + */ def needsTracked(sym: Symbol, param: ValDef)(using Context) = !sym.is(Tracked) && param.hasAttachment(ContextBoundParam) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index bb1d5ac71269..ecf1da30cac1 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -776,6 +776,7 @@ object ProtoTypes { TypeComparer.addToConstraint(added, tvars) val singletonRefs = preciseConstrainedRefs(added, singletonOnly = true) for paramRef <- added.paramRefs do + // Constrain all type parameters [T: Singleton] to T <: Singleton if singletonRefs.contains(paramRef) then paramRef <:< defn.SingletonType (added, tvars) end constrained diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 266b69d029c1..cb1aea27c444 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -552,7 +552,11 @@ object RefChecks { overrideError("is an extension method, cannot override a normal method") else if (other.is(ExtensionMethod) && !member.is(ExtensionMethod)) // (1.3) overrideError("is a normal method, cannot override an extension method") - else if (!other.is(Deferred) || other.isAllOf(Given | HasDefault)) + else if (!other.is(Deferred) + || other.isAllOf(Given | HasDefault) + // deferred givens have flags Given, HasDefault and Deferred set. These + // need to be checked for overriding as if they were concrete members + ) && !member.is(Deferred) && !other.name.is(DefaultGetterName) && !member.isAnyOverride @@ -626,7 +630,7 @@ object RefChecks { else if intoOccurrences(memberTp(self)) != intoOccurrences(otherTp(self)) then overrideError("has different occurrences of `into` modifiers", compareTypes = true) else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) - && !member.is(Tracked) + && !member.is(Tracked) // see remark on tracked members above then // (1.12) report.errorOrMigrationWarning( em"cannot override val parameter ${other.showLocated}", diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index a2291d55bac8..2eeccb6e477d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -183,7 +183,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Overridden in derived typers def newLikeThis(nestingLevel: Int): Typer = new Typer(nestingLevel) - // Overridden to do nothing in derived typers + /** Apply given migration. Overridden to use `disabled` instead in ReTypers. */ protected def migrate[T](migration: => T, disabled: => T = ()): T = migration /** Find the type of an identifier with given `name` in given context `ctx`. @@ -869,7 +869,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer type Alts = List[(/*prev: */Tree, /*prevState: */TyperState, /*prevWitness: */TermRef)] /** Compare two alternative selections `alt1` and `alt2` from witness types - * `wit1`, `wit2` according to the 3 criteria in the enclosing doc comment. I.e. + * `wit1`, `wit2` according to the 3 criteria in Step 3 of the doc comment + * of annotation.internal.WitnessNames. I.e. * * alt1 = qual1.m, alt2 = qual2.m, qual1: wit1, qual2: wit2 * @@ -887,13 +888,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case (tp1: TermRef, tp2: TermRef) => if tp1.info.isSingleton && (tp1 frozen_=:= tp2) then 1 else compare(tp1, tp2, preferGeneral = false) - case (tp1: TermRef, _) => 1 + case (tp1: TermRef, _) => 1 // should not happen, but prefer TermRefs over othersver others case (_, tp2: TermRef) => -1 case _ => 0 - /** Find the set of maximally preferred alternative among `prev` and the - * remaining alternatives generated from `witnesses` with is a union type - * of witness references. + /** Find the set of maximally preferred alternatives among `prevs` and + * alternatives referred to by `witnesses`. + * @param prevs a list of (ref tree, typer state, term ref) tripls that + * represents previously identified alternatives + * @param witnesses a type of the form ref_1 | ... | ref_n containing references + * still to be considered. */ def tryAlts(prevs: Alts, witnesses: Type): Alts = witnesses match case OrType(wit1, wit2) => @@ -905,10 +909,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def current = (alt, altCtx.typerState, witness) if altCtx.reporter.hasErrors then prevs else - val cmps = prevs.map: (prevTree, prevState, prevWitness) => + val comparisons = prevs.map: (prevTree, prevState, prevWitness) => compareAlts(prevTree, alt, prevWitness, witness) - if cmps.exists(_ == 1) then prevs - else current :: prevs.zip(cmps).collect{ case (prev, cmp) if cmp != -1 => prev } + if comparisons.exists(_ == 1) then prevs + else current :: prevs.zip(comparisons).collect{ case (prev, cmp) if cmp != -1 => prev } qual.tpe.widen match case AppliedType(_, arg :: Nil) => @@ -2370,9 +2374,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else + def selfNote = + if Feature.enabled(modularity) then + " and\ndoes not have an abstract type member named `Self` either" + else "" errorTree(tree, - em"""Illegal context bound: ${tycon.tpe} does not take type parameters and - |does not have an abstract type member named `Self` either.""") + em"Illegal context bound: ${tycon.tpe} does not take type parameters$selfNote.") def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) @@ -2605,7 +2612,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var name = tree.name if (name == nme.WILDCARD && tree.mods.is(Given)) { val Typed(_, tpt) = tree.body: @unchecked - name = desugar.inventGivenOrExtensionName(tpt) + name = desugar.inventGivenName(tpt) } if (name == nme.WILDCARD) body1 else { @@ -2725,6 +2732,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if filters == List(MessageFilter.None) then sup.markUsed() ctx.run.nn.suppressions.addSuppression(sup) + /** Run `typed` on `rhs` except if `rhs` is the right hand side of a deferred given, + * in which case the empty tree is returned. + */ + private inline def excludeDeferredGiven( + rhs: untpd.Tree, sym: Symbol)( + inline typed: untpd.Tree => Tree)(using Context): Tree = + rhs match + case rhs: RefTree + if rhs.name == nme.deferred && sym.isAllOf(DeferredGivenFlags, butNot = Param) => + EmptyTree + case _ => + typed(rhs) + def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) @@ -2732,15 +2752,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.is(Implicit) then checkImplicitConversionDefOK(sym) if sym.is(Module) then checkNoModuleClash(sym) val tpt1 = checkSimpleKinded(typedType(tpt)) - val rhs1 = vdef.rhs match { + val rhs1 = vdef.rhs match case rhs @ Ident(nme.WILDCARD) => rhs.withType(tpt1.tpe) - case rhs: RefTree - if rhs.name == nme.deferred && sym.isAllOf(DeferredGivenFlags, butNot = Param) => - EmptyTree case rhs => - typedExpr(rhs, tpt1.tpe.widenExpr) - } + excludeDeferredGiven(rhs, sym): + typedExpr(_, tpt1.tpe.widenExpr) val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) postProcessInfo(vdef1, sym) vdef1.setDefTree @@ -2800,13 +2817,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.isInlineMethod then rhsCtx.addMode(Mode.InlineableBody) if sym.is(ExtensionMethod) then rhsCtx.addMode(Mode.InExtensionMethod) - val rhs1 = ddef.rhs match - case Ident(nme.deferred) if sym.isAllOf(DeferredGivenFlags) => - EmptyTree - case rhs => - PrepareInlineable.dropInlineIfError(sym, - if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) - else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) + val rhs1 = excludeDeferredGiven(ddef.rhs, sym): rhs => + PrepareInlineable.dropInlineIfError(sym, + if sym.isScala2Macro then typedScala2MacroBody(rhs)(using rhsCtx) + else typedExpr(rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then if StagingLevel.level > 0 then diff --git a/library/src/scala/annotation/internal/WitnessNames.scala b/library/src/scala/annotation/internal/WitnessNames.scala index f859cda96d06..80b8fea4a84d 100644 --- a/library/src/scala/annotation/internal/WitnessNames.scala +++ b/library/src/scala/annotation/internal/WitnessNames.scala @@ -36,7 +36,7 @@ package internal * 2. The underlying type (under widen) of ref_i is a true supertype of the * underlying type of ref_j. * 3. ref_i.m is a term, the underlying type of ref_j is not a strict subtype - * of the underlying type of ref_j, and the underlying type ref_i.m is a + * of the underlying type of ref_i, and the underlying type ref_i.m is a * strict subtype of the underlying type of ref_j.m. * * If there is such a selection, map A.m to ref_i.m, otherwise report an error. @@ -48,6 +48,7 @@ package internal * * 4. At PostTyper, issue an error when encountering any reference to a CB companion. */ +@experimental class WitnessNames(names: String*) extends StaticAnnotation diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index be76941a680b..a3896a1eeb06 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -1,7 +1,7 @@ package scala package compiletime -import annotation.compileTimeOnly +import annotation.{compileTimeOnly, experimental} /** Use this method when you have a type, do not have a value for it but want to * pattern match on it. For example, given a type `Tup <: Tuple`, one can @@ -52,6 +52,7 @@ def uninitialized: Nothing = ??? * that implement the enclosing trait and that do not contain an explicit overriding * definition of that given. */ +@experimental @compileTimeOnly("`deferred` can only be used as the right hand side of a given definition in a trait") def deferred: Nothing = ??? diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 6c286f322ba7..77b014b80466 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -77,6 +77,7 @@ object Predef: * * which is what is needed for a context bound `[A: TC]`. */ + @experimental infix type is[A <: AnyKind, B <: Any{type Self <: AnyKind}] = B { type Self = A } end Predef diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check index eded51f70f31..55806fa5ca1b 100644 --- a/tests/neg/i12348.check +++ b/tests/neg/i12348.check @@ -1,8 +1,4 @@ --- [E040] Syntax Error: tests/neg/i12348.scala:2:16 -------------------------------------------------------------------- -2 | given inline x: Int = 0 // error // error - | ^ - | an identifier expected, but ':' found --- [E067] Syntax Error: tests/neg/i12348.scala:2:8 --------------------------------------------------------------------- -2 | given inline x: Int = 0 // error // error - | ^ - |Declaration of given instance given_x_inline_ not allowed here: only classes can have declared but undefined members +-- [E040] Syntax Error: tests/neg/i12348.scala:2:15 -------------------------------------------------------------------- +2 | given inline x: Int = 0 // error + | ^ + | 'with' expected, but identifier found diff --git a/tests/neg/i12348.scala b/tests/neg/i12348.scala index 43daf9a2801b..bd8bf63994e6 100644 --- a/tests/neg/i12348.scala +++ b/tests/neg/i12348.scala @@ -1,2 +1,2 @@ object A { - given inline x: Int = 0 // error // error + given inline x: Int = 0 // error diff --git a/tests/pos/typeclasses-this.scala b/tests/pos/typeclasses-this.scala index 20ce78678b22..33ccb8d9d653 100644 --- a/tests/pos/typeclasses-this.scala +++ b/tests/pos/typeclasses-this.scala @@ -36,7 +36,7 @@ end Common object Instances extends Common: - given intOrd: Int is Ord with + given intOrd: (Int is Ord) with extension (x: Int) def compareTo(y: Int) = if x < y then -1 @@ -44,7 +44,7 @@ object Instances extends Common: else 0 // given [T](using tracked val ev: Ord { type Self = T}): Ord { type Self = List[T] } with - given [T: Ord]: List[T] is Ord with + given [T: Ord]: (List[T] is Ord) with extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -53,7 +53,7 @@ object Instances extends Common: val fst = x.compareTo(y) if (fst != 0) fst else xs1.compareTo(ys1) - given listMonad: List is Monad with + given listMonad: (List is Monad) with extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = @@ -61,7 +61,7 @@ object Instances extends Common: type Reader[Ctx] = [X] =>> Ctx => X - given readerMonad[Ctx]: Reader[Ctx] is Monad with + given readerMonad[Ctx]: (Reader[Ctx] is Monad) with extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -83,7 +83,7 @@ object Instances extends Common: def maximum[T: Ord](xs: List[T]): T = xs.reduce(_ `max` _) - given descending[T: Ord]: T is Ord with + given descending[T: Ord]: (T is Ord) with extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) def minimum[T: Ord](xs: List[T]) = diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index df35bed19360..9a01e711537b 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -80,8 +80,11 @@ val experimentalDefinitionInLibrary = Set( "scala.NamedTupleDecomposition", "scala.NamedTupleDecomposition$", - // New feature: Precise trait + // New feature: modularity "scala.Precise", + "scala.annotation.internal.WitnessNames", + "scala.compiletime.package$package$.deferred", + "scala.Predef$.is", ) From d3e6a952d4e3914d8f7cfc1054f6ddbeab9b33c5 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 16:07:04 +0200 Subject: [PATCH 038/827] Fix rebase breakage --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index fd4c634801be..c3369ac58e31 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1880,7 +1880,7 @@ trait Applications extends Compatibility { val tp1p = prepare(tp1) val tp2p = prepare(tp2) - if Feature.sourceVersion.isAtMost(SourceVersion.`3.5`) + if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) || oldResolution || !alt1isGiven && !alt2isGiven then diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 9a01e711537b..7079c7320ba0 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -84,7 +84,7 @@ val experimentalDefinitionInLibrary = Set( "scala.Precise", "scala.annotation.internal.WitnessNames", "scala.compiletime.package$package$.deferred", - "scala.Predef$.is", + "scala.runtime.stdLibPatches.Predef$.is", ) From b2f0791a0ac337474fdd223085f8da6ee03ac01e Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 16:07:33 +0200 Subject: [PATCH 039/827] Make Singleton an erased class only under modularity import --- compiler/src/dotty/tools/dotc/core/TypeUtils.scala | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index dd881bb1adf6..afc2cc39f9cf 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -8,6 +8,7 @@ import Names.{Name, TermName} import Constants.Constant import Names.Name +import config.Feature class TypeUtils: /** A decorator that provides methods on types @@ -22,7 +23,11 @@ class TypeUtils: self.classSymbol.isPrimitiveValueClass def isErasedClass(using Context): Boolean = - self.underlyingClassRef(refinementOK = true).typeSymbol.is(Flags.Erased) + val cls = self.underlyingClassRef(refinementOK = true).typeSymbol + cls.is(Flags.Erased) + && (cls != defn.SingletonClass || Feature.enabled(Feature.modularity)) + // Singleton counts as an erased class only under x.modularity + /** Is this type a checked exception? This is the case if the type * derives from Exception but not from RuntimeException. According to @@ -179,7 +184,7 @@ class TypeUtils: def isThisTypeOf(cls: Symbol)(using Context) = self match case self: Types.ThisType => self.cls == cls case _ => false - + /** Strip all outer refinements off this type */ def stripRefinement: Type = self match case self: RefinedOrRecType => self.parent.stripRefinement From 4f28d8a892b835a2598e10a7af48b05ed5a19e32 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 19:07:52 +0200 Subject: [PATCH 040/827] Address review comments --- .../src/dotty/tools/dotc/ast/Desugar.scala | 28 ++++++++++--------- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 24 ++++++---------- .../src/dotty/tools/dotc/core/Flags.scala | 2 +- .../src/dotty/tools/dotc/core/NamerOps.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 18 ++++++------ .../dotty/tools/dotc/parsing/Parsers.scala | 7 +++-- .../src/dotty/tools/dotc/typer/Namer.scala | 4 +-- .../src/dotty/tools/dotc/typer/Typer.scala | 4 +-- docs/_docs/internals/syntax.md | 2 +- .../annotation/internal/WitnessNames.scala | 4 +-- 10 files changed, 46 insertions(+), 49 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 0681492a4ba7..b1b771bc7512 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -234,7 +234,7 @@ object desugar { * of the same method. * @param evidenceFlags The flags to use for evidence definitions * @param freshName A function to generate fresh names for evidence definitions - * @param allParams If `tdef` is a type paramter, all parameters of the owning method, + * @param allParamss If `tdef` is a type paramter, all parameters of the owning method, * otherwise the empty list. */ private def desugarContextBounds( @@ -246,29 +246,31 @@ object desugar { val evidenceNames = mutable.ListBuffer[TermName]() - def desugarRhs(rhs: Tree): Tree = rhs match - case ContextBounds(tbounds, cxbounds) => + def desugarRHS(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, ctxbounds) => val isMember = evidenceFlags.isAllOf(DeferredGivenFlags) - for bound <- cxbounds do + for bound <- ctxbounds do val evidenceName = bound match case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => ownName // if there is an explicitly given name, use it. - case _ if Config.nameSingleContextBounds && !isMember - && cxbounds.tail.isEmpty && Feature.enabled(Feature.modularity) => - tdef.name.toTermName case _ => - freshName(bound) + if Config.nameSingleContextBounds + && !isMember + && ctxbounds.tail.isEmpty + && Feature.enabled(Feature.modularity) + then tdef.name.toTermName + else freshName(bound) evidenceNames += evidenceName val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(evidenceFlags) evidenceParam.pushAttachment(ContextBoundParam, ()) evidenceBuf += evidenceParam tbounds case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, desugarRhs(body)) + cpy.LambdaTypeTree(rhs)(tparams, desugarRHS(body)) case _ => rhs - val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRhs(tdef.rhs)) + val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRHS(tdef.rhs)) // Under x.modularity, if there was a context bound, and `tdef`s name as a term name is // neither a name of an existing parameter nor a name of generated evidence for // the same method, add a WitnessAnnotation with all generated evidence names to `tdef`. @@ -695,10 +697,10 @@ object desugar { case _ => false } - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { + /** Is this a repeated argument x* (using a spread operator)? */ + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match case PostfixOp(_, Ident(tpnme.raw.STAR)) => true case _ => false - } def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { val targs = for (tparam <- tparams) yield { @@ -1218,7 +1220,7 @@ object desugar { /** Extract a synthesized given name from a type tree. This is used for * both anonymous givens and (under x.modularity) deferred givens. - * @param followArgs If true include argument types in the name + * @param followArgs if true include argument types in the name */ private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { private def extractArgs(args: List[Tree])(using Context): String = diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 11fb572b66c6..97de434ba9d5 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -394,22 +394,16 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => )) def unapply(tree: Tree)(using Context): Option[List[TermName]] = - def isWitnessNames(tp: Type) = tp match - case tp: TypeRef => - tp.name == tpnme.WitnessNames && tp.symbol == defn.WitnessNamesAnnot - case _ => - false unsplice(tree) match - case Apply( - Select(New(tpt: tpd.TypeTree), nme.CONSTRUCTOR), - SeqLiteral(elems, _) :: Nil - ) if isWitnessNames(tpt.tpe) => - Some: - elems.map: - case Literal(Constant(str: String)) => - ContextBoundParamName.unmangle(str.toTermName.asSimpleName) - case _ => - None + case Apply(Select(New(tpt: tpd.TypeTree), nme.CONSTRUCTOR), SeqLiteral(elems, _) :: Nil) => + tpt.tpe match + case tp: TypeRef if tp.name == tpnme.WitnessNames && tp.symbol == defn.WitnessNamesAnnot => + Some: + elems.map: + case Literal(Constant(str: String)) => + ContextBoundParamName.unmangle(str.toTermName.asSimpleName) + case _ => None + case _ => None end WitnessNamesAnnot } diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index e17834d61fdc..b1bf7a266c91 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -573,7 +573,7 @@ object Flags { val DeferredOrLazyOrMethod: FlagSet = Deferred | Lazy | Method val DeferredOrTermParamOrAccessor: FlagSet = Deferred | ParamAccessor | TermParam // term symbols without right-hand sides val DeferredOrTypeParam: FlagSet = Deferred | TypeParam // type symbols without right-hand sides - val DeferredGivenFlags = Deferred | Given | HasDefault + val DeferredGivenFlags: FlagSet = Deferred | Given | HasDefault val EnumValue: FlagSet = Enum | StableRealizable // A Scala enum value val FinalOrInline: FlagSet = Final | Inline val FinalOrModuleClass: FlagSet = Final | ModuleClass // A module class or a final class diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 5e76b09bbde6..07cb9292baa4 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -262,7 +262,7 @@ object NamerOps: * bound that defines a set of witnesses with names `witnessNames`. * * @param params If `tsym` is a type parameter, a list of parameter symbols - * that include all witnesses, otherwise the empty list. + * that includes all witnesses, otherwise the empty list. * * The context-bound companion has as name the name of `tsym` translated to * a term name. We create a synthetic val of the form diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index a92893678a17..eeffc41d4159 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1655,7 +1655,7 @@ object Types extends TypeUtils { * * P { ... type T = / += / -= U ... } # T * - * to just U. Analogously, `P { val x: S} # x` is reduced tp `S` if `S` + * to just U. Analogously, `P { val x: S} # x` is reduced to `S` if `S` * is a singleton type. * * Does not perform the reduction if the resulting type would contain @@ -5050,14 +5050,14 @@ object Types extends TypeUtils { * or if it has as an upper bound a precise TypeVar. */ def isPrecise(using Context) = - precise - || { - val constr = ctx.typerState.constraint - constr.upper(origin).exists: tparam => - constr.typeVarOfParam(tparam) match - case tvar: TypeVar => tvar.precise - case _ => false - } + precise || hasPreciseUpperBound + + private def hasPreciseUpperBound(using Context) = + val constr = ctx.typerState.constraint + constr.upper(origin).exists: tparam => + constr.typeVarOfParam(tparam) match + case tvar: TypeVar => tvar.precise + case _ => false /** The policy used for widening singletons or unions when instantiating * this variable in the current context. diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index fe23d97d58c3..e28ba5fd669e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3552,9 +3552,10 @@ object Parsers { !impliedMods.is(Given) || startParamTokens.contains(in.token) || isIdent - && (in.name == nme.inline - || in.name == nme.tracked && in.featureEnabled(Feature.modularity) - || in.lookahead.isColon) + && (in.name == nme.inline // inline starts a name binding + || in.name == nme.tracked // tracked starts a name binding under x.modularity + && in.featureEnabled(Feature.modularity) + || in.lookahead.isColon) // a following `:` starts a name binding (mods, paramsAreNamed) val params = if paramsAreNamed then commaSeparated(() => param()) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 0588e27ea54f..83964417a6f1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1860,7 +1860,7 @@ class Namer { typer: Typer => for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do witnessNamesOfParam(tdef) = ws - /** Is each name in `wnames` defined spmewhere in the longest prefix of all `params` + /** Is each name in `wnames` defined somewhere in the longest prefix of all `params` * that have been typed ahead (i.e. that carry the TypedAhead attachment)? */ def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = @@ -1919,7 +1919,7 @@ class Namer { typer: Typer => && param.hasAttachment(ContextBoundParam) && sym.info.memberNames(abstractTypeNameFilter).nonEmpty - /** Set every context bound evidence parameter of a class to be tracked, + /** Under x.modularity, set every context bound evidence parameter of a class to be tracked, * provided it has a type that has an abstract type member. Reset private and local flags * so that the parameter becomes a `val`. */ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 2eeccb6e477d..2a69c948baae 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -888,7 +888,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case (tp1: TermRef, tp2: TermRef) => if tp1.info.isSingleton && (tp1 frozen_=:= tp2) then 1 else compare(tp1, tp2, preferGeneral = false) - case (tp1: TermRef, _) => 1 // should not happen, but prefer TermRefs over othersver others + case (tp1: TermRef, _) => 1 // should not happen, but prefer TermRefs over others case (_, tp2: TermRef) => -1 case _ => 0 @@ -4588,7 +4588,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) case _ => errorTree(tree, em"cannot convert from $tree to an instance creation expression") - val tycon = ctorResultType.underlyingClassRef(refinementOK = true) + val tycon = ctorResultType.underlyingClassRef(refinementOK = Feature.enabled(modularity)) typed( untpd.Select( untpd.New(untpd.TypedSplice(tpt.withType(tycon))), diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 05f89a344148..dd4a3af403ab 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -191,7 +191,7 @@ MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) AnnotType ::= SimpleType {Annotation} Annotated(t, annot) -AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) +AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) SimpleType ::= SimpleLiteral SingletonTypeTree(l) | ‘?’ TypeBounds diff --git a/library/src/scala/annotation/internal/WitnessNames.scala b/library/src/scala/annotation/internal/WitnessNames.scala index 80b8fea4a84d..3921c2083617 100644 --- a/library/src/scala/annotation/internal/WitnessNames.scala +++ b/library/src/scala/annotation/internal/WitnessNames.scala @@ -11,7 +11,7 @@ package internal * * 2. During Namer or Unpickling, when encountering a type declaration A with * a WitnessNames(n_1, ... , n_k) annotation, create a CB companion `val A` with - * rtype ``[ref_1 | ... | ref_k] where ref_i is a TermRef + * type ``[ref_1 | ... | ref_k] where ref_i is a TermRef * with the same prefix as A and name n_i. Except, don't do this if the type in * question is a type parameter and there is already a term parameter with name A * defined for the same method. @@ -20,7 +20,7 @@ package internal * * type ``[-Refs] * - * The context bound companion's variance is negative, so that unons in the + * The context bound companion's variance is negative, so that unions in the * arguments are joined when encountering multiple definfitions and forming a glb. * * 3. Add a special case for typing a selection A.m on a value A of type From 0dddcb7fb9511acf8e8ca676c95768d8b445d7bd Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 May 2024 20:33:48 +0200 Subject: [PATCH 041/827] Adress review comments with changed docs and new tests --- .../reference/experimental/typeclasses.md | 5 +--- tests/neg/deferred-givens-2.check | 12 ++++++++++ tests/neg/deferred-givens-2.scala | 23 +++++++++++++++++++ tests/pending/pos/cbproxy-default.scala | 4 ++++ tests/pending/pos/singleton-infer.scala | 8 +++++++ tests/pos/cbproxy-expansion.scala | 16 +++++++++++++ 6 files changed, 64 insertions(+), 4 deletions(-) create mode 100644 tests/neg/deferred-givens-2.check create mode 100644 tests/neg/deferred-givens-2.scala create mode 100644 tests/pending/pos/cbproxy-default.scala create mode 100644 tests/pending/pos/singleton-infer.scala create mode 100644 tests/pos/cbproxy-expansion.scala diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index cf5f3220faa6..a78e764bbe7d 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -1,12 +1,9 @@ - --- layout: doc-page -title: "Type Classes" +title: "Better Support for Type Classes" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses.html --- -# Some Proposed Changes for Better Support of Type Classes - Martin Odersky, 8.1.2024, edited 5.4.2024 A type class in Scala is a pattern where we define diff --git a/tests/neg/deferred-givens-2.check b/tests/neg/deferred-givens-2.check new file mode 100644 index 000000000000..4a29141cc48b --- /dev/null +++ b/tests/neg/deferred-givens-2.check @@ -0,0 +1,12 @@ +-- [E172] Type Error: tests/neg/deferred-givens-2.scala:17:6 ----------------------------------------------------------- +17 |class SortedIntWrong1 extends Sorted: // error + |^ + |No given instance of type Ord{type Self = SortedIntWrong1.this.Element} was found for inferring the implementation of the deferred given instance given_Ord_Element in trait Sorted +18 | type Element = Int +19 | override given (Element is Ord)() +-- [E172] Type Error: tests/neg/deferred-givens-2.scala:21:6 ----------------------------------------------------------- +21 |class SortedIntWrong2 extends Sorted: // error + |^ + |No given instance of type Ord{type Self = SortedIntWrong2.this.Element} was found for inferring the implementation of the deferred given instance given_Ord_Element in trait Sorted +22 | type Element = Int +23 | override given (Int is Ord)() diff --git a/tests/neg/deferred-givens-2.scala b/tests/neg/deferred-givens-2.scala new file mode 100644 index 000000000000..4e75ceb08728 --- /dev/null +++ b/tests/neg/deferred-givens-2.scala @@ -0,0 +1,23 @@ +//> using options -language:experimental.modularity -source future +trait Ord: + type Self + +trait Sorted: + type Element: Ord + +object Scoped: + given (Int is Ord)() + class SortedIntCorrect extends Sorted: + type Element = Int + +class SortedIntCorrect2 extends Sorted: + type Element = Int + override given (Int is Ord)() as given_Ord_Element + +class SortedIntWrong1 extends Sorted: // error + type Element = Int + override given (Element is Ord)() + +class SortedIntWrong2 extends Sorted: // error + type Element = Int + override given (Int is Ord)() \ No newline at end of file diff --git a/tests/pending/pos/cbproxy-default.scala b/tests/pending/pos/cbproxy-default.scala new file mode 100644 index 000000000000..e8f12ceeae75 --- /dev/null +++ b/tests/pending/pos/cbproxy-default.scala @@ -0,0 +1,4 @@ +def f[S: Monad]( + initial: S.Self = S.unit // error +) = + S.unit // works \ No newline at end of file diff --git a/tests/pending/pos/singleton-infer.scala b/tests/pending/pos/singleton-infer.scala new file mode 100644 index 000000000000..72e00baf3aab --- /dev/null +++ b/tests/pending/pos/singleton-infer.scala @@ -0,0 +1,8 @@ +//> using options -Xprint:typer -language:experimental.modularity -source future + +def f1[S, T <: S : Singleton](x: S) = () +def f2[S, T >: S : Singleton](x: S) = () + +def Test = + f1(42) // f1[Int, Singleton & Int] // should infer (42 : Int) or throw an error? + f2(42) // f2[(42 : Int), (42 : Int)] \ No newline at end of file diff --git a/tests/pos/cbproxy-expansion.scala b/tests/pos/cbproxy-expansion.scala new file mode 100644 index 000000000000..ee145b62d4ed --- /dev/null +++ b/tests/pos/cbproxy-expansion.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future +trait TC[T]: + type Self + +def f1[S, T: TC[S] as tc](x: S, y: tc.Self) = () +def f2[S, T: TC[S]](x: S, y: T.Self) = () +def f3[S, T: TC[S]](x: S, y: Int) = () + +given TC[String] with + type Self = Int + def unit = 42 + +def main = + f1("hello", 23) + f2("hello", 23) + f3("hello", 23) From 62e0244d0f77b4b9158da20d5a252e24e51e5db2 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 7 May 2024 12:43:03 +0200 Subject: [PATCH 042/827] Update warn check files Error number changed --- tests/warn/i16723.check | 2 +- tests/warn/i16723a.check | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/warn/i16723.check b/tests/warn/i16723.check index ed8e55502a80..6d55fa0a89d2 100644 --- a/tests/warn/i16723.check +++ b/tests/warn/i16723.check @@ -1,4 +1,4 @@ --- [E195] Potential Issue Warning: tests/warn/i16723.scala:3:2 --------------------------------------------------------- +-- [E197] Potential Issue Warning: tests/warn/i16723.scala:3:2 --------------------------------------------------------- 3 | new Object {} // warn | ^ | New anonymous class definition will be duplicated at each inline site diff --git a/tests/warn/i16723a.check b/tests/warn/i16723a.check index ba4794fac23e..ace11c5af1f9 100644 --- a/tests/warn/i16723a.check +++ b/tests/warn/i16723a.check @@ -1,4 +1,4 @@ --- [E195] Potential Issue Warning: tests/warn/i16723a.scala:5:38 ------------------------------------------------------- +-- [E197] Potential Issue Warning: tests/warn/i16723a.scala:5:38 ------------------------------------------------------- 5 |inline given Converter[Int, String] = new Converter { // warn | ^ | New anonymous class definition will be duplicated at each inline site From 9959f28ab5008d4a8deeb78f3764cec641f439db Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 7 May 2024 13:05:53 +0200 Subject: [PATCH 043/827] Update InlayHints --- .../test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala index e470f492657c..8ce7cdce4382 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala @@ -898,7 +898,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { | import quotes.reflect.* | Type.of[T] match | case '[f] => - | val fr/*: TypeRepr<>*/ = TypeRepr.of[T]/*(using evidence$1<<(3:21)>>)*/ + | val fr/*: TypeRepr<>*/ = TypeRepr.of[T]/*(using evidence$1<<(3:23)>>)*/ |""".stripMargin ) From 3c78ada957b8f77f6055ea280e09693f40d0e845 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 7 May 2024 15:15:39 +0200 Subject: [PATCH 044/827] Fix typo --- library/src/scala/runtime/stdLibPatches/language.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 76a3be1579a9..02c4a99bbbcf 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -96,7 +96,7 @@ object language: * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] */ @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") - object namedTupleas + object namedTuples /** Experimental support for new features for better modularity, including * - better tracking of dependencies through classes From c5659933ef58ddbb003ecc30694a9e3e77b20c57 Mon Sep 17 00:00:00 2001 From: Jan Chyb <48855024+jchyb@users.noreply.github.com> Date: Tue, 7 May 2024 18:46:27 +0200 Subject: [PATCH 045/827] Revert "Regression: fix compilation performance on Windows" --- compiler/src/dotty/tools/io/AbstractFile.scala | 6 ++++++ compiler/src/dotty/tools/io/NoAbstractFile.scala | 2 ++ compiler/src/dotty/tools/io/PlainFile.scala | 13 +++++++++++-- compiler/src/dotty/tools/io/VirtualDirectory.scala | 6 ++++++ compiler/src/dotty/tools/io/VirtualFile.scala | 6 ++++++ compiler/src/dotty/tools/io/ZipArchive.scala | 2 ++ 6 files changed, 33 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index ee72297c2a4f..233b1ca8fb62 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -136,6 +136,12 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Does this abstract file represent something which can contain classfiles? */ def isClassContainer: Boolean = isDirectory || (jpath != null && ext.isJarOrZip) + /** Create a file on disk, if one does not exist already. */ + def create(): Unit + + /** Delete the underlying file or directory (recursively). */ + def delete(): Unit + /** Is this abstract file a directory? */ def isDirectory: Boolean diff --git a/compiler/src/dotty/tools/io/NoAbstractFile.scala b/compiler/src/dotty/tools/io/NoAbstractFile.scala index bef045e290a5..13c2c6851d2b 100644 --- a/compiler/src/dotty/tools/io/NoAbstractFile.scala +++ b/compiler/src/dotty/tools/io/NoAbstractFile.scala @@ -17,6 +17,8 @@ import java.io.InputStream object NoAbstractFile extends AbstractFile { def absolute: AbstractFile = this def container: AbstractFile = this + def create(): Unit = ??? + def delete(): Unit = ??? def jpath: JPath = null def input: InputStream = null def isDirectory: Boolean = false diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala index a6a39d9ff3eb..acef191d3072 100644 --- a/compiler/src/dotty/tools/io/PlainFile.scala +++ b/compiler/src/dotty/tools/io/PlainFile.scala @@ -13,8 +13,9 @@ import java.nio.file.{InvalidPathException, Paths} /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { - override val isDirectory: Boolean = true + override def isDirectory: Boolean = true override def iterator(): Iterator[PlainFile] = givenPath.list.filter(_.exists).map(new PlainFile(_)) + override def delete(): Unit = givenPath.deleteRecursively() } /** This class implements an abstract file backed by a File. @@ -77,7 +78,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile { } /** Is this abstract file a directory? */ - val isDirectory: Boolean = givenPath.isDirectory // cached for performance on Windows + def isDirectory: Boolean = givenPath.isDirectory /** Returns the time that this abstract file was last modified. */ def lastModified: Long = givenPath.lastModified.toMillis @@ -112,6 +113,14 @@ class PlainFile(val givenPath: Path) extends AbstractFile { null } + /** Does this abstract file denote an existing file? */ + def create(): Unit = if (!exists) givenPath.createFile() + + /** Delete the underlying file or directory (recursively). */ + def delete(): Unit = + if (givenPath.isFile) givenPath.delete() + else if (givenPath.isDirectory) givenPath.toDirectory.deleteRecursively() + /** Returns a plain file with the given name. It does not * check that it exists. */ diff --git a/compiler/src/dotty/tools/io/VirtualDirectory.scala b/compiler/src/dotty/tools/io/VirtualDirectory.scala index 949f2d0e61dd..157f63a2ac1a 100644 --- a/compiler/src/dotty/tools/io/VirtualDirectory.scala +++ b/compiler/src/dotty/tools/io/VirtualDirectory.scala @@ -34,6 +34,12 @@ extends AbstractFile { override def input: InputStream = sys.error("directories cannot be read") override def output: OutputStream = sys.error("directories cannot be written") + /** Does this abstract file denote an existing file? */ + def create(): Unit = { unsupported() } + + /** Delete the underlying file or directory (recursively). */ + def delete(): Unit = { unsupported() } + /** Returns an abstract file with the given name. It does not * check that it exists. */ diff --git a/compiler/src/dotty/tools/io/VirtualFile.scala b/compiler/src/dotty/tools/io/VirtualFile.scala index 6fb9859503f2..9d290a9b0e6a 100644 --- a/compiler/src/dotty/tools/io/VirtualFile.scala +++ b/compiler/src/dotty/tools/io/VirtualFile.scala @@ -82,6 +82,12 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF Iterator.empty } + /** Does this abstract file denote an existing file? */ + def create(): Unit = unsupported() + + /** Delete the underlying file or directory (recursively). */ + def delete(): Unit = unsupported() + /** * Returns the abstract file in this abstract directory with the * specified name. If there is no such file, returns null. The diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index a23bde8faaed..9af935690ffc 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -61,6 +61,8 @@ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) ex def isDirectory: Boolean = true def lookupName(name: String, directory: Boolean): AbstractFile = unsupported() def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported() + def create(): Unit = unsupported() + def delete(): Unit = unsupported() def output: OutputStream = unsupported() def container: AbstractFile = unsupported() def absolute: AbstractFile = unsupported() From b1635d4977be6a6f45f4c92a3ea5d76cf480d4aa Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Wed, 8 May 2024 10:27:53 +0200 Subject: [PATCH 046/827] Fix test i20317 --- tests/neg/i20317.scala | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 tests/neg/i20317.scala diff --git a/tests/neg/i20317.scala b/tests/neg/i20317.scala deleted file mode 100644 index e9d8599b9fc1..000000000000 --- a/tests/neg/i20317.scala +++ /dev/null @@ -1,3 +0,0 @@ -type Foo[A] = A - -def foo[A <: Foo[A]]: Unit = () // error // error From 1a235c6719f56e1597241dc38eeda49087b323e8 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 10 Apr 2024 14:13:27 +0200 Subject: [PATCH 047/827] Implement match type amendment: extractors follow aliases and singletons This implements the change proposed in https://github.com/scala/improvement-proposals/pull/84. The added pos test case presents motivating examples, the added neg test cases demonstrate that errors are correctly reported when cycles are present. The potential for cycle is no worse than with the existing extraction logic as demonstrated by the existing test in `tests/neg/mt-deskolemize.scala`. --- .../dotty/tools/dotc/core/TypeComparer.scala | 65 +++++++++++++++++-- tests/neg/mt-deskolemize.scala | 42 ++++++++++++ tests/pos/mt-deskolemize.scala | 55 ++++++++++++++++ 3 files changed, 157 insertions(+), 5 deletions(-) create mode 100644 tests/pos/mt-deskolemize.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index a849d28c81d6..cc7eaecfd9bd 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3518,20 +3518,75 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { false case MatchTypeCasePattern.TypeMemberExtractor(typeMemberName, capture) => + /** Try to remove references to `skolem` from a type in accordance with the spec. + * + * If any reference to `skolem` remains in the result type, + * `refersToSkolem` is set to true. + */ + class DropSkolemMap(skolem: SkolemType) extends TypeMap: + var refersToSkolem = false + def apply(tp: Type): Type = + tp match + case `skolem` => + refersToSkolem = true + tp + case tp: NamedType => + var savedRefersToSkolem = refersToSkolem + refersToSkolem = false + try + val pre1 = apply(tp.prefix) + if refersToSkolem then + tp match + case tp: TermRef => tp.info.widenExpr.dealias match + case info: SingletonType => + refersToSkolem = false + apply(info) + case _ => + tp.derivedSelect(pre1) + case tp: TypeRef => tp.info match + case info: AliasingBounds => + refersToSkolem = false + apply(info.alias) + case _ => + tp.derivedSelect(pre1) + else + tp.derivedSelect(pre1) + finally + refersToSkolem |= savedRefersToSkolem + case tp: LazyRef => + // By default, TypeMap maps LazyRefs lazily. We need to + // force it for `refersToSkolem` to be correctly set. + apply(tp.ref) + case _ => + mapOver(tp) + end DropSkolemMap + /** Try to remove references to `skolem` from `u` in accordance with the spec. + * + * If any reference to `skolem` remains in the result type, return + * NoType instead. + */ + def dropSkolem(u: Type, skolem: SkolemType): Type = + val dmap = DropSkolemMap(skolem) + val res = dmap(u) + if dmap.refersToSkolem then NoType else res + val stableScrut: SingletonType = scrut match case scrut: SingletonType => scrut case _ => SkolemType(scrut) + stableScrut.member(typeMemberName) match case denot: SingleDenotation if denot.exists => val info = denot.info match case alias: AliasingBounds => alias.alias // Extract the alias case ClassInfo(prefix, cls, _, _, _) => prefix.select(cls) // Re-select the class from the prefix case info => info // Notably, RealTypeBounds, which will eventually give a MatchResult.NoInstances - val infoRefersToSkolem = stableScrut.isInstanceOf[SkolemType] && stableScrut.occursIn(info) - val info1 = info match - case info: TypeBounds => info // Will already trigger a MatchResult.NoInstances - case _ if infoRefersToSkolem => RealTypeBounds(info, info) // Explicitly trigger a MatchResult.NoInstances - case _ => info // We have a match + val info1 = stableScrut match + case skolem: SkolemType => + dropSkolem(info, skolem).orElse: + info match + case info: TypeBounds => info // Will already trigger a MatchResult.NoInstances + case _ => RealTypeBounds(info, info) // Explicitly trigger a MatchResult.NoInstances + case _ => info rec(capture, info1, variance = 0, scrutIsWidenedAbstract) case _ => false diff --git a/tests/neg/mt-deskolemize.scala b/tests/neg/mt-deskolemize.scala index 0a58d5db7bc4..505e47637ac4 100644 --- a/tests/neg/mt-deskolemize.scala +++ b/tests/neg/mt-deskolemize.scala @@ -14,3 +14,45 @@ class SimpleLoop2 extends Expr: object Test1: val x: ExtractValue[SimpleLoop1] = 1 // error + +trait Description: + type Elem <: Tuple + +class PrimBroken extends Expr: + type Value = Alias + type Alias = Value // error + +class Prim extends Expr: + type Value = BigInt + +class VecExpr[E <: Expr] extends Expr: + type Value = Vector[ExtractValue[E]] + +trait ProdExpr extends Expr: + val description: Description + type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] + + +class MyExpr1 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[Prim], MyExpr2) + +class MyExpr2 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) + +trait Constable[E <: Expr]: + def lit(v: ExtractValue[E]): E +object Constable: + given [E <: Expr]: Constable[E] = ??? + +object Test2: + def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = + summon[Constable[E]].lit(v) + val x0: ExtractValue[Prim] = "" // error + val x1: ExtractValue[PrimBroken] = 1 // error + + val foo: MyExpr2 = new MyExpr2 + val v: foo.Value = (Vector(Vector()), 1) // error: Recursion limit exceeded + val c: MyExpr2 = fromLiteral: + (Vector(Vector()), 1) // error: Recursion limit exceeded diff --git a/tests/pos/mt-deskolemize.scala b/tests/pos/mt-deskolemize.scala new file mode 100644 index 000000000000..34f38289b24d --- /dev/null +++ b/tests/pos/mt-deskolemize.scala @@ -0,0 +1,55 @@ +trait Expr: + type Value + +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[F <: Expr] = F match + case Expr.Of[v] => v +import Expr.ExtractValue + +class Prim extends Expr: + type Value = Alias + type Alias = BigInt + +class VecExpr[E <: Expr] extends Expr: + type Value = Vector[ExtractValue[E]] + +trait Description: + type Elem <: Tuple + +trait ProdExpr extends Expr: + val description: Description + type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] + +class MyExpr1 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[Prim], Prim) + +class MyExpr2 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) + +trait ProdExprAlt[T <: Tuple] extends Expr: + type Value = Tuple.Map[T, [X] =>> ExtractValue[X & Expr]] + +class MyExpr3 extends ProdExprAlt[(Prim, VecExpr[Prim], Prim)] + +trait Constable[E <: Expr]: + def lit(v: ExtractValue[E]): E +object Constable: + given [E <: Expr]: Constable[E] = ??? + +object Test: + def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = + summon[Constable[E]].lit(v) + val a: Prim = fromLiteral(1) + val b: VecExpr[Prim] = fromLiteral(Vector(1)) + val c: MyExpr1 = fromLiteral((Vector(1), 1)) + val d: MyExpr2 = fromLiteral(Vector(Vector((Vector(1), 1))), 2) + val e: MyExpr3 = fromLiteral((1, Vector(1), 1)) + val f: ProdExprAlt[(MyExpr1, VecExpr[MyExpr3])] = fromLiteral: + ( + (Vector(1), 1), + Vector((1, Vector(1), 1), (2, Vector(1), 2)) + ) + val g: Expr { type Alias = Int; type Value = Alias } = fromLiteral(1) From 61b5a7b6a52f32c68a4f3aa8842f6c4850349b87 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Sun, 5 May 2024 18:54:30 +0200 Subject: [PATCH 048/827] Move logic under feature.experimental.betterMatchTypesExtractors This way we can merge this PR without waiting for the SIP committee to approve it. --- .../src/dotty/tools/dotc/config/Feature.scala | 3 + .../dotty/tools/dotc/core/TypeComparer.scala | 11 +++- .../runtime/stdLibPatches/language.scala | 7 +++ tests/neg/mt-deskolemize-2.scala | 60 +++++++++++++++++++ tests/neg/mt-deskolemize.scala | 42 ------------- tests/pos/mt-deskolemize.scala | 2 + 6 files changed, 80 insertions(+), 45 deletions(-) create mode 100644 tests/neg/mt-deskolemize-2.scala diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index d2bfdcb550dc..0d551094da4d 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -35,6 +35,7 @@ object Feature: val into = experimental("into") val namedTuples = experimental("namedTuples") val modularity = experimental("modularity") + val betterMatchTypeExtractors = experimental("betterMatchTypeExtractors") def experimentalAutoEnableFeatures(using Context): List[TermName] = defn.languageExperimentalFeatures @@ -89,6 +90,8 @@ object Feature: def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + def betterMatchTypeExtractorsEnabled(using Context) = enabled(betterMatchTypeExtractors) + /** Is pureFunctions enabled for this compilation unit? */ def pureFunsEnabled(using Context) = enabledBySetting(pureFunctions) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index cc7eaecfd9bd..2481f17ffdad 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -10,7 +10,7 @@ import TypeOps.refineUsingParent import collection.mutable import util.{Stats, NoSourcePosition, EqHashMap} import config.Config -import config.Feature.{migrateTo3, sourceVersion} +import config.Feature.{betterMatchTypeExtractorsEnabled, migrateTo3, sourceVersion} import config.Printers.{subtyping, gadts, matchTypes, noPrinter} import config.SourceVersion import TypeErasure.{erasedLub, erasedGlb} @@ -3519,6 +3519,11 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { case MatchTypeCasePattern.TypeMemberExtractor(typeMemberName, capture) => /** Try to remove references to `skolem` from a type in accordance with the spec. + * + * If `betterMatchTypeExtractorsEnabled` is enabled then references + * to `skolem` occuring are avoided by following aliases and + * singletons, otherwise no attempt made to avoid references to + * `skolem`. * * If any reference to `skolem` remains in the result type, * `refersToSkolem` is set to true. @@ -3530,7 +3535,7 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { case `skolem` => refersToSkolem = true tp - case tp: NamedType => + case tp: NamedType if betterMatchTypeExtractorsEnabled => var savedRefersToSkolem = refersToSkolem refersToSkolem = false try @@ -3553,7 +3558,7 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { tp.derivedSelect(pre1) finally refersToSkolem |= savedRefersToSkolem - case tp: LazyRef => + case tp: LazyRef if betterMatchTypeExtractorsEnabled => // By default, TypeMap maps LazyRefs lazily. We need to // force it for `refersToSkolem` to be correctly set. apply(tp.ref) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 02c4a99bbbcf..1171c62602fb 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -117,6 +117,13 @@ object language: @compileTimeOnly("`relaxedExtensionImports` can only be used at compile time in import statements") @deprecated("The experimental.relaxedExtensionImports language import is no longer needed since the feature is now standard", since = "3.4") object relaxedExtensionImports + + /** Enhance match type extractors to follow aliases and singletons. + * + * @see [[https://github.com/scala/improvement-proposals/pull/84]] + */ + @compileTimeOnly("`betterMatchTypeExtractors` can only be used at compile time in import statements") + object betterMatchTypeExtractors end experimental /** The deprecated object contains features that are no longer officially suypported in Scala. diff --git a/tests/neg/mt-deskolemize-2.scala b/tests/neg/mt-deskolemize-2.scala new file mode 100644 index 000000000000..90d506a42e6f --- /dev/null +++ b/tests/neg/mt-deskolemize-2.scala @@ -0,0 +1,60 @@ +//> using options -language:experimental.betterMatchTypeExtractors + +trait Expr: + type Value +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[F <: Expr] = F match + case Expr.Of[v] => v +import Expr.ExtractValue + +class SimpleLoop1 extends Expr: + type Value = ExtractValue[SimpleLoop2] + +class SimpleLoop2 extends Expr: + type Value = ExtractValue[SimpleLoop1] + +object Test1: + val x: ExtractValue[SimpleLoop1] = 1 // error + +trait Description: + type Elem <: Tuple + +class PrimBroken extends Expr: + type Value = Alias + type Alias = Value // error + +class Prim extends Expr: + type Value = BigInt + +class VecExpr[E <: Expr] extends Expr: + type Value = Vector[ExtractValue[E]] + +trait ProdExpr extends Expr: + val description: Description + type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] + + +class MyExpr1 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[Prim], MyExpr2) + +class MyExpr2 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) + +trait Constable[E <: Expr]: + def lit(v: ExtractValue[E]): E +object Constable: + given [E <: Expr]: Constable[E] = ??? + +object Test2: + def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = + summon[Constable[E]].lit(v) + val x0: ExtractValue[Prim] = "" // error + val x1: ExtractValue[PrimBroken] = 1 // error + + val foo: MyExpr2 = new MyExpr2 + val v: foo.Value = (Vector(Vector()), 1) // error: Recursion limit exceeded + val c: MyExpr2 = fromLiteral: + (Vector(Vector()), 1) // error: Recursion limit exceeded diff --git a/tests/neg/mt-deskolemize.scala b/tests/neg/mt-deskolemize.scala index 505e47637ac4..0a58d5db7bc4 100644 --- a/tests/neg/mt-deskolemize.scala +++ b/tests/neg/mt-deskolemize.scala @@ -14,45 +14,3 @@ class SimpleLoop2 extends Expr: object Test1: val x: ExtractValue[SimpleLoop1] = 1 // error - -trait Description: - type Elem <: Tuple - -class PrimBroken extends Expr: - type Value = Alias - type Alias = Value // error - -class Prim extends Expr: - type Value = BigInt - -class VecExpr[E <: Expr] extends Expr: - type Value = Vector[ExtractValue[E]] - -trait ProdExpr extends Expr: - val description: Description - type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] - - -class MyExpr1 extends ProdExpr: - final val description = new Description: - type Elem = (VecExpr[Prim], MyExpr2) - -class MyExpr2 extends ProdExpr: - final val description = new Description: - type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) - -trait Constable[E <: Expr]: - def lit(v: ExtractValue[E]): E -object Constable: - given [E <: Expr]: Constable[E] = ??? - -object Test2: - def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = - summon[Constable[E]].lit(v) - val x0: ExtractValue[Prim] = "" // error - val x1: ExtractValue[PrimBroken] = 1 // error - - val foo: MyExpr2 = new MyExpr2 - val v: foo.Value = (Vector(Vector()), 1) // error: Recursion limit exceeded - val c: MyExpr2 = fromLiteral: - (Vector(Vector()), 1) // error: Recursion limit exceeded diff --git a/tests/pos/mt-deskolemize.scala b/tests/pos/mt-deskolemize.scala index 34f38289b24d..abd61d9d55e6 100644 --- a/tests/pos/mt-deskolemize.scala +++ b/tests/pos/mt-deskolemize.scala @@ -1,3 +1,5 @@ +//> using options -language:experimental.betterMatchTypeExtractors + trait Expr: type Value From a1930c4ca38673885a4ebc2ce95689e9e65d08be Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 7 May 2024 12:34:30 +0200 Subject: [PATCH 049/827] DropSkolemMap: simplify logic No need to save the value of `refersToSkolem`: if it's true before we enter `NamedType` it will be true after and `dropSkolem` will return `NoType`. The previous logic could still be useful if we want to give more easily actionable error messages in the future by only keeping in the type the skolems we couldn't remove. --- .../dotty/tools/dotc/core/TypeComparer.scala | 41 +++++++++---------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 2481f17ffdad..c2c502a984c4 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3531,33 +3531,30 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { class DropSkolemMap(skolem: SkolemType) extends TypeMap: var refersToSkolem = false def apply(tp: Type): Type = + if refersToSkolem then + return tp tp match case `skolem` => refersToSkolem = true tp case tp: NamedType if betterMatchTypeExtractorsEnabled => - var savedRefersToSkolem = refersToSkolem - refersToSkolem = false - try - val pre1 = apply(tp.prefix) - if refersToSkolem then - tp match - case tp: TermRef => tp.info.widenExpr.dealias match - case info: SingletonType => - refersToSkolem = false - apply(info) - case _ => - tp.derivedSelect(pre1) - case tp: TypeRef => tp.info match - case info: AliasingBounds => - refersToSkolem = false - apply(info.alias) - case _ => - tp.derivedSelect(pre1) - else - tp.derivedSelect(pre1) - finally - refersToSkolem |= savedRefersToSkolem + val pre1 = apply(tp.prefix) + if refersToSkolem then + tp match + case tp: TermRef => tp.info.widenExpr.dealias match + case info: SingletonType => + refersToSkolem = false + apply(info) + case _ => + tp.derivedSelect(pre1) + case tp: TypeRef => tp.info match + case info: AliasingBounds => + refersToSkolem = false + apply(info.alias) + case _ => + tp.derivedSelect(pre1) + else + tp.derivedSelect(pre1) case tp: LazyRef if betterMatchTypeExtractorsEnabled => // By default, TypeMap maps LazyRefs lazily. We need to // force it for `refersToSkolem` to be correctly set. From 1276034e48114b9422ae5c5f1b25708e62517d45 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 8 May 2024 14:57:35 +0200 Subject: [PATCH 050/827] Deprecate `StandardPlugin.init` in favor of `initialize` method taking implicit Context (#20330) We do deprecate `StandardPlugin.init` in favour of `StandardPlugin.initialize` method tak takes additional `Context` parameter - it would e.g. allow to use reporting mechanism when parsing compiler plugin options. Introduces changes to akka/akka fork used in Community Build --- community-build/community-projects/akka | 2 +- .../src/dotty/tools/dotc/plugins/Plugin.scala | 16 +++++++++++++++- .../src/dotty/tools/dotc/plugins/Plugins.scala | 2 +- .../changed-features/compiler-plugins.md | 4 ++-- .../changed-features/compiler-plugins.md | 4 ++-- .../analyzer-plugin/plugin/Analyzer.scala | 2 +- .../compiler-plugin/plugin/DivideZero.scala | 3 ++- tests/plugins/custom/analyzer/Analyzer_1.scala | 2 +- tests/plugins/neg/divideZero/plugin_1.scala | 2 +- 9 files changed, 26 insertions(+), 11 deletions(-) diff --git a/community-build/community-projects/akka b/community-build/community-projects/akka index 7f5115ebc9cd..79b294048f89 160000 --- a/community-build/community-projects/akka +++ b/community-build/community-projects/akka @@ -1 +1 @@ -Subproject commit 7f5115ebc9cde408433040f11834f5218b4a3357 +Subproject commit 79b294048f893d9d6b9332618f7aebedce9a5340 diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index ce77a5b9d97a..fdb41fc56689 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -13,6 +13,7 @@ import java.io.InputStream import java.util.Properties import scala.util.{ Try, Success, Failure } +import scala.annotation.nowarn trait PluginPhase extends MiniPhase { def runsBefore: Set[String] = Set.empty @@ -50,7 +51,20 @@ trait StandardPlugin extends Plugin { * @param options commandline options to the plugin. * @return a list of phases to be added to the phase plan */ - def init(options: List[String]): List[PluginPhase] + @deprecatedOverriding("Method 'init' does not allow to access 'Context', use 'initialize' instead.", since = "Scala 3.5.0") + @deprecated("Use 'initialize' instead.", since = "Scala 3.5.0") + def init(options: List[String]): List[PluginPhase] = Nil + + /** Non-research plugins should override this method to return the phases + * + * The phases returned must be freshly constructed (not reused + * and returned again on subsequent calls). + * + * @param options commandline options to the plugin. + * @return a list of phases to be added to the phase plan + */ + @nowarn("cat=deprecation") + def initialize(options: List[String])(using Context): List[PluginPhase] = init(options) } /** A research plugin may customize the compilation pipeline freely diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index 31176bb2fb2c..a6672d475129 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -125,7 +125,7 @@ trait Plugins { } // schedule plugins according to ordering constraints - val pluginPhases = plugins.collect { case p: StandardPlugin => p }.flatMap { plug => plug.init(options(plug)) } + val pluginPhases = plugins.collect { case p: StandardPlugin => p }.flatMap { plug => plug.initialize(options(plug)) } val updatedPlan = Plugins.schedule(plan, pluginPhases) // add research plugins diff --git a/docs/_docs/reference/changed-features/compiler-plugins.md b/docs/_docs/reference/changed-features/compiler-plugins.md index 6be8a62c7ac4..c0bfccec8172 100644 --- a/docs/_docs/reference/changed-features/compiler-plugins.md +++ b/docs/_docs/reference/changed-features/compiler-plugins.md @@ -67,7 +67,7 @@ class DivideZero extends StandardPlugin: val name: String = "divideZero" override val description: String = "divide zero check" - def init(options: List[String]): List[PluginPhase] = + override def initialize(options: List[String])(using Context): List[PluginPhase] = (new DivideZeroPhase) :: Nil class DivideZeroPhase extends PluginPhase: @@ -90,7 +90,7 @@ end DivideZeroPhase ``` The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` -and implement the method `init` that takes the plugin's options as argument +and implement the method `initialize` that takes the plugin's options as argument and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. Our plugin adds one compiler phase to the pipeline. A compiler phase must extend diff --git a/docs/_spec/TODOreference/changed-features/compiler-plugins.md b/docs/_spec/TODOreference/changed-features/compiler-plugins.md index 20bdb7f49836..719e204fc803 100644 --- a/docs/_spec/TODOreference/changed-features/compiler-plugins.md +++ b/docs/_spec/TODOreference/changed-features/compiler-plugins.md @@ -67,7 +67,7 @@ class DivideZero extends StandardPlugin: val name: String = "divideZero" override val description: String = "divide zero check" - def init(options: List[String]): List[PluginPhase] = + override def initialize(options: List[String])(using Context): List[PluginPhase] = (new DivideZeroPhase) :: Nil class DivideZeroPhase extends PluginPhase: @@ -90,7 +90,7 @@ end DivideZeroPhase ``` The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` -and implement the method `init` that takes the plugin's options as argument +and implement the method `initialize` that takes the plugin's options as argument and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. Our plugin adds one compiler phase to the pipeline. A compiler phase must extend diff --git a/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala b/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala index c1fab5c13f42..01aa57d7a971 100644 --- a/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala +++ b/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala @@ -21,7 +21,7 @@ class InitPlugin extends StandardPlugin { val name: String = "initPlugin" override val description: String = "checks that under -Yretain-trees we may get tree for all symbols" - def init(options: List[String]): List[PluginPhase] = + override def initialize(options: List[String])(using Context): List[PluginPhase] = (new SetDefTree) :: (new InitChecker) :: Nil } diff --git a/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala b/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala index c6fac6b796c0..3d1698250e5d 100644 --- a/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala +++ b/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala @@ -22,7 +22,8 @@ class DivideZero extends PluginPhase with StandardPlugin { override val runsAfter = Set(Pickler.name) override val runsBefore = Set(Staging.name) - def init(options: List[String]): List[PluginPhase] = this :: Nil + // We keep using deprecated variant here just to ensure it still works correctly + override def init(options: List[String]): List[PluginPhase] = this :: Nil private def isNumericDivide(sym: Symbol)(implicit ctx: Context): Boolean = { def test(tpe: String): Boolean = diff --git a/tests/plugins/custom/analyzer/Analyzer_1.scala b/tests/plugins/custom/analyzer/Analyzer_1.scala index 0e1cc53290d0..d611972e0e48 100644 --- a/tests/plugins/custom/analyzer/Analyzer_1.scala +++ b/tests/plugins/custom/analyzer/Analyzer_1.scala @@ -52,7 +52,7 @@ class InitChecker extends PluginPhase with StandardPlugin { override val runsAfter = Set(SetDefTree.name) override val runsBefore = Set(FirstTransform.name) - def init(options: List[String]): List[PluginPhase] = this :: (new SetDefTree) :: Nil + override def initialize(options: List[String])(using Context): List[PluginPhase] = this :: (new SetDefTree) :: Nil private def checkDef(tree: Tree)(implicit ctx: Context): Tree = { if (tree.symbol.defTree.isEmpty) diff --git a/tests/plugins/neg/divideZero/plugin_1.scala b/tests/plugins/neg/divideZero/plugin_1.scala index ef8e077fd14d..68b2a8eae478 100644 --- a/tests/plugins/neg/divideZero/plugin_1.scala +++ b/tests/plugins/neg/divideZero/plugin_1.scala @@ -20,7 +20,7 @@ class DivideZero extends PluginPhase with StandardPlugin { override val runsAfter = Set(Pickler.name) override val runsBefore = Set(PickleQuotes.name) - override def init(options: List[String]): List[PluginPhase] = this :: Nil + override def initialize(options: List[String])(using Context): List[PluginPhase] = this :: Nil private def isNumericDivide(sym: Symbol)(implicit ctx: Context): Boolean = { def test(tpe: String): Boolean = From 863077c8ec76d7e3ecc57744b8584ccd8d2c241b Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 8 May 2024 19:48:38 +0200 Subject: [PATCH 051/827] Bring back ambiguity filter when we report an implicit not found error This reverts one part of #20261. When we fail with both an ambiguity on one implicit argument and another error on another argument we prefer the other error. I added a comment why this is needed. Fixes #20344 --- .../src/dotty/tools/dotc/typer/Typer.scala | 9 +++++- tests/pos/i20344.scala | 28 +++++++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i20344.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 2a69c948baae..9bf8071ba2f4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4113,7 +4113,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * `SearchFailureType`. */ def issueErrors(fun: Tree, args: List[Tree]): Tree = - def firstFailure = args.tpes.find(_.isInstanceOf[SearchFailureType]).getOrElse(NoType) + // Prefer other errors over ambiguities. If nested in outer searches a missing + // implicit can be healed by simply dropping this alternative and tryng something + // else. But an ambiguity is sticky and propagates outwards. If we have both + // a missing implicit on one argument and an ambiguity on another the whole + // branch should be classified as a missing implicit. + val firstNonAmbiguous = args.tpes.find(tp => tp.isError && !tp.isInstanceOf[AmbiguousImplicits]) + def firstError = args.tpes.find(_.isInstanceOf[SearchFailureType]).getOrElse(NoType) + def firstFailure = firstNonAmbiguous.getOrElse(firstError) val errorType = firstFailure match case tp: AmbiguousImplicits => diff --git a/tests/pos/i20344.scala b/tests/pos/i20344.scala new file mode 100644 index 000000000000..d3b2a060d6e2 --- /dev/null +++ b/tests/pos/i20344.scala @@ -0,0 +1,28 @@ +trait Monad[F[_]] extends Invariant[F] + +trait Invariant[F[_]] +object Invariant: + implicit def catsInstancesForList: Monad[List] = ??? + implicit def catsInstancesForVector: Monad[Vector] = ??? + +trait Shrink[T] +object Shrink extends ShrinkLowPriorityImplicits: + trait Buildable[T,C] + implicit def shrinkContainer[C[_],T](implicit v: C[T] => Traversable[T], s: Shrink[T], b: Buildable[T,C[T]]): Shrink[C[T]] = ??? +trait ShrinkLowPriorityImplicits: + implicit def shrinkAny[T]: Shrink[T] = ??? + +trait Distribution[F[_], -P, X] extends (P => F[X]) +type GenBeta[A, B, X] = [F[_]] =>> Distribution[F, Beta.Params[A, B], X] +type Beta[R] = [F[_]] =>> GenBeta[R, R, R][F] + +object Beta: + trait Params[+A, +B] +trait BetaInstances: + given schrodingerRandomBetaForDouble[F[_]: Monad]: Beta[Double][F] = ??? + +object all extends BetaInstances + +@main def Test = + import all.given + summon[Shrink[Beta.Params[Double, Double]]] \ No newline at end of file From 783b7bddc3bbbccb2b8e2611e52e17135a600924 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 8 May 2024 22:51:44 +0200 Subject: [PATCH 052/827] Update compiler/src/dotty/tools/dotc/typer/Typer.scala Co-authored-by: Matt Bovel --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 9bf8071ba2f4..ae50d626cb1f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4114,7 +4114,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer */ def issueErrors(fun: Tree, args: List[Tree]): Tree = // Prefer other errors over ambiguities. If nested in outer searches a missing - // implicit can be healed by simply dropping this alternative and tryng something + // implicit can be healed by simply dropping this alternative and trying something // else. But an ambiguity is sticky and propagates outwards. If we have both // a missing implicit on one argument and an ambiguity on another the whole // branch should be classified as a missing implicit. From 455dba4fb5cce2f68f345b7fc3bbc46ac159572f Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Tue, 7 May 2024 18:57:54 +0200 Subject: [PATCH 053/827] Add test --- tests/init-global/warn/ScalaCheck.scala | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 tests/init-global/warn/ScalaCheck.scala diff --git a/tests/init-global/warn/ScalaCheck.scala b/tests/init-global/warn/ScalaCheck.scala new file mode 100644 index 000000000000..0e7036c80a8f --- /dev/null +++ b/tests/init-global/warn/ScalaCheck.scala @@ -0,0 +1,22 @@ +trait CmdLineParser: + outer => + + val a: String + + trait Opt[+T]: + val default: T + val names: Set[String] + val help: String + + trait IntOpt extends Opt[Int]: + println("outer = " + outer) + println("outer.a = " + outer.a) + +object FirstParser extends CmdLineParser: + object OptMinSuccess extends IntOpt: + val default = 100 + val names = Set("bla") + val help = "bla" + + val opts = List(OptMinSuccess) // warn + val a = "FirstParser" From fd61dfa57a3de2189e1bd879a31fe45a9d330c1a Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Tue, 7 May 2024 19:02:48 +0200 Subject: [PATCH 054/827] Remove duplicate test due to races in merge --- tests/init-global/neg/TypeCast.scala | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 tests/init-global/neg/TypeCast.scala diff --git a/tests/init-global/neg/TypeCast.scala b/tests/init-global/neg/TypeCast.scala deleted file mode 100644 index 55447e9df4e2..000000000000 --- a/tests/init-global/neg/TypeCast.scala +++ /dev/null @@ -1,18 +0,0 @@ -object A { - val f: Int = 10 - def m() = f -} -object B { - val f: Int = g() - def g(): Int = f // error -} -object C { - val a: A.type | B.type = if ??? then A else B - def cast[T](a: Any): T = a.asInstanceOf[T] - val c: A.type = cast[A.type](a) // abstraction for c is {A, B} - val d = c.f // treat as c.asInstanceOf[owner of f].f - val e = c.m() // treat as c.asInstanceOf[owner of f].m() - val c2: B.type = cast[B.type](a) - val g = c2.f // no error here -} - From 96559761995ed62f9ae11315478d563687b31f26 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Tue, 7 May 2024 19:03:05 +0200 Subject: [PATCH 055/827] Cleanup code --- compiler/src/dotty/tools/dotc/transform/init/Objects.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 52e90c0857ed..bfa684eef8b4 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -29,7 +29,6 @@ import scala.collection.mutable import scala.annotation.tailrec import scala.annotation.constructorOnly import dotty.tools.dotc.core.Flags.AbstractOrTrait -import Decorators.* /** Check initialization safety of static objects * From 67c68b81e0a8c7d53fcbd5011fb6918851eb394a Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Thu, 9 May 2024 06:38:41 +0200 Subject: [PATCH 056/827] Fix error line --- tests/init-global/warn/ScalaCheck.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/init-global/warn/ScalaCheck.scala b/tests/init-global/warn/ScalaCheck.scala index 0e7036c80a8f..574db37e8585 100644 --- a/tests/init-global/warn/ScalaCheck.scala +++ b/tests/init-global/warn/ScalaCheck.scala @@ -13,10 +13,10 @@ trait CmdLineParser: println("outer.a = " + outer.a) object FirstParser extends CmdLineParser: - object OptMinSuccess extends IntOpt: + object OptMinSuccess extends IntOpt: // warn val default = 100 val names = Set("bla") val help = "bla" - val opts = List(OptMinSuccess) // warn + val opts = List(OptMinSuccess) val a = "FirstParser" From 402907fb58d43e7dce9dca17b091b783b8a0a7e8 Mon Sep 17 00:00:00 2001 From: Pascal Weisenburger Date: Tue, 7 May 2024 17:00:18 +0200 Subject: [PATCH 057/827] anonymous functions are not macro dependencies --- .../dotty/tools/dotc/inlines/Inliner.scala | 2 ++ tests/pos-macros/i20353/Macro_1.scala | 22 +++++++++++++++++++ tests/pos-macros/i20353/Test_2.scala | 17 ++++++++++++++ 3 files changed, 41 insertions(+) create mode 100644 tests/pos-macros/i20353/Macro_1.scala create mode 100644 tests/pos-macros/i20353/Test_2.scala diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 7c79e972c126..01b539d04690 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -1104,6 +1104,8 @@ class Inliner(val call: tpd.Tree)(using Context): new TreeAccumulator[List[Symbol]] { override def apply(syms: List[Symbol], tree: tpd.Tree)(using Context): List[Symbol] = tree match { + case Closure(env, meth, tpt) if meth.symbol.isAnonymousFunction => + this(syms, tpt :: env) case tree: RefTree if tree.isTerm && level == -1 && tree.symbol.isDefinedInCurrentRun && !tree.symbol.isLocal => foldOver(tree.symbol :: syms, tree) case _: This if level == -1 && tree.symbol.isDefinedInCurrentRun => diff --git a/tests/pos-macros/i20353/Macro_1.scala b/tests/pos-macros/i20353/Macro_1.scala new file mode 100644 index 000000000000..7f1a914b89c6 --- /dev/null +++ b/tests/pos-macros/i20353/Macro_1.scala @@ -0,0 +1,22 @@ +//> using options -experimental -Yno-experimental + +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted.* + +class ImplicitValue + +object ImplicitValue: + inline given ImplicitValue = + ${ makeImplicitValue } + + def makeImplicitValue(using Quotes) = + import quotes.reflect.* + '{ ImplicitValue() } +end ImplicitValue + +@experimental +class Test extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition) = + import quotes.reflect.* + Implicits.search(TypeRepr.of[ImplicitValue]) + List(tree) diff --git a/tests/pos-macros/i20353/Test_2.scala b/tests/pos-macros/i20353/Test_2.scala new file mode 100644 index 000000000000..ebe2bb7af4b3 --- /dev/null +++ b/tests/pos-macros/i20353/Test_2.scala @@ -0,0 +1,17 @@ +//> using options -experimental -Yno-experimental + +class OuterClass: + @Test + class InnerClass + + @Test + object InnerObject +end OuterClass + +object OuterObject: + @Test + class InnerClass + + @Test + object InnerObject +end OuterObject From 9c14610802bb24031401eeca0e7641a0c84cd731 Mon Sep 17 00:00:00 2001 From: Pascal Weisenburger Date: Tue, 7 May 2024 22:07:01 +0200 Subject: [PATCH 058/827] update for #19677 --- tests/pos-macros/i20353/Macro_1.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pos-macros/i20353/Macro_1.scala b/tests/pos-macros/i20353/Macro_1.scala index 7f1a914b89c6..ba1e1620c1d6 100644 --- a/tests/pos-macros/i20353/Macro_1.scala +++ b/tests/pos-macros/i20353/Macro_1.scala @@ -16,7 +16,7 @@ end ImplicitValue @experimental class Test extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition) = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]) = import quotes.reflect.* Implicits.search(TypeRepr.of[ImplicitValue]) - List(tree) + List(definition) From 6195874de19bd7d1fdcd6130ef51d3cb622b7a7f Mon Sep 17 00:00:00 2001 From: Nafer Sanabria Date: Tue, 7 May 2024 14:45:17 -0500 Subject: [PATCH 059/827] Fix duplicate word in comments --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 2 +- compiler/src/dotty/tools/dotc/transform/CheckUnused.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Synthesizer.scala | 2 +- compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala | 2 +- compiler/test/dotty/tools/scripting/BashExitCodeTests.scala | 2 +- .../src/scala/collection/mutable/ArrayBuffer.scala | 2 +- staging/src/scala/quoted/staging/Compiler.scala | 2 +- tests/neg-macros/tasty-macro-error/quoted_1.scala | 2 +- tests/neg-macros/tasty-macro-positions/quoted_1.scala | 4 ++-- tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index faace26de84d..514ac46170e1 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -1274,7 +1274,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless } - /** A tree traverser that generates the the same import contexts as original typer for statements. + /** A tree traverser that generates the same import contexts as original typer for statements. * TODO: Should we align TreeMapWithPreciseStatContexts and also keep track of exprOwners? */ abstract class TreeTraverserWithPreciseImportContexts extends TreeTraverser: diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index bd4ef73d6eea..d420fe78107e 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -624,7 +624,7 @@ object CheckUnused: symbol.name.mangledString.contains("$") /** - * Is the the constructor of synthetic package object + * Is the constructor of synthetic package object * Should be ignored as it is always imported/used in package * Trigger false negative on used import * diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 6b18540b6551..7f6be8f89314 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -536,7 +536,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): else refineAtPrefix(childPre, childClass, childClass.primaryConstructor.info) match case info: PolyType => - // Compute the the full child type by solving the subtype constraint + // Compute the full child type by solving the subtype constraint // `C[X1, ..., Xn] <: P`, where // // - P is the current `mirroredType` diff --git a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala index 44886d59ac12..ac1cbbfb6bb5 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala @@ -26,7 +26,7 @@ import dotty.tools.dotc.util.optional * - `isClosedUnder(x1, .., xn)('{e})` returns true if and only if all the references in `e` to names defined in the pattern are contained in the set `{x1, ... xn}`. * - `lift(x1, .., xn)('{e})` returns `(y1, ..., yn) => [xi = $yi]'{e}` where `yi` is an `Expr` of the type of `xi`. * - `withEnv(x1 -> y1, ..., xn -> yn)(matching)` evaluates matching recording that `xi` is equivalent to `yi`. - * - `matched` denotes that the the match succeeded and `matched('{e})` denotes that a match succeeded and extracts `'{e}` + * - `matched` denotes that the match succeeded and `matched('{e})` denotes that a match succeeded and extracts `'{e}` * - `&&&` matches if both sides match. Concatenates the extracted expressions of both sides. * * Note: that not all quoted terms bellow are valid expressions diff --git a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala index cc53447cd64b..9b65522fc549 100644 --- a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala +++ b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala @@ -28,7 +28,7 @@ class BashExitCodeTests: s"expected $expectedExitCode but got $exitCode${pp("out", stdout)}${pp("err", stderr)}" }, expectedExitCode, exitCode) - // Helpers for running scala, scalac, and scalac without the the output directory ("raw") + // Helpers for running scala, scalac, and scalac without the output directory ("raw") def scala(args: String*) = verifyExit(scalaPath, args*) def scalacRaw(args: String*) = verifyExit(scalacPath, args*) def scalac(args: String*) = scalacRaw(("-d" +: tmpDir +: args)*) diff --git a/scala2-library-cc/src/scala/collection/mutable/ArrayBuffer.scala b/scala2-library-cc/src/scala/collection/mutable/ArrayBuffer.scala index 85a045c34423..b47b25f9529f 100644 --- a/scala2-library-cc/src/scala/collection/mutable/ArrayBuffer.scala +++ b/scala2-library-cc/src/scala/collection/mutable/ArrayBuffer.scala @@ -197,7 +197,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) // the previous line // - `copyElemsToArray` will call `System.arraycopy` // - `System.arraycopy` will effectively "read" all the values before - // overwriting any of them when two arrays are the the same reference + // overwriting any of them when two arrays are the same reference val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength") size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy diff --git a/staging/src/scala/quoted/staging/Compiler.scala b/staging/src/scala/quoted/staging/Compiler.scala index b37e8d4f70f2..2cc3aa6555c1 100644 --- a/staging/src/scala/quoted/staging/Compiler.scala +++ b/staging/src/scala/quoted/staging/Compiler.scala @@ -11,7 +11,7 @@ trait Compiler: object Compiler: - /** Create a new instance of the compiler using the the classloader of the application. + /** Create a new instance of the compiler using the classloader of the application. * * Usage: * ``` diff --git a/tests/neg-macros/tasty-macro-error/quoted_1.scala b/tests/neg-macros/tasty-macro-error/quoted_1.scala index b395ec4c240b..8a4c45e46c89 100644 --- a/tests/neg-macros/tasty-macro-error/quoted_1.scala +++ b/tests/neg-macros/tasty-macro-error/quoted_1.scala @@ -6,7 +6,7 @@ object Macros { def impl(x: Expr[Any])(using Quotes) : Expr[Unit] = { import quotes.reflect.* - report.error("here is the the argument is " + x.asTerm.underlyingArgument.show, x.asTerm.underlyingArgument.pos) + report.error("here is the argument is " + x.asTerm.underlyingArgument.show, x.asTerm.underlyingArgument.pos) '{} } diff --git a/tests/neg-macros/tasty-macro-positions/quoted_1.scala b/tests/neg-macros/tasty-macro-positions/quoted_1.scala index b77373baa21c..a64e575a8d4d 100644 --- a/tests/neg-macros/tasty-macro-positions/quoted_1.scala +++ b/tests/neg-macros/tasty-macro-positions/quoted_1.scala @@ -7,8 +7,8 @@ object Macros { def impl(x: Expr[Any])(using Quotes) : Expr[Unit] = { import quotes.reflect.* val pos = x.asTerm.underlyingArgument.pos - report.error("here is the the argument is " + x.asTerm.underlyingArgument.show, pos) - report.error("here (+5) is the the argument is " + x.asTerm.underlyingArgument.show, Position(pos.sourceFile, pos.start + 5, pos.end + 5)) + report.error("here is the argument is " + x.asTerm.underlyingArgument.show, pos) + report.error("here (+5) is the argument is " + x.asTerm.underlyingArgument.show, Position(pos.sourceFile, pos.start + 5, pos.end + 5)) '{} } diff --git a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala index 6ead3134235a..d1a017d67f20 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala @@ -498,7 +498,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): else refineAtPrefix(childPre, childClass, childClass.primaryConstructor.info) match case info: PolyType => - // Compute the the full child type by solving the subtype constraint + // Compute the full child type by solving the subtype constraint // `C[X1, ..., Xn] <: P`, where // // - P is the current `mirroredType` From 8bffc9e92020bced2d1f7418bde58562cf1e3393 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Fri, 10 May 2024 15:06:23 +0200 Subject: [PATCH 060/827] Add regression test for i20309 --- tests/pos-macros/i20309/Macro_1.scala | 24 ++++++++++++++++++++++++ tests/pos-macros/i20309/Test_2.scala | 10 ++++++++++ 2 files changed, 34 insertions(+) create mode 100644 tests/pos-macros/i20309/Macro_1.scala create mode 100644 tests/pos-macros/i20309/Test_2.scala diff --git a/tests/pos-macros/i20309/Macro_1.scala b/tests/pos-macros/i20309/Macro_1.scala new file mode 100644 index 000000000000..e92e623ea775 --- /dev/null +++ b/tests/pos-macros/i20309/Macro_1.scala @@ -0,0 +1,24 @@ +import scala.quoted.* +import scala.compiletime.* + +trait Context +object Scope: + def spawn[A](f: Context ?=> A): A = ??? + +type Contextual[T] = Context ?=> T + +object Macros { + inline def transformContextLambda[T](inline expr: Context ?=> T): Context => T = + ${ transformContextLambdaImpl[T]('expr) } + + def transformContextLambdaImpl[T: Type]( + cexpr: Expr[Context ?=> T] + )(using Quotes): Expr[Context => T] = { + import quotes.reflect.* + val tree = asTerm(cexpr) + val traverse = new TreeMap() {} + println(tree.show) + traverse.transformTree(tree)(tree.symbol) + '{ _ => ??? } + } +} diff --git a/tests/pos-macros/i20309/Test_2.scala b/tests/pos-macros/i20309/Test_2.scala new file mode 100644 index 000000000000..6b01708d7ae0 --- /dev/null +++ b/tests/pos-macros/i20309/Test_2.scala @@ -0,0 +1,10 @@ + +transparent inline def inScope[T](inline expr: Context ?=> T): T = + val fn = Macros.transformContextLambda[T](expr) + fn(new Context {}) + +@main def Test = { + inScope { + Scope.spawn[Unit] { () } + } +} From d00cd3d1778d7e78cc9d9c10dc5536c702d3722f Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Sat, 11 May 2024 19:13:06 +0200 Subject: [PATCH 061/827] Add checkfile for ScalaCheck --- tests/init-global/warn/ScalaCheck.check | 10 ++++++++++ tests/init-global/warn/ScalaCheck.scala | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 tests/init-global/warn/ScalaCheck.check diff --git a/tests/init-global/warn/ScalaCheck.check b/tests/init-global/warn/ScalaCheck.check new file mode 100644 index 000000000000..32fad69cfc57 --- /dev/null +++ b/tests/init-global/warn/ScalaCheck.check @@ -0,0 +1,10 @@ +-- Warning: tests/init-global/warn/ScalaCheck.scala:16:9 --------------------------------------------------------------- +16 | object OptMinSuccess extends IntOpt: // warn + | ^ + | Cyclic initialization: object OptMinSuccess -> object FirstParser -> object OptMinSuccess. Calling trace: + | ├── object OptMinSuccess extends IntOpt: // warn [ ScalaCheck.scala:16 ] + | │ ^ + | ├── object FirstParser extends CmdLineParser: [ ScalaCheck.scala:15 ] + | │ ^ + | └── val opts = Some(OptMinSuccess) [ ScalaCheck.scala:21 ] + | ^^^^^^^^^^^^^ diff --git a/tests/init-global/warn/ScalaCheck.scala b/tests/init-global/warn/ScalaCheck.scala index 574db37e8585..34b248bcfd68 100644 --- a/tests/init-global/warn/ScalaCheck.scala +++ b/tests/init-global/warn/ScalaCheck.scala @@ -18,5 +18,5 @@ object FirstParser extends CmdLineParser: val names = Set("bla") val help = "bla" - val opts = List(OptMinSuccess) + val opts = Some(OptMinSuccess) val a = "FirstParser" From 4112544573782339512b22c32ec8a98cc876698b Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Sat, 11 May 2024 23:22:19 +0200 Subject: [PATCH 062/827] Disable ScalaCheck.scala test --- .../test/dotc/neg-init-global-scala2-library-tasty.blacklist | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist index f435867fcaab..93e6cd5b4ebc 100644 --- a/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist +++ b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist @@ -4,3 +4,4 @@ t9312.scala unapplySeq-implicit-arg.scala unapplySeq-implicit-arg2.scala unapplySeq-implicit-arg3.scala +ScalaCheck.scala From accf42d555a6754a620b56fbaec604df1d89a469 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sun, 28 Apr 2024 00:07:47 -0400 Subject: [PATCH 063/827] Adjust the API name entry for nested classes **Problem** Some build pipelining tests fail on the latest sbt RC. ``` Error: (sbt-test / scripted) Failed tests: Error: pipelining/Yjava-tasty-fromjavaobject Error: pipelining/Yjava-tasty-paths ``` This is likely caused by inconsistent capturing of APIs from Java sources in ExtractAPI vs AnalyzingJavaCompiler in Zinc. **Solution** This adjusts the API name entry for Java nested classes. --- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 2 +- .../tools/dotc/sbt/ExtractDependencies.scala | 18 +++++++++++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 75f04908ac55..75e859111932 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -295,7 +295,7 @@ private class ExtractAPICollector(nonLocalClassSymbols: mutable.HashSet[Symbol]) val selfType = apiType(sym.givenSelfType) - val name = sym.fullName.stripModuleClassSuffix.toString + val name = ExtractDependencies.classNameAsString(sym) // We strip module class suffix. Zinc relies on a class and its companion having the same name val tparams = sym.typeParams.map(apiTypeParameter).toArray diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index dfff5971889e..154d50f8ebc2 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -105,8 +105,24 @@ object ExtractDependencies { val name: String = "sbt-deps" val description: String = "sends information on classes' dependencies to sbt" + /** Construct String name for the given sym. + * See https://github.com/sbt/zinc/blob/v1.9.6/internal/zinc-apiinfo/src/main/scala/sbt/internal/inc/ClassToAPI.scala#L86-L99 + * + * For a Java nested class M of a class C returns C's canonical name + "." + M's simple name. + */ def classNameAsString(sym: Symbol)(using Context): String = - sym.fullName.stripModuleClassSuffix.toString + def isJava(sym: Symbol)(using Context): Boolean = + Option(sym.source) match + case Some(src) => src.toString.endsWith(".java") + case None => false + def classNameAsString0(sym: Symbol)(using Context): String = + sym.fullName.stripModuleClassSuffix.toString + def javaClassNameAsString(sym: Symbol)(using Context): String = + if sym.owner.isClass && !sym.owner.isRoot then + javaClassNameAsString(sym.owner) + "." + sym.name.stripModuleClassSuffix.toString + else classNameAsString0(sym) + if isJava(sym) then javaClassNameAsString(sym) + else classNameAsString0(sym) /** Report an internal error in incremental compilation. */ def internalError(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = From e165a21fda71b6d592934e6e606be64077d77987 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Mon, 13 May 2024 14:37:42 +0200 Subject: [PATCH 064/827] Set baseVersion to 3.5.1-RC1 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 350471cc3e12..3e9e4f6b04f8 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -85,7 +85,7 @@ object Build { val referenceVersion = "3.4.2-RC1" - val baseVersion = "3.5.0-RC1" + val baseVersion = "3.5.1-RC1" // LTS or Next val versionLine = "Next" From f5a081164d1f3eb52a519d3c1671a82801c43ac8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 May 2024 13:06:43 +0000 Subject: [PATCH 065/827] Bump VirtusLab/scala-cli-setup from 1.3.0 to 1.3.1 Bumps [VirtusLab/scala-cli-setup](https://github.com/virtuslab/scala-cli-setup) from 1.3.0 to 1.3.1. - [Release notes](https://github.com/virtuslab/scala-cli-setup/releases) - [Commits](https://github.com/virtuslab/scala-cli-setup/compare/v1.3.0...v1.3.1) --- updated-dependencies: - dependency-name: VirtusLab/scala-cli-setup dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 9c3405235b31..26bfb9a5d28c 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.3.0 + - uses: VirtusLab/scala-cli-setup@v1.3.1 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From c175842211d87d0f865f4b3145841797dc96dec4 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Tue, 14 May 2024 09:17:25 +0200 Subject: [PATCH 066/827] Revert "Set baseVersion to 3.5.1-RC1" --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 3e9e4f6b04f8..350471cc3e12 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -85,7 +85,7 @@ object Build { val referenceVersion = "3.4.2-RC1" - val baseVersion = "3.5.1-RC1" + val baseVersion = "3.5.0-RC1" // LTS or Next val versionLine = "Next" From 6c070b6ee53aa22612b797ed0e7d8c17bcb85af4 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 14 May 2024 11:33:04 +0200 Subject: [PATCH 067/827] Bump from 3.4.0 to 3.5.0-RC1 --- project/Build.scala | 2 +- project/MiMaFilters.scala | 39 +++++++++++++++++++++++++-------------- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 3e9e4f6b04f8..9f08c9d16112 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -104,7 +104,7 @@ object Build { * - `3.M.0` if `P > 0` * - `3.(M-1).0` if `P = 0` */ - val mimaPreviousDottyVersion = "3.4.0" + val mimaPreviousDottyVersion = "3.5.0-RC1" /** LTS version against which we check binary compatibility. * diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 18d2e985f844..bf652cb0ee33 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -8,20 +8,7 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of the library Build.mimaPreviousDottyVersion -> Seq( - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.annotation.experimental.this"), - ProblemFilters.exclude[FinalClassProblem]("scala.annotation.experimental"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Tuple.fromArray"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Tuple.fromIArray"), - ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple.helpers"), - ProblemFilters.exclude[MissingClassProblem]("scala.Tuple$helpers$"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromArray"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromIArray"), - ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.namedTuples"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), - ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.compiletime.package#package.deferred"), - ProblemFilters.exclude[MissingClassProblem]("scala.annotation.internal.WitnessNames"), + ), // Additions since last LTS @@ -53,6 +40,30 @@ object MiMaFilters { ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.5"), ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.clauseInterleaving"), ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.relaxedExtensionImports"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E6$minusmigration$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.6-migration"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E6$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.6"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.annotation.experimental.this"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.compiletime.package#package.deferred"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.MethodTypeKind"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleTypeTypeTest"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleType"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleTypeMethods"), + ProblemFilters.exclude[MissingClassProblem]("scala.quoted.Quotes$reflectModule$FlexibleTypeMethods"), + ProblemFilters.exclude[MissingClassProblem]("scala.quoted.Quotes$reflectModule$FlexibleTypeModule"), + ProblemFilters.exclude[MissingClassProblem]("scala.quoted.Quotes$reflectModule$MethodTypeKind"), + ProblemFilters.exclude[MissingClassProblem]("scala.quoted.Quotes$reflectModule$MethodTypeKind$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.isContextual"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.methodTypeKind"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeModule.apply"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#SymbolMethods.isSuperAccessor"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.namedTuples"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.betterMatchTypeExtractors"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$betterMatchTypeExtractors$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), ), ) From f6345c6202b69a4603cb61ca029d60aa2ac80599 Mon Sep 17 00:00:00 2001 From: noti0na1 <8036790+noti0na1@users.noreply.github.com> Date: Mon, 22 Apr 2024 13:54:13 +0000 Subject: [PATCH 068/827] Enhance help message for language flag --- compiler/src/dotty/tools/dotc/config/CliCommand.scala | 7 +++++-- .../src/dotty/tools/dotc/config/CompilerCommand.scala | 2 +- compiler/src/dotty/tools/dotc/config/Feature.scala | 9 +++++++++ compiler/src/dotty/tools/dotc/config/ScalaSettings.scala | 2 +- .../tools/dotc/config/ScalaSettingsProperties.scala | 3 +++ compiler/src/dotty/tools/dotc/config/Settings.scala | 2 +- compiler/src/dotty/tools/dotc/core/StdNames.scala | 1 + 7 files changed, 21 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index 5ac6b772df95..b0046ee49cd1 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -53,7 +53,7 @@ trait CliCommand: end distill /** Creates a help message for a subset of options based on cond */ - protected def availableOptionsMsg(p: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = + protected def availableOptionsMsg(p: Setting[?] => Boolean, showArgFileMsg: Boolean = true)(using settings: ConcreteSettings)(using SettingsState): String = // result is (Option Name, descrption\ndefault: value\nchoices: x, y, z def help(s: Setting[?]): (String, String) = // For now, skip the default values that do not make sense for the end user, such as 'false' for the version command. @@ -68,7 +68,10 @@ trait CliCommand: val ss = settings.allSettings.filter(p).toList.sortBy(_.name) val formatter = Columnator("", "", maxField = 30) val fresh = ContextBase().initialCtx.fresh.setSettings(summon[SettingsState]) - formatter(List(ss.map(help) :+ ("@", "A text file containing compiler arguments (options and source files).")))(using fresh) + var msg = ss.map(help) + if showArgFileMsg then + msg = msg :+ ("@", "A text file containing compiler arguments (options and source files).") + formatter(List(msg))(using fresh) end availableOptionsMsg protected def shortUsage: String = s"Usage: $cmdName " diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala index 587f94dad7b3..43f3ed63f969 100644 --- a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala @@ -9,7 +9,7 @@ abstract class CompilerCommand extends CliCommand: final def helpMsg(using settings: ConcreteSettings)(using SettingsState, Context): String = settings.allSettings.find(isHelping) match - case Some(s) => s.description + case Some(s) => availableOptionsMsg(_ == s, showArgFileMsg = false) case _ => if (settings.help.value) usageMessage else if (settings.Vhelp.value) vusageMessage diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 0d551094da4d..0538c421813c 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -41,6 +41,15 @@ object Feature: defn.languageExperimentalFeatures .map(sym => experimental(sym.name)) .filterNot(_ == captureChecking) // TODO is this correct? + + val values = List( + nme.help, + nme.noAutoTupling, nme.dynamics, nme.unsafeNulls, nme.postfixOps, nme.strictEquality, + nme.implicitConversions, nme.adhocExtensions, + namedTypeArguments, genericNumberLiterals, scala2macros, + dependent, erasedDefinitions, symbolLiterals, fewerBraces, saferExceptions, + clauseInterleaving, pureFunctions, captureChecking, into + ) /** Is `feature` enabled by by a command-line setting? The enabling setting is * diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 86b657ddf00d..15cfb31489f7 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -114,7 +114,7 @@ trait CommonScalaSettings: val explainTypes: Setting[Boolean] = BooleanSetting(RootSetting, "explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) val explainCyclic: Setting[Boolean] = BooleanSetting(RootSetting, "explain-cyclic", "Explain cyclic reference errors in more detail.", aliases = List("--explain-cyclic")) val unchecked: Setting[Boolean] = BooleanSetting(RootSetting, "unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) - val language: Setting[List[String]] = MultiStringSetting(RootSetting, "language", "feature", "Enable one or more language features.", aliases = List("--language")) + val language: Setting[List[String]] = MultiChoiceSetting(RootSetting, "language", "feature", "Enable one or more language features.", choices = ScalaSettingsProperties.supportedLanguageFeatures, aliases = List("--language")) val experimental: Setting[Boolean] = BooleanSetting(RootSetting, "experimental", "Annotate all top-level definitions with @experimental. This enables the use of experimental features anywhere in the project.") /* Coverage settings */ diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala index e8a55dc6e737..95c6237bcae3 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala @@ -26,6 +26,9 @@ object ScalaSettingsProperties: def supportedSourceVersions: List[String] = SourceVersion.values.toList.map(_.toString) + def supportedLanguageFeatures: List[String] = + Feature.values.toList.map(_.toString) + def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") def defaultPageWidth: Int = { diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 1e2ced4d65a7..5042737c30cb 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -380,7 +380,7 @@ object Settings: def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs, deprecation = deprecation)) - def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = + def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String] = Nil, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, deprecation = deprecation)) def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[ChoiceWithHelp[String]]] = diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index b935488695e0..3753d1688399 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -509,6 +509,7 @@ object StdNames { val _hashCode_ : N = "_hashCode" val hash_ : N = "hash" val head: N = "head" + val help: N = "help" val higherKinds: N = "higherKinds" val idx: N = "idx" val identity: N = "identity" From e3a5f153e7ba88a3d4b384241622b3a2cc312ab0 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 23 Apr 2024 14:32:20 +0200 Subject: [PATCH 069/827] Add help to choices --- .../src/dotty/tools/dotc/config/Feature.scala | 34 ++++++++++++++----- .../tools/dotc/config/ScalaSettings.scala | 3 +- .../dotc/config/ScalaSettingsProperties.scala | 5 +-- 3 files changed, 31 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 0538c421813c..02dd40b3516a 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -11,6 +11,7 @@ import SourceVersion.* import reporting.Message import NameKinds.QualifiedName import Annotations.ExperimentalAnnotation +import Settings.Setting.ChoiceWithHelp object Feature: @@ -41,16 +42,33 @@ object Feature: defn.languageExperimentalFeatures .map(sym => experimental(sym.name)) .filterNot(_ == captureChecking) // TODO is this correct? - + val values = List( - nme.help, - nme.noAutoTupling, nme.dynamics, nme.unsafeNulls, nme.postfixOps, nme.strictEquality, - nme.implicitConversions, nme.adhocExtensions, - namedTypeArguments, genericNumberLiterals, scala2macros, - dependent, erasedDefinitions, symbolLiterals, fewerBraces, saferExceptions, - clauseInterleaving, pureFunctions, captureChecking, into + (nme.help, "Display all available features"), + (nme.noAutoTupling, "Disable automatic tupling"), + (nme.dynamics, "Allow direct or indirect subclasses of scala.Dynamic"), + (nme.unsafeNulls, "Enable unsafe nulls for explicit nulls"), + (nme.postfixOps, "Allow postfix operator notation"), + (nme.strictEquality, "Enable strict equality (=== and !==)"), + (nme.implicitConversions, "Allow implicit conversions without warnings"), + (nme.adhocExtensions, "Allow ad-hoc extension methods"), + (namedTypeArguments, "Allow named type arguments"), + (genericNumberLiterals, "Allow generic number literals"), + (scala2macros, "Allow Scala 2 macros"), + (dependent, "Allow dependent method types"), + (erasedDefinitions, "Allow erased definitions"), + (symbolLiterals, "Allow symbol literals"), + (fewerBraces, "Allow fewer braces"), + (saferExceptions, "Enable safer exceptions"), + (clauseInterleaving, "Enable clause interleaving"), + (pureFunctions, "Enable pure functions"), + (captureChecking, "Enable experimental capture checking"), + (into, "Allow into clauses in pattern matches") ) + private def enabledLanguageFeaturesBySetting(using Context): List[String] = + ctx.settings.language.value.asInstanceOf + /** Is `feature` enabled by by a command-line setting? The enabling setting is * * -language:feature @@ -59,7 +77,7 @@ object Feature: * but subtracting the prefix `scala.language.` at the front. */ def enabledBySetting(feature: TermName)(using Context): Boolean = - ctx.base.settings.language.value.contains(feature.toString) + enabledLanguageFeaturesBySetting.contains(feature.toString) /** Is `feature` enabled by by an import? This is the case if the feature * is imported by a named import diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 15cfb31489f7..bb28e06150fe 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -114,7 +114,7 @@ trait CommonScalaSettings: val explainTypes: Setting[Boolean] = BooleanSetting(RootSetting, "explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) val explainCyclic: Setting[Boolean] = BooleanSetting(RootSetting, "explain-cyclic", "Explain cyclic reference errors in more detail.", aliases = List("--explain-cyclic")) val unchecked: Setting[Boolean] = BooleanSetting(RootSetting, "unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) - val language: Setting[List[String]] = MultiChoiceSetting(RootSetting, "language", "feature", "Enable one or more language features.", choices = ScalaSettingsProperties.supportedLanguageFeatures, aliases = List("--language")) + val language: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting(RootSetting, "language", "feature", "Enable one or more language features.", choices = ScalaSettingsProperties.supportedLanguageFeatures, default = Nil, aliases = List("--language")) val experimental: Setting[Boolean] = BooleanSetting(RootSetting, "experimental", "Annotate all top-level definitions with @experimental. This enables the use of experimental features anywhere in the project.") /* Coverage settings */ @@ -492,3 +492,4 @@ private sealed trait YSettings: @deprecated(message = "Scheduled for removal.", since = "3.5.0") val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles", deprecation = Deprecation.removed()) end YSettings + diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala index 95c6237bcae3..a839d3e3be19 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala @@ -1,6 +1,7 @@ package dotty.tools.dotc package config +import Settings.Setting.ChoiceWithHelp import dotty.tools.backend.jvm.BackendUtils.classfileVersionMap import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory, NoAbstractFile} import scala.language.unsafeNulls @@ -26,8 +27,8 @@ object ScalaSettingsProperties: def supportedSourceVersions: List[String] = SourceVersion.values.toList.map(_.toString) - def supportedLanguageFeatures: List[String] = - Feature.values.toList.map(_.toString) + def supportedLanguageFeatures: List[ChoiceWithHelp[String]] = + Feature.values.map((n, d) => ChoiceWithHelp(n.toString, d)) def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") From 20ab4aa1824156a1ca51eb98fe2a07443e450fe3 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 23 Apr 2024 16:57:40 +0200 Subject: [PATCH 070/827] Fix tests --- community-build/community-projects/scala-xml | 2 +- compiler/test/dotty/tools/DottyTest.scala | 2 +- compiler/test/dotty/tools/dotc/CompilationTests.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/community-build/community-projects/scala-xml b/community-build/community-projects/scala-xml index 105c3dac8835..0605c07e298c 160000 --- a/community-build/community-projects/scala-xml +++ b/community-build/community-projects/scala-xml @@ -1 +1 @@ -Subproject commit 105c3dac883549eca1182b04fc5a18fe4f5ad51a +Subproject commit 0605c07e298c1bd8758f79d3c790f89db986a6bc diff --git a/compiler/test/dotty/tools/DottyTest.scala b/compiler/test/dotty/tools/DottyTest.scala index 7ccbc09a4c92..2b94801b67d7 100644 --- a/compiler/test/dotty/tools/DottyTest.scala +++ b/compiler/test/dotty/tools/DottyTest.scala @@ -40,7 +40,7 @@ trait DottyTest extends ContextEscapeDetection { protected def initializeCtx(fc: FreshContext): Unit = { fc.setSetting(fc.settings.encoding, "UTF8") fc.setSetting(fc.settings.classpath, TestConfiguration.basicClasspath) - fc.setSetting(fc.settings.language, List("experimental.erasedDefinitions")) + fc.setSetting(fc.settings.language, List("experimental.erasedDefinitions").asInstanceOf) fc.setProperty(ContextDoc, new ContextDocstrings) } diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index de3bd02bba6e..2b9ebd2c69d1 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -143,7 +143,7 @@ class CompilationTests { "tests/neg-custom-args/toplevel-samesource/S.scala", "tests/neg-custom-args/toplevel-samesource/nested/S.scala"), defaultOptions), - compileFile("tests/neg/i7575.scala", defaultOptions.withoutLanguageFeatures.and("-language:_")), + compileFile("tests/neg/i7575.scala", defaultOptions.withoutLanguageFeatures), ).checkExpectedErrors() } From d5f1695e01278b673fe6e3cba270f8fd57237f19 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 2 May 2024 19:24:22 +0200 Subject: [PATCH 071/827] Update some helps --- compiler/src/dotty/tools/dotc/config/Feature.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 02dd40b3516a..6c51d0812eb6 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -49,7 +49,7 @@ object Feature: (nme.dynamics, "Allow direct or indirect subclasses of scala.Dynamic"), (nme.unsafeNulls, "Enable unsafe nulls for explicit nulls"), (nme.postfixOps, "Allow postfix operator notation"), - (nme.strictEquality, "Enable strict equality (=== and !==)"), + (nme.strictEquality, "Enable strict equality (disable canEqualAny)"), (nme.implicitConversions, "Allow implicit conversions without warnings"), (nme.adhocExtensions, "Allow ad-hoc extension methods"), (namedTypeArguments, "Allow named type arguments"), @@ -58,10 +58,10 @@ object Feature: (dependent, "Allow dependent method types"), (erasedDefinitions, "Allow erased definitions"), (symbolLiterals, "Allow symbol literals"), - (fewerBraces, "Allow fewer braces"), + (fewerBraces, "Enable support for using indentation for arguments"), (saferExceptions, "Enable safer exceptions"), (clauseInterleaving, "Enable clause interleaving"), - (pureFunctions, "Enable pure functions"), + (pureFunctions, "Enable pure functions for capture checking"), (captureChecking, "Enable experimental capture checking"), (into, "Allow into clauses in pattern matches") ) From 326408188d073d79aabb0320cea99290196ed5ce Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Fri, 3 May 2024 15:45:11 +0200 Subject: [PATCH 072/827] Update community projects --- community-build/community-projects/izumi-reflect | 2 +- community-build/community-projects/parboiled2 | 2 +- community-build/community-projects/scala-collection-compat | 2 +- compiler/src/dotty/tools/dotc/config/Feature.scala | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/community-build/community-projects/izumi-reflect b/community-build/community-projects/izumi-reflect index c0756faa7311..bd4ae213f81e 160000 --- a/community-build/community-projects/izumi-reflect +++ b/community-build/community-projects/izumi-reflect @@ -1 +1 @@ -Subproject commit c0756faa7311f70c6da6af29b8cb25506634bf09 +Subproject commit bd4ae213f81e63c330b22cf5f73f68641814b195 diff --git a/community-build/community-projects/parboiled2 b/community-build/community-projects/parboiled2 index 628127744bde..3fb32f833f8c 160000 --- a/community-build/community-projects/parboiled2 +++ b/community-build/community-projects/parboiled2 @@ -1 +1 @@ -Subproject commit 628127744bde8dc2e01432badd68886a5f722f71 +Subproject commit 3fb32f833f8c6a2fca25474c189efd91ffb65557 diff --git a/community-build/community-projects/scala-collection-compat b/community-build/community-projects/scala-collection-compat index b39b4b64732d..2bf3fea914b2 160000 --- a/community-build/community-projects/scala-collection-compat +++ b/community-build/community-projects/scala-collection-compat @@ -1 +1 @@ -Subproject commit b39b4b64732d9dd5e0f065e4180f656237ac4444 +Subproject commit 2bf3fea914b2f13e4805b3e7b519bdf0e595e4c9 diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 6c51d0812eb6..f5fa383c5636 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -48,7 +48,7 @@ object Feature: (nme.noAutoTupling, "Disable automatic tupling"), (nme.dynamics, "Allow direct or indirect subclasses of scala.Dynamic"), (nme.unsafeNulls, "Enable unsafe nulls for explicit nulls"), - (nme.postfixOps, "Allow postfix operator notation"), + (nme.postfixOps, "Allow postfix operators (not recommended)"), (nme.strictEquality, "Enable strict equality (disable canEqualAny)"), (nme.implicitConversions, "Allow implicit conversions without warnings"), (nme.adhocExtensions, "Allow ad-hoc extension methods"), From b7fc5da279948259acc0e9b933c79788c1710981 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Mon, 6 May 2024 14:06:37 +0200 Subject: [PATCH 073/827] Update remaining comminity projects --- community-build/community-projects/Monocle | 2 +- community-build/community-projects/akka | 2 +- community-build/community-projects/endpoints4s | 2 +- community-build/community-projects/scalaz | 2 +- compiler/src/dotty/tools/dotc/config/Feature.scala | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/community-build/community-projects/Monocle b/community-build/community-projects/Monocle index a0e70744e9b3..a9a12a13a48c 160000 --- a/community-build/community-projects/Monocle +++ b/community-build/community-projects/Monocle @@ -1 +1 @@ -Subproject commit a0e70744e9b3bfb0f12e4ea292151c49c3302cd1 +Subproject commit a9a12a13a48c957535ddd6850ed8c6b0db2dc4fe diff --git a/community-build/community-projects/akka b/community-build/community-projects/akka index 79b294048f89..2dffb6504005 160000 --- a/community-build/community-projects/akka +++ b/community-build/community-projects/akka @@ -1 +1 @@ -Subproject commit 79b294048f893d9d6b9332618f7aebedce9a5340 +Subproject commit 2dffb6504005a6144561c4e3ba7b185639a8ad48 diff --git a/community-build/community-projects/endpoints4s b/community-build/community-projects/endpoints4s index 3a667a3608ff..cc03ddf1c4a0 160000 --- a/community-build/community-projects/endpoints4s +++ b/community-build/community-projects/endpoints4s @@ -1 +1 @@ -Subproject commit 3a667a3608ff9950c24e9b2b5038c71c1690a21d +Subproject commit cc03ddf1c4a03391c8031784e48c057bdc9394db diff --git a/community-build/community-projects/scalaz b/community-build/community-projects/scalaz index 97cccf3b3fcb..4919bdce732f 160000 --- a/community-build/community-projects/scalaz +++ b/community-build/community-projects/scalaz @@ -1 +1 @@ -Subproject commit 97cccf3b3fcb71885a32b2e567171c0f70b06104 +Subproject commit 4919bdce732f53a3316d5e12d9c853fc2141ddfb diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index f5fa383c5636..ad458214058f 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -63,7 +63,7 @@ object Feature: (clauseInterleaving, "Enable clause interleaving"), (pureFunctions, "Enable pure functions for capture checking"), (captureChecking, "Enable experimental capture checking"), - (into, "Allow into clauses in pattern matches") + (into, "Allow into modifier on parameter types") ) private def enabledLanguageFeaturesBySetting(using Context): List[String] = From 6a07b0a456d6560bc1de21e08d7b4dbb4009b439 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 14 May 2024 14:45:33 +0200 Subject: [PATCH 074/827] Add notes for new features --- compiler/src/dotty/tools/dotc/config/Feature.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index ad458214058f..91100627981b 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -63,7 +63,10 @@ object Feature: (clauseInterleaving, "Enable clause interleaving"), (pureFunctions, "Enable pure functions for capture checking"), (captureChecking, "Enable experimental capture checking"), - (into, "Allow into modifier on parameter types") + (into, "Allow into modifier on parameter types"), + (namedTuples, "Allow named tuples"), + (modularity, "Enable experimental modularity features"), + (betterMatchTypeExtractors, "Enable better match type extractors") ) private def enabledLanguageFeaturesBySetting(using Context): List[String] = From c8764bac0f555b2d3c9f42403d1ae216960d28ad Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 14 May 2024 22:23:10 +0200 Subject: [PATCH 075/827] Avoid forcing whole package when using `-experimental` In https://github.com/scala/scala3/pull/19807, the behavior of `-experimental` was changed to mark all top-level definitions as experimental. To do so, the implementation traverses the whole package and checks every symbol to see if it should be transformed or not. The problem was that the first check we do is `sym.isExperimental` which ends up forcing the symbol. Besides being a performance issue, this could also lead to a crash if the current package is the empty package, because we could end up forcing the magic `module-info.class` that Java modules place there. For some reason, this appear to only happen when building with sbt, hence the additional scripted test. This PR fixes this issue by reordering the checks (and adding a preliminary `isDefinedInCurrentRun` check for good measure). We should also investigate whether we can avoid creating a symbol for `module-info.class`, but this PR is intentionally minimal so we can backport it to 3.5.0-RC2 without risks. --- compiler/src/dotty/tools/dotc/typer/Checking.scala | 3 ++- sbt-test/java-compat/moduleInfo/A.scala | 2 ++ sbt-test/java-compat/moduleInfo/build.sbt | 5 +++++ sbt-test/java-compat/moduleInfo/test | 1 + 4 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 sbt-test/java-compat/moduleInfo/A.scala create mode 100644 sbt-test/java-compat/moduleInfo/build.sbt create mode 100644 sbt-test/java-compat/moduleInfo/test diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 073055ba5b58..1f82b9ddc084 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -806,10 +806,11 @@ object Checking { def checkAndAdaptExperimentalImports(trees: List[Tree])(using Context): Unit = def nonExperimentalTopLevelDefs(pack: Symbol): Iterator[Symbol] = def isNonExperimentalTopLevelDefinition(sym: Symbol) = - !sym.isExperimental + sym.isDefinedInCurrentRun && sym.source == ctx.compilationUnit.source && !sym.isConstructor // not constructor of package object && !sym.is(Package) && !sym.name.isPackageObjectName + && !sym.isExperimental pack.info.decls.toList.iterator.flatMap: sym => if sym.isClass && (sym.is(Package) || sym.isPackageObject) then diff --git a/sbt-test/java-compat/moduleInfo/A.scala b/sbt-test/java-compat/moduleInfo/A.scala new file mode 100644 index 000000000000..4b46ae7047d6 --- /dev/null +++ b/sbt-test/java-compat/moduleInfo/A.scala @@ -0,0 +1,2 @@ +// Previously, we crashed trying to parse module-info.class in the empty package. +class A diff --git a/sbt-test/java-compat/moduleInfo/build.sbt b/sbt-test/java-compat/moduleInfo/build.sbt new file mode 100644 index 000000000000..a0308b6cb83a --- /dev/null +++ b/sbt-test/java-compat/moduleInfo/build.sbt @@ -0,0 +1,5 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +scalacOptions ++= Seq( + "-experimental" +) diff --git a/sbt-test/java-compat/moduleInfo/test b/sbt-test/java-compat/moduleInfo/test new file mode 100644 index 000000000000..5df2af1f3956 --- /dev/null +++ b/sbt-test/java-compat/moduleInfo/test @@ -0,0 +1 @@ +> compile From 3cbc15e0dbd0d88faee3acdeeb993473cf32183d Mon Sep 17 00:00:00 2001 From: Derek Wickern Date: Tue, 14 May 2024 22:04:07 -0700 Subject: [PATCH 076/827] Emit switch bytecode when matching unions of a switchable type --- .../tools/dotc/transform/PatternMatcher.scala | 7 +- .../backend/jvm/DottyBytecodeTests.scala | 98 +++++++++++++++++++ 2 files changed, 102 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 0b8507f3b6c7..1e95ca1618b2 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -868,7 +868,7 @@ object PatternMatcher { (Nil, plan) :: Nil } - if (isSwitchableType(scrutinee.tpe.widen)) recur(plan) + if (isSwitchableType(scrutinee.tpe.widen.widenSingletons())) recur(plan) else Nil } @@ -889,8 +889,9 @@ object PatternMatcher { */ val (primScrutinee, scrutineeTpe) = - if (scrutinee.tpe.widen.isRef(defn.IntClass)) (scrutinee, defn.IntType) - else if (scrutinee.tpe.widen.isRef(defn.StringClass)) (scrutinee, defn.StringType) + val tpe = scrutinee.tpe.widen.widenSingletons() + if (tpe.isRef(defn.IntClass)) (scrutinee, defn.IntType) + else if (tpe.isRef(defn.StringClass)) (scrutinee, defn.StringType) else (scrutinee.select(nme.toInt), defn.IntType) def primLiteral(lit: Tree): Tree = diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index f446913d7964..e4e485478804 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -158,6 +158,104 @@ class DottyBytecodeTests extends DottyBytecodeTest { } } + @Test def switchOnUnionOfInts = { + val source = + """ + |object Foo { + | def foo(x: 1 | 2 | 3 | 4 | 5) = x match { + | case 1 => println(3) + | case 2 | 3 => println(2) + | case 4 => println(1) + | case 5 => println(0) + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + + @Test def switchOnUnionOfStrings = { + val source = + """ + |object Foo { + | def foo(s: "one" | "two" | "three" | "four" | "five") = s match { + | case "one" => println(3) + | case "two" | "three" => println(2) + | case "four" | "five" => println(1) + | case _ => println(0) + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + + @Test def switchOnUnionOfIntSingletons = { + val source = + """ + |object Foo { + | final val One = 1 + | final val Two = 2 + | final val Three = 3 + | final val Four = 4 + | final val Five = 5 + | type Values = One.type | Two.type | Three.type | Four.type | Five.type + | + | def foo(s: Values) = s match { + | case One => println(3) + | case Two | Three => println(2) + | case Four => println(1) + | case Five => println(0) + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + + @Test def switchOnUnionOfStringSingletons = { + val source = + """ + |object Foo { + | final val One = "one" + | final val Two = "two" + | final val Three = "three" + | final val Four = "four" + | final val Five = "five" + | type Values = One.type | Two.type | Three.type | Four.type | Five.type + | + | def foo(s: Values) = s match { + | case One => println(3) + | case Two | Three => println(2) + | case Four => println(1) + | case Five => println(0) + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + @Test def matchWithDefaultNoThrowMatchError = { val source = """class Test { From 36d2205284b9042304204da0b4a16f13392c4a04 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Tue, 14 May 2024 12:25:27 +0200 Subject: [PATCH 077/827] Check pattern match exhaustivity in inlined code --- .../dotty/tools/dotc/transform/PatternMatcher.scala | 4 ++++ .../src/dotty/tools/dotc/transform/patmat/Space.scala | 5 ++++- tests/warn/i20372.check | 8 ++++++++ tests/warn/i20372.scala | 10 ++++++++++ 4 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 tests/warn/i20372.check create mode 100644 tests/warn/i20372.scala diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 0b8507f3b6c7..c586c836e305 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -56,6 +56,10 @@ class PatternMatcher extends MiniPhase { if !inInlinedCode then // check exhaustivity and unreachability SpaceEngine.checkMatch(tree) + else + // only check exhaustivity, as inlining may generate unreachable code + // like in i19157.scala + SpaceEngine.checkMatchExhaustivityOnly(tree) translated.ensureConforms(matchType) } diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index e1603761f08b..3ad13ec011b5 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -901,6 +901,9 @@ object SpaceEngine { } def checkMatch(m: Match)(using Context): Unit = - if exhaustivityCheckable(m.selector) then checkExhaustivity(m) + checkMatchExhaustivityOnly(m) if reachabilityCheckable(m.selector) then checkReachability(m) + + def checkMatchExhaustivityOnly(m: Match)(using Context): Unit = + if exhaustivityCheckable(m.selector) then checkExhaustivity(m) } diff --git a/tests/warn/i20372.check b/tests/warn/i20372.check new file mode 100644 index 000000000000..7946c424df0c --- /dev/null +++ b/tests/warn/i20372.check @@ -0,0 +1,8 @@ +-- [E029] Pattern Match Exhaustivity Warning: tests/warn/i20372.scala:8:5 ---------------------------------------------- +8 | id(foo match { // warn + | ^^^ + | match may not be exhaustive. + | + | It would fail on pattern case: Baz + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i20372.scala b/tests/warn/i20372.scala new file mode 100644 index 000000000000..2886bd11b09f --- /dev/null +++ b/tests/warn/i20372.scala @@ -0,0 +1,10 @@ +sealed trait Foo +case object Bar extends Foo +case object Baz extends Foo + +inline def id[A](a: A): A = a + +def shouldThrowAWarning(foo: Foo) = + id(foo match { // warn + case Bar => "Bar" + }) From f70ed41248eb073af736c78599257d14849025be Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Mon, 13 May 2024 19:05:04 +0200 Subject: [PATCH 078/827] Fix erasure crash for Inlined rhs of a ctxfun closure This is achieved by safely removing Inlined nodes at that point, in the same manner they would be removed later in `typedInlined`` in the same phase. --- .../src/dotty/tools/dotc/transform/Erasure.scala | 2 ++ tests/pos-macros/i16963/Macro_1.scala | 14 ++++++++++++++ tests/pos-macros/i16963/Test_2.scala | 1 + 3 files changed, 17 insertions(+) create mode 100644 tests/pos-macros/i16963/Macro_1.scala create mode 100644 tests/pos-macros/i16963/Test_2.scala diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index a25a2fcb5c6d..be00d952566c 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -945,6 +945,8 @@ object Erasure { vparams = vparams :+ param if crCount == 1 then meth.rhs.changeOwnerAfter(meth.symbol, sym, erasurePhase) else skipContextClosures(meth.rhs, crCount - 1) + case inlined: Inlined => + skipContextClosures(Inlines.dropInlined(inlined), crCount) var rhs1 = skipContextClosures(ddef.rhs.asInstanceOf[Tree], contextResultCount(sym)) diff --git a/tests/pos-macros/i16963/Macro_1.scala b/tests/pos-macros/i16963/Macro_1.scala new file mode 100644 index 000000000000..317d8947abd3 --- /dev/null +++ b/tests/pos-macros/i16963/Macro_1.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +inline def myMacro = ${ myMacroExpr } + +def myMacroExpr(using Quotes) = + import quotes.reflect.* + + '{ def innerMethod = (_: String) ?=> ???; () }.asTerm match + case block @ Inlined(_, _, Block(List(defdef: DefDef), _)) => + val rhs = + given Quotes = defdef.symbol.asQuotes + '{ (x: String) ?=> ??? }.asTerm + + Block(List(DefDef(defdef.symbol, _ => Some(rhs))), '{}.asTerm).asExprOf[Unit] diff --git a/tests/pos-macros/i16963/Test_2.scala b/tests/pos-macros/i16963/Test_2.scala new file mode 100644 index 000000000000..389f9e3233a2 --- /dev/null +++ b/tests/pos-macros/i16963/Test_2.scala @@ -0,0 +1 @@ +def method: Unit = myMacro From d79bbf0800889f1f510459b56d3cf9b7d257d012 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Thu, 16 May 2024 22:02:49 +0200 Subject: [PATCH 079/827] Revert "Revert "Regression: fix compilation performance on Windows"" This reverts commit c5659933ef58ddbb003ecc30694a9e3e77b20c57, which was sound, but originally reverted due to merge conflicts on main. --- compiler/src/dotty/tools/io/AbstractFile.scala | 6 ------ compiler/src/dotty/tools/io/NoAbstractFile.scala | 2 -- compiler/src/dotty/tools/io/PlainFile.scala | 13 ++----------- compiler/src/dotty/tools/io/VirtualDirectory.scala | 6 ------ compiler/src/dotty/tools/io/VirtualFile.scala | 6 ------ compiler/src/dotty/tools/io/ZipArchive.scala | 2 -- 6 files changed, 2 insertions(+), 33 deletions(-) diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index 233b1ca8fb62..ee72297c2a4f 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -136,12 +136,6 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Does this abstract file represent something which can contain classfiles? */ def isClassContainer: Boolean = isDirectory || (jpath != null && ext.isJarOrZip) - /** Create a file on disk, if one does not exist already. */ - def create(): Unit - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit - /** Is this abstract file a directory? */ def isDirectory: Boolean diff --git a/compiler/src/dotty/tools/io/NoAbstractFile.scala b/compiler/src/dotty/tools/io/NoAbstractFile.scala index 13c2c6851d2b..bef045e290a5 100644 --- a/compiler/src/dotty/tools/io/NoAbstractFile.scala +++ b/compiler/src/dotty/tools/io/NoAbstractFile.scala @@ -17,8 +17,6 @@ import java.io.InputStream object NoAbstractFile extends AbstractFile { def absolute: AbstractFile = this def container: AbstractFile = this - def create(): Unit = ??? - def delete(): Unit = ??? def jpath: JPath = null def input: InputStream = null def isDirectory: Boolean = false diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala index acef191d3072..a6a39d9ff3eb 100644 --- a/compiler/src/dotty/tools/io/PlainFile.scala +++ b/compiler/src/dotty/tools/io/PlainFile.scala @@ -13,9 +13,8 @@ import java.nio.file.{InvalidPathException, Paths} /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { - override def isDirectory: Boolean = true + override val isDirectory: Boolean = true override def iterator(): Iterator[PlainFile] = givenPath.list.filter(_.exists).map(new PlainFile(_)) - override def delete(): Unit = givenPath.deleteRecursively() } /** This class implements an abstract file backed by a File. @@ -78,7 +77,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile { } /** Is this abstract file a directory? */ - def isDirectory: Boolean = givenPath.isDirectory + val isDirectory: Boolean = givenPath.isDirectory // cached for performance on Windows /** Returns the time that this abstract file was last modified. */ def lastModified: Long = givenPath.lastModified.toMillis @@ -113,14 +112,6 @@ class PlainFile(val givenPath: Path) extends AbstractFile { null } - /** Does this abstract file denote an existing file? */ - def create(): Unit = if (!exists) givenPath.createFile() - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit = - if (givenPath.isFile) givenPath.delete() - else if (givenPath.isDirectory) givenPath.toDirectory.deleteRecursively() - /** Returns a plain file with the given name. It does not * check that it exists. */ diff --git a/compiler/src/dotty/tools/io/VirtualDirectory.scala b/compiler/src/dotty/tools/io/VirtualDirectory.scala index 157f63a2ac1a..949f2d0e61dd 100644 --- a/compiler/src/dotty/tools/io/VirtualDirectory.scala +++ b/compiler/src/dotty/tools/io/VirtualDirectory.scala @@ -34,12 +34,6 @@ extends AbstractFile { override def input: InputStream = sys.error("directories cannot be read") override def output: OutputStream = sys.error("directories cannot be written") - /** Does this abstract file denote an existing file? */ - def create(): Unit = { unsupported() } - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit = { unsupported() } - /** Returns an abstract file with the given name. It does not * check that it exists. */ diff --git a/compiler/src/dotty/tools/io/VirtualFile.scala b/compiler/src/dotty/tools/io/VirtualFile.scala index 9d290a9b0e6a..6fb9859503f2 100644 --- a/compiler/src/dotty/tools/io/VirtualFile.scala +++ b/compiler/src/dotty/tools/io/VirtualFile.scala @@ -82,12 +82,6 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF Iterator.empty } - /** Does this abstract file denote an existing file? */ - def create(): Unit = unsupported() - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit = unsupported() - /** * Returns the abstract file in this abstract directory with the * specified name. If there is no such file, returns null. The diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index 9af935690ffc..a23bde8faaed 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -61,8 +61,6 @@ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) ex def isDirectory: Boolean = true def lookupName(name: String, directory: Boolean): AbstractFile = unsupported() def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported() - def create(): Unit = unsupported() - def delete(): Unit = unsupported() def output: OutputStream = unsupported() def container: AbstractFile = unsupported() def absolute: AbstractFile = unsupported() From c6086f6724b64cfdaaa43071a3847b2dbab930c9 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Thu, 16 May 2024 22:20:44 +0200 Subject: [PATCH 080/827] Replace removed `PlainFile.delete()` API method --- .../dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala b/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala index 9cdfb042b8fb..13a6a274ed96 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala @@ -18,8 +18,8 @@ object BestEffortTastyWriter: unit.pickled.foreach { (clz, binary) => val parts = clz.fullName.mangledString.split('.') val outPath = outputPath(parts.toList, dir) - val outTastyFile = new PlainFile(new File(outPath)) - val outstream = new DataOutputStream(outTastyFile.bufferedOutput) + val outTastyFile = new File(outPath) + val outstream = new DataOutputStream(new PlainFile(outTastyFile).bufferedOutput) try outstream.write(binary()) catch case ex: ClosedByInterruptException => try From c60817704ca44b777722fbfdb35b016a1b58d97f Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 16 May 2024 23:19:00 +0200 Subject: [PATCH 081/827] Do `constraint.replace` when `addOneBound` produces equal bounds as an optimization In #20120, we reach constraints with equal bounds that are intersection types, they are formed from multiple successive calls to `addOneBound`. We miss the `replace` optimization in this case because the bounds only become equal progressively, and we are only checking for equality with the constraint being added. Additionally, we recheck for equal bounds after `constraint.updateEntry` as checking `isSub` can have narrowed the bounds further. #19955 is an example where this second optimization applies. Fix #20120 Close #20208 the original implementation --- .../tools/dotc/core/ConstraintHandling.scala | 56 +++++++++++-------- 1 file changed, 33 insertions(+), 23 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 06711ec97abf..e63911a6a883 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -120,7 +120,7 @@ trait ConstraintHandling { */ private var myTrustBounds = true - inline def withUntrustedBounds(op: => Type): Type = + transparent inline def withUntrustedBounds(op: => Type): Type = val saved = myTrustBounds myTrustBounds = false try op finally myTrustBounds = saved @@ -301,34 +301,44 @@ trait ConstraintHandling { // so we shouldn't allow them as constraints either. false else - val bound = legalBound(param, rawBound, isUpper) - val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) - val equalBounds = (if isUpper then lo else hi) eq bound - if equalBounds && !bound.existsPart(_ eq param, StopAt.Static) then - // The narrowed bounds are equal and not recursive, - // so we can remove `param` from the constraint. - constraint = constraint.replace(param, bound) - true - else - // Narrow one of the bounds of type parameter `param` - // If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure - // that `param >: bound`. - val narrowedBounds = - val saved = homogenizeArgs - homogenizeArgs = Config.alignArgsInAnd - try - withUntrustedBounds( - if isUpper then oldBounds.derivedTypeBounds(lo, hi & bound) - else oldBounds.derivedTypeBounds(lo | bound, hi)) - finally - homogenizeArgs = saved + + // Narrow one of the bounds of type parameter `param` + // If `isUpper` is true, ensure that `param <: `bound`, + // otherwise ensure that `param >: bound`. + val narrowedBounds: TypeBounds = + val bound = legalBound(param, rawBound, isUpper) + val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) + + val saved = homogenizeArgs + homogenizeArgs = Config.alignArgsInAnd + try + withUntrustedBounds( + if isUpper then oldBounds.derivedTypeBounds(lo, hi & bound) + else oldBounds.derivedTypeBounds(lo | bound, hi)) + finally + homogenizeArgs = saved + end narrowedBounds + + // If the narrowed bounds are equal and not recursive, + // we can remove `param` from the constraint. + def tryReplace(newBounds: TypeBounds): Boolean = + val TypeBounds(lo, hi) = newBounds + val canReplace = (lo eq hi) && !newBounds.existsPart(_ eq param, StopAt.Static) + if canReplace then constraint = constraint.replace(param, lo) + canReplace + + tryReplace(narrowedBounds) || locally: //println(i"narrow bounds for $param from $oldBounds to $narrowedBounds") val c1 = constraint.updateEntry(param, narrowedBounds) (c1 eq constraint) || { constraint = c1 val TypeBounds(lo, hi) = constraint.entry(param): @unchecked - isSub(lo, hi) + val isSat = isSub(lo, hi) + if isSat then + // isSub may have narrowed the bounds further + tryReplace(constraint.nonParamBounds(param)) + isSat } end addOneBound From 2c349c13ba69ffc13ced832c4e71e316e53b3d63 Mon Sep 17 00:00:00 2001 From: Derek Wickern Date: Thu, 16 May 2024 21:01:28 -0700 Subject: [PATCH 082/827] add test for union of Char --- .../backend/jvm/DottyBytecodeTests.scala | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index e4e485478804..f80336646dfd 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -200,6 +200,28 @@ class DottyBytecodeTests extends DottyBytecodeTest { } } + @Test def switchOnUnionOfChars = { + val source = + """ + |object Foo { + | def foo(ch: 'a' | 'b' | 'c' | 'd' | 'e'): Int = ch match { + | case 'a' => 1 + | case 'b' => 2 + | case 'c' => 3 + | case 'd' => 4 + | case 'e' => 5 + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + @Test def switchOnUnionOfIntSingletons = { val source = """ From 7279bf7cb0368b6abef68a8664e63505b22dc7ce Mon Sep 17 00:00:00 2001 From: Derek Wickern Date: Thu, 16 May 2024 21:03:57 -0700 Subject: [PATCH 083/827] replace widen with <:< --- .../tools/dotc/transform/PatternMatcher.scala | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 1e95ca1618b2..1e16897081dd 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -814,11 +814,11 @@ object PatternMatcher { */ private def collectSwitchCases(scrutinee: Tree, plan: SeqPlan): List[(List[Tree], Plan)] = { def isSwitchableType(tpe: Type): Boolean = - (tpe isRef defn.IntClass) || - (tpe isRef defn.ByteClass) || - (tpe isRef defn.ShortClass) || - (tpe isRef defn.CharClass) || - (tpe isRef defn.StringClass) + (tpe <:< defn.IntType) || + (tpe <:< defn.ByteType) || + (tpe <:< defn.ShortType) || + (tpe <:< defn.CharType) || + (tpe <:< defn.StringType) val seen = mutable.Set[Any]() @@ -868,7 +868,7 @@ object PatternMatcher { (Nil, plan) :: Nil } - if (isSwitchableType(scrutinee.tpe.widen.widenSingletons())) recur(plan) + if (isSwitchableType(scrutinee.tpe)) recur(plan) else Nil } @@ -889,9 +889,8 @@ object PatternMatcher { */ val (primScrutinee, scrutineeTpe) = - val tpe = scrutinee.tpe.widen.widenSingletons() - if (tpe.isRef(defn.IntClass)) (scrutinee, defn.IntType) - else if (tpe.isRef(defn.StringClass)) (scrutinee, defn.StringType) + if (scrutinee.tpe <:< defn.IntType) (scrutinee, defn.IntType) + else if (scrutinee.tpe <:< defn.StringType) (scrutinee, defn.StringType) else (scrutinee.select(nme.toInt), defn.IntType) def primLiteral(lit: Tree): Tree = From 4d8bf775893a020d10042a1e326d293d4bea6f57 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Sat, 18 May 2024 00:00:35 +0200 Subject: [PATCH 084/827] Use SyntheticUnit attachment to detect if has else branch --- compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index db52a74300ef..46a62c73aa77 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -24,6 +24,7 @@ import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Phases.* import dotty.tools.dotc.core.Decorators.em import dotty.tools.dotc.report +import dotty.tools.dotc.ast.Trees.SyntheticUnit /* * @@ -218,10 +219,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val success = new asm.Label val failure = new asm.Label - val hasElse = !elsep.isEmpty && (elsep match { - case Literal(value) if value.tag == UnitTag => false - case _ => true - }) + val hasElse = !elsep.hasAttachment(SyntheticUnit) genCond(condp, success, failure, targetIfNoJump = success) markProgramPoint(success) From 4afb8c79de92463fda855520c11593fd276657b5 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Sat, 18 May 2024 00:03:33 +0200 Subject: [PATCH 085/827] Emit explicit line position for synthetic unit pointing to if condition line --- compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index 46a62c73aa77..565ad72c0d9d 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -248,6 +248,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { if hasElse then genLoadTo(elsep, expectedType, dest) else + lineNumber(tree.cond) genAdaptAndSendToDest(UNIT, expectedType, dest) expectedType end if From 8c52866c98823bb9f4030ef3ffac8cc32e1426cb Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Sat, 18 May 2024 00:05:31 +0200 Subject: [PATCH 086/827] Introduce `untpd.syntheticUnitLiteral` to allow detection of explicit unit provided by user from synthetic unit introduced by compiler --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 8 ++++---- compiler/src/dotty/tools/dotc/ast/untpd.scala | 6 +++++- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 8 ++++---- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- compiler/src/dotty/tools/repl/ReplCompiler.scala | 2 +- 5 files changed, 15 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index b1b771bc7512..b1c70d0d3d36 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -188,7 +188,7 @@ object desugar { if isSetterNeeded(vdef) then val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) // The rhs gets filled in later, when field is generated and getter has parameters (see Memoize miniphase) - val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral + val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else syntheticUnitLiteral val setter = cpy.DefDef(vdef)( name = valName.setterName, paramss = (setterParam :: Nil) :: Nil, @@ -1489,7 +1489,7 @@ object desugar { def block(tree: Block)(using Context): Block = tree.expr match { case EmptyTree => cpy.Block(tree)(tree.stats, - unitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) + syntheticUnitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) case _ => tree } @@ -2013,7 +2013,7 @@ object desugar { case ts: Thicket => ts.trees.tail case t => Nil } map { - case Block(Nil, EmptyTree) => unitLiteral // for s"... ${} ..." + case Block(Nil, EmptyTree) => syntheticUnitLiteral // for s"... ${} ..." case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala case t => t } @@ -2046,7 +2046,7 @@ object desugar { val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) case ext: ExtMethods => - Block(List(ext), unitLiteral.withSpan(ext.span)) + Block(List(ext), syntheticUnitLiteral.withSpan(ext.span)) case f: FunctionWithMods if f.hasErasedParams => makeFunctionWithValDefs(f, pt) } desugared.withSpan(tree.span) diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 64f9fb4df95e..c42e8f71246d 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -495,7 +495,11 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def InferredTypeTree(tpe: Type)(using Context): TypedSplice = TypedSplice(new InferredTypeTree().withTypeUnchecked(tpe)) - def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())).withAttachment(SyntheticUnit, ()) + def unitLiteral(implicit src: SourceFile): Literal = + Literal(Constant(())) + + def syntheticUnitLiteral(implicit src: SourceFile): Literal = + unitLiteral.withAttachment(SyntheticUnit, ()) def ref(tp: NamedType)(using Context): Tree = TypedSplice(tpd.ref(tp)) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index e28ba5fd669e..4c13934f3473 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1725,7 +1725,7 @@ object Parsers { case arg => arg val args1 = args.mapConserve(sanitize) - + if in.isArrow || isPureArrow || erasedArgs.contains(true) then functionRest(args) else @@ -2424,7 +2424,7 @@ object Parsers { in.nextToken(); val expr = subExpr() if expr.span.exists then expr - else unitLiteral // finally without an expression + else syntheticUnitLiteral // finally without an expression } else { if handler.isEmpty then @@ -3921,10 +3921,10 @@ object Parsers { val stats = selfInvocation() :: ( if (isStatSep) { in.nextToken(); blockStatSeq() } else Nil) - Block(stats, unitLiteral) + Block(stats, syntheticUnitLiteral) } } - else Block(selfInvocation() :: Nil, unitLiteral) + else Block(selfInvocation() :: Nil, syntheticUnitLiteral) /** SelfInvocation ::= this ArgumentExprs {ArgumentExprs} */ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ae50d626cb1f..bccf7b952b0c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2221,7 +2221,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // because we do not know the internal type params and method params. // Hence no adaptation is possible, and we assume WildcardType as prototype. (from, proto) - val expr1 = typedExpr(tree.expr orElse untpd.unitLiteral.withSpan(tree.span), proto) + val expr1 = typedExpr(tree.expr orElse untpd.syntheticUnitLiteral.withSpan(tree.span), proto) assignType(cpy.Return(tree)(expr1, from)) end typedReturn diff --git a/compiler/src/dotty/tools/repl/ReplCompiler.scala b/compiler/src/dotty/tools/repl/ReplCompiler.scala index d69173cb6d88..f909abfc129a 100644 --- a/compiler/src/dotty/tools/repl/ReplCompiler.scala +++ b/compiler/src/dotty/tools/repl/ReplCompiler.scala @@ -159,7 +159,7 @@ class ReplCompiler extends Compiler: def wrap(trees: List[untpd.Tree]): untpd.PackageDef = { import untpd.* - val valdef = ValDef("expr".toTermName, TypeTree(), Block(trees, unitLiteral).withSpan(Span(0, expr.length))) + val valdef = ValDef("expr".toTermName, TypeTree(), Block(trees, syntheticUnitLiteral).withSpan(Span(0, expr.length))) val tmpl = Template(emptyConstructor, Nil, Nil, EmptyValDef, List(valdef)) val wrapper = TypeDef("$wrapper".toTypeName, tmpl) .withMods(Modifiers(Final)) From af75f3b266f8ddca74e99c73c67e59995c4729e3 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Sat, 18 May 2024 00:06:31 +0200 Subject: [PATCH 087/827] Add unit tests for issue 18238 - ensure we generate correct line positions for if conditions --- .../backend/jvm/SourcePositionsTest.scala | 116 ++++++++++++++++++ 1 file changed, 116 insertions(+) create mode 100644 compiler/test/dotty/tools/backend/jvm/SourcePositionsTest.scala diff --git a/compiler/test/dotty/tools/backend/jvm/SourcePositionsTest.scala b/compiler/test/dotty/tools/backend/jvm/SourcePositionsTest.scala new file mode 100644 index 000000000000..7bb52260c366 --- /dev/null +++ b/compiler/test/dotty/tools/backend/jvm/SourcePositionsTest.scala @@ -0,0 +1,116 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import org.junit.Assert._ +import org.junit.Test + +class SourcePositionsTest extends DottyBytecodeTest: + import ASMConverters._ + + @Test def issue18238_a(): Unit = { + val code = + """ + |class Test { + | def test(): Unit = { + | var x = 3 + | var y = 2 + | while(true) { + | if (x < y) + | if (x >= y) + | x += 1 + | else + | y -= 1 + | } + | } + |}""".stripMargin + + checkBCode(code) { dir => + val testClass = loadClassNode(dir.lookupName("Test.class", directory = false).input, skipDebugInfo = false) + val testMethod = getMethod(testClass, "test") + val lineNumbers = instructionsFromMethod(testMethod).collect{case ln: LineNumber => ln} + val expected = List( + LineNumber(4, Label(0)), // var x + LineNumber(5, Label(4)), // var y + LineNumber(6, Label(8)), // while(true) + LineNumber(7, Label(13)), // if (x < y) + LineNumber(8, Label(18)), // if (x >= y) + LineNumber(9, Label(23)), // x += 1 + LineNumber(11, Label(27)), // y -= 1 + LineNumber(7, Label(32)) // point back to `if (x < y) + ) + assertEquals(expected, lineNumbers) + } + } + + @Test def issue18238_b(): Unit = { + val code = + """ + |class Test { + | def test(): Unit = { + | var x = 3 + | var y = 2 + | while(true) { + | if (x < y) + | if (x >= y) + | x += 1 + | else + | y -= 1 + | else () + | } + | } + |}""".stripMargin + + checkBCode(code) { dir => + val testClass = loadClassNode(dir.lookupName("Test.class", directory = false).input, skipDebugInfo = false) + val testMethod = getMethod(testClass, "test") + val lineNumbers = instructionsFromMethod(testMethod).collect{case ln: LineNumber => ln} + val expected = List( + LineNumber(4, Label(0)), // var x + LineNumber(5, Label(4)), // var y + LineNumber(6, Label(8)), // while(true) + LineNumber(7, Label(13)), // if (x < y) + LineNumber(8, Label(18)), // if (x >= y) + LineNumber(9, Label(23)), // x += 1 + LineNumber(11, Label(27)), // y -= 1 + LineNumber(12, Label(32)) // else () + ) + assertEquals(expected, lineNumbers) + } + } + + @Test def issue18238_c(): Unit = { + val code = + """ + |class Test { + | def test(): Unit = { + | var x = 3 + | var y = 2 + | while(true) { + | if (x < y) + | if (x >= y) + | x += 1 + | else + | y -= 1 + | println() + | } + | } + |}""".stripMargin + + checkBCode(code) { dir => + val testClass = loadClassNode(dir.lookupName("Test.class", directory = false).input, skipDebugInfo = false) + val testMethod = getMethod(testClass, "test") + val lineNumbers = instructionsFromMethod(testMethod).collect{case ln: LineNumber => ln} + val expected = List( + LineNumber(4, Label(0)), // var x + LineNumber(5, Label(4)), // var y + LineNumber(6, Label(8)), // while(true) + LineNumber(7, Label(13)), // if (x < y) + LineNumber(8, Label(18)), // if (x >= y) + LineNumber(9, Label(23)), // x += 1 + LineNumber(11, Label(27)), // y -= 1 + LineNumber(12, Label(31)) // println() + ) + assertEquals(expected, lineNumbers) + } + } From f0560b208cdef274263858cd612e528fbe29bb5b Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 13 May 2024 21:24:40 +0200 Subject: [PATCH 088/827] Don't re-balance AndTypes arising from supertypes #20284 started breaking since we now balance AndTypes to avoid performance drops. But (re-)balancing an AndType interferes with the logic that determines which symbol is referred by a super select. This is fixed by two changes: - Form types of super with `AndType` instead of `&` - Don't simplify types of super since that would rebalance the underlying AndTypes. Fixes #20284 --- .../src/dotty/tools/dotc/core/TypeOps.scala | 4 ++ .../dotty/tools/dotc/typer/TypeAssigner.scala | 2 +- tests/run/i20284.check | 15 ++++++ tests/run/i20284.scala | 54 +++++++++++++++++++ 4 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 tests/run/i20284.check create mode 100644 tests/run/i20284.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 1282b77f013e..3bc7a7223abb 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -190,6 +190,10 @@ object TypeOps: // Mapping over a skolem creates a new skolem which by definition won't // be =:= to the original one. tp + case tp: SuperType => + // Mapping a supertype might re-balance an AndType which is not permitted since + // we need the original order of parents for current super resolution. + tp case _ => mapOver } diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 64722d51708c..2be81a4222cd 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -261,7 +261,7 @@ trait TypeAssigner { else if (ctx.erasedTypes) cls.info.firstParent.typeConstructor else { val ps = cls.classInfo.parents - if (ps.isEmpty) defn.AnyType else ps.reduceLeft((x: Type, y: Type) => x & y) + if ps.isEmpty then defn.AnyType else ps.reduceLeft(AndType(_, _)) } SuperType(cls.thisType, owntype) diff --git a/tests/run/i20284.check b/tests/run/i20284.check new file mode 100644 index 000000000000..69e714e43e38 --- /dev/null +++ b/tests/run/i20284.check @@ -0,0 +1,15 @@ +Test 1 +D +B +C +A +Test 2 +D +B +C +A +Test 3 +D +B +C +A diff --git a/tests/run/i20284.scala b/tests/run/i20284.scala new file mode 100644 index 000000000000..a47fda6e1457 --- /dev/null +++ b/tests/run/i20284.scala @@ -0,0 +1,54 @@ +trait A { + def print: Unit = println("A") +} + +trait B extends A { + override def print: Unit = { + println("B") + super.print + } +} + +trait C extends A { + override def print: Unit = { + println("C") + super.print + } +} + +trait D extends B { + override def print: Unit = { + println("D") + super.print + } +} + +trait BB extends B + +trait X +trait Y +trait Z + +class Test1 extends C with B with BB with D with X with Y with Z: + override def print: Unit = { + println("Test 1") + super.print + } + +class Test2 extends C with B with BB with D with X with Y { + override def print: Unit = { + println("Test 2") + super.print + } +} + +class Test3 extends X with Y with Z with C with B with BB with D { + override def print: Unit = { + println("Test 3") + super.print + } +} +@main def Test = + new Test1().print + new Test2().print + new Test3().print From a7ac03eb636e683eeb4d6a0e7c4532c34b7ba245 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 17 May 2024 15:47:20 +0200 Subject: [PATCH 089/827] Adapt toInstantiate in interpolateTypeVars to consider c6081770 constrainIfDependentParamRef can now not only instantiate the tvar being constrained, but also tvars having already been added to buf. We simply re-filter buf at the end as this should be a rare occurrence. [test_scala2_library_tasty] --- compiler/src/dotty/tools/dotc/typer/Inferencing.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index ed37a869d612..92be3130c99d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -706,7 +706,9 @@ trait Inferencing { this: Typer => else typr.println(i"no interpolation for nonvariant $tvar in $state") ) - buf.toList + // constrainIfDependentParamRef could also have instantiated tvars added to buf before the check + buf.filterNot(_._1.isInstantiated).toList + end toInstantiate def typeVarsIn(xs: ToInstantiate): TypeVars = xs.foldLeft(SimpleIdentitySet.empty: TypeVars)((tvs, tvi) => tvs + tvi._1) From 81322889ffa41e2508aaaf06125223d8b444f319 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 19 May 2024 17:56:44 +0200 Subject: [PATCH 090/827] Treat 3.5-migration the same as 3.5 for a warning Treat 3.5-migration the same as 3.5 for a warning about implicit priority change Fixes #20420 --- .../dotty/tools/dotc/typer/Implicits.scala | 4 +-- tests/warn/i20420.scala | 27 +++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 tests/warn/i20420.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index fd22f0ec5529..54821444aed6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1311,14 +1311,14 @@ trait Implicits: else var cmp = comp(using searchContext()) val sv = Feature.sourceVersion - if sv == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` then + if sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) if cmp != prev then def choice(c: Int) = c match case -1 => "the second alternative" case 1 => "the first alternative" case _ => "none - it's ambiguous" - if sv == SourceVersion.`3.5` then + if sv.stable == SourceVersion.`3.5` then report.warning( em"""Given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} will change |Current choice : ${choice(prev)} diff --git a/tests/warn/i20420.scala b/tests/warn/i20420.scala new file mode 100644 index 000000000000..d28270509f91 --- /dev/null +++ b/tests/warn/i20420.scala @@ -0,0 +1,27 @@ +//> using options -source 3.5-migration + +final class StrictEqual[V] +final class Less[V] +type LessEqual[V] = Less[V] | StrictEqual[V] + +object TapirCodecIron: + trait ValidatorForPredicate[Value, Predicate] + trait PrimitiveValidatorForPredicate[Value, Predicate] + extends ValidatorForPredicate[Value, Predicate] + + given validatorForLessEqual[N: Numeric, NM <: N](using + ValueOf[NM] + ): PrimitiveValidatorForPredicate[N, LessEqual[NM]] = ??? + given validatorForDescribedOr[N, P](using + IsDescription[P] + ): ValidatorForPredicate[N, P] = ??? + + trait IsDescription[A] + object IsDescription: + given derived[A]: IsDescription[A] = ??? + +@main def Test = { + import TapirCodecIron.{*, given} + type IntConstraint = LessEqual[3] + summon[ValidatorForPredicate[Int, IntConstraint]] // warn +} \ No newline at end of file From fb4687cfbe46e734f82cf9a516dbec29594b31f6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 May 2024 13:34:36 +0000 Subject: [PATCH 091/827] Bump scalacenter/sbt-dependency-submission from 2 to 3 Bumps [scalacenter/sbt-dependency-submission](https://github.com/scalacenter/sbt-dependency-submission) from 2 to 3. - [Release notes](https://github.com/scalacenter/sbt-dependency-submission/releases) - [Commits](https://github.com/scalacenter/sbt-dependency-submission/compare/v2...v3) --- updated-dependencies: - dependency-name: scalacenter/sbt-dependency-submission dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/dependency-graph.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/dependency-graph.yml b/.github/workflows/dependency-graph.yml index e96c3efbc8aa..d4be398148c7 100644 --- a/.github/workflows/dependency-graph.yml +++ b/.github/workflows/dependency-graph.yml @@ -9,4 +9,4 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: scalacenter/sbt-dependency-submission@v2 + - uses: scalacenter/sbt-dependency-submission@v3 From bfd0b4f3c77baf3bab1b9a46926c84291ee596c4 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 20 May 2024 17:25:34 +0200 Subject: [PATCH 092/827] Bump default source version to 3.5 --- compiler/src/dotty/tools/dotc/config/SourceVersion.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 3a44021af2df..935b95003729 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -28,7 +28,7 @@ enum SourceVersion: def isAtMost(v: SourceVersion) = stable.ordinal <= v.ordinal object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.4` + def defaultSourceVersion = `3.5` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) From d2eea139abd2fe36c8a066216f6ad16badda762c Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 20 May 2024 17:37:46 +0200 Subject: [PATCH 093/827] Address compatibility issues in scala2-library-bootstrapped --- .../src/scala/Array.scala | 690 +++++++ .../src/scala/collection/ArrayOps.scala | 1664 +++++++++++++++++ .../src/scala/collection/Factory.scala | 784 ++++++++ .../src/scala/collection/Iterable.scala | 1043 +++++++++++ .../src/scala/collection/SortedMap.scala | 220 +++ .../StrictOptimizedSortedMapOps.scala | 46 + .../generic/DefaultSerializationProxy.scala | 87 + .../scala/collection/mutable/ArraySeq.scala | 354 ++++ .../mutable/CollisionProofHashMap.scala | 888 +++++++++ 9 files changed, 5776 insertions(+) create mode 100644 scala2-library-bootstrapped/src/scala/Array.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/Factory.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/Iterable.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/SortedMap.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala diff --git a/scala2-library-bootstrapped/src/scala/Array.scala b/scala2-library-bootstrapped/src/scala/Array.scala new file mode 100644 index 000000000000..d2098a76f32f --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/Array.scala @@ -0,0 +1,690 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +//import scala.collection.generic._ +import scala.collection.{Factory, immutable, mutable} +import mutable.ArrayBuilder +import immutable.ArraySeq +import scala.language.implicitConversions +import scala.reflect.{ClassTag, classTag} +import scala.runtime.BoxedUnit +import scala.runtime.ScalaRunTime +import scala.runtime.ScalaRunTime.{array_apply, array_update} + +/** Utility methods for operating on arrays. + * For example: + * {{{ + * val a = Array(1, 2) + * val b = Array.ofDim[Int](2) + * val c = Array.concat(a, b) + * }}} + * where the array objects `a`, `b` and `c` have respectively the values + * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. + */ +object Array { + val emptyBooleanArray = new Array[Boolean](0) + val emptyByteArray = new Array[Byte](0) + val emptyCharArray = new Array[Char](0) + val emptyDoubleArray = new Array[Double](0) + val emptyFloatArray = new Array[Float](0) + val emptyIntArray = new Array[Int](0) + val emptyLongArray = new Array[Long](0) + val emptyShortArray = new Array[Short](0) + val emptyObjectArray = new Array[Object](0) + + /** Provides an implicit conversion from the Array object to a collection Factory */ + implicit def toFactory[A : ClassTag](dummy: Array.type): Factory[A, Array[A]] = new ArrayFactory(dummy) + @SerialVersionUID(3L) + private class ArrayFactory[A : ClassTag](dummy: Array.type) extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = Array.from[A](it) + def newBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder[A] + } + + /** + * Returns a new [[scala.collection.mutable.ArrayBuilder]]. + */ + def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T](using t) + + /** Build an array from the iterable collection. + * + * {{{ + * scala> val a = Array.from(Seq(1, 5)) + * val a: Array[Int] = Array(1, 5) + * + * scala> val b = Array.from(Range(1, 5)) + * val b: Array[Int] = Array(1, 2, 3, 4) + * }}} + * + * @param it the iterable collection + * @return an array consisting of elements of the iterable collection + */ + def from[A : ClassTag](it: IterableOnce[A]): Array[A] = it match { + case it: Iterable[A] => it.toArray[A] + case _ => it.iterator.toArray[A] + } + + private def slowcopy(src : AnyRef, + srcPos : Int, + dest : AnyRef, + destPos : Int, + length : Int): Unit = { + var i = srcPos + var j = destPos + val srcUntil = srcPos + length + while (i < srcUntil) { + array_update(dest, j, array_apply(src, i)) + i += 1 + j += 1 + } + } + + /** Copy one array to another. + * Equivalent to Java's + * `System.arraycopy(src, srcPos, dest, destPos, length)`, + * except that this also works for polymorphic and boxed arrays. + * + * Note that the passed-in `dest` array will be modified by this call. + * + * @param src the source array. + * @param srcPos starting position in the source array. + * @param dest destination array. + * @param destPos starting position in the destination array. + * @param length the number of array elements to be copied. + * + * @see `java.lang.System#arraycopy` + */ + def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { + val srcClass = src.getClass + if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) + java.lang.System.arraycopy(src, srcPos, dest, destPos, length) + else + slowcopy(src, srcPos, dest, destPos, length) + } + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength)`, + * except that this works for primitive and object arrays in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyOf[A](original: Array[A], newLength: Int): Array[A] = ((original: @unchecked) match { + case x: Array[BoxedUnit] => newUnitArray(newLength).asInstanceOf[Array[A]] + case x: Array[AnyRef] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Int] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Double] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Long] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Float] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Char] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Byte] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Short] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Boolean] => java.util.Arrays.copyOf(x, newLength) + }).asInstanceOf[Array[A]] + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. The new array can have + * a different type than the original one as long as the values are + * assignment-compatible. When copying between primitive and object arrays, + * boxing and unboxing are supported. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength, newType)`, + * except that this works for all combinations of primitive and object arrays + * in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyAs[A](original: Array[_], newLength: Int)(implicit ct: ClassTag[A]): Array[A] = { + val runtimeClass = ct.runtimeClass + if (runtimeClass == Void.TYPE) newUnitArray(newLength).asInstanceOf[Array[A]] + else { + val destClass = runtimeClass.asInstanceOf[Class[A]] + if (destClass.isAssignableFrom(original.getClass.getComponentType)) { + if (destClass.isPrimitive) copyOf[A](original.asInstanceOf[Array[A]], newLength) + else { + val destArrayClass = java.lang.reflect.Array.newInstance(destClass, 0).getClass.asInstanceOf[Class[Array[AnyRef]]] + java.util.Arrays.copyOf(original.asInstanceOf[Array[AnyRef]], newLength, destArrayClass).asInstanceOf[Array[A]] + } + } else { + val dest = new Array[A](newLength) + Array.copy(original, 0, dest, 0, original.length) + dest + } + } + } + + private def newUnitArray(len: Int): Array[Unit] = { + val result = new Array[Unit](len) + java.util.Arrays.fill(result.asInstanceOf[Array[AnyRef]], ()) + result + } + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = new Array[T](0) + + /** Creates an array with given elements. + * + * @param xs the elements to put in the array + * @return an array containing all elements from xs. + */ + // Subject to a compiler optimization in Cleanup. + // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } + def apply[T: ClassTag](xs: T*): Array[T] = { + val len = xs.length + xs match { + case wa: immutable.ArraySeq[_] if wa.unsafeArray.getClass.getComponentType == classTag[T].runtimeClass => + // We get here in test/files/run/sd760a.scala, `Array[T](t)` for + // a specialized type parameter `T`. While we still pay for two + // copies of the array it is better than before when we also boxed + // each element when populating the result. + ScalaRunTime.array_clone(wa.unsafeArray).asInstanceOf[Array[T]] + case _ => + val array = new Array[T](len) + val iterator = xs.iterator + var i = 0 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + } + + /** Creates an array of `Boolean` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { + val array = new Array[Boolean](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Byte` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Byte, xs: Byte*): Array[Byte] = { + val array = new Array[Byte](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Short` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Short, xs: Short*): Array[Short] = { + val array = new Array[Short](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Char` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Char, xs: Char*): Array[Char] = { + val array = new Array[Char](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Int` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Int, xs: Int*): Array[Int] = { + val array = new Array[Int](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Long` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Long, xs: Long*): Array[Long] = { + val array = new Array[Long](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Float` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Float, xs: Float*): Array[Float] = { + val array = new Array[Float](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Double` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Double, xs: Double*): Array[Double] = { + val array = new Array[Double](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Unit` objects */ + def apply(x: Unit, xs: Unit*): Array[Unit] = { + val array = new Array[Unit](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = + new Array[T](n1) + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = { + val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]]) + for (i <- 0 until n1) arr(i) = new Array[T](n2) + arr + // tabulate(n1)(_ => ofDim[T](n2)) + } + /** Creates a 3-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] = + tabulate(n1)(_ => ofDim[T](n2, n3)) + /** Creates a 4-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4)) + /** Creates a 5-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5)) + + /** Concatenates all arrays into a single array. + * + * @param xss the given arrays + * @return the array created from concatenating `xss` + */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = { + val b = newBuilder[T] + b.sizeHint(xss.map(_.length).sum) + for (xs <- xss) b ++= xs + b.result() + } + + /** Returns an array that contains the results of some element computation a number + * of times. + * + * Note that this means that `elem` is computed a total of n times: + * {{{ + * scala> Array.fill(3){ math.random } + * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306) + * }}} + * + * @param n the number of elements desired + * @param elem the element computation + * @return an Array of size n, where each element contains the result of computing + * `elem`. + */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = elem + i += 1 + } + array + } + } + + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = + tabulate(n1)(_ => fill(n2)(elem)) + + /** Returns a three-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = + tabulate(n1)(_ => fill(n2, n3)(elem)) + + /** Returns a four-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => fill(n2, n3, n4)(elem)) + + /** Returns a five-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. + * + * @param n The number of elements in the array + * @param f The function computing element values + * @return An `Array` consisting of elements `f(0),f(1), ..., f(n - 1)` + */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = f(i) + i += 1 + } + array + } + } + + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Returns a three-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Returns a four-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Returns a five-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Returns an array containing a sequence of increasing integers in a range. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @return the array with values in range `start, start + 1, ..., end - 1` + * up to, but excluding, `end`. + */ + def range(start: Int, end: Int): Array[Int] = range(start, end, 1) + + /** Returns an array containing equally spaced values in some integer interval. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @param step the increment value of the array (may not be zero) + * @return the array with values in `start, start + step, ...` up to, but excluding `end` + */ + def range(start: Int, end: Int, step: Int): Array[Int] = { + if (step == 0) throw new IllegalArgumentException("zero step") + val array = new Array[Int](immutable.Range.count(start, end, step, isInclusive = false)) + + var n = 0 + var i = start + while (if (step < 0) end < i else i < end) { + array(n) = i + i += step + n += 1 + } + array + } + + /** Returns an array containing repeated applications of a function to a start value. + * + * @param start the start value of the array + * @param len the number of elements returned by the array + * @param f the function that is repeatedly applied + * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { + if (len > 0) { + val array = new Array[T](len) + var acc = start + var i = 1 + array(0) = acc + + while (i < len) { + acc = f(acc) + array(i) = acc + i += 1 + } + array + } else { + empty[T] + } + } + + /** Compare two arrays per element. + * + * A more efficient version of `xs.sameElements(ys)`. + * + * Note that arrays are invariant in Scala, but it may + * be sound to cast an array of arbitrary reference type + * to `Array[AnyRef]`. Arrays on the JVM are covariant + * in their element type. + * + * `Array.equals(xs.asInstanceOf[Array[AnyRef]], ys.asInstanceOf[Array[AnyRef]])` + * + * @param xs an array of AnyRef + * @param ys an array of AnyRef + * @return true if corresponding elements are equal + */ + def equals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = + (xs eq ys) || + (xs.length == ys.length) && { + var i = 0 + while (i < xs.length && xs(i) == ys(i)) i += 1 + i >= xs.length + } + + /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. + * + * @param x the selector value + * @return sequence wrapped in a [[scala.Some]], if `x` is an Array, otherwise `None` + */ + def unapplySeq[T](x: Array[T]): UnapplySeqWrapper[T] = new UnapplySeqWrapper(x) + + final class UnapplySeqWrapper[T](private val a: Array[T]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[T] = this + def lengthCompare(len: Int): Int = a.lengthCompare(len) + def apply(i: Int): T = a(i) + def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 + def toSeq: scala.Seq[T] = a.toSeq // clones the array + } +} + +/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation + * for Java's `T[]`. + * + * {{{ + * val numbers = Array(1, 2, 3, 4) + * val first = numbers(0) // read the first element + * numbers(3) = 100 // replace the 4th array element with 100 + * val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two + * }}} + * + * Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above + * example code. + * Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to + * `update(Int, T)`. + * + * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion + * to [[scala.collection.ArrayOps]] (shown on line 4 of the example above) and a conversion + * to [[scala.collection.mutable.ArraySeq]] (a subtype of [[scala.collection.Seq]]). + * Both types make available many of the standard operations found in the Scala collections API. + * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, + * while the conversion to `ArraySeq` is permanent as all operations return a `ArraySeq`. + * + * The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. For instance, + * consider the following code: + * + * {{{ + * val arr = Array(1, 2, 3) + * val arrReversed = arr.reverse + * val seqReversed : collection.Seq[Int] = arr.reverse + * }}} + * + * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring + * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed + * by converting to `ArraySeq` first and invoking the variant of `reverse` that returns another + * `ArraySeq`. + * + * @see [[https://www.scala-lang.org/files/archive/spec/2.13/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) + * @see [[https://docs.scala-lang.org/sips/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. + * @hideImplicitConversion scala.Predef.booleanArrayOps + * @hideImplicitConversion scala.Predef.byteArrayOps + * @hideImplicitConversion scala.Predef.charArrayOps + * @hideImplicitConversion scala.Predef.doubleArrayOps + * @hideImplicitConversion scala.Predef.floatArrayOps + * @hideImplicitConversion scala.Predef.intArrayOps + * @hideImplicitConversion scala.Predef.longArrayOps + * @hideImplicitConversion scala.Predef.refArrayOps + * @hideImplicitConversion scala.Predef.shortArrayOps + * @hideImplicitConversion scala.Predef.unitArrayOps + * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray + * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray + * @define coll array + * @define Coll `Array` + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define collectExample + * @define undefinedorder + */ +final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { + + /** The length of the array */ + def length: Int = throw new Error() + + /** The element at given index. + * + * Indices start at `0`; `xs.apply(0)` is the first element of array `xs`. + * Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`. + * + * @param i the index + * @return the element at the given index + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def apply(i: Int): T = throw new Error() + + /** Update the element at given index. + * + * Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array. + * Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`. + * + * @param i the index + * @param x the value to be written at index `i` + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def update(i: Int, x: T): Unit = { throw new Error() } + + /** Clone the Array. + * + * @return A clone of the Array. + */ + override def clone(): Array[T] = throw new Error() +} diff --git a/scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala b/scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala new file mode 100644 index 000000000000..d4659bbb0dba --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala @@ -0,0 +1,1664 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.Math.{max, min} +import java.util.Arrays + +import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + booleanArrayOps => _, + byteArrayOps => _, + charArrayOps => _, + doubleArrayOps => _, + floatArrayOps => _, + intArrayOps => _, + longArrayOps => _, + refArrayOps => _, + shortArrayOps => _, + unitArrayOps => _, + genericWrapArray => _, + wrapRefArray => _, + wrapIntArray => _, + wrapDoubleArray => _, + wrapLongArray => _, + wrapFloatArray => _, + wrapCharArray => _, + wrapByteArray => _, + wrapShortArray => _, + wrapBooleanArray => _, + wrapUnitArray => _, + wrapString => _, + copyArrayToImmutableIndexedSeq => _, + _ +} +import scala.collection.Stepper.EfficientSplit +import scala.collection.immutable.Range +import scala.collection.mutable.ArrayBuilder +import scala.math.Ordering +import scala.reflect.ClassTag +import scala.util.Sorting + +object ArrayOps { + + @SerialVersionUID(3L) + private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + def length = xs.length + def apply(n: Int) = xs(n) + override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") + } + + /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter[A](p: A => Boolean, xs: Array[A]) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + while(i < len) { + val x = xs(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B: ClassTag](f: A => B): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val x = xs(i) + if(p(x)) b += f(x) + i = i + 1 + } + b.result() + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + } + + @SerialVersionUID(3L) + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = 0 + private[this] val len = xs.length + override def knownSize: Int = len - pos + def hasNext: Boolean = pos < len + def next(): A = { + if (pos >= xs.length) Iterator.empty.next() + val r = xs(pos) + pos += 1 + r + } + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } + this + } + } + + @SerialVersionUID(3L) + private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = xs.length-1 + def hasNext: Boolean = pos >= 0 + def next(): A = { + if (pos < 0) Iterator.empty.next() + val r = xs(pos) + pos -= 1 + r + } + + override def drop(n: Int): Iterator[A] = { + if (n > 0) pos = Math.max( -1, pos - n) + this + } + } + + @SerialVersionUID(3L) + private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private[this] var pos = 0 + def hasNext: Boolean = pos < xs.length + def next(): Array[A] = { + if(pos >= xs.length) throw new NoSuchElementException + val r = new ArrayOps(xs).slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to + * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. + */ + private final val MaxStableSortLength = 300 + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** This class serves as a wrapper for `Array`s with many of the operations found in + * indexed sequences. Where needed, instances of arrays are implicitly converted + * into this class. There is generally no reason to create an instance explicitly or use + * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on + * the implicit conversion to `ArrayOps` when calling a method (which does not actually + * allocate an instance of `ArrayOps` because it is a value class). + * + * Neither `Array` nor `ArrayOps` are proper collection types + * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and + * `immutable.ArraySeq` serve this purpose. + * + * The difference between this class and `ArraySeq`s is that calling transformer methods such as + * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. + * + * @tparam A type of the elements contained in this array. + */ +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + + @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def size: Int = xs.length + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def knownSize: Int = xs.length + + /** Tests whether the array is empty. + * + * @return `true` if the array contains no elements, `false` otherwise. + */ + @`inline` def isEmpty: Boolean = xs.length == 0 + + /** Tests whether the array is not empty. + * + * @return `true` if the array contains at least one element, `false` otherwise. + */ + @`inline` def nonEmpty: Boolean = xs.length != 0 + + /** Selects the first element of this array. + * + * @return the first element of this array. + * @throws NoSuchElementException if the array is empty. + */ + def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") + + /** Selects the last element. + * + * @return The last element of this array. + * @throws NoSuchElementException If the array is empty. + */ + def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") + + /** Optionally selects the first element. + * + * @return the first element of this array if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = if(isEmpty) None else Some(head) + + /** Optionally selects the last element. + * + * @return the last element of this array$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if(isEmpty) None else Some(last) + + /** Compares the size of this array to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + */ + def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) + + /** Compares the length of this array to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + */ + def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) + + /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` + * because `size` is known and comparison is constant-time. + * + * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + def sizeIs: Int = xs.length + + /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` + * because `length` is known and comparison is constant-time. + * + * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + def lengthIs: Int = xs.length + + /** Selects an interval of elements. The returned array is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this array. + * @param until the lowest index to EXCLUDE from this array. + * @return an array containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this array. + */ + def slice(from: Int, until: Int): Array[A] = { + import java.util.Arrays.copyOfRange + val lo = max(from, 0) + val hi = min(until, xs.length) + if (hi > lo) { + (((xs: Array[_]): @unchecked) match { + case x: Array[AnyRef] => copyOfRange(x, lo, hi) + case x: Array[Int] => copyOfRange(x, lo, hi) + case x: Array[Double] => copyOfRange(x, lo, hi) + case x: Array[Long] => copyOfRange(x, lo, hi) + case x: Array[Float] => copyOfRange(x, lo, hi) + case x: Array[Char] => copyOfRange(x, lo, hi) + case x: Array[Byte] => copyOfRange(x, lo, hi) + case x: Array[Short] => copyOfRange(x, lo, hi) + case x: Array[Boolean] => copyOfRange(x, lo, hi) + }).asInstanceOf[Array[A]] + } else new Array[A](0) + } + + /** The rest of the array without its first element. */ + def tail: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) + + /** The initial part of the array without its last element. */ + def init: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) + + /** Iterates over the tails of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this array + */ + def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) + + /** Iterates over the inits of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this array + */ + def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) + + /** An array containing the first `n` elements of this array. */ + def take(n: Int): Array[A] = slice(0, n) + + /** The rest of the array without its `n` first elements. */ + def drop(n: Int): Array[A] = slice(n, xs.length) + + /** An array containing the last `n` elements of this array. */ + def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) + + /** The rest of the array without its `n` last elements. */ + def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) + + /** Takes longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest prefix of this array whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val hi = if(i < 0) xs.length else i + slice(0, hi) + } + + /** Drops longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this array whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val lo = if(i < 0) xs.length else i + slice(lo, xs.length) + } + + def iterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = (shape.shape: @unchecked) match { + case StepperShape.ReferenceShape => (xs: Any) match { + case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) + case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) + } + case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) + case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) + case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) + case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) + case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) + case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) + case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Partitions elements in fixed size arrays. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing arrays of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) + + /** Splits this array into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this array whose + * elements all satisfy `p`, and the rest of this array. + */ + def span(p: A => Boolean): (Array[A], Array[A]) = { + val i = indexWhere(x => !p(x)) + val idx = if(i < 0) xs.length else i + (slice(0, idx), slice(idx, xs.length)) + } + + /** Splits this array into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of arrays consisting of the first `n` + * elements of this array, and the other elements. + */ + def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ + def partition(p: A => Boolean): (Array[A], Array[A]) = { + val res1, res2 = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + (if(p(x)) res1 else res2) += x + i += 1 + } + (res1.result(), res2.result()) + } + + /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == (Array(1, 2, 3), + * // Array(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] + * + * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. */ + def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + val res1 = ArrayBuilder.make[A1] + val res2 = ArrayBuilder.make[A2] + var i = 0 + while(i < xs.length) { + f(xs(i)) match { + case Left(x) => res1 += x + case Right(x) => res2 += x + } + i += 1 + } + (res1.result(), res2.result()) + } + + /** Returns a new array with the elements in reversed order. */ + @inline def reverse: Array[A] = { + val len = xs.length + val res = new Array[A](len) + var i = 0 + while(i < len) { + res(len-i-1) = xs(i) + i += 1 + } + res + } + + /** An iterator yielding elements in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the elements of this array in reversed order + */ + def reverseIterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + /** Selects all elements of this array which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. + */ + def filter(p: A => Boolean): Array[A] = { + val res = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) res += x + i += 1 + } + res.result() + } + + /** Selects all elements of this array which do not satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. + */ + def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) + + /** Sorts this array according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return an array consisting of the elements of this array + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { + val len = xs.length + def boxed = if(len < ArrayOps.MaxStableSortLength) { + val a = xs.clone() + Sorting.stableSort(a)(using ord.asInstanceOf[Ordering[A]]) + a + } else { + val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + Array.copyAs[A](a, len) + } + if(len <= 1) xs.clone() + else ((xs: Array[_]) match { + case xs: Array[AnyRef] => + val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a + case xs: Array[Int] => + if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Long] => + if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Char] => + if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Byte] => + if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Short] => + if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Boolean] => + if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } + else boxed + case xs => boxed + }).asInstanceOf[Array[A]] + } + + /** Sorts this array according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return an array consisting of the elements of this array + * sorted according to the comparison function `lt`. + */ + def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this array according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return an array consisting of the elements of this array + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) + + /** Creates a non-strict filter of this array. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new array, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `ArrayOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this array + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + + /** Finds index of first occurrence of some value in this array after or at some start index. + * + * @param elem the element value to search for. + * @param from the start index + * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf(elem: A, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(elem == xs(i)) return i + i += 1 + } + -1 + } + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(p(xs(i))) return i + i += 1 + } + -1 + } + + /** Finds index of last occurrence of some value in this array before or at a given end index. + * + * @param elem the element value to search for. + * @param end the end index. + * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(elem == xs(i)) return i + i -= 1 + } + -1 + } + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(p(xs(i))) return i + i -= 1 + } + -1 + } + + /** Finds the first element of the array satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the array + * that satisfies `p`, or `None` if none exists. + */ + def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { + val idx = indexWhere(p) + if(idx == -1) None else Some(xs(idx)) + } + + /** Tests whether a predicate holds for at least one element of this array. + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` + */ + def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 + + /** Tests whether a predicate holds for all elements of this array. + * + * @param p the predicate used to test elements. + * @return `true` if this array is empty or the given predicate `p` + * holds for all elements of this array, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { + var i = 0 + while(i < xs.length) { + if(!p(xs(i))) return false + i += 1 + } + true + } + + /** Applies a binary operator to a start value and all elements of this array, + * going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going left to right with the start value `z` on the left: + * {{{ + * op(...op(z, x_1), x_2, ..., x_n) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + val length = xs.length + var v: Any = z + var i = 0 + while(i < length) { + v = op(v, xs(i)) + i += 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException // null-check first helps static analysis of instanceOf + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + } + + /** Produces an array containing cumulative results of applying the binary + * operator going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) + * }}} + * + */ + def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + var v = z + var i = 0 + val res = new Array[B](xs.length + 1) + while(i < xs.length) { + res(i) = v + v = op(v, xs(i)) + i += 1 + } + res(i) = v + res + } + + /** Computes a prefix scan of the elements of the array. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting array + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new array containing the prefix scan of the elements in this array + */ + def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + + /** Produces an array containing cumulative results of applying the binary + * operator going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) + * }}} + * + */ + def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + var v = z + var i = xs.length - 1 + val res = new Array[B](xs.length + 1) + res(xs.length) = z + while(i >= 0) { + v = op(xs(i), v) + res(i) = v + i -= 1 + } + res + } + + /** Applies a binary operator to all elements of this array and a start value, + * going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going right to left with the start value `z` on the right: + * {{{ + * op(x_1, op(x_2, ... op(x_n, z)...)) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + var v = z + var i = xs.length - 1 + while(i >= 0) { + v = op(xs(i), v) + i -= 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + + } + + /** Folds the elements of this array using the specified associative binary operator. + * + * @tparam A1 a type parameter for the binary operator, a supertype of `A`. + * @param z a neutral element for the fold operation; may be added to the result + * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + val len = xs.length + val ys = new Array[B](len) + if(len > 0) { + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + ys + } + + def mapInPlace(f: A => A): Array[A] = { + var i = 0 + while (i < xs.length) { + xs.update(i, f(xs(i))) + i = i + 1 + } + xs + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Flattens a two-dimensional array by concatenating all its rows + * into a single array. + * + * @tparam B Type of row elements. + * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. + * @return An array obtained by concatenating rows of this array. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val len = xs.length + var size = 0 + var i = 0 + while(i < len) { + xs(i) match { + case it: IterableOnce[_] => + val k = it.knownSize + if(k > 0) size += k + case a: Array[_] => size += a.length + case _ => + } + i += 1 + } + if(size > 0) b.sizeHint(size) + i = 0 + while(i < len) { + b ++= asIterable(xs(i)) + i += 1 + } + b.result() + } + + /** Builds a new array by applying a partial function to all elements of this array + * on which the function is defined. + * + * @param pf the partial function which filters and maps the array. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + val fallback: Any => Any = ArrayOps.fallback + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Finds the first element of the array for which the given partial function is defined, and applies the + * partial function to it. */ + def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { + val fallback: Any => Any = ArrayOps.fallback + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) + i += 1 + } + None + } + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the minimum of the lengths of this array and `that`. + */ + def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + val b = new ArrayBuilder.ofRef[(A, B)]() + val k = that.knownSize + b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + b.result() + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the maximum of the lengths of this array and `that`. + * If this array is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this array, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + val b = new ArrayBuilder.ofRef[(A1, B)]() + val k = that.knownSize + b.sizeHint(max(k, xs.length)) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + while(it.hasNext) { + b += ((thisElem, it.next())) + i += 1 + } + while(i < xs.length) { + b += ((xs(i), thatElem)) + i += 1 + } + b.result() + } + + /** Zips this array with its indices. + * + * @return A new array containing pairs consisting of all elements of this array paired with their index. + * Indices start at `0`. + */ + def zipWithIndex: Array[(A, Int)] = { + val b = new Array[(A, Int)](xs.length) + var i = 0 + while(i < xs.length) { + b(i) = ((xs(i), i)) + i += 1 + } + b + } + + /** A copy of this array with an element appended. */ + def appended[B >: A : ClassTag](x: B): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+1) + dest(xs.length) = x + dest + } + + @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + + /** A copy of this array with an element prepended. */ + def prepended[B >: A : ClassTag](x: B): Array[B] = { + val dest = new Array[B](xs.length + 1) + dest(0) = x + Array.copy(xs, 0, dest, 1, xs.length) + dest + } + + @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + + /** A copy of this array with all elements of a collection prepended. */ + def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = prefix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + xs.length) + b.addAll(xs) + b.result() + } + + /** A copy of this array with all elements of an array prepended. */ + def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](prefix, prefix.length+xs.length) + Array.copy(xs, 0, dest, prefix.length, xs.length) + dest + } + + @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + + @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + + /** A copy of this array with all elements of a collection appended. */ + def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = suffix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(xs) + b.addAll(suffix) + b.result() + } + + /** A copy of this array with all elements of an array appended. */ + def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+suffix.length) + Array.copy(suffix, 0, dest, xs.length, suffix.length) + dest + } + + @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + + @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + + /** Tests whether this array contains a given value as an element. + * + * @param elem the element to test. + * @return `true` if this array has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: A): Boolean = exists (_ == elem) + + /** Returns a copy of this array with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original array appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param other The patch values + * @param replaced The number of values in the original array that are replaced by the patch. + */ + def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + val b = ArrayBuilder.make[B] + val k = other.knownSize + val r = if(replaced < 0) 0 else replaced + if(k >= 0) b.sizeHint(xs.length + k - r) + val chunk1 = if(from > 0) min(from, xs.length) else 0 + if(chunk1 > 0) b.addAll(xs, 0, chunk1) + b ++= other + val remaining = xs.length - chunk1 - r + if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) + b.result() + } + + /** Converts an array of pairs into an array of first elements and an array of second elements. + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this Array is a pair. + * @param ct1 a class tag for `A1` type parameter that is required to create an instance + * of `Array[A1]` + * @param ct2 a class tag for `A2` type parameter that is required to create an instance + * of `Array[A2]` + * @return a pair of Arrays, containing, respectively, the first and second half + * of each element pair of this Array. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + var i = 0 + while (i < xs.length) { + val e = asPair(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + i += 1 + } + (a1, a2) + } + + /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. + * + * @tparam A1 the type of the first of three elements in the triple + * @tparam A2 the type of the second of three elements in the triple + * @tparam A3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Array is a triple. + * @param ct1 a class tag for T1 type parameter that is required to create an instance + * of Array[T1] + * @param ct2 a class tag for T2 type parameter that is required to create an instance + * of Array[T2] + * @param ct3 a class tag for T3 type parameter that is required to create an instance + * of Array[T3] + * @return a triple of Arrays, containing, respectively, the first, second, and third + * elements from each element triple of this Array. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + val a3 = new Array[A3](xs.length) + var i = 0 + while (i < xs.length) { + val e = asTriple(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + a3(i) = e._3 + i += 1 + } + (a1, a2, a3) + } + + /** Transposes a two dimensional array. + * + * @tparam B Type of row elements. + * @param asArray A function that converts elements of this array to rows - arrays of type `B`. + * @return An array obtained by replacing elements of this arrays with rows the represent. + */ + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](using ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- this) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + + /** Selects all the elements of this array ignoring the duplicates. + * + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinct: Array[A] = distinctBy(identity) + + /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinctBy[B](f: A => B): Array[A] = + ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() + + /** A copy of this array with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned array. + * @return a new array consisting of + * all elements of this array followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + var i = xs.length + val newlen = max(i, len) + val dest = Array.copyAs[B](xs, newlen) + while(i < newlen) { + dest(i) = elem + i += 1 + } + dest + } + + /** Produces the range of all indices of this sequence. + * + * @return a `Range` value from `0` to one less than the length of this array. + */ + def indices: Range = Range(0, xs.length) + + /** Partitions this array into a map of arrays according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to arrays such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to an array of those elements `x` + * for which `f(x)` equals `k`. + */ + def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { + val m = mutable.Map.empty[K, ArrayBuilder[A]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val key = f(elem) + val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) + bldr += elem + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + /** + * Partitions this array into a map of arrays according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Array[User]): Map[Int, Array[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + val m = mutable.Map.empty[K, ArrayBuilder[B]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val k = key(elem) + val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) + bldr += f(elem) + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq + + def toIndexedSeq: immutable.IndexedSeq[A] = + immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start` with at most `len` values. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) + if (copied > 0) { + Array.copy(this.xs, 0, xs, start, copied) + } + copied + } + + /** Create a copy of this array with the specified element type. */ + def toArray[B >: A: ClassTag]: Array[B] = { + val destination = new Array[B](xs.length) + @annotation.unused val copied = copyToArray(destination, 0) + //assert(copied == xs.length) + destination + } + + /** Counts the number of elements in this array which satisfy a predicate */ + def count(p: A => Boolean): Int = { + var i, res = 0 + val len = xs.length + while(i < len) { + if(p(xs(i))) res += 1 + i += 1 + } + res + } + + // can't use a default arg because we already have another overload with a default arg + /** Tests whether this array starts with the given array. */ + @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + + /** Tests whether this array contains the given array at a given index. + * + * @param that the array to test + * @param offset the index where the array is searched. + * @return `true` if the array `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + val safeOffset = offset.max(0) + val thatl = that.length + if(thatl > xs.length-safeOffset) thatl == 0 + else { + var i = 0 + while(i < thatl) { + if(xs(i+safeOffset) != that(i)) return false + i += 1 + } + true + } + } + + /** Tests whether this array ends with the given array. + * + * @param that the array to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Array[B]): Boolean = { + val thatl = that.length + val off = xs.length - thatl + if(off < 0) false + else { + var i = 0 + while(i < thatl) { + if(xs(i+off) != that(i)) return false + i += 1 + } + true + } + } + + /** A copy of this array with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + */ + def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") + val dest = toArray[B] + dest(index) = elem + dest + } + + @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which + may not provide the best possible performance. We need them in `ArrayOps` because their return type + mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this array and another sequence. + * + * @param that the sequence of elements to remove + * @return a new array which contains all elements of this array + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] + + /** Computes the multiset intersection between this array and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive groups + * @return An iterator producing arrays of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + */ + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this array + * @example {{{ + * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b) + * // Array(a, c) + * // Array(b, b) + * // Array(b, c) + * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(b, b) + * // Array(b, a) + * }}} + */ + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this array. + * @example {{{ + * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b, b) + * // Array(b, a, b) + * // Array(b, b, a) + * }}} + */ + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array contains the given sequence at a given index. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array ends with the given sequence. + * + * @param that the sequence to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) +} diff --git a/scala2-library-bootstrapped/src/scala/collection/Factory.scala b/scala2-library-bootstrapped/src/scala/collection/Factory.scala new file mode 100644 index 000000000000..6006f292bb19 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/Factory.scala @@ -0,0 +1,784 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.collection.immutable.NumericRange +import scala.language.implicitConversions +import scala.collection.mutable.Builder +import scala.annotation.unchecked.uncheckedVariance +import scala.reflect.ClassTag + +/** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * This is a general form of any factory ([[IterableFactory]], + * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose + * element type is fixed. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait Factory[-A, +C] extends Any { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: Builder[A, C] +} + +object Factory { + + implicit val stringFactory: Factory[Char, String] = new StringFactory + @SerialVersionUID(3L) + private class StringFactory extends Factory[Char, String] with Serializable { + def fromSpecific(it: IterableOnce[Char]): String = { + val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[Char, String] = new mutable.StringBuilder() + } + + implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + @SerialVersionUID(3L) + private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = { + val b = newBuilder + b.sizeHint(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] + } + +} + +/** Base trait for companion objects of unconstrained collection types that may require + * multiple traversals of a source collection to build a target collection `CC`. + * + * @tparam CC Collection type constructor (e.g. `List`) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait IterableFactory[+CC[_]] extends Serializable { + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + def from[A](source: IterableOnce[A]): CC[A] + + /** An empty collection + * @tparam A the type of the ${coll}'s elements + */ + def empty[A]: CC[A] + + /** Creates a $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A](elems: A*): CC[A] = from(elems) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, CC[A]] + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Iterable[A]*): CC[A] = { + from(xss.foldLeft(View.empty[A])(_ ++ _)) + } + + implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) +} + +object IterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = + new BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder + } + + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `List`) + */ +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object SeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } + + final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[A] = this + def lengthCompare(len: Int): Int = c.lengthCompare(len) + def apply(i: Int): A = c(i) + def drop(n: Int): scala.Seq[A] = c match { + case seq: scala.Seq[A] => seq.drop(n) + case _ => c.view.drop(n).toSeq + } + def toSeq: scala.Seq[A] = c.toSeq + } +} + +trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { + + override def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + override def concat[A](xss: Iterable[A]*): CC[A] = { + val b = newBuilder[A] + val knownSizes = xss.view.map(_.knownSize) + if (knownSizes forall (_ >= 0)) { + b.sizeHint(knownSizes.sum) + } + for (xs <- xss) b ++= xs + b.result() + } + +} + +/** + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + def empty: C + def apply(xs: A*): C = fromSpecific(xs) + def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) + def newBuilder: Builder[A, C] + + implicit def specificIterableFactory: Factory[A, C] = this +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait MapFactory[+CC[_, _]] extends Serializable { + + /** + * An empty Map + */ + def empty[K, V]: CC[K, V] + + /** + * A collection of type Map generated from given iterable object. + */ + def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + + /** + * A collection of type Map that contains given key/value bindings. + */ + def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) + + /** + * The default builder for Map objects. + */ + def newBuilder[K, V]: Builder[(K, V), CC[K, V]] + + /** + * The default Factory instance for maps. + */ + implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) +} + +object MapFactory { + + /** + * Fixes the key and value types of `factory` to `K` and `V`, respectively + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` + * and values of type `V` + */ + implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = + new BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { + override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) + def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def empty[K, V]: C[K, V] = delegate.empty + def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder + } +} + +/** Base trait for companion objects of collections that require an implicit evidence. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + * @tparam Ev Unary type constructor for the implicit evidence required for an element type + * (typically `Ordering` or `ClassTag`) + * + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { + + def from[E : Ev](it: IterableOnce[E]): CC[E] + + def empty[A : Ev]: CC[A] + + def apply[A : Ev](xs: A*): CC[A] = from(xs) + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + def newBuilder[A : Ev]: Builder[A, CC[A]] + + implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) +} + +object EvidenceIterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) + * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) + private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] + } + + @SerialVersionUID(3L) + class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { + override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) + def empty[A : Ev]: CC[A] = delegate.empty + def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** Base trait for companion objects of collections that require an implicit `Ordering`. + * @tparam CC Collection type constructor (e.g. `SortedSet`) + */ +trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] + +object SortedIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) + extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] +} + +/** Base trait for companion objects of collections that require an implicit `ClassTag`. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { + + @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = + ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) +} + +object ClassTagIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) + extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] + + /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { + def empty[A]: CC[A] = delegate.empty(using ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(using ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(using ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(using ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(using ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(using ClassTag.Any).asInstanceOf[CC[A]] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object ClassTagSeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] + + /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] +} + +trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { + + override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SortedMapFactory[+CC[_, _]] extends Serializable { + + def empty[K : Ordering, V]: CC[K, V] + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + + def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) + + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] + + implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) + +} + +object SortedMapFactory { + + /** + * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, + * respectively. + * + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of + * type `K` and values of type `V` + */ + implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) + private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { + override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def empty[K : Ordering, V]: CC[K, V] = delegate.empty + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/Iterable.scala b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala new file mode 100644 index 000000000000..8f9142583b29 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala @@ -0,0 +1,1043 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder +import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} + +/** Base trait for generic collections. + * + * @tparam A the element type of the collection + * + * @define Coll `Iterable` + * @define coll iterable collection + */ +trait Iterable[+A] extends IterableOnce[A] + with IterableOps[A, Iterable, Iterable[A]] + with IterableFactoryDefaults[A, Iterable] { + + // The collection itself + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + final def toIterable: this.type = this + + final protected def coll: this.type = this + + def iterableFactory: IterableFactory[Iterable] = Iterable + + @deprecated("Iterable.seq always returns the iterable itself", "2.13.0") + def seq: this.type = this + + /** Defines the prefix of this object's `toString` representation. + * + * It is recommended to return the name of the concrete collection type, but + * not implementation subclasses. For example, for `ListMap` this method should + * return `"ListMap"`, not `"Map"` (the supertype) or `"Node"` (an implementation + * subclass). + * + * The default implementation returns "Iterable". It is overridden for the basic + * collection kinds "Seq", "IndexedSeq", "LinearSeq", "Buffer", "Set", "Map", + * "SortedSet", "SortedMap" and "View". + * + * @return a string representation which starts the result of `toString` + * applied to this $coll. By default the string prefix is the + * simple name of the collection class $coll. + */ + protected[this] def className: String = stringPrefix + + /** Forwarder to `className` for use in `scala.runtime.ScalaRunTime`. + * + * This allows the proper visibility for `className` to be + * published, but provides the exclusive access needed by + * `scala.runtime.ScalaRunTime.stringOf` (and a few tests in + * the test suite). + */ + private[scala] final def collectionClassName: String = className + + @deprecatedOverriding("Override className instead", "2.13.0") + protected[this] def stringPrefix: String = "Iterable" + + /** Converts this $coll to a string. + * + * @return a string representation of this collection. By default this + * string consists of the `className` of this $coll, followed + * by all elements separated by commas and enclosed in parentheses. + */ + override def toString = mkString(className + "(", ", ", ")") + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that) +} + +/** Base trait for Iterable operations + * + * =VarianceNote= + * + * We require that for all child classes of Iterable the variance of + * the child class and the variance of the `C` parameter passed to `IterableOps` + * are the same. We cannot express this since we lack variance polymorphism. That's + * why we have to resort at some places to write `C[A @uncheckedVariance]`. + * + * @tparam CC type constructor of the collection (e.g. `List`, `Set`). Operations returning a collection + * with a different type of element `B` (e.g. `map`) return a `CC[B]`. + * @tparam C type of the collection (e.g. `List[Int]`, `String`, `BitSet`). Operations returning a collection + * with the same type of element (e.g. `drop`, `filter`) return a `C`. + * + * @define Coll Iterable + * @define coll iterable collection + * @define orderDependent + * + * Note: might return different results for different runs, unless the underlying collection type is ordered. + * @define orderDependentFold + * + * Note: might return different results for different runs, unless the + * underlying collection type is ordered or the operator is associative + * and commutative. + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * @define undefinedorder + * The order in which operations are performed on elements is unspecified + * and may be nondeterministic. + */ +trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with IterableOnceOps[A, CC, C] { + /** + * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. + */ + // Should be `protected def asIterable`, or maybe removed altogether if it's not needed + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + def toIterable: Iterable[A] + + /** Converts this $coll to an unspecified Iterable. Will return + * the same collection if this instance is already Iterable. + * @return An Iterable containing all elements of this $coll. + */ + @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") + final def toTraversable: Traversable[A] = toIterable + + override def isTraversableAgain: Boolean = true + + /** + * @return This collection as a `C`. + */ + protected def coll: C + + @deprecated("Use coll instead of repr in a collection implementation, use the collection value itself from the outside", "2.13.0") + final def repr: C = coll + + /** + * Defines how to turn a given `Iterable[A]` into a collection of type `C`. + * + * This process can be done in a strict way or a non-strict way (ie. without evaluating + * the elements of the resulting collections). In other words, this methods defines + * the evaluation model of the collection. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method + * might be unsound. However, as long as it is called with an + * `Iterable[A]` obtained from `this` collection (as it is the case in the + * implementations of operations where we use a `View[A]`), it is safe. + */ + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): C + + /** The companion object of this ${coll}, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def iterableFactory: IterableFactory[CC] + + @deprecated("Use iterableFactory instead", "2.13.0") + @deprecatedOverriding("Use iterableFactory instead", "2.13.0") + @`inline` def companion: IterableFactory[CC] = iterableFactory + + /** + * @return a strict builder for the same collection type. + * + * Note that in the case of lazy collections (e.g. [[scala.collection.View]] or [[scala.collection.immutable.LazyList]]), + * it is possible to implement this method but the resulting `Builder` will break laziness. + * As a consequence, operations should preferably be implemented with `fromSpecific` + * instead of this method. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method might + * be unsound. However, as long as the returned builder is only fed + * with `A` values taken from `this` instance, it is safe. + */ + protected def newSpecificBuilder: Builder[A @uncheckedVariance, C] + + /** The empty iterable of the same type as this iterable + * + * @return an empty iterable of type `C`. + */ + def empty: C = fromSpecific(Nil) + + /** Selects the first element of this $coll. + * $orderDependent + * @return the first element of this $coll. + * @throws NoSuchElementException if the $coll is empty. + */ + def head: A = iterator.next() + + /** Optionally selects the first element. + * $orderDependent + * @return the first element of this $coll if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = { + val it = iterator + if (it.hasNext) Some(it.next()) else None + } + + /** Selects the last element. + * $orderDependent + * @return The last element of this $coll. + * @throws NoSuchElementException If the $coll is empty. + */ + def last: A = { + val it = iterator + var lst = it.next() + while (it.hasNext) lst = it.next() + lst + } + + /** Optionally selects the last element. + * $orderDependent + * @return the last element of this $coll$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if (isEmpty) None else Some(last) + + /** A view over the elements of this collection. */ + def view: View[A] = View.fromIteratorProvider(() => iterator) + + /** Compares the size of this $coll to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(size min otherSize)` instead of `O(size)`. The method should be overridden + * if computing `size` is cheap and `knownSize` returns `-1`. + * + * @see [[sizeIs]] + */ + def sizeCompare(otherSize: Int): Int = { + if (otherSize < 0) 1 + else { + val known = knownSize + if (known >= 0) Integer.compare(known, otherSize) + else { + var i = 0 + val it = iterator + while (it.hasNext) { + if (i == otherSize) return 1 + it.next() + i += 1 + } + i - otherSize + } + } + } + + /** Returns a value class containing operations for comparing the size of this $coll to a test value. + * + * These operations are implemented in terms of [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + @inline final def sizeIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) + + /** Compares the size of this $coll to the size of another `Iterable`. + * + * @param that the `Iterable` whose size is compared with this $coll's size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < that.size + * x == 0 if this.size == that.size + * x > 0 if this.size > that.size + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(this.size min that.size)` instead of `O(this.size + that.size)`. + * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. + */ + def sizeCompare(that: Iterable[_]): Int = { + val thatKnownSize = that.knownSize + + if (thatKnownSize >= 0) this sizeCompare thatKnownSize + else { + val thisKnownSize = this.knownSize + + if (thisKnownSize >= 0) { + val res = that sizeCompare thisKnownSize + // can't just invert the result, because `-Int.MinValue == Int.MinValue` + if (res == Int.MinValue) 1 else -res + } else { + val thisIt = this.iterator + val thatIt = that.iterator + while (thisIt.hasNext && thatIt.hasNext) { + thisIt.next() + thatIt.next() + } + java.lang.Boolean.compare(thisIt.hasNext, thatIt.hasNext) + } + } + } + + /** A view over a slice of the elements of this collection. */ + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + def view(from: Int, until: Int): View[A] = view.slice(from, until) + + /** Transposes this $coll of iterable collections into + * a $coll of ${coll}s. + * + * The resulting collection's type will be guided by the + * static type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(4, 5, 6)).transpose + * // xs == List( + * // List(1, 4), + * // List(2, 5), + * // List(3, 6)) + * + * val ys = Vector( + * List(1, 2, 3), + * List(4, 5, 6)).transpose + * // ys == Vector( + * // Vector(1, 4), + * // Vector(2, 5), + * // Vector(3, 6)) + * }}} + * + * $willForceEvaluation + * + * @tparam B the type of the elements of each iterable collection. + * @param asIterable an implicit conversion which asserts that the + * element type of this $coll is an `Iterable`. + * @return a two-dimensional $coll of ${coll}s which has as ''n''th row + * the ''n''th column of this $coll. + * @throws IllegalArgumentException if all collections in this $coll + * are not of the same size. + */ + def transpose[B](implicit asIterable: A => /*<:= headSize) fail + bs(i) += x + i += 1 + } + if (i != headSize) + fail + } + iterableFactory.from(bs.map(_.result())) + } + + def filter(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = false)) + + def filterNot(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = true)) + + /** Creates a non-strict filter of this $coll. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new collection, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * $orderDependent + * + * @param p the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): collection.WithFilter[A, CC] = new IterableOps.WithFilter(this, p) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, + * all elements that do not. Interesting because it splits a collection in two. + * + * The default implementation provided here needs to traverse the collection twice. + * Strict collections have an overridden version of `partition` in `StrictOptimizedIterableOps`, + * which requires only a single traversal. + */ + def partition(p: A => Boolean): (C, C) = { + val first = new View.Filter(this, p, false) + val second = new View.Filter(this, p, true) + (fromSpecific(first), fromSpecific(second)) + } + + override def splitAt(n: Int): (C, C) = (take(n), drop(n)) + + def take(n: Int): C = fromSpecific(new View.Take(this, n)) + + /** Selects the last ''n'' elements. + * $orderDependent + * @param n the number of elements to take from this $coll. + * @return a $coll consisting only of the last `n` elements of this $coll, + * or else the whole $coll, if it has less than `n` elements. + * If `n` is negative, returns an empty $coll. + */ + def takeRight(n: Int): C = fromSpecific(new View.TakeRight(this, n)) + + /** Takes longest prefix of elements that satisfy a predicate. + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest prefix of this $coll whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): C = fromSpecific(new View.TakeWhile(this, p)) + + def span(p: A => Boolean): (C, C) = (takeWhile(p), dropWhile(p)) + + def drop(n: Int): C = fromSpecific(new View.Drop(this, n)) + + /** Selects all elements except last ''n'' ones. + * $orderDependent + * @param n the number of elements to drop from this $coll. + * @return a $coll consisting of all elements of this $coll except the last `n` ones, or else the + * empty $coll, if this $coll has less than `n` elements. + * If `n` is negative, don't drop any elements. + */ + def dropRight(n: Int): C = fromSpecific(new View.DropRight(this, n)) + + def dropWhile(p: A => Boolean): C = fromSpecific(new View.DropWhile(this, p)) + + /** Partitions elements in fixed size ${coll}s. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[C] = + iterator.grouped(size).map(fromSpecific) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in `grouped`.) + * + * An empty collection returns an empty iterator, and a non-empty + * collection containing fewer elements than the window size returns + * an iterator that will produce the original collection as its only + * element. + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except for a + * non-empty collection with less than `size` elements, which + * returns an iterator that produces the source collection itself + * as its only element. + * @example `List().sliding(2) = empty iterator` + * @example `List(1).sliding(2) = Iterator(List(1))` + * @example `List(1, 2).sliding(2) = Iterator(List(1, 2))` + * @example `List(1, 2, 3).sliding(2) = Iterator(List(1, 2), List(2, 3))` + */ + def sliding(size: Int): Iterator[C] = sliding(size, 1) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * + * The returned iterator will be empty when called on an empty collection. + * The last element the iterator produces may be smaller than the window + * size when the original collection isn't exhausted by the window before + * it and its last element isn't skipped by the step before it. + * + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive + * groups + * @return An iterator producing ${coll}s of size `size`, except the last + * element (which may be the only element) will be smaller + * if there are fewer than `size` elements remaining to be grouped. + * @example `List(1, 2, 3, 4, 5).sliding(2, 2) = Iterator(List(1, 2), List(3, 4), List(5))` + * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` + */ + def sliding(size: Int, step: Int): Iterator[C] = + iterator.sliding(size, step).map(fromSpecific) + + /** The rest of the collection without its first element. */ + def tail: C = { + if (isEmpty) throw new UnsupportedOperationException + drop(1) + } + + /** The initial part of the collection without its last element. + * $willForceEvaluation + */ + def init: C = { + if (isEmpty) throw new UnsupportedOperationException + dropRight(1) + } + + def slice(from: Int, until: Int): C = + fromSpecific(new View.Drop(new View.Take(this, until), from)) + + /** Partitions this $coll into a map of ${coll}s according to some discriminator function. + * + * $willForceEvaluation + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to ${coll}s such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to a $coll of those elements `x` + * for which `f(x)` equals `k`. + * + */ + def groupBy[K](f: A => K): immutable.Map[K, C] = { + val m = mutable.Map.empty[K, Builder[A, C]] + val it = iterator + while (it.hasNext) { + val elem = it.next() + val key = f(elem) + val bldr = m.getOrElseUpdate(key, newSpecificBuilder) + bldr += elem + } + var result = immutable.HashMap.empty[K, C] + val mapIt = m.iterator + while (mapIt.hasNext) { + val (k, v) = mapIt.next() + result = result.updated(k, v.result()) + } + result + } + + /** + * Partitions this $coll into a map of ${coll}s according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Seq[User]): Map[Int, Seq[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * $willForceEvaluation + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B](key: A => K)(f: A => B): immutable.Map[K, CC[B]] = { + val m = mutable.Map.empty[K, Builder[B, CC[B]]] + for (elem <- this) { + val k = key(elem) + val bldr = m.getOrElseUpdate(k, iterableFactory.newBuilder[B]) + bldr += f(elem) + } + class Result extends runtime.AbstractFunction1[(K, Builder[B, CC[B]]), Unit] { + var built = immutable.Map.empty[K, CC[B]] + def apply(kv: (K, Builder[B, CC[B]])) = + built = built.updated(kv._1, kv._2.result()) + } + val result = new Result + m.foreach(result) + result.built + } + + /** + * Partitions this $coll into a map according to a discriminator function `key`. All the values that + * have the same discriminator are then transformed by the `f` function and then reduced into a + * single value with the `reduce` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f).reduce(reduce))`, but more efficient. + * + * {{{ + * def occurrences[A](as: Seq[A]): Map[A, Int] = + * as.groupMapReduce(identity)(_ => 1)(_ + _) + * }}} + * + * $willForceEvaluation + */ + def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): immutable.Map[K, B] = { + val m = mutable.Map.empty[K, B] + for (elem <- this) { + val k = key(elem) + val v = + m.get(k) match { + case Some(b) => reduce(b, f(elem)) + case None => f(elem) + } + m.put(k, v) + } + m.to(immutable.Map) + } + + /** Computes a prefix scan of the elements of the collection. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting collection + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new $coll containing the prefix scan of the elements in this $coll + */ + def scan[B >: A](z: B)(op: (B, B) => B): CC[B] = scanLeft(z)(op) + + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = iterableFactory.from(new View.ScanLeft(this, z, op)) + + /** Produces a collection containing cumulative results of applying the operator going right to left. + * The head of the collection is the last cumulative result. + * $willNotTerminateInf + * $orderDependent + * $willForceEvaluation + * + * Example: + * {{{ + * List(1, 2, 3, 4).scanRight(0)(_ + _) == List(10, 9, 7, 4, 0) + * }}} + * + * @tparam B the type of the elements in the resulting collection + * @param z the initial value + * @param op the binary operator applied to the intermediate result and the element + * @return collection with intermediate results + */ + def scanRight[B](z: B)(op: (A, B) => B): CC[B] = { + class Scanner extends runtime.AbstractFunction1[A, Unit] { + var acc = z + var scanned = acc :: immutable.Nil + def apply(x: A) = { + acc = op(x, acc) + scanned ::= acc + } + } + val scanner = new Scanner + reversed.foreach(scanner) + iterableFactory.from(scanner.scanned) + } + + def map[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(this, f)) + + def flatMap[B](f: A => IterableOnce[B]): CC[B] = iterableFactory.from(new View.FlatMap(this, f)) + + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] = flatMap(asIterable) + + def collect[B](pf: PartialFunction[A, B]): CC[B] = + iterableFactory.from(new View.Collect(this, pf)) + + /** Applies a function `f` to each element of the $coll and returns a pair of ${coll}s: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = $Coll(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this $coll to an [[scala.util.Either]] + * + * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. + */ + def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { + val left: View[A1] = new LeftPartitionMapped(this, f) + val right: View[A2] = new RightPartitionMapped(this, f) + (iterableFactory.from(left), iterableFactory.from(right)) + } + + /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $coll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param suffix the iterable to append. + * @tparam B the element type of the returned collection. + * @return a new $coll which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def concat[B >: A](suffix: IterableOnce[B]): CC[B] = iterableFactory.from(suffix match { + case xs: Iterable[B] => new View.Concat(this, xs) + case xs => iterator ++ suffix.iterator + }) + + /** Alias for `concat` */ + @`inline` final def ++ [B >: A](suffix: IterableOnce[B]): CC[B] = concat(suffix) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = iterableFactory.from(that match { // sound bcs of VarianceNote + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + def zipWithIndex: CC[(A @uncheckedVariance, Int)] = iterableFactory.from(new View.ZipWithIndex(this)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. + * @return a new collection of type `That` containing pairs consisting of + * corresponding elements of this $coll and `that`. The length + * of the returned collection is the maximum of the lengths of this $coll and `that`. + * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): CC[(A1, B)] = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) + + /** Converts this $coll of pairs into two collections of the first and second + * half of each pair. + * + * {{{ + * val xs = $Coll( + * (1, "one"), + * (2, "two"), + * (3, "three")).unzip + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this $coll is a pair. + * @return a pair of ${coll}s, containing the first, respectively second + * half of each element pair of this $coll. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val first: View[A1] = new View.Map[A, A1](this, asPair(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asPair(_)._2) + (iterableFactory.from(first), iterableFactory.from(second)) + } + + /** Converts this $coll of triples into three collections of the first, second, + * and third element of each triple. + * + * {{{ + * val xs = $Coll( + * (1, "one", '1'), + * (2, "two", '2'), + * (3, "three", '3')).unzip3 + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three), + * // $Coll(1, 2, 3)) + * }}} + * + * @tparam A1 the type of the first member of the element triples + * @tparam A2 the type of the second member of the element triples + * @tparam A3 the type of the third member of the element triples + * @param asTriple an implicit conversion which asserts that the element type + * of this $coll is a triple. + * @return a triple of ${coll}s, containing the first, second, respectively + * third member of each element triple of this $coll. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val first: View[A1] = new View.Map[A, A1](this, asTriple(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asTriple(_)._2) + val third: View[A3] = new View.Map[A, A3](this, asTriple(_)._3) + (iterableFactory.from(first), iterableFactory.from(second), iterableFactory.from(third)) + } + + /** Iterates over the tails of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this $coll + * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` + */ + def tails: Iterator[C] = iterateUntilEmpty(_.tail) + + /** Iterates over the inits of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `init`. + * + * $willForceEvaluation + * + * @return an iterator over all the inits of this $coll + * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` + */ + def inits: Iterator[C] = iterateUntilEmpty(_.init) + + override def tapEach[U](f: A => U): C = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { + // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` + // `this.tail.tail` doesn't compile as `C` is unbounded + // `Iterable.from(this)` would eagerly copy non-immutable collections + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) + (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) + } + + @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") + def ++:[B >: A](that: IterableOnce[B]): CC[B] = iterableFactory.from(that match { + case xs: Iterable[B] => new View.Concat(xs, this) + case _ => that.iterator ++ iterator + }) +} + +object IterableOps { + + /** Operations for comparing the size of a collection to a test value. + * + * These operations are implemented in terms of + * [[scala.collection.IterableOps.sizeCompare(Int) `sizeCompare(Int)`]]. + */ + final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]) extends AnyVal { + /** Tests if the size of the collection is less than some value. */ + @inline def <(size: Int): Boolean = it.sizeCompare(size) < 0 + /** Tests if the size of the collection is less than or equal to some value. */ + @inline def <=(size: Int): Boolean = it.sizeCompare(size) <= 0 + /** Tests if the size of the collection is equal to some value. */ + @inline def ==(size: Int): Boolean = it.sizeCompare(size) == 0 + /** Tests if the size of the collection is not equal to some value. */ + @inline def !=(size: Int): Boolean = it.sizeCompare(size) != 0 + /** Tests if the size of the collection is greater than or equal to some value. */ + @inline def >=(size: Int): Boolean = it.sizeCompare(size) >= 0 + /** Tests if the size of the collection is greater than some value. */ + @inline def >(size: Int): Boolean = it.sizeCompare(size) > 0 + } + + /** A trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ + @SerialVersionUID(3L) + class WithFilter[+A, +CC[_]]( + self: IterableOps[A, CC, _], + p: A => Boolean + ) extends collection.WithFilter[A, CC] with Serializable { + + protected def filtered: Iterable[A] = + new View.Filter(self, p, isFlipped = false) + + def map[B](f: A => B): CC[B] = + self.iterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B](f: A => IterableOnce[B]): CC[B] = + self.iterableFactory.from(new View.FlatMap(filtered, f)) + + def foreach[U](f: A => U): Unit = filtered.foreach(f) + + def withFilter(q: A => Boolean): WithFilter[A, CC] = + new WithFilter(self, (a: A) => p(a) && q(a)) + + } + +} + +@SerialVersionUID(3L) +object Iterable extends IterableFactory.Delegate[Iterable](immutable.Iterable) { + + def single[A](a: A): Iterable[A] = new AbstractIterable[A] { + override def iterator = Iterator.single(a) + override def knownSize = 1 + override def head = a + override def headOption: Some[A] = Some(a) + override def last = a + override def lastOption: Some[A] = Some(a) + override def view: View.Single[A] = new View.Single(a) + override def take(n: Int) = if (n > 0) this else Iterable.empty + override def takeRight(n: Int) = if (n > 0) this else Iterable.empty + override def drop(n: Int) = if (n > 0) Iterable.empty else this + override def dropRight(n: Int) = if (n > 0) Iterable.empty else this + override def tail: Iterable[Nothing] = Iterable.empty + override def init: Iterable[Nothing] = Iterable.empty + } +} + +/** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ +abstract class AbstractIterable[+A] extends Iterable[A] + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = iterableFactory.from(coll) + protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] + + // overridden for efficiency, since we know CC[A] =:= C + override def empty: CC[A @uncheckedVariance] = iterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for collections that have an additional constraint, + * expressed by the `evidenceIterableFactory` method. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait EvidenceIterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]], Ev[_]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def evidenceIterableFactory: EvidenceIterableFactory[CC, Ev] + implicit protected def iterableEvidence: Ev[A @uncheckedVariance] + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = evidenceIterableFactory.from(coll) + override protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = evidenceIterableFactory.newBuilder[A] + override def empty: CC[A @uncheckedVariance] = evidenceIterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted sets. + * + * Note that in sorted sets, the `CC` type of the set is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Set` in [[SortedSetOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedSetFactoryDefaults[+A, + +CC[X] <: SortedSet[X] with SortedSetOps[X, CC, CC[X]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { + self: IterableOps[A, WithFilterCC, _] => + + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = sortedIterableFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](using ordering) + override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(using ordering) + + override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC] = + new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) +} + + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for maps. + * + * Note that in maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Map` in [[MapOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait MapFactoryDefaults[K, +V, + +CC[x, y] <: IterableOps[(x, y), Iterable, Iterable[(x, y)]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x]] extends MapOps[K, V, CC, CC[K, V @uncheckedVariance]] with IterableOps[(K, V), WithFilterCC, CC[K, V @uncheckedVariance]] { + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = mapFactory.from(coll) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] + override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { + // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) + case self: immutable.TreeSeqMap[_, _] => immutable.TreeSeqMap.empty(self.orderedBy).asInstanceOf[CC[K, V]] + case _ => mapFactory.empty + } + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC] = + new MapOps.WithFilter[K, V, WithFilterCC, CC](this, p) +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted maps. + * + * Note that in sorted maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying map (which is fixed to `Map` in [[SortedMapOps]]). This trait has therefore + * three type parameters `CC`, `WithFilterCC` and `UnsortedCC`. The `withFilter` method inherited + * from `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedMapFactoryDefaults[K, +V, + +CC[x, y] <: Map[x, y] with SortedMapOps[x, y, CC, CC[x, y]] with UnsortedCC[x, y], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x], + +UnsortedCC[x, y] <: Map[x, y]] extends SortedMapOps[K, V, CC, CC[K, V @uncheckedVariance]] with MapOps[K, V, UnsortedCC, CC[K, V @uncheckedVariance]] { + self: IterableOps[(K, V), WithFilterCC, _] => + + override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(using ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = sortedMapFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](using ordering) + + override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC] = + new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) +} diff --git a/scala2-library-bootstrapped/src/scala/collection/SortedMap.scala b/scala2-library-bootstrapped/src/scala/collection/SortedMap.scala new file mode 100644 index 000000000000..5beb811ed0b2 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/SortedMap.scala @@ -0,0 +1,220 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{implicitNotFound, nowarn} + +/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ +trait SortedMap[K, +V] + extends Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ + + def unsorted: Map[K, V] = this + + def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedMap" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => + (sm canEqual this) && + (this.size == sm.size) && { + val i1 = this.iterator + val i2 = sm.iterator + var allEqual = true + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } + allEqual + } + case _ => super.equals(that) + } +} + +trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] + with SortedOps[K, C] { + + /** The companion object of this sorted map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedMapFactory: SortedMapFactory[CC] + + /** Similar to `mapFromIterable`, but returns a SortedMap collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) + + def unsorted: Map[K, V] + + /** + * Creates an iterator over all the key/value pairs + * contained in this map having a key greater than or + * equal to `start` according to the ordering of + * this map. x.iteratorFrom(y) is equivalent + * to but often more efficient than x.from(y).iterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def iteratorFrom(start: K): Iterator[(K, V)] + + /** + * Creates an iterator over all the keys(or elements) contained in this + * collection greater than or equal to `start` + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than + * x.from(y).keysIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def keysIteratorFrom(start: K): Iterator[K] + + /** + * Creates an iterator over all the values contained in this + * map that are associated with a key greater than or equal to `start` + * according to the ordering of this map. x.valuesIteratorFrom(y) is + * equivalent to but often more efficient than + * x.from(y).valuesIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) + + def firstKey: K = head._1 + def lastKey: K = last._1 + + /** Find the element with smallest key larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption + + /** Find the element with largest key less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption + + def rangeTo(to: K): C = { + val i = keySet.rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + override def keySet: SortedSet[K] = new KeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { + def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = SortedMapOps.this.rangeImpl(from, until) + new map.KeySortedSet + } + } + + /** A generic trait that is reused by sorted keyset implementations */ + protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => + implicit def ordering: Ordering[K] = SortedMapOps.this.ordering + def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) + } + + // And finally, we add new overloads taking an ordering + /** Builds a new sorted map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new sorted map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + })(using ordering) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(using ordering) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(using ordering) +} + +object SortedMapOps { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + /** Specializes `MapWithFilter` for sorted Map collections + * + * @define coll sorted map collection + */ + class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( + self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], + p: ((K, V)) => Boolean + ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { + + def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + self.sortedMapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + self.sortedMapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] = + new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala b/scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala new file mode 100644 index 000000000000..ad5d67a64635 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound + +/** + * Trait that overrides sorted map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedMap(sortedMapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) + + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(using ordering)) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedCollect(sortedMapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] + if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala b/scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala new file mode 100644 index 000000000000..e794044a1af9 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala @@ -0,0 +1,87 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.generic + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.collection.{Factory, Iterable} +import scala.collection.mutable.Builder + +/** The default serialization proxy for collection implementations. + * + * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` + * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed + * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any + * additional state required to create the proper `Builder` needs to be captured by the `factory`. + */ +@SerialVersionUID(3L) +final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { + + @transient protected var builder: Builder[A, Any] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val k = coll.knownSize + out.writeInt(k) + var count = 0 + coll.foreach { x => + out.writeObject(x) + count += 1 + } + if(k >= 0) { + if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") + } else out.writeObject(SerializeEnd) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + builder = factory.newBuilder + val k = in.readInt() + if(k >= 0) { + builder.sizeHint(k) + var count = 0 + while(count < k) { + builder += in.readObject().asInstanceOf[A] + count += 1 + } + } else { + while (true) in.readObject match { + case SerializeEnd => return + case a => builder += a.asInstanceOf[A] + } + } + } + + protected[this] def readResolve(): Any = builder.result() +} + +@SerialVersionUID(3L) +private[collection] case object SerializeEnd + +/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type + * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or + * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement + * it directly without using this trait if you need a non-standard factory or if you want to use a different + * serialization scheme. + */ +trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => + protected[this] def writeReplace(): AnyRef = { + val f: Factory[Any, Any] = this match { + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](using it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](using it.ordering.asInstanceOf[Ordering[Any]]) + case it => it.iterableFactory.iterableFactory + } + new DefaultSerializationProxy(f, this) + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala b/scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala new file mode 100644 index 000000000000..ebefa4c3c17a --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala @@ -0,0 +1,354 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable +import java.util.Arrays +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 + +/** + * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same + * underlying `Array`, therefore it is not growable or shrinkable. + * + * @tparam T type of the elements in this wrapped array. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed abstract class ArraySeq[T] + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] + with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] + with Serializable { + + override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged + + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + val b = ArrayBuilder.make(using elemTag).asInstanceOf[ArrayBuilder[T]] + val s = coll.knownSize + if(s > 0) b.sizeHint(s) + b ++= coll + ArraySeq.make(b.result()) + } + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(using elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(using elemTag.asInstanceOf[ClassTag[T]]) + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def elemTag: ClassTag[_] + + /** Update element at given index */ + def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit + + /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def array: Array[_] + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit + + override protected[this] def className = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + + override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + override def equals(other: Any): Boolean = other match { + case that: ArraySeq[_] if this.array.length != that.array.length => + false + case _ => + super.equals(other) + } + + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + + override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + this + } +} + +/** A companion object used to create instances of `ArraySeq`. + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + // This is reused for all calls to empty. + private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + + /** + * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type + * without copying. + * + * Note that an array containing boxed primitives can be converted to a `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` + * at runtime. + */ + def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { + def elemTag: ClassTag[T] = ClassTag[T](array.getClass.getComponentType) + def length: Int = array.length + def apply(index: Int): T = array(index) + def update(index: Int, elem: T): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofRef[_] => + Array.equals( + this.array.asInstanceOf[Array[AnyRef]], + that.array.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(array, 0, array.length) + else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { + // Type erases to `ManifestFactory.ByteManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Byte.type = ClassTag.Byte + def length: Int = array.length + def apply(index: Int): Byte = array(index) + def update(index: Int, elem: Byte): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) + else new WidenedByteArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { + // Type erases to `ManifestFactory.ShortManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Short.type = ClassTag.Short + def length: Int = array.length + def apply(index: Int): Short = array(index) + def update(index: Int, elem: Short): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) + else new WidenedShortArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { + // Type erases to `ManifestFactory.CharManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Char.type = ClassTag.Char + def length: Int = array.length + def apply(index: Int): Char = array(index) + def update(index: Int, elem: Char): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) + else new WidenedCharArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + val jsb = sb.underlying + if (start.length != 0) jsb.append(start) + val len = array.length + if (len != 0) { + if (sep.isEmpty) jsb.append(array) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(array(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(array(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + sb + } + } + + @SerialVersionUID(3L) + final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { + // Type erases to `ManifestFactory.IntManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Int.type = ClassTag.Int + def length: Int = array.length + def apply(index: Int): Int = array(index) + def update(index: Int, elem: Int): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) + else new IntArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { + // Type erases to `ManifestFactory.LongManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Long.type = ClassTag.Long + def length: Int = array.length + def apply(index: Int): Long = array(index) + def update(index: Int, elem: Long): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) + else new LongArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { + // Type erases to `ManifestFactory.FloatManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Float.type = ClassTag.Float + def length: Int = array.length + def apply(index: Int): Float = array(index) + def update(index: Int, elem: Float): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) + else new WidenedFloatArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { + // Type erases to `ManifestFactory.DoubleManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Double.type = ClassTag.Double + def length: Int = array.length + def apply(index: Int): Double = array(index) + def update(index: Int, elem: Double): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) + else new DoubleArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { + // Type erases to `ManifestFactory.BooleanManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Boolean.type = ClassTag.Boolean + def length: Int = array.length + def apply(index: Int): Boolean = array(index) + def update(index: Int, elem: Boolean): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { + // Type erases to `ManifestFactory.UnitManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Unit.type = ClassTag.Unit + def length: Int = array.length + def apply(index: Int): Unit = array(index) + def update(index: Int, elem: Unit): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofUnit => array.length == that.array.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala b/scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala new file mode 100644 index 000000000000..36b53d1e433b --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala @@ -0,0 +1,888 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.{unchecked => uc} +import scala.annotation.{implicitNotFound, tailrec, unused} +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializationProxy +import scala.runtime.Statics + +/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good + * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality + * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality + * of numeric types is not supported (similar to `AnyRefMap`). + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) + extends AbstractMap[K, V] + with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- + with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- + + private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap + + def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) + + import CollisionProofHashMap.Node + private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] + private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] + + /** The actual hash table. */ + private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + @`inline` private[this] final def computeHash(o: K): Int = { + val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode + h ^ (h >>> 16) + } + + @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) + + override protected def fromSpecific(coll: IterableOnce[(K, V)] @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] + + override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + override def contains(key: K): Boolean = findNode(key) ne null + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + }) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + } + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val nd = findNode(key) + if (nd eq null) default else nd match { + case nd: LLNode @uc => nd.value + case n => n.asInstanceOf[RBNode].value + } + } + + @`inline` private[this] def findNode(elem: K): Node = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case n: LLNode @uc => n.getNode(elem, hash) + case n => n.asInstanceOf[RBNode].getNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) { + if(size == 0) reallocTable(target) + else growTable(target) + } + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + val res = table(idx) match { + case n: RBNode @uc => + insert(n, idx, key, hash, value) + case _old => + val old: LLNode = _old.asInstanceOf[LLNode] + if(old eq null) { + table(idx) = new LLNode(key, hash, value, null) + } else { + var remaining = CollisionProofHashMap.treeifyThreshold + var prev: LLNode = null + var n = old + while((n ne null) && n.hash <= hash && remaining > 0) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return (if(getOld) Some(old) else null) + } + prev = n + n = n.next + remaining -= 1 + } + if(remaining == 0) { + treeify(old, idx) + return put0(key, value, getOld, hash, idx) + } + if(prev eq null) table(idx) = new LLNode(key, hash, value, old) + else prev.next = new LLNode(key, hash, value, prev.next) + } + true + } + if(res) contentSize += 1 + if(res) Some(null.asInstanceOf[V]) else null //TODO + } + + private[this] def treeify(old: LLNode, idx: Int): Unit = { + table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) + var n: LLNode = old.next + while(n ne null) { + val root = table(idx).asInstanceOf[RBNode] + insertIntoExisting(root, idx, n.key, n.hash, n.value, root) + n = n.next + } + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + val k = xs.knownSize + if(k > 0) sizeHint(contentSize + k) + super.addAll(xs) + } + + // returns the old value or Statics.pfMarker if not found + private[this] def remove0(elem: K) : Any = { + val hash = computeHash(elem) + val idx = index(hash) + table(idx) match { + case null => Statics.pfMarker + case t: RBNode @uc => + val v = delete(t, idx, elem, hash) + if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 + v + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd.value + case nd: LLNode @uc => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next.value + } + prev = next + next = next.next + } + Statics.pfMarker + } + } + + private[this] abstract class MapIterator[R] extends AbstractIterator[R] { + protected[this] def extract(node: LLNode): R + protected[this] def extract(node: RBNode): R + + private[this] var i = 0 + private[this] var node: Node = null + private[this] val len = table.length + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + n match { + case null => + case n: RBNode @uc => + node = CollisionProofHashMap.minNodeNonNull(n) + return true + case n: LLNode @uc => + node = n + return true + } + } + false + } + } + + def next(): R = + if(!hasNext) Iterator.empty.next() + else node match { + case n: RBNode @uc => + val r = extract(n) + node = CollisionProofHashMap.successor(n ) + r + case n: LLNode @uc => + val r = extract(n) + node = n.next + r + } + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapIterator[K] { + protected[this] def extract(node: LLNode) = node.key + protected[this] def extract(node: RBNode) = node.key + } + } + + override def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapIterator[(K, V)] { + protected[this] def extract(node: LLNode) = (node.key, node.value) + protected[this] def extract(node: RBNode) = (node.key, node.value) + } + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + table = java.util.Arrays.copyOf(table, newlen) + threshold = newThreshold(table.length) + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) splitBucket(old, i, i + oldlen, oldlen) + i += 1 + } + oldlen *= 2 + } + } + + @`inline` private[this] def reallocTable(newlen: Int) = { + table = new Array(newlen) + threshold = newThreshold(table.length) + } + + @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) + } + + private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + //preLow.next = null + //preHigh.next = null + var lastLow: LLNode = preLow + var lastHigh: LLNode = preHigh + var n = list + while(n ne null) { + val next = n.next + if((n.hash & mask) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(list ne preLow.next) table(lowBucket) = preLow.next + if(preHigh.next ne null) { + table(highBucket) = preHigh.next + lastHigh.next = null + } + } + + private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + var lowCount, highCount = 0 + tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) + if(highCount != 0) { + if(lowCount == 0) { + table(lowBucket) = null + table(highBucket) = tree + } else { + table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) + table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def remove(key: K): Option[V] = { + val v = remove0(key) + if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) + } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) + } + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) + } + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) + + override protected[this] def className = "CollisionProofHashMap" + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val hash = computeHash(key) + val idx = index(hash) + table(idx) match { + case null => () + case n: LLNode @uc => + val nd = n.getNode(key, hash) + if(nd != null) return nd.value + case n => + val nd = n.asInstanceOf[RBNode].getNode(key, hash) + if(nd != null) return nd.value + } + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + + ///////////////////// Overrides code from SortedMapOps + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: + + @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red + @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red + + @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { + val i = hash - node.hash + if(i != 0) i else ordering.compare(key, node.key) + } + + @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ordering.compare(key, node.key) + } + + // ---- insertion ---- + + @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { + val cmp = compare(key, hash, x) + if(cmp == 0) { + x.value = value + false + } else { + val next = if(cmp < 0) x.left else x.right + if(next eq null) { + val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) + if (cmp < 0) x.left = z else x.right = z + table(bucket) = fixAfterInsert(_root, z) + return true + } + else insertIntoExisting(_root, bucket, key, hash, value, next) + } + } + + private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { + if(tree eq null) { + table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) + true + } else insertIntoExisting(tree, bucket, key, hash, value, tree) + } + + private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { + var root = _root + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + root = rotateLeft(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateRight(root, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + root = rotateRight(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateLeft(root, z.parent.parent) + } + } + } + root.red = false + root + } + + // ---- deletion ---- + + // returns the old value or Statics.pfMarker if not found + private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { + var root = _root + val z = root.getNode(key, hash: Int) + if (z ne null) { + val oldValue = z.value + var y = z + var yIsRed = y.red + var x: RBNode = null + var xParent: RBNode = null + + if (z.left eq null) { + x = z.right + root = transplant(root, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + root = transplant(root, z, z.left) + xParent = z.parent + } + else { + y = CollisionProofHashMap.minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + root = transplant(root, y, y.right) + y.right = z.right + y.right.parent = y + } + root = transplant(root, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) root = fixAfterDelete(root, x, xParent) + if(root ne _root) table(bucket) = root + oldValue + } else Statics.pfMarker + } + + private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { + var root = _root + var x = node + var xParent = parent + while ((x ne root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateLeft(root, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + root = rotateRight(root, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + root = rotateLeft(root, xParent) + x = root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateRight(root, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + root = rotateLeft(root, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + root = rotateRight(root, xParent) + x = root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + root + } + + // ---- helpers ---- + + @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.right + x.right = y.left + + val xp = x.parent + if (y.left ne null) y.left.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.left) xp.left = y + else xp.right = y + + y.left = x + x.parent = y + root + } + + @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.left + x.left = y.right + + val xp = x.parent + if (y.right ne null) y.right.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.right) xp.right = y + else xp.left = y + + y.right = x + x.parent = y + root + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { + var root = _root + if (to.parent eq null) root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + if (from ne null) from.parent = to.parent + root + } + + // building + + def fromNodes(xs: Iterator[Node], size: Int): RBNode = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): RBNode = size match { + case 0 => null + case 1 => + val nn = xs.next() + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val nn = xs.next() + val right = f(level+1, size-1-leftSize) + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + val n = new RBNode(key, hash, value, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + f(1, size) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + */ +@SerialVersionUID(3L) +object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + def from[K : Ordering, V](it: scala.collection.IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it + } + + def empty[K : Ordering, V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + def newBuilder[K : Ordering, V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K : Ordering, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(using ordering) + } + + @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { + val i = hash - node.hash + if(i != 0) i else ord.compare(key, node.key) + } + + @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ord.compare(key, node.key) + } + + private final val treeifyThreshold = 8 + + // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. + // Keeping calls monomorphic where possible and dispatching manually where needed is faster. + sealed abstract class Node + + /////////////////////////// Red-Black Tree Node + + final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { + val cmp = compare(k, h, this) + if (cmp < 0) { + if(left ne null) left.getNode(k, h) else null + } else if (cmp > 0) { + if(right ne null) right.getNode(k, h) else null + } else this + } + + def foreach[U](f: ((K, V)) => U): Unit = { + if(left ne null) left.foreach(f) + f((key, value)) + if(right ne null) right.foreach(f) + } + + def foreachEntry[U](f: (K, V) => U): Unit = { + if(left ne null) left.foreachEntry(f) + f(key, value) + if(right ne null) right.foreachEntry(f) + } + + def foreachNode[U](f: RBNode[K, V] => U): Unit = { + if(left ne null) left.foreachNode(f) + f(this) + if(right ne null) right.foreachNode(f) + } + } + + @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + new RBNode(key, hash, value, red, null, null, parent) + + @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): RBNode[A, B] = nextNode match { + case null => Iterator.empty.next() + case node => + nextNode = successor(node) + node + } + } + + /////////////////////////// Linked List Node + + private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + override def toString = s"LLNode($key, $value, $hash) -> $next" + + private[this] def eq(a: Any, b: Any): Boolean = + if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { + if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this + else if((next eq null) || (hash > h)) null + else next.getNode(k, h) + } + + @tailrec def foreach[U](f: ((K, V)) => U): Unit = { + f((key, value)) + if(next ne null) next.foreach(f) + } + + @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { + f(key, value) + if(next ne null) next.foreachEntry(f) + } + + @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { + f(this) + if(next ne null) next.foreachNode(f) + } + } +} From c796bcfc1586ec8029afae9fafbcfa74d5fb003d Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 20 May 2024 17:48:40 +0200 Subject: [PATCH 094/827] Address compatibility issues in scala2-library-cc --- scala2-library-cc/src/scala/Array.scala | 690 ++++++++++++++++++ .../src/scala/collection/ArrayOps.scala | 4 +- .../src/scala/collection/Factory.scala | 20 +- .../src/scala/collection/Iterable.scala | 12 +- .../src/scala/collection/SortedMap.scala | 6 +- .../StrictOptimizedSortedMapOps.scala | 2 +- .../generic/DefaultSerializationProxy.scala | 4 +- .../scala/collection/mutable/ArraySeq.scala | 6 +- .../mutable/CollisionProofHashMap.scala | 2 +- 9 files changed, 718 insertions(+), 28 deletions(-) create mode 100644 scala2-library-cc/src/scala/Array.scala diff --git a/scala2-library-cc/src/scala/Array.scala b/scala2-library-cc/src/scala/Array.scala new file mode 100644 index 000000000000..d2098a76f32f --- /dev/null +++ b/scala2-library-cc/src/scala/Array.scala @@ -0,0 +1,690 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +//import scala.collection.generic._ +import scala.collection.{Factory, immutable, mutable} +import mutable.ArrayBuilder +import immutable.ArraySeq +import scala.language.implicitConversions +import scala.reflect.{ClassTag, classTag} +import scala.runtime.BoxedUnit +import scala.runtime.ScalaRunTime +import scala.runtime.ScalaRunTime.{array_apply, array_update} + +/** Utility methods for operating on arrays. + * For example: + * {{{ + * val a = Array(1, 2) + * val b = Array.ofDim[Int](2) + * val c = Array.concat(a, b) + * }}} + * where the array objects `a`, `b` and `c` have respectively the values + * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. + */ +object Array { + val emptyBooleanArray = new Array[Boolean](0) + val emptyByteArray = new Array[Byte](0) + val emptyCharArray = new Array[Char](0) + val emptyDoubleArray = new Array[Double](0) + val emptyFloatArray = new Array[Float](0) + val emptyIntArray = new Array[Int](0) + val emptyLongArray = new Array[Long](0) + val emptyShortArray = new Array[Short](0) + val emptyObjectArray = new Array[Object](0) + + /** Provides an implicit conversion from the Array object to a collection Factory */ + implicit def toFactory[A : ClassTag](dummy: Array.type): Factory[A, Array[A]] = new ArrayFactory(dummy) + @SerialVersionUID(3L) + private class ArrayFactory[A : ClassTag](dummy: Array.type) extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = Array.from[A](it) + def newBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder[A] + } + + /** + * Returns a new [[scala.collection.mutable.ArrayBuilder]]. + */ + def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T](using t) + + /** Build an array from the iterable collection. + * + * {{{ + * scala> val a = Array.from(Seq(1, 5)) + * val a: Array[Int] = Array(1, 5) + * + * scala> val b = Array.from(Range(1, 5)) + * val b: Array[Int] = Array(1, 2, 3, 4) + * }}} + * + * @param it the iterable collection + * @return an array consisting of elements of the iterable collection + */ + def from[A : ClassTag](it: IterableOnce[A]): Array[A] = it match { + case it: Iterable[A] => it.toArray[A] + case _ => it.iterator.toArray[A] + } + + private def slowcopy(src : AnyRef, + srcPos : Int, + dest : AnyRef, + destPos : Int, + length : Int): Unit = { + var i = srcPos + var j = destPos + val srcUntil = srcPos + length + while (i < srcUntil) { + array_update(dest, j, array_apply(src, i)) + i += 1 + j += 1 + } + } + + /** Copy one array to another. + * Equivalent to Java's + * `System.arraycopy(src, srcPos, dest, destPos, length)`, + * except that this also works for polymorphic and boxed arrays. + * + * Note that the passed-in `dest` array will be modified by this call. + * + * @param src the source array. + * @param srcPos starting position in the source array. + * @param dest destination array. + * @param destPos starting position in the destination array. + * @param length the number of array elements to be copied. + * + * @see `java.lang.System#arraycopy` + */ + def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { + val srcClass = src.getClass + if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) + java.lang.System.arraycopy(src, srcPos, dest, destPos, length) + else + slowcopy(src, srcPos, dest, destPos, length) + } + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength)`, + * except that this works for primitive and object arrays in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyOf[A](original: Array[A], newLength: Int): Array[A] = ((original: @unchecked) match { + case x: Array[BoxedUnit] => newUnitArray(newLength).asInstanceOf[Array[A]] + case x: Array[AnyRef] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Int] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Double] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Long] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Float] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Char] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Byte] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Short] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Boolean] => java.util.Arrays.copyOf(x, newLength) + }).asInstanceOf[Array[A]] + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. The new array can have + * a different type than the original one as long as the values are + * assignment-compatible. When copying between primitive and object arrays, + * boxing and unboxing are supported. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength, newType)`, + * except that this works for all combinations of primitive and object arrays + * in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyAs[A](original: Array[_], newLength: Int)(implicit ct: ClassTag[A]): Array[A] = { + val runtimeClass = ct.runtimeClass + if (runtimeClass == Void.TYPE) newUnitArray(newLength).asInstanceOf[Array[A]] + else { + val destClass = runtimeClass.asInstanceOf[Class[A]] + if (destClass.isAssignableFrom(original.getClass.getComponentType)) { + if (destClass.isPrimitive) copyOf[A](original.asInstanceOf[Array[A]], newLength) + else { + val destArrayClass = java.lang.reflect.Array.newInstance(destClass, 0).getClass.asInstanceOf[Class[Array[AnyRef]]] + java.util.Arrays.copyOf(original.asInstanceOf[Array[AnyRef]], newLength, destArrayClass).asInstanceOf[Array[A]] + } + } else { + val dest = new Array[A](newLength) + Array.copy(original, 0, dest, 0, original.length) + dest + } + } + } + + private def newUnitArray(len: Int): Array[Unit] = { + val result = new Array[Unit](len) + java.util.Arrays.fill(result.asInstanceOf[Array[AnyRef]], ()) + result + } + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = new Array[T](0) + + /** Creates an array with given elements. + * + * @param xs the elements to put in the array + * @return an array containing all elements from xs. + */ + // Subject to a compiler optimization in Cleanup. + // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } + def apply[T: ClassTag](xs: T*): Array[T] = { + val len = xs.length + xs match { + case wa: immutable.ArraySeq[_] if wa.unsafeArray.getClass.getComponentType == classTag[T].runtimeClass => + // We get here in test/files/run/sd760a.scala, `Array[T](t)` for + // a specialized type parameter `T`. While we still pay for two + // copies of the array it is better than before when we also boxed + // each element when populating the result. + ScalaRunTime.array_clone(wa.unsafeArray).asInstanceOf[Array[T]] + case _ => + val array = new Array[T](len) + val iterator = xs.iterator + var i = 0 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + } + + /** Creates an array of `Boolean` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { + val array = new Array[Boolean](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Byte` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Byte, xs: Byte*): Array[Byte] = { + val array = new Array[Byte](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Short` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Short, xs: Short*): Array[Short] = { + val array = new Array[Short](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Char` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Char, xs: Char*): Array[Char] = { + val array = new Array[Char](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Int` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Int, xs: Int*): Array[Int] = { + val array = new Array[Int](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Long` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Long, xs: Long*): Array[Long] = { + val array = new Array[Long](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Float` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Float, xs: Float*): Array[Float] = { + val array = new Array[Float](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Double` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Double, xs: Double*): Array[Double] = { + val array = new Array[Double](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Unit` objects */ + def apply(x: Unit, xs: Unit*): Array[Unit] = { + val array = new Array[Unit](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = + new Array[T](n1) + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = { + val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]]) + for (i <- 0 until n1) arr(i) = new Array[T](n2) + arr + // tabulate(n1)(_ => ofDim[T](n2)) + } + /** Creates a 3-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] = + tabulate(n1)(_ => ofDim[T](n2, n3)) + /** Creates a 4-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4)) + /** Creates a 5-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5)) + + /** Concatenates all arrays into a single array. + * + * @param xss the given arrays + * @return the array created from concatenating `xss` + */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = { + val b = newBuilder[T] + b.sizeHint(xss.map(_.length).sum) + for (xs <- xss) b ++= xs + b.result() + } + + /** Returns an array that contains the results of some element computation a number + * of times. + * + * Note that this means that `elem` is computed a total of n times: + * {{{ + * scala> Array.fill(3){ math.random } + * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306) + * }}} + * + * @param n the number of elements desired + * @param elem the element computation + * @return an Array of size n, where each element contains the result of computing + * `elem`. + */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = elem + i += 1 + } + array + } + } + + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = + tabulate(n1)(_ => fill(n2)(elem)) + + /** Returns a three-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = + tabulate(n1)(_ => fill(n2, n3)(elem)) + + /** Returns a four-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => fill(n2, n3, n4)(elem)) + + /** Returns a five-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. + * + * @param n The number of elements in the array + * @param f The function computing element values + * @return An `Array` consisting of elements `f(0),f(1), ..., f(n - 1)` + */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = f(i) + i += 1 + } + array + } + } + + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Returns a three-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Returns a four-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Returns a five-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Returns an array containing a sequence of increasing integers in a range. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @return the array with values in range `start, start + 1, ..., end - 1` + * up to, but excluding, `end`. + */ + def range(start: Int, end: Int): Array[Int] = range(start, end, 1) + + /** Returns an array containing equally spaced values in some integer interval. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @param step the increment value of the array (may not be zero) + * @return the array with values in `start, start + step, ...` up to, but excluding `end` + */ + def range(start: Int, end: Int, step: Int): Array[Int] = { + if (step == 0) throw new IllegalArgumentException("zero step") + val array = new Array[Int](immutable.Range.count(start, end, step, isInclusive = false)) + + var n = 0 + var i = start + while (if (step < 0) end < i else i < end) { + array(n) = i + i += step + n += 1 + } + array + } + + /** Returns an array containing repeated applications of a function to a start value. + * + * @param start the start value of the array + * @param len the number of elements returned by the array + * @param f the function that is repeatedly applied + * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { + if (len > 0) { + val array = new Array[T](len) + var acc = start + var i = 1 + array(0) = acc + + while (i < len) { + acc = f(acc) + array(i) = acc + i += 1 + } + array + } else { + empty[T] + } + } + + /** Compare two arrays per element. + * + * A more efficient version of `xs.sameElements(ys)`. + * + * Note that arrays are invariant in Scala, but it may + * be sound to cast an array of arbitrary reference type + * to `Array[AnyRef]`. Arrays on the JVM are covariant + * in their element type. + * + * `Array.equals(xs.asInstanceOf[Array[AnyRef]], ys.asInstanceOf[Array[AnyRef]])` + * + * @param xs an array of AnyRef + * @param ys an array of AnyRef + * @return true if corresponding elements are equal + */ + def equals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = + (xs eq ys) || + (xs.length == ys.length) && { + var i = 0 + while (i < xs.length && xs(i) == ys(i)) i += 1 + i >= xs.length + } + + /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. + * + * @param x the selector value + * @return sequence wrapped in a [[scala.Some]], if `x` is an Array, otherwise `None` + */ + def unapplySeq[T](x: Array[T]): UnapplySeqWrapper[T] = new UnapplySeqWrapper(x) + + final class UnapplySeqWrapper[T](private val a: Array[T]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[T] = this + def lengthCompare(len: Int): Int = a.lengthCompare(len) + def apply(i: Int): T = a(i) + def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 + def toSeq: scala.Seq[T] = a.toSeq // clones the array + } +} + +/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation + * for Java's `T[]`. + * + * {{{ + * val numbers = Array(1, 2, 3, 4) + * val first = numbers(0) // read the first element + * numbers(3) = 100 // replace the 4th array element with 100 + * val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two + * }}} + * + * Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above + * example code. + * Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to + * `update(Int, T)`. + * + * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion + * to [[scala.collection.ArrayOps]] (shown on line 4 of the example above) and a conversion + * to [[scala.collection.mutable.ArraySeq]] (a subtype of [[scala.collection.Seq]]). + * Both types make available many of the standard operations found in the Scala collections API. + * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, + * while the conversion to `ArraySeq` is permanent as all operations return a `ArraySeq`. + * + * The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. For instance, + * consider the following code: + * + * {{{ + * val arr = Array(1, 2, 3) + * val arrReversed = arr.reverse + * val seqReversed : collection.Seq[Int] = arr.reverse + * }}} + * + * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring + * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed + * by converting to `ArraySeq` first and invoking the variant of `reverse` that returns another + * `ArraySeq`. + * + * @see [[https://www.scala-lang.org/files/archive/spec/2.13/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) + * @see [[https://docs.scala-lang.org/sips/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. + * @hideImplicitConversion scala.Predef.booleanArrayOps + * @hideImplicitConversion scala.Predef.byteArrayOps + * @hideImplicitConversion scala.Predef.charArrayOps + * @hideImplicitConversion scala.Predef.doubleArrayOps + * @hideImplicitConversion scala.Predef.floatArrayOps + * @hideImplicitConversion scala.Predef.intArrayOps + * @hideImplicitConversion scala.Predef.longArrayOps + * @hideImplicitConversion scala.Predef.refArrayOps + * @hideImplicitConversion scala.Predef.shortArrayOps + * @hideImplicitConversion scala.Predef.unitArrayOps + * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray + * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray + * @define coll array + * @define Coll `Array` + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define collectExample + * @define undefinedorder + */ +final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { + + /** The length of the array */ + def length: Int = throw new Error() + + /** The element at given index. + * + * Indices start at `0`; `xs.apply(0)` is the first element of array `xs`. + * Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`. + * + * @param i the index + * @return the element at the given index + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def apply(i: Int): T = throw new Error() + + /** Update the element at given index. + * + * Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array. + * Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`. + * + * @param i the index + * @param x the value to be written at index `i` + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def update(i: Int, x: T): Unit = { throw new Error() } + + /** Clone the Array. + * + * @return A clone of the Array. + */ + override def clone(): Array[T] = throw new Error() +} diff --git a/scala2-library-cc/src/scala/collection/ArrayOps.scala b/scala2-library-cc/src/scala/collection/ArrayOps.scala index e8548c12751f..72ec66a0bc86 100644 --- a/scala2-library-cc/src/scala/collection/ArrayOps.scala +++ b/scala2-library-cc/src/scala/collection/ArrayOps.scala @@ -590,7 +590,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { val len = xs.length def boxed = if(len < ArrayOps.MaxStableSortLength) { val a = xs.clone() - Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]]) + Sorting.stableSort(a)(using ord.asInstanceOf[Ordering[A]]) a } else { val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) @@ -1300,7 +1300,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) if (xs.length == 0) bb.result() else { - def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + def mkRowBuilder() = ArrayBuilder.make[B](using ClassTag[B](aClass.getComponentType)) val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) for (xs <- this) { var i = 0 diff --git a/scala2-library-cc/src/scala/collection/Factory.scala b/scala2-library-cc/src/scala/collection/Factory.scala index 99f584b972fc..96f39bafc905 100644 --- a/scala2-library-cc/src/scala/collection/Factory.scala +++ b/scala2-library-cc/src/scala/collection/Factory.scala @@ -675,16 +675,16 @@ object ClassTagIterableFactory { * sound depending on the use of the `ClassTag` by the collection implementation. */ @SerialVersionUID(3L) class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { - def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] - def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] - def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] - override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] - override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]] - override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]] + def empty[A]: CC[A] = delegate.empty(using ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(using ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(using ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(using ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(using ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(using ClassTag.Any).asInstanceOf[CC[A]] } } diff --git a/scala2-library-cc/src/scala/collection/Iterable.scala b/scala2-library-cc/src/scala/collection/Iterable.scala index 5afc14f4ceef..6556f31d378d 100644 --- a/scala2-library-cc/src/scala/collection/Iterable.scala +++ b/scala2-library-cc/src/scala/collection/Iterable.scala @@ -985,9 +985,9 @@ trait SortedSetFactoryDefaults[+A, +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { self: IterableOps[A, WithFilterCC, _] => - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(ordering) - override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](ordering) - override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(ordering) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](using ordering) + override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(using ordering) override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC]^{p} = new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) @@ -1040,9 +1040,9 @@ trait SortedMapFactoryDefaults[K, +V, +UnsortedCC[x, y] <: Map[x, y]] extends SortedMapOps[K, V, CC, CC[K, V @uncheckedVariance]] with MapOps[K, V, UnsortedCC, CC[K, V @uncheckedVariance]] { self: IterableOps[(K, V), WithFilterCC, _] => - override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(ordering) - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(ordering) - override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](ordering) + override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(using ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](using ordering) override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC]^{p} = new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) diff --git a/scala2-library-cc/src/scala/collection/SortedMap.scala b/scala2-library-cc/src/scala/collection/SortedMap.scala index 7b9381ebb078..876a83b2709c 100644 --- a/scala2-library-cc/src/scala/collection/SortedMap.scala +++ b/scala2-library-cc/src/scala/collection/SortedMap.scala @@ -181,16 +181,16 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) - })(ordering) + })(using ordering) /** Alias for `concat` */ @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(using ordering) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(using ordering) } object SortedMapOps { diff --git a/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala b/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala index 9a9e6e367922..411a86c7cc5c 100644 --- a/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala +++ b/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala @@ -34,7 +34,7 @@ trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOp strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = - strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(using ordering)) override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = strictOptimizedCollect(sortedMapFactory.newBuilder, pf) diff --git a/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala b/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala index e36bb77ebdb8..1f0e6164731c 100644 --- a/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala +++ b/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala @@ -78,9 +78,9 @@ private[collection] case object SerializeEnd trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => protected[this] def writeReplace(): AnyRef = { val f: Factory[Any, Any] = this match { - case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](using it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] - case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]]) + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](using it.ordering.asInstanceOf[Ordering[Any]]) case it => it.iterableFactory.iterableFactory } new DefaultSerializationProxy(f, this) diff --git a/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala b/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala index 70762e5b340d..d1c5b5c9ce72 100644 --- a/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala +++ b/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala @@ -46,15 +46,15 @@ sealed abstract class ArraySeq[T] override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = { - val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] + val b = ArrayBuilder.make(using elemTag).asInstanceOf[ArrayBuilder[T]] val s = coll.knownSize if(s > 0) b.sizeHint(s) b ++= coll ArraySeq.make(b.result()) } override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = - ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] - override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) + ArraySeq.newBuilder[T](using elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(using elemTag.asInstanceOf[ClassTag[T]]) /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype diff --git a/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala b/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala index ff3bab1dd818..05c3124a3323 100644 --- a/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala +++ b/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala @@ -768,7 +768,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { @SerialVersionUID(3L) private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it - def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(using ordering) } @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { From 695d2a917ab848504962ad28461abed97350a54c Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Tue, 21 May 2024 14:23:49 +0200 Subject: [PATCH 095/827] chore: Bump mtags to latest stable We should avoid using non stable versions since this can cause issues like https://github.com/scalameta/metals/issues/6402 Closes https://github.com/scalameta/metals/issues/6402 --- project/Build.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 56c0beeec98b..bddbab5568a9 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1339,10 +1339,8 @@ object Build { BuildInfoPlugin.buildInfoDefaultSettings def presentationCompilerSettings(implicit mode: Mode) = { - val mtagsVersion = "1.3.0+56-a06a024d-SNAPSHOT" - + val mtagsVersion = "1.3.1" Seq( - resolvers ++= Resolver.sonatypeOssRepos("snapshots"), libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", "io.get-coursier" % "interface" % "1.0.18", From de46c7d752d07d251f4ce7c7d4641e89186f33c3 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 21 May 2024 10:05:10 +0200 Subject: [PATCH 096/827] Adapt the tests to Scala 3.5 --- library/src/scala/quoted/ToExpr.scala | 2 +- tests/neg/given-loop-prevention.check | 14 ++++++ tests/neg/given-loop-prevention.scala | 12 +++++ tests/neg/i6716.check | 6 +-- tests/neg/i6716.scala | 4 +- tests/neg/i7294-a.check | 27 ----------- tests/neg/i7294-a.scala | 14 ------ tests/neg/i7294-b.scala | 12 ----- tests/neg/i7294.check | 25 ++++++++++ tests/neg/i7294.scala | 10 ++++ tests/neg/looping-givens.check | 48 +++++++++++++++++++ tests/neg/looping-givens.scala | 11 +++++ .../CollectionStrawMan6.scala | 4 +- tests/pos/extmethods.scala | 2 +- tests/pos/given-loop-prevention.scala | 14 ------ tests/pos/i17245.scala | 2 +- tests/pos/i9967.scala | 2 +- tests/pos/t5643.scala | 2 +- .../run/colltest6/CollectionStrawMan6_1.scala | 4 +- tests/run/i502.scala | 6 +-- tests/run/t2029.scala | 2 +- tests/run/t3326.scala | 8 ++-- tests/warn/context-bounds-migration.scala | 9 ---- tests/warn/i15474.scala | 2 +- tests/warn/looping-givens.check | 45 +++++++++++++++++ tests/warn/looping-givens.scala | 2 + 26 files changed, 188 insertions(+), 101 deletions(-) create mode 100644 tests/neg/given-loop-prevention.check create mode 100644 tests/neg/given-loop-prevention.scala delete mode 100644 tests/neg/i7294-a.check delete mode 100644 tests/neg/i7294-a.scala delete mode 100644 tests/neg/i7294-b.scala create mode 100644 tests/neg/i7294.check create mode 100644 tests/neg/i7294.scala create mode 100644 tests/neg/looping-givens.check create mode 100644 tests/neg/looping-givens.scala delete mode 100644 tests/pos/given-loop-prevention.scala delete mode 100644 tests/warn/context-bounds-migration.scala create mode 100644 tests/warn/looping-givens.check diff --git a/library/src/scala/quoted/ToExpr.scala b/library/src/scala/quoted/ToExpr.scala index 042c8ff37a52..6c167c353d87 100644 --- a/library/src/scala/quoted/ToExpr.scala +++ b/library/src/scala/quoted/ToExpr.scala @@ -97,7 +97,7 @@ object ToExpr { /** Default implementation of `ToExpr[Array[T]]` */ given ArrayToExpr[T: Type: ToExpr: ClassTag]: ToExpr[Array[T]] with { def apply(arr: Array[T])(using Quotes): Expr[Array[T]] = - '{ Array[T](${Expr(arr.toSeq)}*)(${Expr(summon[ClassTag[T]])}) } + '{ Array[T](${Expr(arr.toSeq)}*)(using ${Expr(summon[ClassTag[T]])}) } } /** Default implementation of `ToExpr[Array[Boolean]]` */ diff --git a/tests/neg/given-loop-prevention.check b/tests/neg/given-loop-prevention.check new file mode 100644 index 000000000000..460adf03be49 --- /dev/null +++ b/tests/neg/given-loop-prevention.check @@ -0,0 +1,14 @@ +-- Error: tests/neg/given-loop-prevention.scala:10:36 ------------------------------------------------------------------ +10 | given List[Foo] = List(summon[Foo]) // error + | ^ + | Result of implicit search for Foo will change. + | Current result Baz.given_Foo will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: No Matching Implicit. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that Baz.given_Foo comes earlier, + | - use an explicit argument. diff --git a/tests/neg/given-loop-prevention.scala b/tests/neg/given-loop-prevention.scala new file mode 100644 index 000000000000..9d404b8c6d8e --- /dev/null +++ b/tests/neg/given-loop-prevention.scala @@ -0,0 +1,12 @@ + +class Foo + +object Bar { + given Foo with {} + given List[Foo] = List(summon[Foo]) // ok +} + +object Baz { + given List[Foo] = List(summon[Foo]) // error + given Foo with {} +} diff --git a/tests/neg/i6716.check b/tests/neg/i6716.check index 4684842e73fe..0144f539f53c 100644 --- a/tests/neg/i6716.check +++ b/tests/neg/i6716.check @@ -1,5 +1,5 @@ --- Warning: tests/neg/i6716.scala:12:39 -------------------------------------------------------------------------------- -12 | given Monad[Bar] = summon[Monad[Foo]] // warn +-- Error: tests/neg/i6716.scala:11:39 ---------------------------------------------------------------------------------- +11 | given Monad[Bar] = summon[Monad[Foo]] // error | ^ | Result of implicit search for Monad[Foo] will change. | Current result Bar.given_Monad_Bar will be no longer eligible @@ -12,5 +12,3 @@ | - use a `given ... with` clause as the enclosing given, | - rearrange definitions so that Bar.given_Monad_Bar comes earlier, | - use an explicit argument. - | This will be an error in Scala 3.5 and later. -No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i6716.scala b/tests/neg/i6716.scala index 311209fd9006..8b37d4e223ac 100644 --- a/tests/neg/i6716.scala +++ b/tests/neg/i6716.scala @@ -1,4 +1,3 @@ -//> using options -Xfatal-warnings trait Monad[T]: def id: String @@ -9,11 +8,10 @@ object Foo { opaque type Bar = Foo object Bar { - given Monad[Bar] = summon[Monad[Foo]] // warn + given Monad[Bar] = summon[Monad[Foo]] // error } object Test extends App { println(summon[Monad[Foo]].id) println(summon[Monad[Bar]].id) } -// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) \ No newline at end of file diff --git a/tests/neg/i7294-a.check b/tests/neg/i7294-a.check deleted file mode 100644 index c33735258ad0..000000000000 --- a/tests/neg/i7294-a.check +++ /dev/null @@ -1,27 +0,0 @@ --- [E007] Type Mismatch Error: tests/neg/i7294-a.scala:10:20 ----------------------------------------------------------- -10 | case x: T => x.g(10) // error - | ^^^^^^^ - | Found: Any - | Required: T - | - | where: T is a type in given instance f with bounds <: foo.Foo - | - | longer explanation available when compiling with `-explain` --- Warning: tests/neg/i7294-a.scala:10:12 ------------------------------------------------------------------------------ -10 | case x: T => x.g(10) // error - | ^ - | Result of implicit search for scala.reflect.TypeTest[Nothing, T] will change. - | Current result foo.Test.f will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: No Matching Implicit. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that foo.Test.f comes earlier, - | - use an explicit argument. - | This will be an error in Scala 3.5 and later. - | - | where: T is a type in given instance f with bounds <: foo.Foo -No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i7294-a.scala b/tests/neg/i7294-a.scala deleted file mode 100644 index a5193097e941..000000000000 --- a/tests/neg/i7294-a.scala +++ /dev/null @@ -1,14 +0,0 @@ -//> using options -Xfatal-warnings - -package foo - -trait Foo { def g(x: Int): Any } - -object Test: - - inline given f[T <: Foo]: T = ??? match { - case x: T => x.g(10) // error - } - - @main def Test = f -// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i7294-b.scala b/tests/neg/i7294-b.scala deleted file mode 100644 index 17cd7f07c3f7..000000000000 --- a/tests/neg/i7294-b.scala +++ /dev/null @@ -1,12 +0,0 @@ -//> using options -Xfatal-warnings - -package foo - -trait Foo { def g(x: Any): Any } - -inline given f[T <: Foo]: T = ??? match { - case x: T => x.g(10) // error -} - -@main def Test = f -// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i7294.check b/tests/neg/i7294.check new file mode 100644 index 000000000000..d6e559997f78 --- /dev/null +++ b/tests/neg/i7294.check @@ -0,0 +1,25 @@ +-- Error: tests/neg/i7294.scala:7:10 ----------------------------------------------------------------------------------- +7 | case x: T => x.g(10) // error // error + | ^ + | Result of implicit search for scala.reflect.TypeTest[Nothing, T] will change. + | Current result foo.f will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: No Matching Implicit. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that foo.f comes earlier, + | - use an explicit argument. + | + | where: T is a type in given instance f with bounds <: foo.Foo +-- [E007] Type Mismatch Error: tests/neg/i7294.scala:7:18 -------------------------------------------------------------- +7 | case x: T => x.g(10) // error // error + | ^^^^^^^ + | Found: Any + | Required: T + | + | where: T is a type in given instance f with bounds <: foo.Foo + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i7294.scala b/tests/neg/i7294.scala new file mode 100644 index 000000000000..fbb00f9b7e89 --- /dev/null +++ b/tests/neg/i7294.scala @@ -0,0 +1,10 @@ + +package foo + +trait Foo { def g(x: Any): Any } + +inline given f[T <: Foo]: T = ??? match { + case x: T => x.g(10) // error // error +} + +@main def Test = f diff --git a/tests/neg/looping-givens.check b/tests/neg/looping-givens.check new file mode 100644 index 000000000000..1e7ee08d79df --- /dev/null +++ b/tests/neg/looping-givens.check @@ -0,0 +1,48 @@ +-- Error: tests/neg/looping-givens.scala:9:22 -------------------------------------------------------------------------- +9 | given aa: A = summon // error + | ^ + | Result of implicit search for T will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: a. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | + | where: T is a type variable with constraint <: A +-- Error: tests/neg/looping-givens.scala:10:22 ------------------------------------------------------------------------- +10 | given bb: B = summon // error + | ^ + | Result of implicit search for T will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: b. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | + | where: T is a type variable with constraint <: B +-- Error: tests/neg/looping-givens.scala:11:28 ------------------------------------------------------------------------- +11 | given ab: (A & B) = summon // error + | ^ + | Result of implicit search for T will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: Search Failure: joint(ab, ab). + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | + | where: T is a type variable with constraint <: A & B diff --git a/tests/neg/looping-givens.scala b/tests/neg/looping-givens.scala new file mode 100644 index 000000000000..57dc95f99aab --- /dev/null +++ b/tests/neg/looping-givens.scala @@ -0,0 +1,11 @@ +//> options -source 3.4 + +class A +class B + +given joint(using a: A, b: B): (A & B) = ??? + +def foo(using a: A, b: B) = + given aa: A = summon // error + given bb: B = summon // error + given ab: (A & B) = summon // error diff --git a/tests/pos-deep-subtype/CollectionStrawMan6.scala b/tests/pos-deep-subtype/CollectionStrawMan6.scala index 9f189afbcf3a..99f634a66622 100644 --- a/tests/pos-deep-subtype/CollectionStrawMan6.scala +++ b/tests/pos-deep-subtype/CollectionStrawMan6.scala @@ -754,11 +754,11 @@ object CollectionStrawMan6 extends LowPriority { def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](elemTag) + protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](using elemTag) def fromIterable[B: ClassTag](coll: Iterable[B]): Array[B] = coll.toArray[B] - protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag)) + protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(using elemTag)) override def knownSize = xs.length diff --git a/tests/pos/extmethods.scala b/tests/pos/extmethods.scala index 368b4f439916..40683c56c694 100644 --- a/tests/pos/extmethods.scala +++ b/tests/pos/extmethods.scala @@ -17,7 +17,7 @@ object CollectionStrawMan { def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag)) + protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(using elemTag)) } } diff --git a/tests/pos/given-loop-prevention.scala b/tests/pos/given-loop-prevention.scala deleted file mode 100644 index 0bae0bb24fed..000000000000 --- a/tests/pos/given-loop-prevention.scala +++ /dev/null @@ -1,14 +0,0 @@ -//> using options -Xfatal-warnings - -class Foo - -object Bar { - given Foo with {} - given List[Foo] = List(summon[Foo]) // ok -} - -object Baz { - @annotation.nowarn - given List[Foo] = List(summon[Foo]) // gives a warning, which is suppressed - given Foo with {} -} diff --git a/tests/pos/i17245.scala b/tests/pos/i17245.scala index 3b5b3a74108d..8609a8293670 100644 --- a/tests/pos/i17245.scala +++ b/tests/pos/i17245.scala @@ -14,7 +14,7 @@ type OnChannel = Channel => Any val case1: OnChannel = Mockito.mock[OnChannel] val case2: OnChannel = Mockito.mock val case3 = Mockito.mock[OnChannel] - val case4: OnChannel = Mockito.mock[OnChannel](summon[ClassTag[OnChannel]]) + val case4: OnChannel = Mockito.mock[OnChannel](using summon[ClassTag[OnChannel]]) // not a regressive case, but an added improvement with the fix for the above val case5: Channel => Any = Mockito.mock[Channel => Any] diff --git a/tests/pos/i9967.scala b/tests/pos/i9967.scala index 4e915a27bfbf..d8cbf99b9d6e 100644 --- a/tests/pos/i9967.scala +++ b/tests/pos/i9967.scala @@ -1,6 +1,6 @@ import collection.mutable class MaxSizeMap[K, V](maxSize: Int)(using o: Ordering[K]): - val sortedMap: mutable.TreeMap[K, V] = mutable.TreeMap.empty[K, V](o) + val sortedMap: mutable.TreeMap[K, V] = mutable.TreeMap.empty[K, V](using o) export sortedMap._ diff --git a/tests/pos/t5643.scala b/tests/pos/t5643.scala index 1ce34ba36226..9866f8d399c2 100644 --- a/tests/pos/t5643.scala +++ b/tests/pos/t5643.scala @@ -13,7 +13,7 @@ object TupledEvidenceTest { def f[T : GetResult] = "" - f[(String,String)](getTuple[(String, String)]) + f[(String,String)](using getTuple[(String, String)]) f[(String,String)] } diff --git a/tests/run/colltest6/CollectionStrawMan6_1.scala b/tests/run/colltest6/CollectionStrawMan6_1.scala index bed5c476b96d..0bf0cbddffc9 100644 --- a/tests/run/colltest6/CollectionStrawMan6_1.scala +++ b/tests/run/colltest6/CollectionStrawMan6_1.scala @@ -755,11 +755,11 @@ object CollectionStrawMan6 extends LowPriority { def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](elemTag) + protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](using elemTag) def fromIterable[B: ClassTag](coll: Iterable[B]): Array[B] = coll.toArray[B] - protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag)) + protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(using elemTag)) override def knownSize = xs.length diff --git a/tests/run/i502.scala b/tests/run/i502.scala index 71176d9660cd..20ed1f43b840 100644 --- a/tests/run/i502.scala +++ b/tests/run/i502.scala @@ -6,13 +6,13 @@ object Test extends App { Array[Int](1, 2) try { - Array[Int](1, 2)(null) + Array[Int](1, 2)(using null) ??? } catch { case _: NullPointerException => println("Ok") } - Array[Int](1, 2)({println("foo"); summon[ClassTag[Int]]}) + Array[Int](1, 2)(using {println("foo"); summon[ClassTag[Int]]}) - Array[Int](1, 2)(ClassTag.apply({ println("bar"); classOf[Int]})) + Array[Int](1, 2)(using ClassTag.apply({ println("bar"); classOf[Int]})) } diff --git a/tests/run/t2029.scala b/tests/run/t2029.scala index d4ab0f02b67f..d5bc478fa0b3 100644 --- a/tests/run/t2029.scala +++ b/tests/run/t2029.scala @@ -5,7 +5,7 @@ object Test{ val mainSet = TreeSet(1 to 5 :_*) var compareCalled = false; - val smallerSet = TreeSet(2 to 4 :_*)(Ordering[Int].reverse) + val smallerSet = TreeSet(2 to 4 :_*)(using Ordering[Int].reverse) println(mainSet.mkString(",")) println(smallerSet.mkString(",")) diff --git a/tests/run/t3326.scala b/tests/run/t3326.scala index 3d7d83068f92..1f8c04394682 100644 --- a/tests/run/t3326.scala +++ b/tests/run/t3326.scala @@ -28,8 +28,8 @@ object Test { def testCollectionSorted(): Unit = { import collection.* val order = implicitly[Ordering[Int]].reverse - var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order) - var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order) + var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) + var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) m1 ++= List(1 -> "World") m1 ++= List(2 -> "Hello") @@ -49,8 +49,8 @@ object Test { def testImmutableSorted(): Unit = { import collection.immutable.* val order = implicitly[Ordering[Int]].reverse - var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order) - var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order) + var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) + var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) m1 += (1 -> "World") m1 += (2 -> "Hello") diff --git a/tests/warn/context-bounds-migration.scala b/tests/warn/context-bounds-migration.scala deleted file mode 100644 index cdd3eca62b5c..000000000000 --- a/tests/warn/context-bounds-migration.scala +++ /dev/null @@ -1,9 +0,0 @@ - -class C[T] -def foo[X: C] = () - -given [T]: C[T] = C[T]() - -def Test = - foo(C[Int]()) // warning - foo(using C[Int]()) // ok diff --git a/tests/warn/i15474.scala b/tests/warn/i15474.scala index d7c41130a1bb..0d8fc111ac6a 100644 --- a/tests/warn/i15474.scala +++ b/tests/warn/i15474.scala @@ -1,4 +1,4 @@ - +//> using options -source 3.4 import scala.language.implicitConversions diff --git a/tests/warn/looping-givens.check b/tests/warn/looping-givens.check new file mode 100644 index 000000000000..eec348c19d11 --- /dev/null +++ b/tests/warn/looping-givens.check @@ -0,0 +1,45 @@ +-- Warning: tests/warn/looping-givens.scala:9:22 ----------------------------------------------------------------------- +9 | given aa: A = summon // warn + | ^ + | Result of implicit search for A & B will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: a. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. +-- Warning: tests/warn/looping-givens.scala:10:22 ---------------------------------------------------------------------- +10 | given bb: B = summon // warn + | ^ + | Result of implicit search for A & B will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: b. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. +-- Warning: tests/warn/looping-givens.scala:11:28 ---------------------------------------------------------------------- +11 | given ab: (A & B) = summon // warn + | ^ + | Result of implicit search for A & B will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: joint. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. diff --git a/tests/warn/looping-givens.scala b/tests/warn/looping-givens.scala index 6b6a32002331..2f737206f64e 100644 --- a/tests/warn/looping-givens.scala +++ b/tests/warn/looping-givens.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 + class A class B From 9b2f624071bb307e975bd6c3e8b4f39137629b78 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 21 May 2024 17:43:11 +0100 Subject: [PATCH 097/827] Fix trailing comma Ident's span --- .../dotty/tools/dotc/parsing/Scanners.scala | 5 ++- tests/neg/i16872.check | 36 +++++++++++++++++++ tests/neg/i16872.scala | 19 ++++++++++ 3 files changed, 59 insertions(+), 1 deletion(-) create mode 100644 tests/neg/i16872.check create mode 100644 tests/neg/i16872.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 831d31d6fa6e..bdb8ae47e407 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -736,7 +736,10 @@ object Scanners { && currentRegion.commasExpected && (token == RPAREN || token == RBRACKET || token == RBRACE || token == OUTDENT) then - () /* skip the trailing comma */ + // encountered a trailing comma + // reset only the lastOffset + // so that the tree's span is correct + lastOffset = prev.lastOffset else reset() case END => diff --git a/tests/neg/i16872.check b/tests/neg/i16872.check new file mode 100644 index 000000000000..2e0f9cf81eda --- /dev/null +++ b/tests/neg/i16872.check @@ -0,0 +1,36 @@ +-- [E006] Not Found Error: tests/neg/i16872.scala:8:6 ------------------------------------------------------------------ +8 | aa, // error + | ^^ + | Not found: aa + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:9:6 ------------------------------------------------------------------ +9 | bb, // error + | ^^ + | Not found: bb + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:10:6 ----------------------------------------------------------------- +10 | cc, // error + | ^^ + | Not found: cc + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:16:6 ----------------------------------------------------------------- +16 | dd, // error + | ^^ + | Not found: dd + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:17:6 ----------------------------------------------------------------- +17 | ee, // error + | ^^ + | Not found: ee - did you mean eq? or perhaps ne? + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:18:6 ----------------------------------------------------------------- +18 | ff, // error + | ^^ + | Not found: ff + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16872.scala b/tests/neg/i16872.scala new file mode 100644 index 000000000000..931ea57e1bec --- /dev/null +++ b/tests/neg/i16872.scala @@ -0,0 +1,19 @@ +// Using a checkfile to verify where the carets point to. +// Originally they were pointing to "cc," and "ff," +// including the trailing comma + +class Test: + def t1 = + ( + aa, // error + bb, // error + cc, // error + ) + + def meth(a: Int, b: Int, c: Int) = a + b + c + def t2 = + meth( + dd, // error + ee, // error + ff, // error + ) From 4a74a97314c4d2a59413db96ecca205918218ebd Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Tue, 21 May 2024 20:30:29 +0200 Subject: [PATCH 098/827] Revert "Revert "Set baseVersion to 3.5.1-RC1"" --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index abe2851e4668..921fbcd80b90 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -85,7 +85,7 @@ object Build { val referenceVersion = "3.4.2-RC1" - val baseVersion = "3.5.0-RC1" + val baseVersion = "3.5.1-RC1" // LTS or Next val versionLine = "Next" From 5c761fd86abdefd66f337d8691d35f46ecebd78b Mon Sep 17 00:00:00 2001 From: Natsu Kagami Date: Wed, 22 May 2024 12:28:27 +0200 Subject: [PATCH 099/827] Add pattern completion for unapply contexts (#20274) Fixes #19972. Add pattern completion for `Unapply` tree contexts. A typical example would be ```scala optionList match case List(S@@) ``` which should be prompted `Some(value)`, due to `List.unapplySeq` expecting `Option[T]` patterns as arguments. --- .../tools/pc/completions/Completions.scala | 32 +++++++++++++ .../pc/completions/MatchCaseCompletions.scala | 19 +++++++- .../pc/tests/completion/CompletionSuite.scala | 46 ++++++++++++++++++- 3 files changed, 95 insertions(+), 2 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index fb39102399ba..db578e32663f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -70,6 +70,8 @@ class Completions( false case (_: (Import | Export)) :: _ => false case _ :: (_: (Import | Export)) :: _ => false + // UnApply has patterns included in MatchCaseCompletions + case _ :: (_: UnApply) :: _ => false case _ => true private lazy val isNew: Boolean = Completion.isInNewContext(adjustedPath) @@ -405,6 +407,36 @@ class Completions( true, ) + // unapply pattern + case Ident(name) :: (unapp : UnApply) :: _ => + ( + CaseKeywordCompletion.contribute( + EmptyTree, // no selector + completionPos, + indexedContext, + config, + search, + parent = unapp, + autoImports, + patternOnly = Some(name.decoded) + ), + false, + ) + case Select(_, name) :: (unapp : UnApply) :: _ => + ( + CaseKeywordCompletion.contribute( + EmptyTree, // no selector + completionPos, + indexedContext, + config, + search, + parent = unapp, + autoImports, + patternOnly = Some(name.decoded) + ), + false, + ) + // class FooImpl extends Foo: // def x| case OverrideExtractor(td, completing, start, exhaustive, fallbackName) => diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala index 48c6bcfe8317..2efcba48e82d 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala @@ -27,6 +27,8 @@ import dotty.tools.dotc.core.Types.NoType import dotty.tools.dotc.core.Types.OrType import dotty.tools.dotc.core.Types.Type import dotty.tools.dotc.core.Types.TypeRef +import dotty.tools.dotc.core.Types.AppliedType +import dotty.tools.dotc.typer.Applications.UnapplyArgs import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.AutoImports.SymbolImport @@ -75,10 +77,24 @@ object CaseKeywordCompletion: patternOnly, hasBind ) + val printer = ShortenedTypePrinter(search, IncludeDefaultParam.Never)(using indexedContext) val selTpe = selector match case EmptyTree => parent match + /* Parent is an unapply pattern */ + case UnApply(fn, implicits, patterns) if !fn.tpe.isErroneous => + patternOnly match + case None => None + case Some(value) => + val argPts = UnapplyArgs(fn.tpe.widen.finalResultType, fn, patterns, parent.srcPos).argTypes + patterns.zipWithIndex + .find: + case (Ident(v), tpe) => v.decoded == value + case (Select(_, v), tpe) => v.decoded == value + case t => false + .map((_, id) => argPts(id).widen.deepDealias) + /* Parent is a function expecting a case match expression */ case TreeApply(fun, _) if !fun.tpe.isErroneous => fun.tpe.paramInfoss match case (head :: Nil) :: _ @@ -105,7 +121,8 @@ object CaseKeywordCompletion: if patternOnly.isEmpty then val selectorTpe = selTpe.show val tpeLabel = - if !selectorTpe.contains("x$1") then selectorTpe + if !selectorTpe.contains("x$1") /* selector of a function type? */ then + selectorTpe else selector.symbol.info.show val label = s"case ${tpeLabel} =>" List( diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index b5db258601bc..03c4fa2bc5bc 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -634,6 +634,49 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin ) + @Test def patRecursive = + check( + s"""|object Main { + | Option(List(Option(1))) match { + | case Some(List(None, Som@@)) + |} + |""".stripMargin, + """|Some(value) scala + |Some scala + |""".stripMargin + ) + check( + s"""|object Main { + | (null: Option[Option[Option[Option[Int]]]]) match + | case Some(Some(Some(Som@@)))) + |} + |""".stripMargin, + """|Some(value) scala + |Some scala + |""".stripMargin + ) + check( + s"""|object Main { + | Option(Option(1)) match { + | case Some(Som@@) + |} + |""".stripMargin, + """|Some(value) scala + |Some scala + |""".stripMargin + ) + check( + s"""|object Test: + | case class NestedClass(x: Int) + |object TestRun: + | Option(Test.NestedClass(5)) match + | case Some(Test.Neste@@) + |""".stripMargin, + """|NestedClass(x) test.Test + |NestedClass test.Test + |""".stripMargin + ) + @Test def pat1 = check( s"""|object Main { @@ -641,7 +684,8 @@ class CompletionSuite extends BaseCompletionSuite: | case List(Som@@) |} |""".stripMargin, - """|Some[A](value: A): Some[A] + """|Some(value) scala + |Some scala |Some scala |""".stripMargin ) From a308ac800a236cc6d1447cbc56776b304c95e316 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 22 May 2024 18:15:17 +0200 Subject: [PATCH 100/827] Accept legacy language features --- .../src/dotty/tools/dotc/config/Feature.scala | 7 ++++++ .../tools/dotc/config/ScalaSettings.scala | 2 +- .../dotc/config/ScalaSettingsProperties.scala | 3 +++ .../dotty/tools/dotc/config/Settings.scala | 23 ++++++++++++------- .../test/dotty/tools/dotc/SettingsTests.scala | 8 +++---- 5 files changed, 30 insertions(+), 13 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 91100627981b..c04c58b419c9 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -69,6 +69,13 @@ object Feature: (betterMatchTypeExtractors, "Enable better match type extractors") ) + // legacy language features from Scala 2 that are no longer supported. + val legacyFeatures = List( + "higherKinds", + "existentials", + "reflectiveCalls" + ) + private def enabledLanguageFeaturesBySetting(using Context): List[String] = ctx.settings.language.value.asInstanceOf diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index bb28e06150fe..c64521ec74e1 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -114,7 +114,7 @@ trait CommonScalaSettings: val explainTypes: Setting[Boolean] = BooleanSetting(RootSetting, "explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) val explainCyclic: Setting[Boolean] = BooleanSetting(RootSetting, "explain-cyclic", "Explain cyclic reference errors in more detail.", aliases = List("--explain-cyclic")) val unchecked: Setting[Boolean] = BooleanSetting(RootSetting, "unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) - val language: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting(RootSetting, "language", "feature", "Enable one or more language features.", choices = ScalaSettingsProperties.supportedLanguageFeatures, default = Nil, aliases = List("--language")) + val language: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting(RootSetting, "language", "feature", "Enable one or more language features.", choices = ScalaSettingsProperties.supportedLanguageFeatures, legacyChoices = ScalaSettingsProperties.legacyLanguageFeatures, default = Nil, aliases = List("--language")) val experimental: Setting[Boolean] = BooleanSetting(RootSetting, "experimental", "Annotate all top-level definitions with @experimental. This enables the use of experimental features anywhere in the project.") /* Coverage settings */ diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala index a839d3e3be19..022916cc9f53 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala @@ -30,6 +30,9 @@ object ScalaSettingsProperties: def supportedLanguageFeatures: List[ChoiceWithHelp[String]] = Feature.values.map((n, d) => ChoiceWithHelp(n.toString, d)) + val legacyLanguageFeatures: List[String] = + Feature.legacyFeatures + def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") def defaultPageWidth: Int = { diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 5042737c30cb..9250303e8cc8 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -85,7 +85,9 @@ object Settings: propertyClass: Option[Class[?]] = None, deprecation: Option[Deprecation] = None, // kept only for -Xkind-projector option compatibility - legacyArgs: Boolean = false)(private[Settings] val idx: Int): + legacyArgs: Boolean = false, + // accept legacy choices (for example, valid in Scala 2 but no longer supported) + legacyChoices: Option[Seq[?]] = None)(private[Settings] val idx: Int): validateSettingString(prefix.getOrElse(name)) aliases.foreach(validateSettingString) @@ -206,9 +208,14 @@ object Settings: def appendList(strings: List[String], argValue: String, args: List[String]) = choices match - case Some(valid) => strings.filterNot(valid.contains) match - case Nil => update(strings, argValue, args) - case invalid => invalidChoices(invalid) + case Some(valid) => strings.partition(valid.contains) match + case (_, Nil) => update(strings, argValue, args) + case (validStrs, invalidStrs) => legacyChoices match + case Some(validBefore) => + invalidStrs.filterNot(validBefore.contains) match + case Nil => update(validStrs, argValue, args) + case realInvalidStrs => invalidChoices(realInvalidStrs) + case _ => invalidChoices(invalidStrs) case _ => update(strings, argValue, args) def doSet(argRest: String) = @@ -380,11 +387,11 @@ object Settings: def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs, deprecation = deprecation)) - def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String] = Nil, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, deprecation = deprecation)) + def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String] = Nil, legacyChoices: List[String] = Nil, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), legacyChoices = Some(legacyChoices), aliases = aliases, deprecation = deprecation)) - def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[ChoiceWithHelp[String]]] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, deprecation = deprecation)) + def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], legacyChoices: List[String] = Nil, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[ChoiceWithHelp[String]]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), legacyChoices = Some(legacyChoices), aliases = aliases, deprecation = deprecation)) def IntSetting(category: SettingCategory, name: String, descr: String, default: Int, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[Int] = publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) diff --git a/compiler/test/dotty/tools/dotc/SettingsTests.scala b/compiler/test/dotty/tools/dotc/SettingsTests.scala index 301dc10ab54e..996ab22f67b1 100644 --- a/compiler/test/dotty/tools/dotc/SettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/SettingsTests.scala @@ -272,8 +272,8 @@ class SettingsTests { val booleanSetting = BooleanSetting(RootSetting, "booleanSetting", "booleanSetting", false) val stringSetting = StringSetting(RootSetting, "stringSetting", "stringSetting", "", "test") val choiceSetting = ChoiceSetting(RootSetting, "choiceSetting", "choiceSetting", "", List("a", "b"), "a") - val multiChoiceSetting= MultiChoiceSetting(RootSetting, "multiChoiceSetting", "multiChoiceSetting", "", List("a", "b"), List()) - val multiChoiceHelpSetting= MultiChoiceHelpSetting(RootSetting, "multiChoiceHelpSetting", "multiChoiceHelpSetting", "", List(ChoiceWithHelp("a", "a"), ChoiceWithHelp("b", "b")), List()) + val multiChoiceSetting= MultiChoiceSetting(RootSetting, "multiChoiceSetting", "multiChoiceSetting", "", List("a", "b"), List(), legacyChoices = List("c")) + val multiChoiceHelpSetting= MultiChoiceHelpSetting(RootSetting, "multiChoiceHelpSetting", "multiChoiceHelpSetting", "", List(ChoiceWithHelp("a", "a"), ChoiceWithHelp("b", "b")), List(), legacyChoices = List("c")) val intSetting = IntSetting(RootSetting, "intSetting", "intSetting", 0) val intChoiceSetting = IntChoiceSetting(RootSetting, "intChoiceSetting", "intChoiceSetting", List(1,2,3), 1) val multiStringSetting = MultiStringSetting(RootSetting, "multiStringSetting", "multiStringSetting", "", List("a", "b"), List()) @@ -289,8 +289,8 @@ class SettingsTests { List("-booleanSetting", "true"), List("-stringSetting", "newTest"), List("-choiceSetting", "b"), - List("-multiChoiceSetting", "a,b"), - List("-multiChoiceHelpSetting", "a,b"), + List("-multiChoiceSetting", "a,b,c"), + List("-multiChoiceHelpSetting", "a,b,c"), List("-intSetting", "42"), List("-intChoiceSetting", "2"), List("-multiStringSetting", "a,b"), From f8798d87668f039701ed528caa743c6bda2ed8a1 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 22 May 2024 18:21:09 +0200 Subject: [PATCH 101/827] Fix NamedTuple selection on an unstable prefix Without a stable prefix, asSeenFrom could end up widening `Fields` to `>: Nothing <: Any`. --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 3 ++- tests/pos/named-tuple-unstable.scala | 15 +++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 tests/pos/named-tuple-unstable.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ae50d626cb1f..4bcf4f6632c8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -826,7 +826,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if qual.tpe.derivesFrom(defn.SelectableClass) && !isDynamicExpansion(tree) && !pt.isInstanceOf[FunOrPolyProto] && pt != LhsProto then - val fieldsType = qual.tpe.select(tpnme.Fields).dealias.simplified + val pre = if !TypeOps.isLegalPrefix(qual.tpe) then SkolemType(qual.tpe) else qual.tpe + val fieldsType = pre.select(tpnme.Fields).dealias.simplified val fields = fieldsType.namedTupleElementTypes typr.println(i"try dyn select $qual, $selName, $fields") fields.find(_._1 == selName) match diff --git a/tests/pos/named-tuple-unstable.scala b/tests/pos/named-tuple-unstable.scala new file mode 100644 index 000000000000..6a6a36732a14 --- /dev/null +++ b/tests/pos/named-tuple-unstable.scala @@ -0,0 +1,15 @@ +import scala.language.experimental.namedTuples +import NamedTuple.{AnyNamedTuple, NamedTuple} + +trait Foo extends Selectable: + val f: Any + type Fields = (myfield: f.type) + def selectDynamic(name: String): Any + +object Test: + val elem1: Foo { val f: Int } = ??? + def elem2: Foo { val f: Int } = ??? + + def test: Unit = + val a: Int = elem1.myfield // OK + val b: Int = elem2.myfield // error: value myfield is not a member of Foo { val f: Int } From 17d5c3d5cf41ad651d8c4b940aadaa5adb51f925 Mon Sep 17 00:00:00 2001 From: Lucas Nouguier Date: Tue, 21 May 2024 18:27:29 +0200 Subject: [PATCH 102/827] fix error message on setter with wrong type Co-authored-by: Matt Bovel Co-authored-by: Nicolas Stucki<> Co-authored-by: Hamza Remmal<> Co-authored-by: Mehdi Alaoui<> fix check file --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 5 +---- tests/neg/i20338a.check | 7 +++++++ tests/neg/i20338a.scala | 10 ++++++++++ tests/neg/i20338b.check | 7 +++++++ tests/neg/i20338b.scala | 10 ++++++++++ tests/neg/i20338c.check | 6 ++++++ tests/neg/i20338c.scala | 9 +++++++++ 7 files changed, 50 insertions(+), 4 deletions(-) create mode 100644 tests/neg/i20338a.check create mode 100644 tests/neg/i20338a.scala create mode 100644 tests/neg/i20338b.check create mode 100644 tests/neg/i20338b.scala create mode 100644 tests/neg/i20338c.check create mode 100644 tests/neg/i20338c.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ae50d626cb1f..28bd4b6c1b6f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1334,12 +1334,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val setter = toSetter(lhsCore) if setter.isEmpty then reassignmentToVal - else tryEither { + else val assign = untpd.Apply(setter, tree.rhs :: Nil) typed(assign, IgnoredProto(pt)) - } { - (_, _) => reassignmentToVal - } case _ => lhsCore.tpe match { case ref: TermRef => val lhsVal = lhsCore.denot.suchThat(!_.is(Method)) diff --git a/tests/neg/i20338a.check b/tests/neg/i20338a.check new file mode 100644 index 000000000000..a329492bd990 --- /dev/null +++ b/tests/neg/i20338a.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i20338a.scala:10:15 ----------------------------------------------------------- +10 | test.field = "hello" // error + | ^^^^^^^ + | Found: ("hello" : String) + | Required: Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20338a.scala b/tests/neg/i20338a.scala new file mode 100644 index 000000000000..b91982297d78 --- /dev/null +++ b/tests/neg/i20338a.scala @@ -0,0 +1,10 @@ +object types: + opaque type Struct = Int + val test: Struct = 25 + extension (s: Struct) + def field: Int = s + def field_=(other: Int) = () + +@main def hello = + import types.* + test.field = "hello" // error \ No newline at end of file diff --git a/tests/neg/i20338b.check b/tests/neg/i20338b.check new file mode 100644 index 000000000000..382d68a0911c --- /dev/null +++ b/tests/neg/i20338b.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i20338b.scala:10:8 ------------------------------------------------------------ +10 | f.x = 42 // error + | ^^ + | Found: (42 : Int) + | Required: String + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20338b.scala b/tests/neg/i20338b.scala new file mode 100644 index 000000000000..b8a3463862e0 --- /dev/null +++ b/tests/neg/i20338b.scala @@ -0,0 +1,10 @@ +class Foo(_x: Int) + +extension (s: Foo) + def x_=(x: String): Unit = () + def x: Int = ??? + +@main +def Test = + val f = Foo(42) + f.x = 42 // error diff --git a/tests/neg/i20338c.check b/tests/neg/i20338c.check new file mode 100644 index 000000000000..1d19ec0b3042 --- /dev/null +++ b/tests/neg/i20338c.check @@ -0,0 +1,6 @@ +-- [E052] Type Error: tests/neg/i20338c.scala:9:6 ---------------------------------------------------------------------- +9 | f.x = 42 // error + | ^^^^^^^^ + | Reassignment to val x + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20338c.scala b/tests/neg/i20338c.scala new file mode 100644 index 000000000000..cfdf38e73b11 --- /dev/null +++ b/tests/neg/i20338c.scala @@ -0,0 +1,9 @@ +class Foo(val x: Int) + +extension (s: Foo) + def x: Int = 43 + +@main +def Test = + val f = Foo(42) + f.x = 42 // error \ No newline at end of file From dabbd8ba7e4b1cf864d032d7fde4912f2d6ba8af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Marks?= Date: Thu, 23 May 2024 13:47:51 +0200 Subject: [PATCH 103/827] Update addToBackportingProject.scala --- project/scripts/addToBackportingProject.scala | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/project/scripts/addToBackportingProject.scala b/project/scripts/addToBackportingProject.scala index 2c1929972791..0ef2ea553a74 100644 --- a/project/scripts/addToBackportingProject.scala +++ b/project/scripts/addToBackportingProject.scala @@ -1,6 +1,6 @@ -//> using scala 3.3.1 -//> using toolkit 0.2.1 -//> using lib pro.kordyjan::pytanie:0.1.7 +//> using scala 3.lts +//> using toolkit 0.4.0 +//> using lib pro.kordyjan::pytanie:0.1.9 import pytanie.* import sttp.client4.* @@ -10,8 +10,29 @@ lazy val apiToken = case class ID(value: String) derives WrapperVariable -val PROJECT_ID = ID("PVT_kwDOACj3ec4AWSoi") -val FIELD_ID = ID("PVTF_lADOACj3ec4AWSoizgO7uJ4") +// Obtained with: +// query { +// organization(login: "scala") { +// projectV2(number: 2) { +// id +// } +// } +// } +val PROJECT_ID = ID("PVT_kwDN3uPOAHewkg") + +// Obtained with: +// query { +// organization(login: "scala") { +// projectV2(number: 2) { +// field(name: "Merged at") { +// ... on ProjectV2FieldCommon { +// id +// } +// } +// } +// } +// } +val FIELD_ID = ID("PVTF_lADN3uPOAHewks4E3B1I") @main def run(commitSha: String) = val (id, date) = getPrData(commitSha) From f88a97d0110d505ea8a0922fb008faca2aee72b6 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Wed, 22 May 2024 10:09:18 +0200 Subject: [PATCH 104/827] Adapt community-build projects to Scala 3.5.0 --- community-build/community-projects/Lucre | 2 +- community-build/community-projects/Monocle | 2 +- community-build/community-projects/akka | 2 +- community-build/community-projects/cask | 2 +- community-build/community-projects/endpoints4s | 2 +- community-build/community-projects/izumi-reflect | 2 +- community-build/community-projects/os-lib | 2 +- community-build/community-projects/scalatest | 2 +- community-build/community-projects/scalaz | 2 +- community-build/community-projects/scas | 2 +- community-build/community-projects/spire | 2 +- community-build/community-projects/upickle | 2 +- community-build/community-projects/utest | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/community-build/community-projects/Lucre b/community-build/community-projects/Lucre index 1008f0b7f513..21a27a294ac7 160000 --- a/community-build/community-projects/Lucre +++ b/community-build/community-projects/Lucre @@ -1 +1 @@ -Subproject commit 1008f0b7f51374ddbc947e677c505fa97677b7d4 +Subproject commit 21a27a294ac7c413f80839d96a02942b2c6d021c diff --git a/community-build/community-projects/Monocle b/community-build/community-projects/Monocle index a9a12a13a48c..b303aa3b98d9 160000 --- a/community-build/community-projects/Monocle +++ b/community-build/community-projects/Monocle @@ -1 +1 @@ -Subproject commit a9a12a13a48c957535ddd6850ed8c6b0db2dc4fe +Subproject commit b303aa3b98d9a10c3f77a56765ca5be2f3cc51f7 diff --git a/community-build/community-projects/akka b/community-build/community-projects/akka index 2dffb6504005..ee0ac854f36f 160000 --- a/community-build/community-projects/akka +++ b/community-build/community-projects/akka @@ -1 +1 @@ -Subproject commit 2dffb6504005a6144561c4e3ba7b185639a8ad48 +Subproject commit ee0ac854f36f537bf3062fd4e9d9f2ff5c1de4c9 diff --git a/community-build/community-projects/cask b/community-build/community-projects/cask index d5fa6d47da5e..2db6020a2d11 160000 --- a/community-build/community-projects/cask +++ b/community-build/community-projects/cask @@ -1 +1 @@ -Subproject commit d5fa6d47da5ea99d94887fafd555696ba07aa205 +Subproject commit 2db6020a2d11566d504ae9af4de28c7a6e20b7ed diff --git a/community-build/community-projects/endpoints4s b/community-build/community-projects/endpoints4s index cc03ddf1c4a0..b004d1388872 160000 --- a/community-build/community-projects/endpoints4s +++ b/community-build/community-projects/endpoints4s @@ -1 +1 @@ -Subproject commit cc03ddf1c4a03391c8031784e48c057bdc9394db +Subproject commit b004d13888723de9f6a86f560137fc31e22edcb6 diff --git a/community-build/community-projects/izumi-reflect b/community-build/community-projects/izumi-reflect index bd4ae213f81e..2c7e4a69c386 160000 --- a/community-build/community-projects/izumi-reflect +++ b/community-build/community-projects/izumi-reflect @@ -1 +1 @@ -Subproject commit bd4ae213f81e63c330b22cf5f73f68641814b195 +Subproject commit 2c7e4a69c386201e479584333a84ce018fef1795 diff --git a/community-build/community-projects/os-lib b/community-build/community-projects/os-lib index a4400deb3bec..4c8c82b23d76 160000 --- a/community-build/community-projects/os-lib +++ b/community-build/community-projects/os-lib @@ -1 +1 @@ -Subproject commit a4400deb3bec415fd82d331fc1f8b749f3d64e60 +Subproject commit 4c8c82b23d767bc927290829514b8de7148052d9 diff --git a/community-build/community-projects/scalatest b/community-build/community-projects/scalatest index d430625d9621..d6eeedbfc1e0 160000 --- a/community-build/community-projects/scalatest +++ b/community-build/community-projects/scalatest @@ -1 +1 @@ -Subproject commit d430625d96218c9031b1434cc0c2110f3740fa1c +Subproject commit d6eeedbfc1e04f2eff55506f07f93f448cc21407 diff --git a/community-build/community-projects/scalaz b/community-build/community-projects/scalaz index 4919bdce732f..868749fdb951 160000 --- a/community-build/community-projects/scalaz +++ b/community-build/community-projects/scalaz @@ -1 +1 @@ -Subproject commit 4919bdce732f53a3316d5e12d9c853fc2141ddfb +Subproject commit 868749fdb951909bb04bd6dd7ad2cd89295fd439 diff --git a/community-build/community-projects/scas b/community-build/community-projects/scas index fbccb263207b..acaad1055738 160000 --- a/community-build/community-projects/scas +++ b/community-build/community-projects/scas @@ -1 +1 @@ -Subproject commit fbccb263207b3a7b735b8a9dc312acf7368a0816 +Subproject commit acaad1055738dbbcae7b18e6c6c2fc95f06eb7d6 diff --git a/community-build/community-projects/spire b/community-build/community-projects/spire index bc524eeea735..d60fe2c38848 160000 --- a/community-build/community-projects/spire +++ b/community-build/community-projects/spire @@ -1 +1 @@ -Subproject commit bc524eeea735a3cf4d5108039f95950b024a14e4 +Subproject commit d60fe2c38848ef193031c18eab3a14d3306b3761 diff --git a/community-build/community-projects/upickle b/community-build/community-projects/upickle index aa3bc0e43ec7..0c09bbcabc66 160000 --- a/community-build/community-projects/upickle +++ b/community-build/community-projects/upickle @@ -1 +1 @@ -Subproject commit aa3bc0e43ec7b618eb087753878f3d845e58277a +Subproject commit 0c09bbcabc664abf98462022fc9036a366135e70 diff --git a/community-build/community-projects/utest b/community-build/community-projects/utest index eae17c7a4d0d..f4a9789e2750 160000 --- a/community-build/community-projects/utest +++ b/community-build/community-projects/utest @@ -1 +1 @@ -Subproject commit eae17c7a4d0d63bab1406ca75791d3cb6394233d +Subproject commit f4a9789e2750523feee4a3477efb42eb15424fc7 From 5a6ca9d23559e9a4c8e06aace3649cdb3c1eaef4 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Fri, 24 May 2024 14:00:28 +0200 Subject: [PATCH 105/827] Remove diagnositcs messages from semanticdb --- tests/semanticdb/expect/InventedNames.expect.scala | 2 +- tests/semanticdb/expect/InventedNames.scala | 2 +- tests/semanticdb/metac.expect | 12 ++++-------- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/tests/semanticdb/expect/InventedNames.expect.scala b/tests/semanticdb/expect/InventedNames.expect.scala index 7c5b008209c2..b92e9aa940a7 100644 --- a/tests/semanticdb/expect/InventedNames.expect.scala +++ b/tests/semanticdb/expect/InventedNames.expect.scala @@ -32,7 +32,7 @@ given [T/*<-givens::InventedNames$package.given_Z_T#[T]*/]: Z/*->givens::Z#*/[T/ val a/*<-givens::InventedNames$package.a.*/ = intValue/*->givens::InventedNames$package.intValue.*/ val b/*<-givens::InventedNames$package.b.*/ = given_String/*->givens::InventedNames$package.given_String.*/ -val c/*<-givens::InventedNames$package.c.*/ = given_Double/*->givens::InventedNames$package.given_Double().*/ +//val c = given_Double val d/*<-givens::InventedNames$package.d.*/ = given_List_T/*->givens::InventedNames$package.given_List_T().*/[Int/*->scala::Int#*/] val e/*<-givens::InventedNames$package.e.*/ = given_Char/*->givens::InventedNames$package.given_Char.*/ val f/*<-givens::InventedNames$package.f.*/ = given_Float/*->givens::InventedNames$package.given_Float.*/ diff --git a/tests/semanticdb/expect/InventedNames.scala b/tests/semanticdb/expect/InventedNames.scala index 42c14c90e370..61baae46c832 100644 --- a/tests/semanticdb/expect/InventedNames.scala +++ b/tests/semanticdb/expect/InventedNames.scala @@ -32,7 +32,7 @@ given [T]: Z[T] with val a = intValue val b = given_String -val c = given_Double +//val c = given_Double val d = given_List_T[Int] val e = given_Char val f = given_Float diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 84c3e7c6a110..98657f122255 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2093,16 +2093,15 @@ Schema => SemanticDB v4 Uri => InventedNames.scala Text => empty Language => Scala -Symbols => 45 entries -Occurrences => 66 entries -Synthetics => 3 entries +Symbols => 44 entries +Occurrences => 64 entries +Synthetics => 2 entries Symbols: -givens/InventedNames$package. => final package object givens extends Object { self: givens.type => +24 decls } +givens/InventedNames$package. => final package object givens extends Object { self: givens.type => +23 decls } givens/InventedNames$package.`* *`. => final implicit lazy val given method * * Long givens/InventedNames$package.a. => val method a Int givens/InventedNames$package.b. => val method b String -givens/InventedNames$package.c. => val method c Double givens/InventedNames$package.d. => val method d List[Int] givens/InventedNames$package.e. => val method e Char givens/InventedNames$package.f. => val method f Float @@ -2193,8 +2192,6 @@ Occurrences: [32:8..32:16): intValue -> givens/InventedNames$package.intValue. [33:4..33:5): b <- givens/InventedNames$package.b. [33:8..33:20): given_String -> givens/InventedNames$package.given_String. -[34:4..34:5): c <- givens/InventedNames$package.c. -[34:8..34:20): given_Double -> givens/InventedNames$package.given_Double(). [35:4..35:5): d <- givens/InventedNames$package.d. [35:8..35:20): given_List_T -> givens/InventedNames$package.given_List_T(). [35:21..35:24): Int -> scala/Int# @@ -2214,7 +2211,6 @@ Occurrences: Synthetics: [24:0..24:0): => *(x$1) -[34:8..34:20):given_Double => *(intValue) [40:8..40:15):given_Y => *(given_X) expect/Issue1749.scala From 500906592c7298f54d6696cf7b7173cf30e43ac1 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Fri, 24 May 2024 16:12:19 +0200 Subject: [PATCH 106/827] Do not expose ClassInfo in widenTermRefByName Previously ClassInfo could be easily be exposed with calls like `TypeRepr.of[T].termRef.widenTermRefByName`. --- .../src/scala/quoted/runtime/impl/QuotesImpl.scala | 5 ++++- tests/pos-macros/i20458/Macro_1.scala | 12 ++++++++++++ tests/pos-macros/i20458/Test_2.scala | 1 + 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 tests/pos-macros/i20458/Macro_1.scala create mode 100644 tests/pos-macros/i20458/Test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 517adff17991..ce8d19aae46a 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -1811,7 +1811,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def =:=(that: TypeRepr): Boolean = self =:= that def <:<(that: TypeRepr): Boolean = self <:< that def widen: TypeRepr = self.widen - def widenTermRefByName: TypeRepr = self.widenTermRefExpr + def widenTermRefByName: TypeRepr = + self.widenTermRefExpr match + case dotc.core.Types.ClassInfo(prefix, sym, _, _, _) => prefix.select(sym) + case other => other def widenByName: TypeRepr = self.widenExpr def dealias: TypeRepr = self.dealias def dealiasKeepOpaques: TypeRepr = self.dealiasKeepOpaques diff --git a/tests/pos-macros/i20458/Macro_1.scala b/tests/pos-macros/i20458/Macro_1.scala new file mode 100644 index 000000000000..803eff68062a --- /dev/null +++ b/tests/pos-macros/i20458/Macro_1.scala @@ -0,0 +1,12 @@ +import scala.quoted._ + +inline def matchCustom[F](): Unit = ${ matchCustomImpl[F] } + +private def matchCustomImpl[F: Type](using q: Quotes): Expr[Unit] = { + import q.reflect.* + val any = TypeRepr.of[Any].typeSymbol + assert(!any.termRef.widenTermRefByName.toString.contains("ClassInfo")) + any.termRef.widenTermRefByName.asType match + case '[t] => () + '{ () } +} diff --git a/tests/pos-macros/i20458/Test_2.scala b/tests/pos-macros/i20458/Test_2.scala new file mode 100644 index 000000000000..1118f4483e23 --- /dev/null +++ b/tests/pos-macros/i20458/Test_2.scala @@ -0,0 +1 @@ +def main() = matchCustom() From 1f9a61ef6711176de5fea09671699a9a2f74a6b5 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 26 May 2024 21:57:20 +0200 Subject: [PATCH 107/827] Add special handling for comparisons with Singleton types Fixes #15030 --- .../src/dotty/tools/dotc/core/TypeComparer.scala | 13 ++++++++----- compiler/src/dotty/tools/dotc/core/Types.scala | 10 ++++++++++ tests/pos/i15030.scala | 16 ++++++++++++++++ 3 files changed, 34 insertions(+), 5 deletions(-) create mode 100644 tests/pos/i15030.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index c2c502a984c4..e0ab30907314 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -970,12 +970,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling compareAppliedType1(tp1, tycon1, args1) case tp1: SingletonType => def comparePaths = tp2 match - case tp2: TermRef => + case tp2: (TermRef | ThisType) => compareAtoms(tp1, tp2, knownSingletons = true).getOrElse(false) - || { // needed to make from-tasty work. test cases: pos/i1753.scala, pos/t839.scala - tp2.info.widenExpr.dealias match - case tp2i: SingletonType => recur(tp1, tp2i) - case _ => false + || { + // If tp2's underlying type tp2super is also effectively a singleton, compare + // against that. The idea is that if tp1 <: tp2super and tp2 <: tp2super and + // tp2super is also singleton, then tp1 and tp2 must be the same singleton. + // Needed to make from-tasty work. test cases: pos/i1753.scala, pos/t839.scala + val tp2super = tp2.superType.widenExpr + tp2super.isEffectivelySingleton && recur(tp1, tp2super) } case _ => false diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index eeffc41d4159..cad06e973741 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -328,6 +328,16 @@ object Types extends TypeUtils { /** Is this type a (possibly aliased) singleton type? */ def isSingleton(using Context): Boolean = dealias.isInstanceOf[SingletonType] + /** Is this type a (possibly aliased) singleton type or a type proxy + * or Or/And type known to be a singleton type? + */ + def isEffectivelySingleton(using Context): Boolean = dealias match + case tp: SingletonType => true + case tp: TypeProxy => tp.superType.isEffectivelySingleton + case AndType(tpL, tpR) => tpL.isEffectivelySingleton || tpR.isEffectivelySingleton + case OrType(tpL, tpR) => tpL.isEffectivelySingleton && tpR.isEffectivelySingleton + case _ => false + /** Is this upper-bounded by a (possibly aliased) singleton type? * Overridden in TypeVar */ diff --git a/tests/pos/i15030.scala b/tests/pos/i15030.scala new file mode 100644 index 000000000000..f0983140120c --- /dev/null +++ b/tests/pos/i15030.scala @@ -0,0 +1,16 @@ +sealed trait Schema[A] + +object Schema extends RecordInstances: + case class Field[A]() + +sealed trait RecordInstances: + self: Schema.type => + + case class Record[A](field: Field[A]) extends Schema[A] + +import Schema._ + +val field: Field[Int] = Field() + +// Uh oh Found Playground.Schema.Field[Int] but Requried RecordInstances.this.Field[Int] +val record = Record[Int](field) \ No newline at end of file From e8dcffcb981771ebc9b8940238c9b45c24f6f0c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 May 2024 13:27:23 +0000 Subject: [PATCH 108/827] Bump VirtusLab/scala-cli-setup from 1.3.1 to 1.3.2 Bumps [VirtusLab/scala-cli-setup](https://github.com/virtuslab/scala-cli-setup) from 1.3.1 to 1.3.2. - [Release notes](https://github.com/virtuslab/scala-cli-setup/releases) - [Commits](https://github.com/virtuslab/scala-cli-setup/compare/v1.3.1...v1.3.2) --- updated-dependencies: - dependency-name: VirtusLab/scala-cli-setup dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 26bfb9a5d28c..108c412df5a3 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.3.1 + - uses: VirtusLab/scala-cli-setup@v1.3.2 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From 1f4a5de2dfd1820ac264cfb0a0e6035ea929450a Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 19 May 2024 17:39:02 +0200 Subject: [PATCH 109/827] Add some more tests for version-specific behavior --- tests/neg/i20415.scala | 2 ++ tests/neg/i6716-source-3.4.scala | 19 +++++++++++++++++++ tests/pos/given-loop-prevention.scala | 14 ++++++++++++++ 3 files changed, 35 insertions(+) create mode 100644 tests/neg/i20415.scala create mode 100644 tests/neg/i6716-source-3.4.scala create mode 100644 tests/pos/given-loop-prevention.scala diff --git a/tests/neg/i20415.scala b/tests/neg/i20415.scala new file mode 100644 index 000000000000..14582e40aa9d --- /dev/null +++ b/tests/neg/i20415.scala @@ -0,0 +1,2 @@ +class Foo: + given ord: Ordering[Int] = summon[Ordering[Int]] // error diff --git a/tests/neg/i6716-source-3.4.scala b/tests/neg/i6716-source-3.4.scala new file mode 100644 index 000000000000..f6f1961b67a4 --- /dev/null +++ b/tests/neg/i6716-source-3.4.scala @@ -0,0 +1,19 @@ +//> using options -Xfatal-warnings -source 3.4 + +trait Monad[T]: + def id: String +class Foo +object Foo { + given Monad[Foo] with { def id = "Foo" } +} + +opaque type Bar = Foo +object Bar { + given Monad[Bar] = summon[Monad[Foo]] // warn +} + +object Test extends App { + println(summon[Monad[Foo]].id) + println(summon[Monad[Bar]].id) +} +// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) \ No newline at end of file diff --git a/tests/pos/given-loop-prevention.scala b/tests/pos/given-loop-prevention.scala new file mode 100644 index 000000000000..f02559af1e82 --- /dev/null +++ b/tests/pos/given-loop-prevention.scala @@ -0,0 +1,14 @@ +//> using options -Xfatal-warnings -source 3.4 + +class Foo + +object Bar { + given Foo with {} + given List[Foo] = List(summon[Foo]) // ok +} + +object Baz { + @annotation.nowarn + given List[Foo] = List(summon[Foo]) // gives a warning, which is suppressed + given Foo with {} +} From 345c1e86c2294a9d87a9effdb6ba9ab9e518229c Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 19 May 2024 17:40:30 +0200 Subject: [PATCH 110/827] Enable new implicit search mode that avoids loops from 3.6 --- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 54821444aed6..3fe8d6fae8a3 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1718,7 +1718,7 @@ trait Implicits: SearchSuccess(tpd.ref(ref).withSpan(span.startPos), ref, 0)(ctx.typerState, ctx.gadt) case _ => searchImplicit(ctx.implicits, - if sourceVersion.isAtLeast(SourceVersion.future) then SearchMode.New + if sourceVersion.isAtLeast(SourceVersion.`3.6`) then SearchMode.New else if sourceVersion.isAtLeast(SourceVersion.`3.5`) then SearchMode.CompareErr else if sourceVersion.isAtLeast(SourceVersion.`3.4`) then SearchMode.CompareWarn else SearchMode.Old) From 026818592dcd86382559677901568fb43173f459 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 11 May 2024 13:08:22 +0200 Subject: [PATCH 111/827] Distinguish maximal from root capabilities A maximal capability is one that derives from `caps.Cap`. Also: drop the given caps.Cap. It's not clear why there needs to be a given for it. --- compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 10 +++++----- compiler/src/dotty/tools/dotc/core/Types.scala | 9 ++++++++- library/src/scala/caps.scala | 2 -- tests/neg-custom-args/captures/filevar.scala | 2 +- tests/neg-custom-args/captures/i15923.scala | 2 +- tests/neg-custom-args/captures/stack-alloc.scala | 2 +- tests/neg-custom-args/captures/usingLogFile.scala | 2 +- 7 files changed, 17 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index d1a5a07f6a0f..5a60506cc49a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -115,7 +115,7 @@ sealed abstract class CaptureSet extends Showable: * capture set. */ protected final def addNewElem(elem: CaptureRef)(using Context, VarState): CompareResult = - if elem.isRootCapability || summon[VarState] == FrozenState then + if elem.isMaxCapability || summon[VarState] == FrozenState then addThisElem(elem) else addThisElem(elem).orElse: @@ -167,11 +167,11 @@ sealed abstract class CaptureSet extends Showable: if comparer.isInstanceOf[ExplainingTypeComparer] then // !!! DEBUG reporting.trace.force(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): elems.exists(_.subsumes(x)) - || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + || !x.isMaxCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK else reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): elems.exists(_.subsumes(x)) - || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + || !x.isMaxCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK /** A more optimistic version of accountsFor, which does not take variable supersets * of the `x` reference into account. A set might account for `x` if it accounts @@ -183,7 +183,7 @@ sealed abstract class CaptureSet extends Showable: def mightAccountFor(x: CaptureRef)(using Context): Boolean = reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { elems.exists(_.subsumes(x)) - || !x.isRootCapability + || !x.isMaxCapability && { val elems = x.captureSetOfInfo.elems !elems.isEmpty && elems.forall(mightAccountFor) @@ -1032,7 +1032,7 @@ object CaptureSet: /** The capture set of the type underlying CaptureRef */ def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match - case ref: TermRef if ref.isRootCapability => ref.singletonCaptureSet + case ref: (TermRef | TermParamRef) if ref.isMaxCapability => ref.singletonCaptureSet case ReachCapability(ref1) => deepCaptureSet(ref1.widen) .showing(i"Deep capture set of $ref: ${ref1.widen} = $result", capt) case _ => ofType(ref.underlying, followResult = true) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index eeffc41d4159..63f49b6ee8ef 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2259,7 +2259,7 @@ object Types extends TypeUtils { * set of the underlying type is not always empty. */ final def isTracked(using Context): Boolean = - isTrackableRef && (isRootCapability || !captureSetOfInfo.isAlwaysEmpty) + isTrackableRef && (isMaxCapability || !captureSetOfInfo.isAlwaysEmpty) /** Is this a reach reference of the form `x*`? */ def isReach(using Context): Boolean = false // overridden in AnnotatedType @@ -2273,6 +2273,9 @@ object Types extends TypeUtils { /** Is this reference the generic root capability `cap` ? */ def isRootCapability(using Context): Boolean = false + /** Is this reference capability that does not derive from another capability ? */ + def isMaxCapability(using Context): Boolean = false + /** Normalize reference so that it can be compared with `eq` for equality */ def normalizedRef(using Context): CaptureRef = this @@ -3010,6 +3013,9 @@ object Types extends TypeUtils { override def isRootCapability(using Context): Boolean = name == nme.CAPTURE_ROOT && symbol == defn.captureRoot + override def isMaxCapability(using Context): Boolean = + widen.derivesFrom(defn.Caps_Cap) && symbol.isStableMember + override def normalizedRef(using Context): CaptureRef = if isTrackableRef then symbol.termRef else this } @@ -4809,6 +4815,7 @@ object Types extends TypeUtils { def kindString: String = "Term" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) override def isTrackableRef(using Context) = true + override def isMaxCapability(using Context) = widen.derivesFrom(defn.Caps_Cap) } private final class TermParamRefImpl(binder: TermLambda, paramNum: Int) extends TermParamRef(binder, paramNum) diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index c7fc8e7ba584..9e21b4af1f1e 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -13,8 +13,6 @@ import annotation.experimental /** The universal capture reference */ val cap: Cap = Cap() - given Cap = cap - /** Reach capabilities x* which appear as terms in @retains annotations are encoded * as `caps.reachCapability(x)`. When converted to CaptureRef types in capture sets * they are represented as `x.type @annotation.internal.reachCapability`. diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index 59b8415d6e0f..bcf2d7beccbf 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -9,7 +9,7 @@ class Service: def log = file.write("log") def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T = - op(new File) + op(using caps.cap)(new File) def test = withFile: f => diff --git a/tests/neg-custom-args/captures/i15923.scala b/tests/neg-custom-args/captures/i15923.scala index 754fd0687037..89688449bdac 100644 --- a/tests/neg-custom-args/captures/i15923.scala +++ b/tests/neg-custom-args/captures/i15923.scala @@ -5,7 +5,7 @@ def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) def bar() = { def withCap[X](op: (lcap: caps.Cap) ?-> Cap^{lcap} => X): X = { val cap: Cap = new Cap { def use() = { println("cap is used"); 0 } } - val result = op(cap) + val result = op(using caps.cap)(cap) result } diff --git a/tests/neg-custom-args/captures/stack-alloc.scala b/tests/neg-custom-args/captures/stack-alloc.scala index befafbf13003..1b93d2e5129d 100644 --- a/tests/neg-custom-args/captures/stack-alloc.scala +++ b/tests/neg-custom-args/captures/stack-alloc.scala @@ -9,7 +9,7 @@ def withFreshPooled[T](op: (lcap: caps.Cap) ?-> Pooled^{lcap} => T): T = if nextFree >= stack.size then stack.append(new Pooled) val pooled = stack(nextFree) nextFree = nextFree + 1 - val ret = op(pooled) + val ret = op(using caps.cap)(pooled) nextFree = nextFree - 1 ret diff --git a/tests/neg-custom-args/captures/usingLogFile.scala b/tests/neg-custom-args/captures/usingLogFile.scala index 67e6f841e7ce..25b853913af9 100644 --- a/tests/neg-custom-args/captures/usingLogFile.scala +++ b/tests/neg-custom-args/captures/usingLogFile.scala @@ -5,7 +5,7 @@ object Test1: def usingLogFile[T](op: (local: caps.Cap) ?-> FileOutputStream => T): T = val logFile = FileOutputStream("log") - val result = op(logFile) + val result = op(using caps.cap)(logFile) logFile.close() result From 0c1394ffef07d6ae61506137b54ebba4298b3262 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 11 May 2024 23:08:05 +0200 Subject: [PATCH 112/827] Handle tracked class parameters The current handling of class type refinements is unsound. We cannot simply use a variable for the capture set of a class argument. What we need to do instead is treat class arguments as tracked. In this commit we at least allow explicitly declared tracked arguments. This needed two modifications: - Don't additionally add a capture set for tracked arguments - Handle the case where a capture reference is of a singleton type which is another capture reference. As a next step we should treat all class arguments as implicitly tracked. --- compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 5 +++++ compiler/src/dotty/tools/dotc/cc/Setup.scala | 4 +++- compiler/src/dotty/tools/dotc/core/Types.scala | 11 +++++++++-- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 5a60506cc49a..3c2dab7c46e0 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -147,6 +147,7 @@ sealed abstract class CaptureSet extends Showable: * this subsumes this.f * x subsumes y ==> x* subsumes y, x subsumes y? * x subsumes y ==> x* subsumes y*, x? subsumes y? + * x: x1.type /\ x1 subsumes y ==> x subsumes y */ extension (x: CaptureRef) private def subsumes(y: CaptureRef)(using Context): Boolean = @@ -158,6 +159,10 @@ sealed abstract class CaptureSet extends Showable: case _ => false || x.match case ReachCapability(x1) => x1.subsumes(y.stripReach) + case x: TermRef => + x.info match + case x1: CaptureRef => x1.subsumes(y) + case _ => false case _ => false /** {x} <:< this where <:< is subcapturing, but treating all variables diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index e6953dbf67b7..d909bccc8070 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -196,7 +196,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case cls: ClassSymbol if !defn.isFunctionClass(cls) && cls.is(CaptureChecked) => cls.paramGetters.foldLeft(tp) { (core, getter) => - if atPhase(thisPhase.next)(getter.termRef.isTracked) then + if atPhase(thisPhase.next)(getter.termRef.isTracked) + && !getter.is(Tracked) + then val getterType = mapInferred(refine = false)(tp.memberInfo(getter)).strippedDealias RefinedType(core, getter.name, diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 63f49b6ee8ef..a007842c937c 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -6162,8 +6162,15 @@ object Types extends TypeUtils { def inverse: BiTypeMap /** A restriction of this map to a function on tracked CaptureRefs */ - def forward(ref: CaptureRef): CaptureRef = this(ref) match - case result: CaptureRef if result.isTrackableRef => result + def forward(ref: CaptureRef): CaptureRef = + val result = this(ref) + def ensureTrackable(tp: Type): CaptureRef = tp match + case tp: CaptureRef => + if tp.isTrackableRef then tp + else ensureTrackable(tp.underlying) + case _ => + assert(false, i"not a trackable captureRef ref: $result, ${result.underlyingIterator.toList}") + ensureTrackable(result) /** A restriction of the inverse to a function on tracked CaptureRefs */ def backward(ref: CaptureRef): CaptureRef = inverse(ref) match From b76bc3ef2aa70a5f927cb3d6958cca9080dc6ff5 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 11 May 2024 23:08:50 +0200 Subject: [PATCH 113/827] Drop @capability annotations Replace with references that inherit trait `Capability`. --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 19 ++++- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 3 +- .../dotty/tools/dotc/cc/CheckCaptures.scala | 10 ++- compiler/src/dotty/tools/dotc/cc/Setup.scala | 6 +- .../dotty/tools/dotc/core/Definitions.scala | 5 +- .../src/dotty/tools/dotc/core/Types.scala | 7 +- library/src/scala/CanThrow.scala | 4 +- library/src/scala/annotation/capability.scala | 4 +- library/src/scala/caps.scala | 11 ++- tests/disabled/pos/lazylist.scala | 2 +- .../captures/box-unsoundness.scala | 1 - tests/neg-custom-args/captures/byname.check | 2 +- tests/neg-custom-args/captures/byname.scala | 2 +- .../captures/capt-box-env.scala | 2 +- tests/neg-custom-args/captures/capt-box.scala | 2 +- tests/neg-custom-args/captures/capt-wf2.scala | 2 +- .../captures/caseclass/Test_2.scala | 2 +- tests/neg-custom-args/captures/cc-this.scala | 2 +- tests/neg-custom-args/captures/cc-this2.check | 2 +- tests/neg-custom-args/captures/cc-this3.scala | 2 +- tests/neg-custom-args/captures/cc-this5.check | 2 +- tests/neg-custom-args/captures/cc-this5.scala | 4 +- .../captures/class-constr.scala | 2 +- .../captures/effect-swaps-explicit.check | 29 +++++++ .../captures/effect-swaps-explicit.scala | 76 +++++++++++++++++++ .../captures/effect-swaps.check | 15 ++-- .../captures/effect-swaps.scala | 14 +++- .../captures/exception-definitions.check | 2 +- .../captures/extending-cap-classes.scala | 15 ---- tests/neg-custom-args/captures/filevar.scala | 2 +- tests/neg-custom-args/captures/i15923.scala | 2 +- tests/neg-custom-args/captures/i16725.scala | 7 +- .../captures/inner-classes.scala | 2 +- .../captures/stack-alloc.scala | 2 +- tests/neg-custom-args/captures/try3.scala | 2 +- .../captures/usingLogFile.scala | 2 +- tests/neg/unsound-reach.check | 10 +-- tests/neg/unsound-reach.scala | 16 ++-- tests/pos-custom-args/captures/boxed1.scala | 2 +- .../captures/capt-capability.scala | 16 ++-- .../pos-custom-args/captures/caseclass.scala | 2 +- tests/pos-custom-args/captures/cc-this.scala | 2 +- .../captures/eta-expansions.scala | 2 +- .../captures/filevar-tracked.scala | 38 ++++++++++ tests/pos-custom-args/captures/filevar.scala | 13 ++-- tests/pos-custom-args/captures/i16116.scala | 3 +- tests/pos-custom-args/captures/i16226.scala | 2 +- tests/pos-custom-args/captures/i19751.scala | 2 +- tests/pos-custom-args/captures/lazyref.scala | 2 +- tests/pos-custom-args/captures/lists.scala | 4 +- .../captures/logger-tracked.scala | 68 +++++++++++++++++ tests/pos-custom-args/captures/logger.scala | 9 ++- .../captures/nested-classes-tracked.scala | 22 ++++++ .../captures/nested-classes.scala | 9 ++- .../captures/null-logger.scala | 2 +- tests/pos-custom-args/captures/pairs.scala | 6 +- tests/pos-custom-args/captures/reaches.scala | 2 +- tests/pos-custom-args/captures/try3.scala | 2 +- tests/pos-custom-args/captures/vars.scala | 2 +- tests/pos/dotty-experimental.scala | 2 +- tests/pos/i20237.scala | 2 +- tests/pos/into-bigint.scala | 4 +- 62 files changed, 382 insertions(+), 130 deletions(-) create mode 100644 tests/neg-custom-args/captures/effect-swaps-explicit.check create mode 100644 tests/neg-custom-args/captures/effect-swaps-explicit.scala delete mode 100644 tests/neg-custom-args/captures/extending-cap-classes.scala create mode 100644 tests/pos-custom-args/captures/filevar-tracked.scala create mode 100644 tests/pos-custom-args/captures/logger-tracked.scala create mode 100644 tests/pos-custom-args/captures/nested-classes-tracked.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 5c9946f6134a..9b899030bc02 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -203,6 +203,23 @@ extension (tp: Type) case _ => false + /** Does type derive from caps.Capability?, which means it references of this + * type are maximal capabilities? + */ + def derivesFromCapability(using Context): Boolean = tp.dealias match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then sym.derivesFrom(defn.Caps_Capability) + else tp.superType.derivesFromCapability + case tp: TypeProxy => + tp.superType.derivesFromCapability + case tp: AndType => + tp.tp1.derivesFromCapability || tp.tp2.derivesFromCapability + case tp: OrType => + tp.tp1.derivesFromCapability && tp.tp2.derivesFromCapability + case _ => + false + /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: @@ -408,7 +425,7 @@ extension (sym: Symbol) /** The owner of the current level. Qualifying owners are * - methods other than constructors and anonymous functions * - anonymous functions, provided they either define a local - * root of type caps.Cap, or they are the rhs of a val definition. + * root of type caps.Capability, or they are the rhs of a val definition. * - classes, if they are not staticOwners * - _root_ */ diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 3c2dab7c46e0..06ba36bd5e24 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -1051,7 +1051,8 @@ object CaptureSet: case tp: TermParamRef => tp.captureSet case tp: TypeRef => - if tp.typeSymbol == defn.Caps_Cap then universal else empty + if tp.derivesFromCapability then universal // TODO: maybe return another value that indicates that the underltinf ref is maximal? + else empty case _: TypeParamRef => empty case CapturingType(parent, refs) => diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index a5bb8792af2c..cd0f21129ac9 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -537,8 +537,8 @@ class CheckCaptures extends Recheck, SymTransformer: */ def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = var refined: Type = core - var allCaptures: CaptureSet = if setup.isCapabilityClassRef(core) - then CaptureSet.universal else initCs + var allCaptures: CaptureSet = + if core.derivesFromCapability then CaptureSet.universal else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol if getter.termRef.isTracked && !getter.is(Private) then @@ -572,8 +572,10 @@ class CheckCaptures extends Recheck, SymTransformer: val TypeApply(fn, args) = tree val polyType = atPhase(thisPhase.prev): fn.tpe.widen.asInstanceOf[TypeLambda] + def isExempt(sym: Symbol) = + sym.isTypeTestOrCast || sym == defn.Compiletime_erasedValue for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do - if !tree.symbol.isTypeTestOrCast then + if !isExempt(tree.symbol) then def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" disallowRootCapabilitiesIn(arg.knownType, NoSymbol, i"Sealed type variable $pname", "be instantiated to", @@ -1305,7 +1307,7 @@ class CheckCaptures extends Recheck, SymTransformer: case ref: TermParamRef if !allowed.contains(ref) && !seen.contains(ref) => seen += ref - if ref.underlying.isRef(defn.Caps_Cap) then + if ref.underlying.isRef(defn.Caps_Capability) then report.error(i"escaping local reference $ref", tree.srcPos) else val widened = ref.captureSetOfInfo diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index d909bccc8070..1a8c65c89a43 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -77,9 +77,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def isCapabilityClassRef(tp: Type)(using Context): Boolean = tp.dealiasKeepAnnots match case _: TypeRef | _: AppliedType => val sym = tp.classSymbol - def checkSym: Boolean = - sym.hasAnnotation(defn.CapabilityAnnot) - || sym.info.parents.exists(hasUniversalCapability) + def checkSym: Boolean = sym.info.parents.exists(hasUniversalCapability) sym.isClass && capabilityClassMap.getOrElseUpdate(sym, checkSym) case _ => false @@ -594,7 +592,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if sym.isClass then !sym.isPureClass else - sym != defn.Caps_Cap && instanceCanBeImpure(tp.superType) + sym != defn.Caps_Capability && instanceCanBeImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => instanceCanBeImpure(tp.underlying) case tp: AndType => diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 11a4a8473e79..52535f26c692 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -991,7 +991,7 @@ class Definitions { @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") - @tu lazy val Caps_Cap: TypeSymbol = CapsModule.requiredType("Cap") + @tu lazy val Caps_Capability: TypeSymbol = CapsModule.requiredType("Capability") @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") @@ -1014,7 +1014,6 @@ class Definitions { @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") - @tu lazy val CapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.capability") @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") @@ -2033,7 +2032,7 @@ class Definitions { */ @tu lazy val ccExperimental: Set[Symbol] = Set( CapsModule, CapsModule.moduleClass, PureClass, - CapabilityAnnot, RequiresCapabilityAnnot, + RequiresCapabilityAnnot, RetainsAnnot, RetainsCapAnnot, RetainsByNameAnnot) /** Experimental language features defined in `scala.runtime.stdLibPatches.language.experimental`. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index a007842c937c..5be6774d0ff0 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3014,7 +3014,8 @@ object Types extends TypeUtils { name == nme.CAPTURE_ROOT && symbol == defn.captureRoot override def isMaxCapability(using Context): Boolean = - widen.derivesFrom(defn.Caps_Cap) && symbol.isStableMember + import cc.* + this.derivesFromCapability && symbol.isStableMember override def normalizedRef(using Context): CaptureRef = if isTrackableRef then symbol.termRef else this @@ -4815,7 +4816,9 @@ object Types extends TypeUtils { def kindString: String = "Term" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) override def isTrackableRef(using Context) = true - override def isMaxCapability(using Context) = widen.derivesFrom(defn.Caps_Cap) + override def isMaxCapability(using Context) = + import cc.* + this.derivesFromCapability } private final class TermParamRefImpl(binder: TermLambda, paramNum: Int) extends TermParamRef(binder, paramNum) diff --git a/library/src/scala/CanThrow.scala b/library/src/scala/CanThrow.scala index c7f23a393715..91c94229c43c 100644 --- a/library/src/scala/CanThrow.scala +++ b/library/src/scala/CanThrow.scala @@ -6,9 +6,9 @@ import annotation.{implicitNotFound, experimental, capability} * experimental.saferExceptions feature, a `throw Ex()` expression will require * a given of class `CanThrow[Ex]` to be available. */ -@experimental @capability +@experimental @implicitNotFound("The capability to throw exception ${E} is missing.\nThe capability can be provided by one of the following:\n - Adding a using clause `(using CanThrow[${E}])` to the definition of the enclosing method\n - Adding `throws ${E}` clause after the result type of the enclosing method\n - Wrapping this piece of code with a `try` block that catches ${E}") -erased class CanThrow[-E <: Exception] +erased class CanThrow[-E <: Exception] extends caps.Capability @experimental object unsafeExceptions: diff --git a/library/src/scala/annotation/capability.scala b/library/src/scala/annotation/capability.scala index 4696ed6a015e..d3453e3c8168 100644 --- a/library/src/scala/annotation/capability.scala +++ b/library/src/scala/annotation/capability.scala @@ -11,4 +11,6 @@ import annotation.experimental * THere, the capture set of any instance of `CanThrow` is assumed to be * `{*}`. */ -@experimental final class capability extends StaticAnnotation +@experimental +@deprecated("To make a class a capability, let it derive from the `Capability` trait instead") +final class capability extends StaticAnnotation diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 9e21b4af1f1e..215ad2cb5697 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -4,14 +4,17 @@ import annotation.experimental @experimental object caps: - class Cap // should be @erased + trait Capability // should be @erased + + /** The universal capture reference */ + val cap: Capability = new Capability() {} /** The universal capture reference (deprecated) */ @deprecated("Use `cap` instead") - val `*`: Cap = cap + val `*`: Capability = cap - /** The universal capture reference */ - val cap: Cap = Cap() + @deprecated("Use `Capability` instead") + type Cap = Capability /** Reach capabilities x* which appear as terms in @retains annotations are encoded * as `caps.reachCapability(x)`. When converted to CaptureRef types in capture sets diff --git a/tests/disabled/pos/lazylist.scala b/tests/disabled/pos/lazylist.scala index c24f8677b91f..e56eb484894c 100644 --- a/tests/disabled/pos/lazylist.scala +++ b/tests/disabled/pos/lazylist.scala @@ -34,7 +34,7 @@ object LazyNil extends LazyList[Nothing]: def map[A, B](xs: {*} LazyList[A], f: {*} A => B): {f, xs} LazyList[B] = xs.map(f) -@annotation.capability class Cap +class Cap extends caps.Capability def test(cap1: Cap, cap2: Cap, cap3: Cap) = def f[T](x: LazyList[T]): LazyList[T] = if cap1 == cap1 then x else LazyNil diff --git a/tests/neg-custom-args/captures/box-unsoundness.scala b/tests/neg-custom-args/captures/box-unsoundness.scala index d1331f16df1f..8c1c22bc7fa6 100644 --- a/tests/neg-custom-args/captures/box-unsoundness.scala +++ b/tests/neg-custom-args/captures/box-unsoundness.scala @@ -1,4 +1,3 @@ -//@annotation.capability class CanIO { def use(): Unit = () } def use[X](x: X): (op: X -> Unit) -> Unit = op => op(x) def test(io: CanIO^): Unit = diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index e06a3a1f8268..2b48428e97bc 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -1,7 +1,7 @@ -- Error: tests/neg-custom-args/captures/byname.scala:19:5 ------------------------------------------------------------- 19 | h(g()) // error | ^^^ - | reference (cap2 : Cap^) is not included in the allowed capture set {cap1} + | reference (cap2 : Cap) is not included in the allowed capture set {cap1} | of an enclosing function literal with expected type () ?->{cap1} I -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/byname.scala:4:2 ----------------------------------------- 4 | def f() = if cap1 == cap1 then g else g // error diff --git a/tests/neg-custom-args/captures/byname.scala b/tests/neg-custom-args/captures/byname.scala index 279122f54735..0ed3a09cb414 100644 --- a/tests/neg-custom-args/captures/byname.scala +++ b/tests/neg-custom-args/captures/byname.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def test(cap1: Cap, cap2: Cap) = def f() = if cap1 == cap1 then g else g // error diff --git a/tests/neg-custom-args/captures/capt-box-env.scala b/tests/neg-custom-args/captures/capt-box-env.scala index 605b446d5262..bfe1874d073b 100644 --- a/tests/neg-custom-args/captures/capt-box-env.scala +++ b/tests/neg-custom-args/captures/capt-box-env.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability class Pair[+A, +B](x: A, y: B): def fst: A = x diff --git a/tests/neg-custom-args/captures/capt-box.scala b/tests/neg-custom-args/captures/capt-box.scala index 634470704fc5..291882bed36d 100644 --- a/tests/neg-custom-args/captures/capt-box.scala +++ b/tests/neg-custom-args/captures/capt-box.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def test(x: Cap) = diff --git a/tests/neg-custom-args/captures/capt-wf2.scala b/tests/neg-custom-args/captures/capt-wf2.scala index 6c65e0dc77f7..8bb04a230fdd 100644 --- a/tests/neg-custom-args/captures/capt-wf2.scala +++ b/tests/neg-custom-args/captures/capt-wf2.scala @@ -1,4 +1,4 @@ -@annotation.capability class C +class C extends caps.Capability def test(c: C) = var x: Any^{c} = ??? diff --git a/tests/neg-custom-args/captures/caseclass/Test_2.scala b/tests/neg-custom-args/captures/caseclass/Test_2.scala index bffc0a295bdc..9d97d5537c72 100644 --- a/tests/neg-custom-args/captures/caseclass/Test_2.scala +++ b/tests/neg-custom-args/captures/caseclass/Test_2.scala @@ -1,4 +1,4 @@ -@annotation.capability class C +class C extends caps.Capability def test(c: C) = val pure: () -> Unit = () => () val impure: () => Unit = pure diff --git a/tests/neg-custom-args/captures/cc-this.scala b/tests/neg-custom-args/captures/cc-this.scala index 4c05be702c51..e4336ed457af 100644 --- a/tests/neg-custom-args/captures/cc-this.scala +++ b/tests/neg-custom-args/captures/cc-this.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def eff(using Cap): Unit = () diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index bd9a1085d262..6cb3010d6174 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -2,7 +2,7 @@ -- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:3:8 -------------------------------------------------------- 3 | this: D^ => // error | ^^ - |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class C + |reference (caps.cap : caps.Capability) captured by this self type is not included in the allowed capture set {} of pure base class class C -- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- 2 |class D extends C: // error | ^ diff --git a/tests/neg-custom-args/captures/cc-this3.scala b/tests/neg-custom-args/captures/cc-this3.scala index 25af19dd6c4a..0a36cde8173b 100644 --- a/tests/neg-custom-args/captures/cc-this3.scala +++ b/tests/neg-custom-args/captures/cc-this3.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def eff(using Cap): Unit = () diff --git a/tests/neg-custom-args/captures/cc-this5.check b/tests/neg-custom-args/captures/cc-this5.check index 8affe7005e2e..1329734ce37d 100644 --- a/tests/neg-custom-args/captures/cc-this5.check +++ b/tests/neg-custom-args/captures/cc-this5.check @@ -1,7 +1,7 @@ -- Error: tests/neg-custom-args/captures/cc-this5.scala:16:20 ---------------------------------------------------------- 16 | def f = println(c) // error | ^ - | (c : Cap^) cannot be referenced here; it is not included in the allowed capture set {} + | (c : Cap) cannot be referenced here; it is not included in the allowed capture set {} | of the enclosing class A -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/cc-this5.scala:21:15 ------------------------------------- 21 | val x: A = this // error diff --git a/tests/neg-custom-args/captures/cc-this5.scala b/tests/neg-custom-args/captures/cc-this5.scala index e84c2a41f55c..4c9a8a706670 100644 --- a/tests/neg-custom-args/captures/cc-this5.scala +++ b/tests/neg-custom-args/captures/cc-this5.scala @@ -1,7 +1,7 @@ class C: val x: C = this -@annotation.capability class Cap +class Cap extends caps.Capability def foo(c: Cap) = object D extends C: // error @@ -17,5 +17,5 @@ def test(c: Cap) = def test2(c: Cap) = class A: - def f = println(c) + def f = println(c) val x: A = this // error diff --git a/tests/neg-custom-args/captures/class-constr.scala b/tests/neg-custom-args/captures/class-constr.scala index 9afb6972ccfa..619fa9fa0341 100644 --- a/tests/neg-custom-args/captures/class-constr.scala +++ b/tests/neg-custom-args/captures/class-constr.scala @@ -1,6 +1,6 @@ import annotation.{capability, constructorOnly} -@capability class Cap +class Cap extends caps.Capability class C(x: Cap, @constructorOnly y: Cap) diff --git a/tests/neg-custom-args/captures/effect-swaps-explicit.check b/tests/neg-custom-args/captures/effect-swaps-explicit.check new file mode 100644 index 000000000000..47559ab97568 --- /dev/null +++ b/tests/neg-custom-args/captures/effect-swaps-explicit.check @@ -0,0 +1,29 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:64:8 ------------------------- +63 | Result: +64 | Future: // error, type mismatch + | ^ + | Found: Result.Ok[box Future[box T^?]^{fr, contextual$1}] + | Required: Result[Future[T], Nothing] +65 | fr.await.ok + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from effect-swaps-explicit.scala:41 +41 | boundary(Ok(body)) + | ^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:74:10 ------------------------ +74 | Future: fut ?=> // error: type mismatch + | ^ + | Found: Future[box T^?]^{fr, lbl} + | Required: Future[box T^?]^? +75 | fr.await.ok + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:68:15 --------------------------------------------- +68 | Result.make: //lbl ?=> // error, escaping label from Result + | ^^^^^^^^^^^ + |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9}, box E^?]]^): + | box Future[box T^?]^{fr, contextual$9} leaks into outer capture set of type parameter T of method make in object Result diff --git a/tests/neg-custom-args/captures/effect-swaps-explicit.scala b/tests/neg-custom-args/captures/effect-swaps-explicit.scala new file mode 100644 index 000000000000..814199706721 --- /dev/null +++ b/tests/neg-custom-args/captures/effect-swaps-explicit.scala @@ -0,0 +1,76 @@ +import annotation.capability + +object boundary: + + final class Label[-T] // extends caps.Capability + + /** Abort current computation and instead return `value` as the value of + * the enclosing `boundary` call that created `label`. + */ + def break[T](value: T)(using label: Label[T]^): Nothing = ??? + + def apply[T](body: Label[T]^ ?=> T): T = ??? +end boundary + +import boundary.{Label, break} + +trait Async extends caps.Capability +object Async: + def blocking[T](body: Async ?=> T): T = ??? + +class Future[+T]: + this: Future[T]^ => + def await(using Async): T = ??? +object Future: + def apply[T](op: Async ?=> T)(using Async): Future[T]^{op} = ??? + +enum Result[+T, +E]: + case Ok[+T](value: T) extends Result[T, Nothing] + case Err[+E](error: E) extends Result[Nothing, E] + + +object Result: + extension [T, E](r: Result[T, E]^)(using Label[Err[E]]^) + + /** `_.ok` propagates Err to current Label */ + def ok: T = r match + case Ok(value) => value + case Err(value) => break[Err[E]](Err(value)) + + transparent inline def apply[T, E](inline body: Label[Result[T, E]]^ ?=> T): Result[T, E] = + boundary(Ok(body)) + + // same as apply, but not an inline method + def make[T, E](body: Label[Result[T, E]]^ ?=> T): Result[T, E] = + boundary(Ok(body)) + +end Result + +def test[T, E](using Async) = + import Result.* + Async.blocking: async ?=> + val good1: List[Future[Result[T, E]]] => Future[Result[List[T], E]] = frs => + Future: + Result: + frs.map(_.await.ok) // OK + + val good2: Result[Future[T], E] => Future[Result[T, E]] = rf => + Future: + Result: + rf.ok.await // OK, Future argument has type Result[T] + + def fail3(fr: Future[Result[T, E]]^) = + Result: + Future: // error, type mismatch + fr.await.ok + + def fail4[T, E](fr: Future[Result[T, E]]^) = + Result.make: //lbl ?=> // error, escaping label from Result + Future: fut ?=> + fr.await.ok + + def fail5[T, E](fr: Future[Result[T, E]]^) = + Result.make[Future[T], E]: lbl ?=> + Future: fut ?=> // error: type mismatch + fr.await.ok + diff --git a/tests/neg-custom-args/captures/effect-swaps.check b/tests/neg-custom-args/captures/effect-swaps.check index bda3509645d1..f16019c15513 100644 --- a/tests/neg-custom-args/captures/effect-swaps.check +++ b/tests/neg-custom-args/captures/effect-swaps.check @@ -1,6 +1,6 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:64:8 ---------------------------------- 63 | Result: -64 | Future: // error, escaping label from Result +64 | Future: // error, type mismatch | ^ | Found: Result.Ok[box Future[box T^?]^{fr, contextual$1}] | Required: Result[Future[T], Nothing] @@ -14,8 +14,11 @@ -------------------------------------------------------------------------------------------------------------------- | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/effect-swaps.scala:68:15 ------------------------------------------------------ -68 | Result.make: //lbl ?=> // error, escaping label from Result - | ^^^^^^^^^^^ - |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9}, box E^?]]^): - | box Future[box T^?]^{fr, contextual$9} leaks into outer capture set of type parameter T of method make in object Result +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:74:10 --------------------------------- +74 | Future: fut ?=> // error: type mismatch + | ^ + | Found: Future[box T^?]^{fr, lbl} + | Required: Future[box T^?]^? +75 | fr.await.ok + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/effect-swaps.scala b/tests/neg-custom-args/captures/effect-swaps.scala index 1d72077bb8da..af44501371a9 100644 --- a/tests/neg-custom-args/captures/effect-swaps.scala +++ b/tests/neg-custom-args/captures/effect-swaps.scala @@ -2,7 +2,7 @@ import annotation.capability object boundary: - @capability final class Label[-T] + final class Label[-T] extends caps.Capability /** Abort current computation and instead return `value` as the value of * the enclosing `boundary` call that created `label`. @@ -14,7 +14,7 @@ end boundary import boundary.{Label, break} -@capability trait Async +trait Async extends caps.Capability object Async: def blocking[T](body: Async ?=> T): T = ??? @@ -61,10 +61,16 @@ def test[T, E](using Async) = def fail3(fr: Future[Result[T, E]]^) = Result: - Future: // error, escaping label from Result + Future: // error, type mismatch fr.await.ok def fail4[T, E](fr: Future[Result[T, E]]^) = - Result.make: //lbl ?=> // error, escaping label from Result + Result.make: //lbl ?=> // should be error, escaping label from Result but infers Result[Any, Any] Future: fut ?=> fr.await.ok + + def fail5[T, E](fr: Future[Result[T, E]]^) = + Result.make[Future[T], E]: lbl ?=> + Future: fut ?=> // error: type mismatch + fr.await.ok + diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 72b88f252e59..7f915ebd9833 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -1,7 +1,7 @@ -- Error: tests/neg-custom-args/captures/exception-definitions.scala:3:8 ----------------------------------------------- 3 | self: Err^ => // error | ^^^^ - |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable + |reference (caps.cap : caps.Capability) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ diff --git a/tests/neg-custom-args/captures/extending-cap-classes.scala b/tests/neg-custom-args/captures/extending-cap-classes.scala deleted file mode 100644 index 17497e415a1e..000000000000 --- a/tests/neg-custom-args/captures/extending-cap-classes.scala +++ /dev/null @@ -1,15 +0,0 @@ -import annotation.capability - -class C1 -@capability class C2 extends C1 -class C3 extends C2 - -def test = - val x1: C1 = new C1 - val x2: C1 = new C2 // error - val x3: C1 = new C3 // error - - val y1: C2 = new C2 - val y2: C2 = new C3 - - val z1: C3 = new C3 \ No newline at end of file diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index bcf2d7beccbf..0d9cbed164e3 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -8,7 +8,7 @@ class Service: var file: File^ = uninitialized // error def log = file.write("log") -def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T = +def withFile[T](op: (l: caps.Capability) ?-> (f: File^{l}) => T): T = op(using caps.cap)(new File) def test = diff --git a/tests/neg-custom-args/captures/i15923.scala b/tests/neg-custom-args/captures/i15923.scala index 89688449bdac..e71f01996938 100644 --- a/tests/neg-custom-args/captures/i15923.scala +++ b/tests/neg-custom-args/captures/i15923.scala @@ -3,7 +3,7 @@ type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) def bar() = { - def withCap[X](op: (lcap: caps.Cap) ?-> Cap^{lcap} => X): X = { + def withCap[X](op: (lcap: caps.Capability) ?-> Cap^{lcap} => X): X = { val cap: Cap = new Cap { def use() = { println("cap is used"); 0 } } val result = op(using caps.cap)(cap) result diff --git a/tests/neg-custom-args/captures/i16725.scala b/tests/neg-custom-args/captures/i16725.scala index ff06b3be78a7..853b3e96ca5c 100644 --- a/tests/neg-custom-args/captures/i16725.scala +++ b/tests/neg-custom-args/captures/i16725.scala @@ -1,6 +1,5 @@ import language.experimental.captureChecking -@annotation.capability -class IO: +class IO extends caps.Capability: def brewCoffee(): Unit = ??? def usingIO[T](op: IO => T): T = ??? @@ -8,8 +7,8 @@ type Wrapper[T] = [R] -> (f: T => R) -> R def mk[T](x: T): Wrapper[T] = [R] => f => f(x) def useWrappedIO(wrapper: Wrapper[IO]): () -> Unit = () => - wrapper: io => // error + wrapper: io => io.brewCoffee() def main(): Unit = - val escaped = usingIO(io => useWrappedIO(mk(io))) + val escaped = usingIO(io => useWrappedIO(mk(io))) // error // error escaped() // boom diff --git a/tests/neg-custom-args/captures/inner-classes.scala b/tests/neg-custom-args/captures/inner-classes.scala index 181b830e4996..fd500e607970 100644 --- a/tests/neg-custom-args/captures/inner-classes.scala +++ b/tests/neg-custom-args/captures/inner-classes.scala @@ -1,6 +1,6 @@ object test: - @annotation.capability class FileSystem + class FileSystem extends caps.Capability def foo(fs: FileSystem) = diff --git a/tests/neg-custom-args/captures/stack-alloc.scala b/tests/neg-custom-args/captures/stack-alloc.scala index 1b93d2e5129d..80e7e4169720 100644 --- a/tests/neg-custom-args/captures/stack-alloc.scala +++ b/tests/neg-custom-args/captures/stack-alloc.scala @@ -5,7 +5,7 @@ class Pooled val stack = mutable.ArrayBuffer[Pooled]() var nextFree = 0 -def withFreshPooled[T](op: (lcap: caps.Cap) ?-> Pooled^{lcap} => T): T = +def withFreshPooled[T](op: (lcap: caps.Capability) ?-> Pooled^{lcap} => T): T = if nextFree >= stack.size then stack.append(new Pooled) val pooled = stack(nextFree) nextFree = nextFree + 1 diff --git a/tests/neg-custom-args/captures/try3.scala b/tests/neg-custom-args/captures/try3.scala index 004cda6a399c..880d20ef16a0 100644 --- a/tests/neg-custom-args/captures/try3.scala +++ b/tests/neg-custom-args/captures/try3.scala @@ -4,7 +4,7 @@ class CT[E] type CanThrow[E] = CT[E]^ type Top = Any^ -def handle[E <: Exception, T <: Top](op: (lcap: caps.Cap) ?-> CT[E]^{lcap} ?=> T)(handler: E => T): T = +def handle[E <: Exception, T <: Top](op: (lcap: caps.Capability) ?-> CT[E]^{lcap} ?=> T)(handler: E => T): T = val x: CT[E] = ??? try op(using caps.cap)(using x) catch case ex: E => handler(ex) diff --git a/tests/neg-custom-args/captures/usingLogFile.scala b/tests/neg-custom-args/captures/usingLogFile.scala index 25b853913af9..b25e4e75a784 100644 --- a/tests/neg-custom-args/captures/usingLogFile.scala +++ b/tests/neg-custom-args/captures/usingLogFile.scala @@ -3,7 +3,7 @@ import annotation.capability object Test1: - def usingLogFile[T](op: (local: caps.Cap) ?-> FileOutputStream => T): T = + def usingLogFile[T](op: (local: caps.Capability) ?-> FileOutputStream => T): T = val logFile = FileOutputStream("log") val result = op(using caps.cap)(logFile) logFile.close() diff --git a/tests/neg/unsound-reach.check b/tests/neg/unsound-reach.check index fd5c401416d1..8cabbe1571a0 100644 --- a/tests/neg/unsound-reach.check +++ b/tests/neg/unsound-reach.check @@ -1,5 +1,5 @@ --- Error: tests/neg/unsound-reach.scala:18:9 --------------------------------------------------------------------------- -18 | boom.use(f): (f1: File^{backdoor*}) => // error - | ^^^^^^^^ - | Reach capability backdoor* and universal capability cap cannot both - | appear in the type (x: File^)(op: box File^{backdoor*} => Unit): Unit of this expression +-- Error: tests/neg/unsound-reach.scala:18:13 -------------------------------------------------------------------------- +18 | boom.use(f): (f1: File^{backdoor*}) => // error + | ^^^^^^^^ + | Reach capability backdoor* and universal capability cap cannot both + | appear in the type (x: File^)(op: box File^{backdoor*} => Unit): Unit of this expression diff --git a/tests/neg/unsound-reach.scala b/tests/neg/unsound-reach.scala index 468730168019..48a74f86d311 100644 --- a/tests/neg/unsound-reach.scala +++ b/tests/neg/unsound-reach.scala @@ -5,16 +5,16 @@ trait File: def withFile[R](path: String)(op: File^ => R): R = ??? trait Foo[+X]: - def use(x: File^)(op: X => Unit): Unit + def use(x: File^)(op: X => Unit): Unit class Bar extends Foo[File^]: - def use(x: File^)(op: File^ => Unit): Unit = op(x) + def use(x: File^)(op: File^ => Unit): Unit = op(x) def bad(): Unit = - val backdoor: Foo[File^] = new Bar - val boom: Foo[File^{backdoor*}] = backdoor + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor - var escaped: File^{backdoor*} = null - withFile("hello.txt"): f => - boom.use(f): (f1: File^{backdoor*}) => // error - escaped = f1 + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + boom.use(f): (f1: File^{backdoor*}) => // error + escaped = f1 diff --git a/tests/pos-custom-args/captures/boxed1.scala b/tests/pos-custom-args/captures/boxed1.scala index 8c6b63ef0134..e2ff69c305d2 100644 --- a/tests/pos-custom-args/captures/boxed1.scala +++ b/tests/pos-custom-args/captures/boxed1.scala @@ -1,6 +1,6 @@ class Box[T](val x: T) -@annotation.capability class Cap +class Cap extends caps.Capability def foo(x: => Int): Unit = () diff --git a/tests/pos-custom-args/captures/capt-capability.scala b/tests/pos-custom-args/captures/capt-capability.scala index 830d341c7bca..64892218ee41 100644 --- a/tests/pos-custom-args/captures/capt-capability.scala +++ b/tests/pos-custom-args/captures/capt-capability.scala @@ -1,7 +1,7 @@ import annotation.capability +import caps.Capability -@capability class Cap -def f1(c: Cap): () ->{c} c.type = () => c // ok +def f1(c: Capability): () ->{c} c.type = () => c // ok def f2: Int = val g: Boolean => Int = ??? @@ -15,15 +15,15 @@ def f3: Int = x def foo() = - val x: Cap = ??? - val y: Cap = x - val x2: () ->{x} Cap = ??? - val y2: () ->{x} Cap = x2 + val x: Capability = ??? + val y: Capability = x + val x2: () ->{x} Capability = ??? + val y2: () ->{x} Capability = x2 - val z1: () => Cap = f1(x) + val z1: () => Capability = f1(x) def h[X](a: X)(b: X) = a val z2 = - if x == null then () => x else () => Cap() + if x == null then () => x else () => new Capability() {} val _ = x diff --git a/tests/pos-custom-args/captures/caseclass.scala b/tests/pos-custom-args/captures/caseclass.scala index ffbf878dca49..0aa656eaf9cb 100644 --- a/tests/pos-custom-args/captures/caseclass.scala +++ b/tests/pos-custom-args/captures/caseclass.scala @@ -1,4 +1,4 @@ -@annotation.capability class C +class C extends caps.Capability object test1: case class Ref(x: String^) diff --git a/tests/pos-custom-args/captures/cc-this.scala b/tests/pos-custom-args/captures/cc-this.scala index 12c62e99d186..d9705df76c55 100644 --- a/tests/pos-custom-args/captures/cc-this.scala +++ b/tests/pos-custom-args/captures/cc-this.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def eff(using Cap): Unit = () diff --git a/tests/pos-custom-args/captures/eta-expansions.scala b/tests/pos-custom-args/captures/eta-expansions.scala index 1aac7ded1b50..b4e38cdf0856 100644 --- a/tests/pos-custom-args/captures/eta-expansions.scala +++ b/tests/pos-custom-args/captures/eta-expansions.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def test(d: Cap) = def map2(xs: List[Int])(f: Int => Int): List[Int] = xs.map(f) diff --git a/tests/pos-custom-args/captures/filevar-tracked.scala b/tests/pos-custom-args/captures/filevar-tracked.scala new file mode 100644 index 000000000000..6fb7000ad4c2 --- /dev/null +++ b/tests/pos-custom-args/captures/filevar-tracked.scala @@ -0,0 +1,38 @@ +import language.experimental.captureChecking +import language.experimental.modularity +import annotation.capability +import compiletime.uninitialized + +object test1: + class File: + def write(x: String): Unit = ??? + + class Service(f: File^): + def log = f.write("log") + + def withFile[T](op: (f: File^) => T): T = + op(new File) + + def test = + withFile: f => + val o = Service(f) + o.log + +object test2: + class IO extends caps.Capability + + class File: + def write(x: String): Unit = ??? + + class Service(tracked val io: IO): + var file: File^{io} = uninitialized + def log = file.write("log") + + def withFile[T](io2: IO)(op: (f: File^{io2}) => T): T = + op(new File) + + def test(io3: IO) = + withFile(io3): f => + val o = Service(io3) + o.file = f + o.log diff --git a/tests/pos-custom-args/captures/filevar.scala b/tests/pos-custom-args/captures/filevar.scala index a6cc7ca9ff47..c5571ca88849 100644 --- a/tests/pos-custom-args/captures/filevar.scala +++ b/tests/pos-custom-args/captures/filevar.scala @@ -1,4 +1,5 @@ import language.experimental.captureChecking +import language.experimental.modularity import annotation.capability import compiletime.uninitialized @@ -18,20 +19,20 @@ object test1: o.log object test2: - @capability class IO + class IO class File: def write(x: String): Unit = ??? - class Service(io: IO): + class Service(io: IO^): var file: File^{io} = uninitialized def log = file.write("log") - def withFile[T](io: IO)(op: (f: File^{io}) => T): T = + def withFile[T](io2: IO^)(op: (f: File^{io2}) => T): T = op(new File) - def test(io: IO) = - withFile(io): f => - val o = Service(io) + def test(io3: IO^) = + withFile(io3): f => + val o = Service(io3) o.file = f o.log diff --git a/tests/pos-custom-args/captures/i16116.scala b/tests/pos-custom-args/captures/i16116.scala index 0311e744f146..979bfdbe4328 100644 --- a/tests/pos-custom-args/captures/i16116.scala +++ b/tests/pos-custom-args/captures/i16116.scala @@ -15,8 +15,7 @@ object CpsMonad { @experimental object Test { - @capability - class CpsTransform[F[_]] { + class CpsTransform[F[_]] extends caps.Capability { def await[T](ft: F[T]): T^{ this } = ??? } diff --git a/tests/pos-custom-args/captures/i16226.scala b/tests/pos-custom-args/captures/i16226.scala index 4cd7f0ceea81..071eefbd3420 100644 --- a/tests/pos-custom-args/captures/i16226.scala +++ b/tests/pos-custom-args/captures/i16226.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability class LazyRef[T](val elem: () => T): val get: () ->{elem} T = elem diff --git a/tests/pos-custom-args/captures/i19751.scala b/tests/pos-custom-args/captures/i19751.scala index b6023cc0ff87..30bd8677f024 100644 --- a/tests/pos-custom-args/captures/i19751.scala +++ b/tests/pos-custom-args/captures/i19751.scala @@ -3,7 +3,7 @@ import annotation.capability import caps.cap trait Ptr[A] -@capability trait Scope: +trait Scope extends caps.Capability: def allocate(size: Int): Ptr[Unit]^{this} diff --git a/tests/pos-custom-args/captures/lazyref.scala b/tests/pos-custom-args/captures/lazyref.scala index 3dae51b491b4..2e3a0030bcdc 100644 --- a/tests/pos-custom-args/captures/lazyref.scala +++ b/tests/pos-custom-args/captures/lazyref.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability class LazyRef[T](val elem: () => T): val get: () ->{elem} T = elem diff --git a/tests/pos-custom-args/captures/lists.scala b/tests/pos-custom-args/captures/lists.scala index 99505f0bb7a2..5f4991c6be54 100644 --- a/tests/pos-custom-args/captures/lists.scala +++ b/tests/pos-custom-args/captures/lists.scala @@ -18,7 +18,7 @@ object NIL extends LIST[Nothing]: def map[A, B](f: A => B)(xs: LIST[A]): LIST[B] = xs.map(f) -@annotation.capability class Cap +class Cap extends caps.Capability def test(c: Cap, d: Cap, e: Cap) = def f(x: Cap): Unit = if c == x then () @@ -30,7 +30,7 @@ def test(c: Cap, d: Cap, e: Cap) = CONS(z, ys) val zsc: LIST[Cap ->{d, y} Unit] = zs val z1 = zs.head - val z1c: Cap^ ->{y, d} Unit = z1 + val z1c: Cap ->{y, d} Unit = z1 val ys1 = zs.tail val y1 = ys1.head diff --git a/tests/pos-custom-args/captures/logger-tracked.scala b/tests/pos-custom-args/captures/logger-tracked.scala new file mode 100644 index 000000000000..1949e25b00d9 --- /dev/null +++ b/tests/pos-custom-args/captures/logger-tracked.scala @@ -0,0 +1,68 @@ +import annotation.capability +import language.experimental.saferExceptions +import language.experimental.modularity + +class FileSystem extends caps.Capability + +class Logger(using tracked val fs: FileSystem): + def log(s: String): Unit = ??? + +def test(using fs: FileSystem) = + val l: Logger^{fs} = Logger(using fs) + l.log("hello world!") + val xs: LazyList[Int]^{l} = + LazyList.from(1) + .map { i => + l.log(s"computing elem # $i") + i * i + } + +trait LazyList[+A]: + def isEmpty: Boolean + def head: A + def tail: LazyList[A]^{this} + +object LazyNil extends LazyList[Nothing]: + def isEmpty: Boolean = true + def head = ??? + def tail = ??? + +final class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: + def isEmpty = false + def head = x + def tail: LazyList[T]^{this} = xs() +end LazyCons + +extension [A](x: A) + def #::(xs1: => LazyList[A]^): LazyList[A]^{xs1} = + LazyCons(x, () => xs1) + +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{xs, f} = + if xs.isEmpty then LazyNil + else f(xs.head) #:: xs.tail.map(f) + +object LazyList: + def from(start: Int): LazyList[Int] = + start #:: from(start + 1) + +class Pair[+A, +B](x: A, y: B): + def fst: A = x + def snd: B = y + +def test2(ct: CanThrow[Exception], fs: FileSystem) = + def x: Int ->{ct} String = ??? + def y: Logger^{fs} = ??? + def p = Pair[Int ->{ct} String, Logger^{fs}](x, y) + def p3 = Pair(x, y) + def f = () => p.fst + + +/* + val l1: Int => String = ??? + val l2: Object^{c} = ??? + val pd = () => Pair(l1, l2) + val p2: Pair[Int => String, Object]^{c} = pd() + val hd = () => p2.fst + +*/ \ No newline at end of file diff --git a/tests/pos-custom-args/captures/logger.scala b/tests/pos-custom-args/captures/logger.scala index d95eeaae74cf..75ea3ac3fbc0 100644 --- a/tests/pos-custom-args/captures/logger.scala +++ b/tests/pos-custom-args/captures/logger.scala @@ -1,12 +1,13 @@ import annotation.capability import language.experimental.saferExceptions +import language.experimental.modularity -@capability class FileSystem +class FileSystem // does not work with extends caps.Capability -class Logger(using fs: FileSystem): +class Logger(using fs: FileSystem^): def log(s: String): Unit = ??? -def test(using fs: FileSystem) = +def test(using fs: FileSystem^) = val l: Logger^{fs} = Logger(using fs) l.log("hello world!") val xs: LazyList[Int]^{l} = @@ -49,7 +50,7 @@ class Pair[+A, +B](x: A, y: B): def fst: A = x def snd: B = y -def test2(ct: CanThrow[Exception], fs: FileSystem) = +def test2(ct: CanThrow[Exception], fs: FileSystem^) = def x: Int ->{ct} String = ??? def y: Logger^{fs} = ??? def p = Pair[Int ->{ct} String, Logger^{fs}](x, y) diff --git a/tests/pos-custom-args/captures/nested-classes-tracked.scala b/tests/pos-custom-args/captures/nested-classes-tracked.scala new file mode 100644 index 000000000000..1c81441f321b --- /dev/null +++ b/tests/pos-custom-args/captures/nested-classes-tracked.scala @@ -0,0 +1,22 @@ +import language.experimental.captureChecking +import language.experimental.modularity +import annotation.{capability, constructorOnly} + +class IO extends caps.Capability +class Blah +class Pkg(using tracked val io: IO): + class Foo: + def m(foo: Blah^{io}) = ??? +class Pkg2(using tracked val io: IO): + class Foo: + def m(foo: Blah^{io}): Any = io; ??? + +def main(using io: IO) = + val pkg = Pkg() + val f = pkg.Foo() + f.m(???) + val pkg2 = Pkg2() + val f2 = pkg2.Foo() + f2.m(???) + + diff --git a/tests/pos-custom-args/captures/nested-classes.scala b/tests/pos-custom-args/captures/nested-classes.scala index b16fc4365183..4a0da34faf5c 100644 --- a/tests/pos-custom-args/captures/nested-classes.scala +++ b/tests/pos-custom-args/captures/nested-classes.scala @@ -1,16 +1,17 @@ import language.experimental.captureChecking +import language.experimental.modularity import annotation.{capability, constructorOnly} -@capability class IO +class IO // does not work with extends caps.Capability class Blah -class Pkg(using @constructorOnly io: IO): +class Pkg(using io: IO^): class Foo: def m(foo: Blah^{io}) = ??? -class Pkg2(using io: IO): +class Pkg2(using io: IO^): class Foo: def m(foo: Blah^{io}): Any = io; ??? -def main(using io: IO) = +def main(using io: IO^) = val pkg = Pkg() val f = pkg.Foo() f.m(???) diff --git a/tests/pos-custom-args/captures/null-logger.scala b/tests/pos-custom-args/captures/null-logger.scala index 0b32d045778c..958002ad0358 100644 --- a/tests/pos-custom-args/captures/null-logger.scala +++ b/tests/pos-custom-args/captures/null-logger.scala @@ -1,7 +1,7 @@ import annotation.capability import annotation.constructorOnly -@capability class FileSystem +class FileSystem extends caps.Capability class NullLogger(using @constructorOnly fs: FileSystem) diff --git a/tests/pos-custom-args/captures/pairs.scala b/tests/pos-custom-args/captures/pairs.scala index e15a76970c29..da7f30185ad3 100644 --- a/tests/pos-custom-args/captures/pairs.scala +++ b/tests/pos-custom-args/captures/pairs.scala @@ -1,6 +1,6 @@ //class CC //type Cap = CC^ -@annotation.capability class Cap +class Cap extends caps.Capability object Generic: @@ -13,6 +13,6 @@ object Generic: def g(x: Cap): Unit = if d == x then () val p = Pair(f, g) val x1 = p.fst - val x1c: Cap^ ->{c} Unit = x1 + val x1c: Cap ->{c} Unit = x1 val y1 = p.snd - val y1c: Cap^ ->{d} Unit = y1 + val y1c: Cap ->{d} Unit = y1 diff --git a/tests/pos-custom-args/captures/reaches.scala b/tests/pos-custom-args/captures/reaches.scala index f17c25712c39..f82c792c8445 100644 --- a/tests/pos-custom-args/captures/reaches.scala +++ b/tests/pos-custom-args/captures/reaches.scala @@ -48,7 +48,7 @@ def compose2[A, B, C](f: A => B, g: B => C): A => C = def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = ps.map((x, y) => compose1(x, y)) // Does not work if map takes an impure function, see reaches in neg -@annotation.capability class IO +class IO extends caps.Capability def test(io: IO) = val a: () ->{io} Unit = () => () diff --git a/tests/pos-custom-args/captures/try3.scala b/tests/pos-custom-args/captures/try3.scala index b44ea57ccae4..305069d3ae9f 100644 --- a/tests/pos-custom-args/captures/try3.scala +++ b/tests/pos-custom-args/captures/try3.scala @@ -2,7 +2,7 @@ import language.experimental.erasedDefinitions import annotation.capability import java.io.IOException -@annotation.capability class CanThrow[-E] +class CanThrow[-E] extends caps.Capability def handle[E <: Exception, T](op: CanThrow[E] ?=> T)(handler: E => T): T = val x: CanThrow[E] = ??? diff --git a/tests/pos-custom-args/captures/vars.scala b/tests/pos-custom-args/captures/vars.scala index a335be96fed1..5c9598fab508 100644 --- a/tests/pos-custom-args/captures/vars.scala +++ b/tests/pos-custom-args/captures/vars.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def test(cap1: Cap, cap2: Cap) = def f(x: String): String = if cap1 == cap1 then "" else "a" diff --git a/tests/pos/dotty-experimental.scala b/tests/pos/dotty-experimental.scala index ee9a84a1b497..813c9b5920c1 100644 --- a/tests/pos/dotty-experimental.scala +++ b/tests/pos/dotty-experimental.scala @@ -3,6 +3,6 @@ import language.experimental.captureChecking object test { - val x: caps.Cap = caps.cap + val x: caps.Capability = caps.cap } diff --git a/tests/pos/i20237.scala b/tests/pos/i20237.scala index da3e902b78b4..0a5eb6d9a332 100644 --- a/tests/pos/i20237.scala +++ b/tests/pos/i20237.scala @@ -1,7 +1,7 @@ import language.experimental.captureChecking import scala.annotation.capability -@capability class Cap: +class Cap extends caps.Capability: def use[T](body: Cap ?=> T) = body(using this) class Box[T](body: Cap ?=> T): diff --git a/tests/pos/into-bigint.scala b/tests/pos/into-bigint.scala index d7ecee40b3ba..409b5e79da2c 100644 --- a/tests/pos/into-bigint.scala +++ b/tests/pos/into-bigint.scala @@ -14,8 +14,8 @@ object BigInt: @main def Test = val x = BigInt(2) val y = 3 - val a1 = x + y - val a2 = y * x + val a1 = x + y // uses conversion on `y` + val a2 = y * x // uses conversion on `y` val a3 = x * x val a4 = y + y From 6baff2dfac206fac0f7c92c56adb6c12fd41ca08 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 14 May 2024 18:29:48 +0200 Subject: [PATCH 114/827] Drop convention on classes inheriting from universal capturing types Drop convention that classes inheriting from universal capturing types are capability classes. Capture sets of parents are instead ignored. The convention led to algebraic anomalies. For instance if class C extends A => B, Serializable then C <: (A => B) & Serializable, which has an empty capture set. Yet we treat every occurrence of C as implicitly carrying `cap`. --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 29 +------------------ .../dotty/tools/dotc/transform/Recheck.scala | 1 - .../captures/extending-impure-function.scala | 0 3 files changed, 1 insertion(+), 29 deletions(-) rename tests/{neg-custom-args => pos-custom-args}/captures/extending-impure-function.scala (100%) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 1a8c65c89a43..bdbc00674563 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -23,7 +23,6 @@ trait SetupAPI: def setupUnit(tree: Tree, recheckDef: DefRecheck)(using Context): Unit def isPreCC(sym: Symbol)(using Context): Boolean def postCheck()(using Context): Unit - def isCapabilityClassRef(tp: Type)(using Context): Boolean object Setup: @@ -68,29 +67,6 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: && !sym.owner.is(CaptureChecked) && !defn.isFunctionSymbol(sym.owner) - private val capabilityClassMap = new util.HashMap[Symbol, Boolean] - - /** Check if the class is capability, which means: - * 1. the class has a capability annotation, - * 2. or at least one of its parent type has universal capability. - */ - def isCapabilityClassRef(tp: Type)(using Context): Boolean = tp.dealiasKeepAnnots match - case _: TypeRef | _: AppliedType => - val sym = tp.classSymbol - def checkSym: Boolean = sym.info.parents.exists(hasUniversalCapability) - sym.isClass && capabilityClassMap.getOrElseUpdate(sym, checkSym) - case _ => false - - private def hasUniversalCapability(tp: Type)(using Context): Boolean = tp.dealiasKeepAnnots match - case CapturingType(parent, refs) => - refs.isUniversal || hasUniversalCapability(parent) - case AnnotatedType(parent, ann) => - if ann.symbol.isRetains then - try ann.tree.toCaptureSet.isUniversal || hasUniversalCapability(parent) - catch case ex: IllegalCaptureRef => false - else hasUniversalCapability(parent) - case tp => isCapabilityClassRef(tp) - private def fluidify(using Context) = new TypeMap with IdempotentCaptRefMap: def apply(t: Type): Type = t match case t: MethodType => @@ -317,10 +293,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case t: TypeVar => this(t.underlying) case t => - // Map references to capability classes C to C^ - if isCapabilityClassRef(t) - then CapturingType(t, defn.expandedUniversalSet, boxed = false) - else recur(t) + recur(t) end expandAliases val tp1 = expandAliases(tp) // TODO: Do we still need to follow aliases? diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index f809fbd176ce..e40c8346ed82 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -33,7 +33,6 @@ object Recheck: * Scala2ModuleVar cannot be also ParamAccessors. */ val ResetPrivate = Scala2ModuleVar - val ResetPrivateParamAccessor = ResetPrivate | ParamAccessor /** Attachment key for rechecked types of TypeTrees */ val RecheckedType = Property.Key[Type] diff --git a/tests/neg-custom-args/captures/extending-impure-function.scala b/tests/pos-custom-args/captures/extending-impure-function.scala similarity index 100% rename from tests/neg-custom-args/captures/extending-impure-function.scala rename to tests/pos-custom-args/captures/extending-impure-function.scala From 08557a16fecf5cc41686c99d6fcd745147073055 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 14 May 2024 20:56:42 +0200 Subject: [PATCH 115/827] Add fewer parameter refinements. Only enrich classes with capture refinements for a parameter if the deep capture set of the parameter's type is nonempty. --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index bdbc00674563..67977b027137 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -57,7 +57,20 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: private val toBeUpdated = new mutable.HashSet[Symbol] private def newFlagsFor(symd: SymDenotation)(using Context): FlagSet = - if symd.isAllOf(PrivateParamAccessor) && symd.owner.is(CaptureChecked) && !symd.hasAnnotation(defn.ConstructorOnlyAnnot) + + object containsCovarRetains extends TypeAccumulator[Boolean]: + def apply(x: Boolean, tp: Type): Boolean = + if x then true + else if tp.derivesFromCapability && variance >= 0 then true + else tp match + case AnnotatedType(_, ann) if ann.symbol.isRetains && variance >= 0 => true + case _ => foldOver(x, tp) + def apply(tp: Type): Boolean = apply(false, tp) + + if symd.isAllOf(PrivateParamAccessor) + && symd.owner.is(CaptureChecked) + && !symd.hasAnnotation(defn.ConstructorOnlyAnnot) + //&& containsCovarRetains(symd.symbol.originDenotation.info) then symd.flags &~ Private | Recheck.ResetPrivate else symd.flags From f02b5fbca3e1f830a51f3e02a3db10a6ac7c40ee Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 15 May 2024 15:42:19 +0200 Subject: [PATCH 116/827] Drop ResetPrivate flag --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 4 ++-- .../src/dotty/tools/dotc/transform/OverridingPairs.scala | 4 +++- compiler/src/dotty/tools/dotc/transform/Recheck.scala | 7 ------- 3 files changed, 5 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 67977b027137..0175d40c186c 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -70,8 +70,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if symd.isAllOf(PrivateParamAccessor) && symd.owner.is(CaptureChecked) && !symd.hasAnnotation(defn.ConstructorOnlyAnnot) - //&& containsCovarRetains(symd.symbol.originDenotation.info) - then symd.flags &~ Private | Recheck.ResetPrivate + && containsCovarRetains(symd.symbol.originDenotation.info) + then symd.flags &~ Private else symd.flags def isPreCC(sym: Symbol)(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index 4020291dded0..6529eed77fa0 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -34,7 +34,9 @@ object OverridingPairs: */ protected def exclude(sym: Symbol): Boolean = !sym.memberCanMatchInheritedSymbols - || isCaptureChecking && sym.is(Recheck.ResetPrivate) + || isCaptureChecking && atPhase(ctx.phase.prev)(sym.is(Private)) + // for capture checking we drop the private flag of certain parameter accessors + // but these still need no overriding checks /** The parents of base that are checked when deciding whether an overriding * pair has already been treated in a parent class. diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index e40c8346ed82..79dfe3393578 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -27,13 +27,6 @@ import annotation.tailrec object Recheck: import tpd.* - /** A flag used to indicate that a ParamAccessor has been temporarily made not-private - * Only used at the start of the Recheck phase, reset at its end. - * The flag repurposes the Scala2ModuleVar flag. No confusion is possible since - * Scala2ModuleVar cannot be also ParamAccessors. - */ - val ResetPrivate = Scala2ModuleVar - /** Attachment key for rechecked types of TypeTrees */ val RecheckedType = Property.Key[Type] From 01775760b007fad97eff82905018ecf3a769ef60 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 17 May 2024 13:31:21 +0200 Subject: [PATCH 117/827] Turn nested environment capture sets into constants at the end of box adaptation This change lets more ref trees with underlying function types keep their singleton types. --- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 2 +- .../dotty/tools/dotc/cc/CheckCaptures.scala | 4 +-- .../dotty/tools/dotc/core/TypeComparer.scala | 28 ++++++++++++++----- tests/neg-custom-args/captures/byname.check | 6 ++-- tests/neg-custom-args/captures/eta.check | 2 +- tests/neg-custom-args/captures/i15772.check | 2 +- .../neg-custom-args/captures/outer-var.check | 4 +-- 7 files changed, 31 insertions(+), 17 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 06ba36bd5e24..1a81e2bc1014 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -1080,7 +1080,7 @@ object CaptureSet: case _ => empty recur(tp) - .showing(i"capture set of $tp = $result", captDebug) + //.showing(i"capture set of $tp = $result", captDebug) private def deepCaptureSet(tp: Type)(using Context): CaptureSet = val collect = new TypeAccumulator[CaptureSet]: diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index cd0f21129ac9..30d05a744e31 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1006,7 +1006,7 @@ class CheckCaptures extends Recheck, SymTransformer: if (ares1 eq ares) && (aargs1 eq aargs) then actual else reconstruct(aargs1, ares1) - (resTp, curEnv.captured) + (resTp, CaptureSet(curEnv.captured.elems)) end adaptFun /** Adapt type function type `actual` to the expected type. @@ -1028,7 +1028,7 @@ class CheckCaptures extends Recheck, SymTransformer: if ares1 eq ares then actual else reconstruct(ares1) - (resTp, curEnv.captured) + (resTp, CaptureSet(curEnv.captured.elems)) end adaptTypeFun def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index c2c502a984c4..a9bb0406a14d 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -842,13 +842,27 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val refs1 = tp1.captureSet try if refs1.isAlwaysEmpty then recur(tp1, parent2) - else subCaptures(refs1, refs2, frozenConstraint).isOK - && sameBoxed(tp1, tp2, refs1) - && (recur(tp1.widen.stripCapturing, parent2) - || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) - // this alternative is needed in case the right hand side is a - // capturing type that contains the lhs as an alternative of a union type. - ) + else + // The singletonOK branch is because we sometimes have a larger capture set in a singleton + // than in its underlying type. An example is `f: () -> () ->{x} T`, which might be + // the type of a closure. In that case the capture set of `f.type` is `{x}` but the + // capture set of the underlying type is `{}`. So without the `singletonOK` test, a singleton + // might not be a subtype of its underlying type. Examples where this arises is + // capt-capibility.scala and function-combinators.scala + val singletonOK = tp1 match + case tp1: SingletonType + if subCaptures(tp1.underlying.captureSet, refs2, frozen = true).isOK => + recur(tp1.widen, tp2) + case _ => + false + singletonOK + || subCaptures(refs1, refs2, frozenConstraint).isOK + && sameBoxed(tp1, tp2, refs1) + && (recur(tp1.widen.stripCapturing, parent2) + || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) + // this alternative is needed in case the right hand side is a + // capturing type that contains the lhs as an alternative of a union type. + ) catch case ex: AssertionError => println(i"assertion failed while compare captured $tp1 <:< $tp2") throw ex diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index 2b48428e97bc..b9e5c81b721d 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -6,13 +6,13 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/byname.scala:4:2 ----------------------------------------- 4 | def f() = if cap1 == cap1 then g else g // error | ^ - | Found: (x$0: Int) ->{cap2} Int - | Required: (x$0: Int) -> Int + | Found: ((x$0: Int) ->{cap2} Int)^{} + | Required: Int -> Int | | Note that the expected type Int ->{} Int | is the previously inferred result type of method test | which is also the type seen in separately compiled sources. - | The new inferred type (x$0: Int) ->{cap2} Int + | The new inferred type ((x$0: Int) ->{cap2} Int)^{} | must conform to this type. 5 | def g(x: Int) = if cap2 == cap2 then 1 else x 6 | def g2(x: Int) = if cap1 == cap1 then 1 else x diff --git a/tests/neg-custom-args/captures/eta.check b/tests/neg-custom-args/captures/eta.check index 91dfdf06d3cd..9850e54a7fdf 100644 --- a/tests/neg-custom-args/captures/eta.check +++ b/tests/neg-custom-args/captures/eta.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/eta.scala:4:9 -------------------------------------------- 4 | g // error | ^ - | Found: () ->? A + | Found: (g : () -> A) | Required: () -> Proc^{f} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i15772.check b/tests/neg-custom-args/captures/i15772.check index cce58da1b93b..0f8f0bf6eac5 100644 --- a/tests/neg-custom-args/captures/i15772.check +++ b/tests/neg-custom-args/captures/i15772.check @@ -35,7 +35,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:44:2 ---------------------------------------- 44 | x: (() -> Unit) // error | ^ - | Found: () ->{x} Unit + | Found: (x : () ->{filesList, sayHello} Unit) | Required: () -> Unit | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/outer-var.check b/tests/neg-custom-args/captures/outer-var.check index c250280961d9..b9f1f57be769 100644 --- a/tests/neg-custom-args/captures/outer-var.check +++ b/tests/neg-custom-args/captures/outer-var.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:11:8 ------------------------------------- 11 | x = q // error | ^ - | Found: () ->{q} Unit + | Found: (q : Proc) | Required: () ->{p, q²} Unit | | where: q is a parameter in method inner @@ -28,7 +28,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:14:8 ------------------------------------- 14 | y = q // error | ^ - | Found: () ->{q} Unit + | Found: (q : Proc) | Required: () ->{p} Unit | | Note that reference (q : Proc), defined in method inner From 4487eebad7be7ffcbfafcfa2a4b1d6851a6c0071 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 19 May 2024 10:36:57 +0200 Subject: [PATCH 118/827] Two fixes to make tests pass as before - Avoid creating capture sets of untrackable references - Refine disallowRootCapability to consider only explicit captures --- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 4 ++- .../dotty/tools/dotc/cc/CheckCaptures.scala | 6 ++-- tests/neg-custom-args/captures/capt1.check | 2 +- tests/neg-custom-args/captures/i16725.scala | 2 +- .../captures/filevar-expanded.scala | 36 +++++++++++++++++++ tests/pos-custom-args/captures/filevar.scala | 10 +++--- tests/pos-custom-args/captures/logger.scala | 8 ++--- .../captures/nested-classes.scala | 12 +++---- 8 files changed, 59 insertions(+), 21 deletions(-) create mode 100644 tests/pos-custom-args/captures/filevar-expanded.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 1a81e2bc1014..5422706f7c40 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -1037,7 +1037,9 @@ object CaptureSet: /** The capture set of the type underlying CaptureRef */ def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match - case ref: (TermRef | TermParamRef) if ref.isMaxCapability => ref.singletonCaptureSet + case ref: (TermRef | TermParamRef) if ref.isMaxCapability => + if ref.isTrackableRef then ref.singletonCaptureSet + else CaptureSet.universal case ReachCapability(ref1) => deepCaptureSet(ref1.widen) .showing(i"Deep capture set of $ref: ${ref1.widen} = $result", capt) case _ => ofType(ref.underlying, followResult = true) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 30d05a744e31..bde97a6d0387 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -157,15 +157,17 @@ object CheckCaptures: case _ => case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot => () - case t => + case CapturingType(parent, refs) => if variance >= 0 then - t.captureSet.disallowRootCapability: () => + refs.disallowRootCapability: () => def part = if t eq tp then "" else i"the part $t of " report.error( em"""$what cannot $have $tp since |${part}that type captures the root capability `cap`. |$addendum""", pos) + traverse(parent) + case t => traverseChildren(t) check.traverse(tp) end disallowRootCapabilitiesIn diff --git a/tests/neg-custom-args/captures/capt1.check b/tests/neg-custom-args/captures/capt1.check index 74b9db728983..0e99d1876d3c 100644 --- a/tests/neg-custom-args/captures/capt1.check +++ b/tests/neg-custom-args/captures/capt1.check @@ -49,6 +49,6 @@ 34 | val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^ | Sealed type variable X cannot be instantiated to box () ->{x} Cap since - | the part C^ of that type captures the root capability `cap`. + | the part Cap of that type captures the root capability `cap`. | This is often caused by a local capability in an argument of method h | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/i16725.scala b/tests/neg-custom-args/captures/i16725.scala index 853b3e96ca5c..733c2c562bbc 100644 --- a/tests/neg-custom-args/captures/i16725.scala +++ b/tests/neg-custom-args/captures/i16725.scala @@ -10,5 +10,5 @@ def useWrappedIO(wrapper: Wrapper[IO]): () -> Unit = wrapper: io => io.brewCoffee() def main(): Unit = - val escaped = usingIO(io => useWrappedIO(mk(io))) // error // error + val escaped = usingIO(io => useWrappedIO(mk(io))) // error escaped() // boom diff --git a/tests/pos-custom-args/captures/filevar-expanded.scala b/tests/pos-custom-args/captures/filevar-expanded.scala new file mode 100644 index 000000000000..13051994f346 --- /dev/null +++ b/tests/pos-custom-args/captures/filevar-expanded.scala @@ -0,0 +1,36 @@ +import language.experimental.captureChecking +import compiletime.uninitialized + +object test1: + class File: + def write(x: String): Unit = ??? + + class Service(f: File^): + def log = f.write("log") + + def withFile[T](op: (f: File^) => T): T = + op(new File) + + def test = + withFile: f => + val o = Service(f) + o.log + +object test2: + class IO + + class File: + def write(x: String): Unit = ??? + + class Service(io: IO^): + var file: File^{io} = uninitialized + def log = file.write("log") + + def withFile[T](io2: IO^)(op: (f: File^{io2}) => T): T = + op(new File) + + def test(io3: IO^) = + withFile(io3): f => + val o = Service(io3) + o.file = f + o.log diff --git a/tests/pos-custom-args/captures/filevar.scala b/tests/pos-custom-args/captures/filevar.scala index c5571ca88849..9ab34fe617b5 100644 --- a/tests/pos-custom-args/captures/filevar.scala +++ b/tests/pos-custom-args/captures/filevar.scala @@ -1,6 +1,4 @@ import language.experimental.captureChecking -import language.experimental.modularity -import annotation.capability import compiletime.uninitialized object test1: @@ -19,19 +17,19 @@ object test1: o.log object test2: - class IO + class IO extends caps.Capability class File: def write(x: String): Unit = ??? - class Service(io: IO^): + class Service(io: IO): var file: File^{io} = uninitialized def log = file.write("log") - def withFile[T](io2: IO^)(op: (f: File^{io2}) => T): T = + def withFile[T](io2: IO)(op: (f: File^{io2}) => T): T = op(new File) - def test(io3: IO^) = + def test(io3: IO) = withFile(io3): f => val o = Service(io3) o.file = f diff --git a/tests/pos-custom-args/captures/logger.scala b/tests/pos-custom-args/captures/logger.scala index 75ea3ac3fbc0..04aee89a227e 100644 --- a/tests/pos-custom-args/captures/logger.scala +++ b/tests/pos-custom-args/captures/logger.scala @@ -2,12 +2,12 @@ import annotation.capability import language.experimental.saferExceptions import language.experimental.modularity -class FileSystem // does not work with extends caps.Capability +class FileSystem extends caps.Capability -class Logger(using fs: FileSystem^): +class Logger(using fs: FileSystem): def log(s: String): Unit = ??? -def test(using fs: FileSystem^) = +def test(using fs: FileSystem) = val l: Logger^{fs} = Logger(using fs) l.log("hello world!") val xs: LazyList[Int]^{l} = @@ -50,7 +50,7 @@ class Pair[+A, +B](x: A, y: B): def fst: A = x def snd: B = y -def test2(ct: CanThrow[Exception], fs: FileSystem^) = +def test2(ct: CanThrow[Exception], fs: FileSystem) = def x: Int ->{ct} String = ??? def y: Logger^{fs} = ??? def p = Pair[Int ->{ct} String, Logger^{fs}](x, y) diff --git a/tests/pos-custom-args/captures/nested-classes.scala b/tests/pos-custom-args/captures/nested-classes.scala index 4a0da34faf5c..4a76a88c03ff 100644 --- a/tests/pos-custom-args/captures/nested-classes.scala +++ b/tests/pos-custom-args/captures/nested-classes.scala @@ -2,21 +2,21 @@ import language.experimental.captureChecking import language.experimental.modularity import annotation.{capability, constructorOnly} -class IO // does not work with extends caps.Capability +class IO extends caps.Capability class Blah -class Pkg(using io: IO^): +class Pkg(using io: IO): class Foo: def m(foo: Blah^{io}) = ??? -class Pkg2(using io: IO^): +class Pkg2(using io: IO): class Foo: def m(foo: Blah^{io}): Any = io; ??? -def main(using io: IO^) = +def main(using io: IO) = val pkg = Pkg() val f = pkg.Foo() - f.m(???) + val x1 = f.m(???) val pkg2 = Pkg2() val f2 = pkg2.Foo() - f2.m(???) + val x2 = f2.m(???) From 999be5ea470dda53f57ade1e19303b26ba6f4671 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 19 May 2024 10:37:22 +0200 Subject: [PATCH 119/827] Add assertions that all references in capture sets are trackable --- compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 5422706f7c40..697b6f707309 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -388,7 +388,7 @@ object CaptureSet: def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = if elems.isEmpty then empty - else Const(SimpleIdentitySet(elems.map(_.normalizedRef)*)) + else Const(SimpleIdentitySet(elems.map(_.normalizedRef.ensuring(_.isTrackableRef))*)) def apply(elems: Refs)(using Context): CaptureSet.Const = if elems.isEmpty then empty else Const(elems) @@ -496,6 +496,7 @@ object CaptureSet: CompareResult.LevelError(this, elem) else //if id == 34 then assert(!elem.isUniversalRootCapability) + assert(elem.isTrackableRef, elem) elems += elem if elem.isRootCapability then rootAddedHandler() From 81cf8d8b5090da0609f1b4bf3cd44db881b1f926 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 26 May 2024 16:21:48 +0200 Subject: [PATCH 120/827] Make capture sets of expressions deriving Capability explicit When an expression has a type that derives from caps.Capability, add an explicit capture set. Also: Address other review comments --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 4 +-- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 6 +++- .../dotty/tools/dotc/cc/CheckCaptures.scala | 29 +++++++++++++++---- library/src/scala/caps.scala | 2 +- .../captures/effect-swaps-explicit.check | 22 +++++++------- .../captures/effect-swaps-explicit.scala | 2 -- .../captures/effect-swaps.check | 18 ++++++------ .../captures/effect-swaps.scala | 2 -- .../captures/extending-cap-classes.check | 21 ++++++++++++++ .../captures/extending-cap-classes.scala | 14 +++++++++ .../captures/usingLogFile.check | 12 ++++---- .../captures/usingLogFile.scala | 1 - .../captures/capt-capability.scala | 1 - .../captures/filevar-tracked.scala | 1 - tests/pos-custom-args/captures/i19751.scala | 1 - .../captures/logger-tracked.scala | 1 - tests/pos-custom-args/captures/logger.scala | 1 - .../captures/null-logger.scala | 1 - tests/pos-custom-args/captures/try3.scala | 1 - .../dotc/core/Definitions.scala | 1 - tests/pos/i20237.scala | 1 - 21 files changed, 92 insertions(+), 50 deletions(-) create mode 100644 tests/neg-custom-args/captures/extending-cap-classes.check create mode 100644 tests/neg-custom-args/captures/extending-cap-classes.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 9b899030bc02..8276a0987003 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -203,8 +203,8 @@ extension (tp: Type) case _ => false - /** Does type derive from caps.Capability?, which means it references of this - * type are maximal capabilities? + /** Tests whether the type derives from `caps.Capability`, which means + * references of this type are maximal capabilities. */ def derivesFromCapability(using Context): Boolean = tp.dealias match case tp: (TypeRef | AppliedType) => diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 697b6f707309..f78ed1a91bd6 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -154,7 +154,11 @@ sealed abstract class CaptureSet extends Showable: (x eq y) || x.isRootCapability || y.match - case y: TermRef => y.prefix eq x + case y: TermRef => + (y.prefix eq x) + || y.info.match + case y1: CaptureRef => x.subsumes(y1) + case _ => false case MaybeCapability(y1) => x.stripMaybe.subsumes(y1) case _ => false || x.match diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index bde97a6d0387..5daa56b9cc07 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1037,7 +1037,7 @@ class CheckCaptures extends Recheck, SymTransformer: val arrow = if covariant then "~~>" else "<~~" i"adapting $actual $arrow $expected" - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { + def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true): if expected.isInstanceOf[WildcardType] then actual else // Decompose the actual type into the inner shape type, the capture set and the box status @@ -1117,7 +1117,22 @@ class CheckCaptures extends Recheck, SymTransformer: adaptedType(!boxed) else adaptedType(boxed) - } + end adapt + + /** If result derives from caps.Capability, yet is not a capturing type itself, + * make its capture set explicit. + */ + def makeCaptureSetExplicit(result: Type) = result match + case CapturingType(_, _) => result + case _ => + if result.derivesFromCapability then + val cap: CaptureRef = actual match + case ref: CaptureRef if ref.isTracked => + ref + case _ => + defn.captureRoot.termRef // TODO: skolemize? + CapturingType(result, cap.singletonCaptureSet) + else result if expected == LhsProto || expected.isSingleton && actual.isSingleton then actual @@ -1133,10 +1148,12 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => case _ => val adapted = adapt(actualw.withReachCaptures(actual), expected, covariant = true) - if adapted ne actualw then - capt.println(i"adapt boxed $actual vs $expected ===> $adapted") - adapted - else actual + makeCaptureSetExplicit: + if adapted ne actualw then + capt.println(i"adapt boxed $actual vs $expected ===> $adapted") + adapted + else + actual end adaptBoxed /** Check overrides again, taking capture sets into account. diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 215ad2cb5697..808bdba34e3f 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -4,7 +4,7 @@ import annotation.experimental @experimental object caps: - trait Capability // should be @erased + trait Capability extends Any /** The universal capture reference */ val cap: Capability = new Capability() {} diff --git a/tests/neg-custom-args/captures/effect-swaps-explicit.check b/tests/neg-custom-args/captures/effect-swaps-explicit.check index 47559ab97568..8c4d1f315fd8 100644 --- a/tests/neg-custom-args/captures/effect-swaps-explicit.check +++ b/tests/neg-custom-args/captures/effect-swaps-explicit.check @@ -1,29 +1,29 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:64:8 ------------------------- -63 | Result: -64 | Future: // error, type mismatch +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:62:8 ------------------------- +61 | Result: +62 | Future: // error, type mismatch | ^ | Found: Result.Ok[box Future[box T^?]^{fr, contextual$1}] | Required: Result[Future[T], Nothing] -65 | fr.await.ok +63 | fr.await.ok |-------------------------------------------------------------------------------------------------------------------- |Inline stack trace |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from effect-swaps-explicit.scala:41 -41 | boundary(Ok(body)) + |This location contains code that was inlined from effect-swaps-explicit.scala:39 +39 | boundary(Ok(body)) | ^^^^^^^^ -------------------------------------------------------------------------------------------------------------------- | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:74:10 ------------------------ -74 | Future: fut ?=> // error: type mismatch +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:72:10 ------------------------ +72 | Future: fut ?=> // error: type mismatch | ^ | Found: Future[box T^?]^{fr, lbl} | Required: Future[box T^?]^? -75 | fr.await.ok +73 | fr.await.ok | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:68:15 --------------------------------------------- -68 | Result.make: //lbl ?=> // error, escaping label from Result +-- Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:66:15 --------------------------------------------- +66 | Result.make: //lbl ?=> // error, escaping label from Result | ^^^^^^^^^^^ |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9}, box E^?]]^): | box Future[box T^?]^{fr, contextual$9} leaks into outer capture set of type parameter T of method make in object Result diff --git a/tests/neg-custom-args/captures/effect-swaps-explicit.scala b/tests/neg-custom-args/captures/effect-swaps-explicit.scala index 814199706721..052beaab01b2 100644 --- a/tests/neg-custom-args/captures/effect-swaps-explicit.scala +++ b/tests/neg-custom-args/captures/effect-swaps-explicit.scala @@ -1,5 +1,3 @@ -import annotation.capability - object boundary: final class Label[-T] // extends caps.Capability diff --git a/tests/neg-custom-args/captures/effect-swaps.check b/tests/neg-custom-args/captures/effect-swaps.check index f16019c15513..ef5a95d333bf 100644 --- a/tests/neg-custom-args/captures/effect-swaps.check +++ b/tests/neg-custom-args/captures/effect-swaps.check @@ -1,24 +1,24 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:64:8 ---------------------------------- -63 | Result: -64 | Future: // error, type mismatch +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:62:8 ---------------------------------- +61 | Result: +62 | Future: // error, type mismatch | ^ | Found: Result.Ok[box Future[box T^?]^{fr, contextual$1}] | Required: Result[Future[T], Nothing] -65 | fr.await.ok +63 | fr.await.ok |-------------------------------------------------------------------------------------------------------------------- |Inline stack trace |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from effect-swaps.scala:41 -41 | boundary(Ok(body)) + |This location contains code that was inlined from effect-swaps.scala:39 +39 | boundary(Ok(body)) | ^^^^^^^^ -------------------------------------------------------------------------------------------------------------------- | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:74:10 --------------------------------- -74 | Future: fut ?=> // error: type mismatch +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:72:10 --------------------------------- +72 | Future: fut ?=> // error: type mismatch | ^ | Found: Future[box T^?]^{fr, lbl} | Required: Future[box T^?]^? -75 | fr.await.ok +73 | fr.await.ok | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/effect-swaps.scala b/tests/neg-custom-args/captures/effect-swaps.scala index af44501371a9..d4eed2bae2f2 100644 --- a/tests/neg-custom-args/captures/effect-swaps.scala +++ b/tests/neg-custom-args/captures/effect-swaps.scala @@ -1,5 +1,3 @@ -import annotation.capability - object boundary: final class Label[-T] extends caps.Capability diff --git a/tests/neg-custom-args/captures/extending-cap-classes.check b/tests/neg-custom-args/captures/extending-cap-classes.check new file mode 100644 index 000000000000..3bdddfd9dd3c --- /dev/null +++ b/tests/neg-custom-args/captures/extending-cap-classes.check @@ -0,0 +1,21 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/extending-cap-classes.scala:7:15 ------------------------- +7 | val x2: C1 = new C2 // error + | ^^^^^^ + | Found: C2^ + | Required: C1 + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/extending-cap-classes.scala:8:15 ------------------------- +8 | val x3: C1 = new C3 // error + | ^^^^^^ + | Found: C3^ + | Required: C1 + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/extending-cap-classes.scala:13:15 ------------------------ +13 | val z2: C1 = y2 // error + | ^^ + | Found: (y2 : C2)^{y2} + | Required: C1 + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/extending-cap-classes.scala b/tests/neg-custom-args/captures/extending-cap-classes.scala new file mode 100644 index 000000000000..6f5a8f48c30a --- /dev/null +++ b/tests/neg-custom-args/captures/extending-cap-classes.scala @@ -0,0 +1,14 @@ +class C1 +class C2 extends C1, caps.Capability +class C3 extends C2 + +def test = + val x1: C1 = new C1 + val x2: C1 = new C2 // error + val x3: C1 = new C3 // error + + val y2: C2 = new C2 + val y3: C3 = new C3 + + val z2: C1 = y2 // error + diff --git a/tests/neg-custom-args/captures/usingLogFile.check b/tests/neg-custom-args/captures/usingLogFile.check index bf5c1dc4f83a..068d8be78c70 100644 --- a/tests/neg-custom-args/captures/usingLogFile.check +++ b/tests/neg-custom-args/captures/usingLogFile.check @@ -1,12 +1,12 @@ --- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:14 ------------------------------------------------------ -23 | val later = usingLogFile { f => () => f.write(0) } // error +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:22:14 ------------------------------------------------------ +22 | val later = usingLogFile { f => () => f.write(0) } // error | ^^^^^^^^^^^^ | local reference f leaks into outer capture set of type parameter T of method usingLogFile in object Test2 --- Error: tests/neg-custom-args/captures/usingLogFile.scala:28:23 ------------------------------------------------------ -28 | private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:27:23 ------------------------------------------------------ +27 | private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error | ^^^^^^^^^^^^ | local reference f leaks into outer capture set of type parameter T of method usingLogFile in object Test2 --- Error: tests/neg-custom-args/captures/usingLogFile.scala:44:16 ------------------------------------------------------ -44 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:43:16 ------------------------------------------------------ +43 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error | ^^^^^^^^^ | local reference f leaks into outer capture set of type parameter T of method usingFile in object Test3 diff --git a/tests/neg-custom-args/captures/usingLogFile.scala b/tests/neg-custom-args/captures/usingLogFile.scala index b25e4e75a784..2b46a5401f46 100644 --- a/tests/neg-custom-args/captures/usingLogFile.scala +++ b/tests/neg-custom-args/captures/usingLogFile.scala @@ -1,5 +1,4 @@ import java.io.* -import annotation.capability object Test1: diff --git a/tests/pos-custom-args/captures/capt-capability.scala b/tests/pos-custom-args/captures/capt-capability.scala index 64892218ee41..03b5cb1bbabf 100644 --- a/tests/pos-custom-args/captures/capt-capability.scala +++ b/tests/pos-custom-args/captures/capt-capability.scala @@ -1,4 +1,3 @@ -import annotation.capability import caps.Capability def f1(c: Capability): () ->{c} c.type = () => c // ok diff --git a/tests/pos-custom-args/captures/filevar-tracked.scala b/tests/pos-custom-args/captures/filevar-tracked.scala index 6fb7000ad4c2..dc8d0b18908b 100644 --- a/tests/pos-custom-args/captures/filevar-tracked.scala +++ b/tests/pos-custom-args/captures/filevar-tracked.scala @@ -1,6 +1,5 @@ import language.experimental.captureChecking import language.experimental.modularity -import annotation.capability import compiletime.uninitialized object test1: diff --git a/tests/pos-custom-args/captures/i19751.scala b/tests/pos-custom-args/captures/i19751.scala index 30bd8677f024..b41017f4f3e7 100644 --- a/tests/pos-custom-args/captures/i19751.scala +++ b/tests/pos-custom-args/captures/i19751.scala @@ -1,5 +1,4 @@ import language.experimental.captureChecking -import annotation.capability import caps.cap trait Ptr[A] diff --git a/tests/pos-custom-args/captures/logger-tracked.scala b/tests/pos-custom-args/captures/logger-tracked.scala index 1949e25b00d9..053731de444d 100644 --- a/tests/pos-custom-args/captures/logger-tracked.scala +++ b/tests/pos-custom-args/captures/logger-tracked.scala @@ -1,4 +1,3 @@ -import annotation.capability import language.experimental.saferExceptions import language.experimental.modularity diff --git a/tests/pos-custom-args/captures/logger.scala b/tests/pos-custom-args/captures/logger.scala index 04aee89a227e..81eeb521fee5 100644 --- a/tests/pos-custom-args/captures/logger.scala +++ b/tests/pos-custom-args/captures/logger.scala @@ -1,4 +1,3 @@ -import annotation.capability import language.experimental.saferExceptions import language.experimental.modularity diff --git a/tests/pos-custom-args/captures/null-logger.scala b/tests/pos-custom-args/captures/null-logger.scala index 958002ad0358..d532b5f74b38 100644 --- a/tests/pos-custom-args/captures/null-logger.scala +++ b/tests/pos-custom-args/captures/null-logger.scala @@ -1,4 +1,3 @@ -import annotation.capability import annotation.constructorOnly class FileSystem extends caps.Capability diff --git a/tests/pos-custom-args/captures/try3.scala b/tests/pos-custom-args/captures/try3.scala index 305069d3ae9f..a1a1bab8724a 100644 --- a/tests/pos-custom-args/captures/try3.scala +++ b/tests/pos-custom-args/captures/try3.scala @@ -1,5 +1,4 @@ import language.experimental.erasedDefinitions -import annotation.capability import java.io.IOException class CanThrow[-E] extends caps.Capability diff --git a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala index 603088dd8f26..8faf208e36d0 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala @@ -985,7 +985,6 @@ class Definitions { @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") - @tu lazy val CapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.capability") @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") diff --git a/tests/pos/i20237.scala b/tests/pos/i20237.scala index 0a5eb6d9a332..973f5d2025e3 100644 --- a/tests/pos/i20237.scala +++ b/tests/pos/i20237.scala @@ -1,5 +1,4 @@ import language.experimental.captureChecking -import scala.annotation.capability class Cap extends caps.Capability: def use[T](body: Cap ?=> T) = body(using this) From 2f3f02f50f30e1bd182cb9c606e3edf37cd4f6b4 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 25 May 2024 15:34:59 +0200 Subject: [PATCH 121/827] Avoid useless warnings about priority change in implicit search Warn about priority change in implicit search only if one of the participating candidates appears in the final result. It could be that we have an priority change between two ranked candidates that both are superseded by the result of the implicit search. In this case, no warning needs to be reported. --- .../dotty/tools/dotc/typer/Implicits.scala | 32 +++++++++++++++---- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 54821444aed6..a15541fa9c76 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -419,6 +419,12 @@ object Implicits: sealed abstract class SearchResult extends Showable { def tree: Tree def toText(printer: Printer): Text = printer.toText(this) + + /** The references that were found, there can be two of them in the case + * of an AmbiguousImplicits failure + */ + def found: List[TermRef] + def recoverWith(other: SearchFailure => SearchResult): SearchResult = this match { case _: SearchSuccess => this case fail: SearchFailure => other(fail) @@ -434,13 +440,17 @@ object Implicits: * @param tstate The typer state to be committed if this alternative is chosen */ case class SearchSuccess(tree: Tree, ref: TermRef, level: Int, isExtension: Boolean = false)(val tstate: TyperState, val gstate: GadtConstraint) - extends SearchResult with RefAndLevel with Showable + extends SearchResult with RefAndLevel with Showable: + final def found = ref :: Nil /** A failed search */ case class SearchFailure(tree: Tree) extends SearchResult { require(tree.tpe.isInstanceOf[SearchFailureType], s"unexpected type for ${tree}") final def isAmbiguous: Boolean = tree.tpe.isInstanceOf[AmbiguousImplicits | TooUnspecific] final def reason: SearchFailureType = tree.tpe.asInstanceOf[SearchFailureType] + final def found = tree.tpe match + case tpe: AmbiguousImplicits => tpe.alt1.ref :: tpe.alt2.ref :: Nil + case _ => Nil } object SearchFailure { @@ -1290,6 +1300,11 @@ trait Implicits: /** Search a list of eligible implicit references */ private def searchImplicit(eligible: List[Candidate], contextual: Boolean): SearchResult = + // A map that associates a priority change warning (between -source 3.4 and 3.6) + // with a candidate ref mentioned in the warning. We report the associated + // message if the candidate ref is part of the result of the implicit search + var priorityChangeWarnings = mutable.ListBuffer[(TermRef, Message)]() + /** Compare `alt1` with `alt2` to determine which one should be chosen. * * @return a number > 0 if `alt1` is preferred over `alt2` @@ -1306,6 +1321,8 @@ trait Implicits: */ def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) + def warn(msg: Message) = + priorityChangeWarnings += (alt1.ref -> msg) += (alt2.ref -> msg) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else @@ -1319,16 +1336,16 @@ trait Implicits: case 1 => "the first alternative" case _ => "none - it's ambiguous" if sv.stable == SourceVersion.`3.5` then - report.warning( + warn( em"""Given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} will change |Current choice : ${choice(prev)} - |New choice from Scala 3.6: ${choice(cmp)}""", srcPos) + |New choice from Scala 3.6: ${choice(cmp)}""") prev else - report.warning( + warn( em"""Change in given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} |Previous choice : ${choice(prev)} - |New choice from Scala 3.6: ${choice(cmp)}""", srcPos) + |New choice from Scala 3.6: ${choice(cmp)}""") cmp else cmp else cmp @@ -1578,7 +1595,10 @@ trait Implicits: validateOrdering(ord) throw ex - rank(sort(eligible), NoMatchingImplicitsFailure, Nil) + val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) + for (ref, msg) <- priorityChangeWarnings do + if result.found.contains(ref) then report.warning(msg, srcPos) + result end searchImplicit def isUnderSpecifiedArgument(tp: Type): Boolean = From 23a6027bb5a0710dce2b26ac99103e08a5d7caff Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 27 May 2024 17:57:03 +0200 Subject: [PATCH 122/827] Re-enable semanticdb test --- tests/semanticdb/expect/InventedNames.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/semanticdb/expect/InventedNames.scala b/tests/semanticdb/expect/InventedNames.scala index 61baae46c832..42c14c90e370 100644 --- a/tests/semanticdb/expect/InventedNames.scala +++ b/tests/semanticdb/expect/InventedNames.scala @@ -32,7 +32,7 @@ given [T]: Z[T] with val a = intValue val b = given_String -//val c = given_Double +val c = given_Double val d = given_List_T[Int] val e = given_Char val f = given_Float From a47035ed7be4d59eb3f2ce0ee108bc35798c7ca0 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 27 May 2024 23:38:03 +0200 Subject: [PATCH 123/827] Update semanticDB expect files --- tests/semanticdb/expect/InventedNames.expect.scala | 2 +- tests/semanticdb/metac.expect | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/tests/semanticdb/expect/InventedNames.expect.scala b/tests/semanticdb/expect/InventedNames.expect.scala index b92e9aa940a7..7c5b008209c2 100644 --- a/tests/semanticdb/expect/InventedNames.expect.scala +++ b/tests/semanticdb/expect/InventedNames.expect.scala @@ -32,7 +32,7 @@ given [T/*<-givens::InventedNames$package.given_Z_T#[T]*/]: Z/*->givens::Z#*/[T/ val a/*<-givens::InventedNames$package.a.*/ = intValue/*->givens::InventedNames$package.intValue.*/ val b/*<-givens::InventedNames$package.b.*/ = given_String/*->givens::InventedNames$package.given_String.*/ -//val c = given_Double +val c/*<-givens::InventedNames$package.c.*/ = given_Double/*->givens::InventedNames$package.given_Double().*/ val d/*<-givens::InventedNames$package.d.*/ = given_List_T/*->givens::InventedNames$package.given_List_T().*/[Int/*->scala::Int#*/] val e/*<-givens::InventedNames$package.e.*/ = given_Char/*->givens::InventedNames$package.given_Char.*/ val f/*<-givens::InventedNames$package.f.*/ = given_Float/*->givens::InventedNames$package.given_Float.*/ diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 98657f122255..84c3e7c6a110 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2093,15 +2093,16 @@ Schema => SemanticDB v4 Uri => InventedNames.scala Text => empty Language => Scala -Symbols => 44 entries -Occurrences => 64 entries -Synthetics => 2 entries +Symbols => 45 entries +Occurrences => 66 entries +Synthetics => 3 entries Symbols: -givens/InventedNames$package. => final package object givens extends Object { self: givens.type => +23 decls } +givens/InventedNames$package. => final package object givens extends Object { self: givens.type => +24 decls } givens/InventedNames$package.`* *`. => final implicit lazy val given method * * Long givens/InventedNames$package.a. => val method a Int givens/InventedNames$package.b. => val method b String +givens/InventedNames$package.c. => val method c Double givens/InventedNames$package.d. => val method d List[Int] givens/InventedNames$package.e. => val method e Char givens/InventedNames$package.f. => val method f Float @@ -2192,6 +2193,8 @@ Occurrences: [32:8..32:16): intValue -> givens/InventedNames$package.intValue. [33:4..33:5): b <- givens/InventedNames$package.b. [33:8..33:20): given_String -> givens/InventedNames$package.given_String. +[34:4..34:5): c <- givens/InventedNames$package.c. +[34:8..34:20): given_Double -> givens/InventedNames$package.given_Double(). [35:4..35:5): d <- givens/InventedNames$package.d. [35:8..35:20): given_List_T -> givens/InventedNames$package.given_List_T(). [35:21..35:24): Int -> scala/Int# @@ -2211,6 +2214,7 @@ Occurrences: Synthetics: [24:0..24:0): => *(x$1) +[34:8..34:20):given_Double => *(intValue) [40:8..40:15):given_Y => *(given_X) expect/Issue1749.scala From d3df8cacc78be93ab5b8b9eaf7fe073fad27010e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20Fornal?= <24961583+Florian3k@users.noreply.github.com> Date: Tue, 28 May 2024 11:21:19 +0200 Subject: [PATCH 124/827] Fix handling of right associative extension methods across scaladoc and printers (#20467) Fixes #19925 The same issue with handling right associative extension methods was also present in RefinedPrinter and ShortenedTypePrinter, so I fixed those as well --- .../dotty/tools/dotc/printing/RefinedPrinter.scala | 4 +++- .../dotty/tools/pc/printer/ShortenedTypePrinter.scala | 4 +++- .../dotty/tools/pc/tests/hover/HoverTermSuite.scala | 11 +++++++++++ .../src/tests/rightAssocExtension.scala | 7 +++++++ .../dotty/tools/scaladoc/tasty/ClassLikeSupport.scala | 4 +++- .../signatures/TranslatableSignaturesTestCases.scala | 2 ++ 6 files changed, 29 insertions(+), 3 deletions(-) create mode 100644 scaladoc-testcases/src/tests/rightAssocExtension.scala diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 1ff4c8cae339..0c6e36c8f18f 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -988,7 +988,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { // - trailingUsing = List(`(using D)`) // - rest = List(`(g: G)`, `(using H)`) // we need to swap (rightTyParams ++ rightParam) with (leftParam ++ trailingUsing) - val (leftTyParams, rest1) = tree.paramss.span(isTypeParamClause) + val (leftTyParams, rest1) = tree.paramss match + case fst :: tail if isTypeParamClause(fst) => (List(fst), tail) + case other => (List(), other) val (leadingUsing, rest2) = rest1.span(isUsingClause) val (rightTyParams, rest3) = rest2.span(isTypeParamClause) val (rightParam, rest4) = rest3.splitAt(1) diff --git a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala index 559e199f3449..19d603fcbb3b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala @@ -419,7 +419,9 @@ class ShortenedTypePrinter( if gsym.is(Flags.ExtensionMethod) then val filteredParams = if gsym.name.isRightAssocOperatorName then - val (leadingTyParamss, rest1) = paramss.span(isTypeParamClause) + val (leadingTyParamss, rest1) = paramss match + case fst :: tail if isTypeParamClause(fst) => (List(fst), tail) + case other => (List(), other) val (leadingUsing, rest2) = rest1.span(isUsingClause) val (rightTyParamss, rest3) = rest2.span(isTypeParamClause) val (rightParamss, rest4) = rest3.splitAt(1) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala index b51974b00fb0..9ae37048caf7 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala @@ -683,3 +683,14 @@ class HoverTermSuite extends BaseHoverSuite: |""".stripMargin, """yy: A{type T = Int}""".stripMargin.hover ) + + @Test def `right-assoc-extension`: Unit = + check( + """ + |case class Wrap[+T](x: T) + | + |extension [T](a: T) + | def <<*@@:>>[U <: Tuple](b: Wrap[U]): Wrap[T *: U] = Wrap(a *: b.x) + |""".stripMargin, + "extension [T](a: T) def *:[U <: Tuple](b: Wrap[U]): Wrap[T *: U]".hover + ) diff --git a/scaladoc-testcases/src/tests/rightAssocExtension.scala b/scaladoc-testcases/src/tests/rightAssocExtension.scala new file mode 100644 index 000000000000..a065ee765caf --- /dev/null +++ b/scaladoc-testcases/src/tests/rightAssocExtension.scala @@ -0,0 +1,7 @@ +package tests.rightAssocExtension + +case class Wrap[+T](x: T) + +extension [T](a: T) + def *:[U <: Tuple](b: Wrap[U]): Wrap[T *: U] + = Wrap(a *: b.x) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 88d57cdb9853..8823f6cb4e5e 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -359,7 +359,9 @@ trait ClassLikeSupport: if methodSymbol.isExtensionMethod && methodSymbol.isRightAssoc then // Taken from RefinedPrinter.scala // If you change the names of the clauses below, also change them in right-associative-extension-methods.md - val (leftTyParams, rest1) = memberInfo.paramLists.span(_.isType) + val (leftTyParams, rest1) = memberInfo.paramLists match + case fst :: tail if fst.isType => (List(fst), tail) + case other => (List(), other) val (leadingUsing, rest2) = rest1.span(_.isUsing) val (rightTyParams, rest3) = rest2.span(_.isType) val (rightParam, rest4) = rest3.splitAt(1) diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala index d60a4d82ff44..bfa2a372827a 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala @@ -122,3 +122,5 @@ class InfixTypes extends SignatureTest("infixTypes", SignatureTest.all) class ExtendsCall extends SignatureTest("extendsCall", SignatureTest.all) class RefinedFunctionTypes extends SignatureTest("refinedFunctionTypes", SignatureTest.all) + +class RightAssocExtension extends SignatureTest("rightAssocExtension", SignatureTest.all) From d04005c048b4633b5897c4a79972ca20352f3e36 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 28 May 2024 16:37:58 +0200 Subject: [PATCH 125/827] Fix isEffectivelySingleton As usual, OrTypes need to be excluded. a.type | b.type is not effectively a singleton. It seems to be an easy trap to fall into. Follow-up to #20474 --- compiler/src/dotty/tools/dotc/core/Types.scala | 3 +-- tests/neg/i20474.scala | 13 +++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 tests/neg/i20474.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index cad06e973741..ca4834558d9a 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -329,13 +329,12 @@ object Types extends TypeUtils { def isSingleton(using Context): Boolean = dealias.isInstanceOf[SingletonType] /** Is this type a (possibly aliased) singleton type or a type proxy - * or Or/And type known to be a singleton type? + * or an AndType where one operand is effectively a singleton? */ def isEffectivelySingleton(using Context): Boolean = dealias match case tp: SingletonType => true case tp: TypeProxy => tp.superType.isEffectivelySingleton case AndType(tpL, tpR) => tpL.isEffectivelySingleton || tpR.isEffectivelySingleton - case OrType(tpL, tpR) => tpL.isEffectivelySingleton && tpR.isEffectivelySingleton case _ => false /** Is this upper-bounded by a (possibly aliased) singleton type? diff --git a/tests/neg/i20474.scala b/tests/neg/i20474.scala new file mode 100644 index 000000000000..4623ec11dbf3 --- /dev/null +++ b/tests/neg/i20474.scala @@ -0,0 +1,13 @@ +class A +class B extends A + +def f(a: A, c: A) = + val b1: a.type = a + val b2: a.type & B = a.asInstanceOf[a.type & B] + val b3: c.type & A = c + val b4: a.type | c.type = c + + val d1: b1.type = a + val d2: b2.type = a // ok + val d3: b3.type = a // error + val d4: b4.type = a // error \ No newline at end of file From 3fdb2923f83acd2ef8910c9f71a394c46be4e268 Mon Sep 17 00:00:00 2001 From: Kasper Kondzielski Date: Tue, 28 May 2024 16:54:15 +0200 Subject: [PATCH 126/827] Add error code to diagnostics about unused code (#19780) Co-authored-by: ghostbuster91 --- .../tools/dotc/reporting/ErrorMessageID.scala | 1 + .../tools/dotc/reporting/MessageKind.scala | 2 ++ .../dotty/tools/dotc/reporting/messages.scala | 16 ++++++++++++++++ .../tools/dotc/transform/CheckUnused.scala | 17 +++++++++-------- compiler/test-resources/repl/i18383 | 2 +- 5 files changed, 29 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 04380a7b8e4a..0e42629773cc 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -211,6 +211,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case ConstructorProxyNotValueID // errorNumber: 195 case ContextBoundCompanionNotValueID // errorNumber: 196 case InlinedAnonClassWarningID // errorNumber: 197 + case UnusedSymbolID // errorNumber: 198 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala b/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala index f039ed900a76..10ad4f83d93d 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala @@ -21,6 +21,7 @@ enum MessageKind: case MatchCaseUnreachable case Compatibility case PotentialIssue + case UnusedSymbol /** Human readable message that will end up being shown to the user. * NOTE: This is only used in the situation where you have multiple words @@ -37,5 +38,6 @@ enum MessageKind: case PatternMatchExhaustivity => "Pattern Match Exhaustivity" case MatchCaseUnreachable => "Match case Unreachable" case PotentialIssue => "Potential Issue" + case UnusedSymbol => "Unused Symbol" case kind => kind.toString end MessageKind diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index ceb8ecbc8e03..65f3a478fcd4 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3239,3 +3239,19 @@ extends TypeMsg(ConstructorProxyNotValueID): |companion value with the (term-)name `A`. However, these context bound companions |are not values themselves, they can only be referred to in selections.""" +class UnusedSymbol(errorText: String)(using Context) +extends Message(UnusedSymbolID) { + def kind = MessageKind.UnusedSymbol + + override def msg(using Context) = errorText + override def explain(using Context) = "" +} + +object UnusedSymbol { + def imports(using Context): UnusedSymbol = new UnusedSymbol(i"unused import") + def localDefs(using Context): UnusedSymbol = new UnusedSymbol(i"unused local definition") + def explicitParams(using Context): UnusedSymbol = new UnusedSymbol(i"unused explicit parameter") + def implicitParams(using Context): UnusedSymbol = new UnusedSymbol(i"unused implicit parameter") + def privateMembers(using Context): UnusedSymbol = new UnusedSymbol(i"unused private member") + def patVars(using Context): UnusedSymbol = new UnusedSymbol(i"unused pattern variable") +} diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index d420fe78107e..d8389ff964a4 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -17,6 +17,7 @@ import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.StdNames import dotty.tools.dotc.report import dotty.tools.dotc.reporting.Message +import dotty.tools.dotc.reporting.UnusedSymbol as UnusedSymbolMessage import dotty.tools.dotc.typer.ImportInfo import dotty.tools.dotc.util.{Property, SrcPos} import dotty.tools.dotc.core.Mode @@ -295,21 +296,21 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke res.warnings.toList.sortBy(_.pos.span.point)(using Ordering[Int]).foreach { s => s match case UnusedSymbol(t, _, WarnTypes.Imports) => - report.warning(s"unused import", t) + report.warning(UnusedSymbolMessage.imports, t) case UnusedSymbol(t, _, WarnTypes.LocalDefs) => - report.warning(s"unused local definition", t) + report.warning(UnusedSymbolMessage.localDefs, t) case UnusedSymbol(t, _, WarnTypes.ExplicitParams) => - report.warning(s"unused explicit parameter", t) + report.warning(UnusedSymbolMessage.explicitParams, t) case UnusedSymbol(t, _, WarnTypes.ImplicitParams) => - report.warning(s"unused implicit parameter", t) + report.warning(UnusedSymbolMessage.implicitParams, t) case UnusedSymbol(t, _, WarnTypes.PrivateMembers) => - report.warning(s"unused private member", t) + report.warning(UnusedSymbolMessage.privateMembers, t) case UnusedSymbol(t, _, WarnTypes.PatVars) => - report.warning(s"unused pattern variable", t) + report.warning(UnusedSymbolMessage.patVars, t) case UnusedSymbol(t, _, WarnTypes.UnsetLocals) => - report.warning(s"unset local variable, consider using an immutable val instead", t) + report.warning("unset local variable, consider using an immutable val instead", t) case UnusedSymbol(t, _, WarnTypes.UnsetPrivates) => - report.warning(s"unset private variable, consider using an immutable val instead", t) + report.warning("unset private variable, consider using an immutable val instead", t) } end CheckUnused diff --git a/compiler/test-resources/repl/i18383 b/compiler/test-resources/repl/i18383 index 81d3c9d5a7fd..563495e2e999 100644 --- a/compiler/test-resources/repl/i18383 +++ b/compiler/test-resources/repl/i18383 @@ -4,7 +4,7 @@ scala> import scala.collection.* scala> class Foo { import scala.util.*; println("foo") } 1 warning found --- Warning: -------------------------------------------------------------------- +-- [E198] Unused Symbol Warning: ----------------------------------------------- 1 | class Foo { import scala.util.*; println("foo") } | ^ | unused import From 03ced0d378ccbcd67a17428947fd1b1afb9cf3ae Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 28 May 2024 18:00:27 +0200 Subject: [PATCH 127/827] Drop priority change warnings that don't qualify Drop priority change warnings if one the mentioned references does not succeed via tryImplicit. --- .../dotty/tools/dotc/typer/Implicits.scala | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index a15541fa9c76..72a9b7545d05 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1301,9 +1301,10 @@ trait Implicits: private def searchImplicit(eligible: List[Candidate], contextual: Boolean): SearchResult = // A map that associates a priority change warning (between -source 3.4 and 3.6) - // with a candidate ref mentioned in the warning. We report the associated - // message if the candidate ref is part of the result of the implicit search - var priorityChangeWarnings = mutable.ListBuffer[(TermRef, Message)]() + // with the candidate refs mentioned in the warning. We report the associated + // message if both candidates qualify in tryImplicit and at least one of the candidates + // is part of the result of the implicit search. + val priorityChangeWarnings = mutable.ListBuffer[(TermRef, TermRef, Message)]() /** Compare `alt1` with `alt2` to determine which one should be chosen. * @@ -1322,7 +1323,7 @@ trait Implicits: def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) def warn(msg: Message) = - priorityChangeWarnings += (alt1.ref -> msg) += (alt2.ref -> msg) + priorityChangeWarnings += ((alt1.ref, alt2.ref, msg)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else @@ -1440,7 +1441,11 @@ trait Implicits: // need a candidate better than `cand` healAmbiguous(fail, newCand => compareAlternatives(newCand, cand) > 0) - else rank(remaining, found, fail :: rfailures) + else + // keep only warnings that don't involve the failed candidate reference + priorityChangeWarnings.filterInPlace: (ref1, ref2, _) => + ref1 != cand.ref && ref2 != cand.ref + rank(remaining, found, fail :: rfailures) case best: SearchSuccess => if (ctx.mode.is(Mode.ImplicitExploration) || isCoherent) best @@ -1596,8 +1601,9 @@ trait Implicits: throw ex val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) - for (ref, msg) <- priorityChangeWarnings do - if result.found.contains(ref) then report.warning(msg, srcPos) + for (ref1, ref2, msg) <- priorityChangeWarnings do + if result.found.exists(ref => ref == ref1 || ref == ref2) then + report.warning(msg, srcPos) result end searchImplicit From 21261c07bb4bdf8aeb493a7fc1893e75c1085344 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 27 May 2024 13:09:29 +0200 Subject: [PATCH 128/827] Explain unresolvable references better We run into problems when referring to a member of a self type of a class that it not also a member of the class from outside via an asSeenFrom. One example is in 11226.scala where we see: ```scala trait ManagedActorClassification { this: ActorEventBus => def unsubscribe(subscriber: Subscriber): Unit } class Unsubscriber(bus: ManagedActorClassification) { def test(a: ActorRef): Unit = bus.unsubscribe(a) // error } ``` The problem is that `unsubscribe` refers to the type `Subscriber` which is not resolvable as a member of `bus`. one idea could be to rule out type signatures like `unsubscribe`, similar how we rule out public signatures referring to private members. But this could rule out existing valid programs. For instance, the `unsubscribe` signature is unproblematic if it gets only called with prefixes that inherit `ActorEventBus`. You could say that the problem was instead that the type of `bus` was not specific enough. In the long term, maybe restructing the signature is the right move. But for now, we just try to give better error messages in the case of existing failures. Fixes #11226 --- .../dotty/tools/dotc/core/TypeErrors.scala | 29 ++++++++++++++----- .../dotty/tools/dotc/reporting/messages.scala | 12 +++++++- tests/neg/i11226.check | 6 ++++ tests/neg/i11226.scala | 14 +++++++++ tests/neg/i11226a.check | 12 ++++++++ tests/neg/i11226a.scala | 13 +++++++++ tests/neg/i16407.check | 12 ++++---- tests/pos/i11226b.scala | 11 +++++++ 8 files changed, 94 insertions(+), 15 deletions(-) create mode 100644 tests/neg/i11226.check create mode 100644 tests/neg/i11226.scala create mode 100644 tests/neg/i11226a.check create mode 100644 tests/neg/i11226a.scala create mode 100644 tests/pos/i11226b.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 5b19fe0e7bdd..79d1cecbd6be 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -57,17 +57,30 @@ end TypeError class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name])(using Context) extends TypeError: def toMessage(using Context) = em"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" -class MissingType(pre: Type, name: Name)(using Context) extends TypeError: - private def otherReason(pre: Type)(using Context): String = pre match { - case pre: ThisType if pre.cls.givenSelfType.exists => - i"\nor the self type of $pre might not contain all transitive dependencies" - case _ => "" - } +class MissingType(val pre: Type, val name: Name)(using Context) extends TypeError: + + def reason(using Context): String = + def missingClassFile = + "The classfile defining the type might be missing from the classpath" + val cls = pre.classSymbol + val givenSelf = cls match + case cls: ClassSymbol => cls.givenSelfType + case _ => NoType + pre match + case pre: ThisType if pre.cls.givenSelfType.exists => + i"""$missingClassFile + |or the self type of $pre might not contain all transitive dependencies""" + case _ if givenSelf.exists && givenSelf.member(name).exists => + i"""$name exists as a member of the self type $givenSelf of $cls + |but it cannot be referenced from a scope that does not extend that ${ctx.printer.kindString(cls)}""" + case _ => + missingClassFile + override def toMessage(using Context): Message = if ctx.debug then printStackTrace() - em"""cannot resolve reference to type $pre.$name - |the classfile defining the type might be missing from the classpath${otherReason(pre)}""" + em"""Cannot resolve reference to type $pre.$name. + |$reason.""" end MissingType class RecursionOverflow(val op: String, details: => String, val previous: Throwable, val weight: Int)(using Context) diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index ceb8ecbc8e03..9558981be800 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -301,6 +301,7 @@ class TypeMismatch(val found: Type, expected: Type, val inTree: Option[untpd.Tre // these are usually easier to analyze. We exclude F-bounds since these would // lead to a recursive infinite expansion. object reported extends TypeMap, IdentityCaptRefMap: + var notes: String = "" def setVariance(v: Int) = variance = v val constraint = mapCtx.typerState.constraint var fbounded = false @@ -318,6 +319,15 @@ class TypeMismatch(val found: Type, expected: Type, val inTree: Option[untpd.Tre case tp: LazyRef => fbounded = true tp + case tp @ TypeRef(pre, _) => + if pre != NoPrefix && !pre.member(tp.name).exists then + notes ++= + i""" + | + |Note that I could not resolve reference $tp. + |${MissingType(pre, tp.name).reason} + """ + mapOver(tp) case _ => mapOver(tp) @@ -329,7 +339,7 @@ class TypeMismatch(val found: Type, expected: Type, val inTree: Option[untpd.Tre else (found1, expected1) val (foundStr, expectedStr) = Formatting.typeDiff(found2, expected2) i"""|Found: $foundStr - |Required: $expectedStr""" + |Required: $expectedStr${reported.notes}""" end msg override def msgPostscript(using Context) = diff --git a/tests/neg/i11226.check b/tests/neg/i11226.check new file mode 100644 index 000000000000..90ed2d6a8ebe --- /dev/null +++ b/tests/neg/i11226.check @@ -0,0 +1,6 @@ +-- Error: tests/neg/i11226.scala:13:36 --------------------------------------------------------------------------------- +13 | def test(a: ActorRef): Unit = bus.unsubscribe(a) // error + | ^ + | Cannot resolve reference to type (Unsubscriber.this.bus : ManagedActorClassification).Subscriber. + | Subscriber exists as a member of the self type ActorEventBus of trait ManagedActorClassification + | but it cannot be referenced from a scope that does not extend that trait. diff --git a/tests/neg/i11226.scala b/tests/neg/i11226.scala new file mode 100644 index 000000000000..34c6eb78fd2d --- /dev/null +++ b/tests/neg/i11226.scala @@ -0,0 +1,14 @@ +trait ActorRef + +trait ActorEventBus { + type Subscriber = ActorRef +} + +trait ManagedActorClassification { this: ActorEventBus => + def unsubscribe(subscriber: Subscriber, from: Any): Unit + def unsubscribe(subscriber: Subscriber): Unit +} + +class Unsubscriber(bus: ManagedActorClassification) { + def test(a: ActorRef): Unit = bus.unsubscribe(a) // error +} \ No newline at end of file diff --git a/tests/neg/i11226a.check b/tests/neg/i11226a.check new file mode 100644 index 000000000000..871973264677 --- /dev/null +++ b/tests/neg/i11226a.check @@ -0,0 +1,12 @@ +-- [E007] Type Mismatch Error: tests/neg/i11226a.scala:12:48 ----------------------------------------------------------- +12 | def test(a: ActorRef): Unit = bus.unsubscribe(a) // error + | ^ + | Found: (a : ActorRef) + | Required: Unsubscriber.this.bus.Subscriber + | + | Note that I could not resolve reference Unsubscriber.this.bus.Subscriber. + | Subscriber exists as a member of the self type ActorEventBus of trait ManagedActorClassification + | but it cannot be referenced from a scope that does not extend that trait + | + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i11226a.scala b/tests/neg/i11226a.scala new file mode 100644 index 000000000000..f30530c5a58e --- /dev/null +++ b/tests/neg/i11226a.scala @@ -0,0 +1,13 @@ +trait ActorRef + +trait ActorEventBus { + type Subscriber = ActorRef +} + +trait ManagedActorClassification { this: ActorEventBus => + def unsubscribe(subscriber: Subscriber): Unit +} + +class Unsubscriber(bus: ManagedActorClassification) { + def test(a: ActorRef): Unit = bus.unsubscribe(a) // error +} \ No newline at end of file diff --git a/tests/neg/i16407.check b/tests/neg/i16407.check index 5c6bd19ca8c1..481d70e83ce3 100644 --- a/tests/neg/i16407.check +++ b/tests/neg/i16407.check @@ -1,12 +1,12 @@ -- Error: tests/neg/i16407.scala:2:2 ----------------------------------------------------------------------------------- 2 | f(g()) // error // error | ^ - | cannot resolve reference to type (X.this : Y & X).A - | the classfile defining the type might be missing from the classpath - | or the self type of (X.this : Y & X) might not contain all transitive dependencies + | Cannot resolve reference to type (X.this : Y & X).A. + | The classfile defining the type might be missing from the classpath + | or the self type of (X.this : Y & X) might not contain all transitive dependencies. -- Error: tests/neg/i16407.scala:2:4 ----------------------------------------------------------------------------------- 2 | f(g()) // error // error | ^ - | cannot resolve reference to type (X.this : Y & X).A - | the classfile defining the type might be missing from the classpath - | or the self type of (X.this : Y & X) might not contain all transitive dependencies + | Cannot resolve reference to type (X.this : Y & X).A. + | The classfile defining the type might be missing from the classpath + | or the self type of (X.this : Y & X) might not contain all transitive dependencies. diff --git a/tests/pos/i11226b.scala b/tests/pos/i11226b.scala new file mode 100644 index 000000000000..4074cbfc4b2e --- /dev/null +++ b/tests/pos/i11226b.scala @@ -0,0 +1,11 @@ +trait A { + class T() +} +trait B { + this: A => + def f(a: Int = 0): Any +} +trait C extends B { + this: A => + def f(t: T): Any +} \ No newline at end of file From 0b9ee335fa488458966fcbe8e2ce1e7601f49546 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 27 May 2024 15:53:37 +0200 Subject: [PATCH 129/827] Add test to bestEffortCompilation blacklist --- compiler/test/dotc/neg-best-effort-pickling.blacklist | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/test/dotc/neg-best-effort-pickling.blacklist b/compiler/test/dotc/neg-best-effort-pickling.blacklist index 1c8421b44539..2daf32509ed1 100644 --- a/compiler/test/dotc/neg-best-effort-pickling.blacklist +++ b/compiler/test/dotc/neg-best-effort-pickling.blacklist @@ -14,6 +14,7 @@ i17121.scala illegal-match-types.scala i13780-1.scala i20317a.scala +i11226.scala # semantic db generation fails in the first compilation i1642.scala From 66bba46f143efa3bb44202c956292ba82ce4b4a1 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 28 May 2024 16:25:31 +0200 Subject: [PATCH 130/827] Update compiler/src/dotty/tools/dotc/core/TypeErrors.scala Co-authored-by: Guillaume Martres --- compiler/src/dotty/tools/dotc/core/TypeErrors.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 79d1cecbd6be..11e313c47932 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -72,7 +72,7 @@ class MissingType(val pre: Type, val name: Name)(using Context) extends TypeErro |or the self type of $pre might not contain all transitive dependencies""" case _ if givenSelf.exists && givenSelf.member(name).exists => i"""$name exists as a member of the self type $givenSelf of $cls - |but it cannot be referenced from a scope that does not extend that ${ctx.printer.kindString(cls)}""" + |but it cannot be called on a receiver whose type does not extend $cls""" case _ => missingClassFile From e1ce6b99781ee6daeb363ee4b14b91dc5ba1ea69 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 28 May 2024 18:29:49 +0200 Subject: [PATCH 131/827] Update check files --- tests/neg/i11226.check | 6 +++--- tests/neg/i11226a.check | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/neg/i11226.check b/tests/neg/i11226.check index 90ed2d6a8ebe..571f54326808 100644 --- a/tests/neg/i11226.check +++ b/tests/neg/i11226.check @@ -1,6 +1,6 @@ -- Error: tests/neg/i11226.scala:13:36 --------------------------------------------------------------------------------- 13 | def test(a: ActorRef): Unit = bus.unsubscribe(a) // error | ^ - | Cannot resolve reference to type (Unsubscriber.this.bus : ManagedActorClassification).Subscriber. - | Subscriber exists as a member of the self type ActorEventBus of trait ManagedActorClassification - | but it cannot be referenced from a scope that does not extend that trait. + | Cannot resolve reference to type (Unsubscriber.this.bus : ManagedActorClassification).Subscriber. + | Subscriber exists as a member of the self type ActorEventBus of trait ManagedActorClassification + | but it cannot be called on a receiver whose type does not extend trait ManagedActorClassification. diff --git a/tests/neg/i11226a.check b/tests/neg/i11226a.check index 871973264677..ecb0760dd01c 100644 --- a/tests/neg/i11226a.check +++ b/tests/neg/i11226a.check @@ -1,12 +1,12 @@ -- [E007] Type Mismatch Error: tests/neg/i11226a.scala:12:48 ----------------------------------------------------------- 12 | def test(a: ActorRef): Unit = bus.unsubscribe(a) // error | ^ - | Found: (a : ActorRef) - | Required: Unsubscriber.this.bus.Subscriber + | Found: (a : ActorRef) + | Required: Unsubscriber.this.bus.Subscriber | - | Note that I could not resolve reference Unsubscriber.this.bus.Subscriber. - | Subscriber exists as a member of the self type ActorEventBus of trait ManagedActorClassification - | but it cannot be referenced from a scope that does not extend that trait - | + | Note that I could not resolve reference Unsubscriber.this.bus.Subscriber. + | Subscriber exists as a member of the self type ActorEventBus of trait ManagedActorClassification + | but it cannot be called on a receiver whose type does not extend trait ManagedActorClassification + | | | longer explanation available when compiling with `-explain` From 3112bb72f56ca5d4463308e2178c0ffcce6f70da Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 28 May 2024 18:40:55 +0200 Subject: [PATCH 132/827] Add test for #20484 --- tests/pos/i20484.scala | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 tests/pos/i20484.scala diff --git a/tests/pos/i20484.scala b/tests/pos/i20484.scala new file mode 100644 index 000000000000..2f02e6206101 --- /dev/null +++ b/tests/pos/i20484.scala @@ -0,0 +1,3 @@ +given Int = ??? +given Char = ??? +val a = summon[Int] \ No newline at end of file From 2e4a0701d57df0f5af067f04da35a95b6003578a Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 28 May 2024 20:38:39 +0200 Subject: [PATCH 133/827] Avoid stacked thisCall contexts AddImplicitArgs can recursively add several implicit parameter lists. We need to make sure we don't perform a thisCallContext search in another thisCall context in this case. Fixes #20483 The original code would back out further and further in the context chain for every implicit parameter section on the secondary constructor. Eventually (in this case after 3 times) bad things happen. --- .../src/dotty/tools/dotc/core/Contexts.scala | 2 +- .../dotty/tools/dotc/typer/Implicits.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 27 ++++++++++++------- tests/pos/i20483.scala | 13 +++++++++ 4 files changed, 32 insertions(+), 12 deletions(-) create mode 100644 tests/pos/i20483.scala diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index a5b0e2dba254..79a0b279aefe 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -477,7 +477,7 @@ object Contexts { /** Is the flexible types option set? */ def flexibleTypes: Boolean = base.settings.YexplicitNulls.value && !base.settings.YnoFlexibleTypes.value - + /** Is the best-effort option set? */ def isBestEffort: Boolean = base.settings.YbestEffort.value diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 3fe8d6fae8a3..cd35825dfae7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1067,7 +1067,7 @@ trait Implicits: trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { record("inferImplicit") assert(ctx.phase.allowsImplicitSearch, - if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase.phaseName}" + if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase}" else i"type error: ${argument.tpe} does not conform to $pt${err.whyNoMatchStr(argument.tpe, pt)}") val usableForInference = pt.exists && !pt.unusableForInference diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index e44da20814dd..9ab5a8ac69df 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4056,7 +4056,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def dummyArg(tp: Type) = untpd.Ident(nme.???).withTypeUnchecked(tp) - def addImplicitArgs(using Context) = { + val origCtx = ctx + + def addImplicitArgs(using Context) = def hasDefaultParams = methPart(tree).symbol.hasDefaultParams def implicitArgs(formals: List[Type], argIndex: Int, pt: Type): List[Tree] = formals match case Nil => Nil @@ -4179,15 +4181,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => retyped else issueErrors(tree, args) } - else tree match { - case tree: Block => - readaptSimplified(tpd.Block(tree.stats, tpd.Apply(tree.expr, args))) - case tree: NamedArg => - readaptSimplified(tpd.NamedArg(tree.name, tpd.Apply(tree.arg, args))) - case _ => - readaptSimplified(tpd.Apply(tree, args)) - } - } + else + inContext(origCtx): + // Reset context in case it was set to a supercall context before. + // otherwise the invariant for taking another this or super call context is not met. + // Test case is i20483.scala + tree match + case tree: Block => + readaptSimplified(tpd.Block(tree.stats, tpd.Apply(tree.expr, args))) + case tree: NamedArg => + readaptSimplified(tpd.NamedArg(tree.name, tpd.Apply(tree.arg, args))) + case _ => + readaptSimplified(tpd.Apply(tree, args)) + end addImplicitArgs + pt.revealIgnored match { case pt: FunProto if pt.applyKind == ApplyKind.Using => // We can end up here if extension methods are called with explicit given arguments. diff --git a/tests/pos/i20483.scala b/tests/pos/i20483.scala new file mode 100644 index 000000000000..a01a77327181 --- /dev/null +++ b/tests/pos/i20483.scala @@ -0,0 +1,13 @@ + +class Foo + (x: Option[String]) + (using Boolean) + (using Int) + (using Double): + + def this + (x: String) + (using Boolean) + (using Int) + (using Double) = + this(Some(x)) \ No newline at end of file From 2c4889e295bb6faa2278ac16f2fd9ab77e6d1706 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 29 May 2024 14:31:52 +0200 Subject: [PATCH 134/827] Allow `apply` adaptation on Selectable with Fields Just like when using a regular Selectable with refinements, this change allows: fromFlds.xs(0) to be expanded to: fromFlds.xs.apply(0) --- .../src/dotty/tools/dotc/typer/Typer.scala | 4 +-- tests/neg/named-tuple-selectable.scala | 29 +++++++++++++++++++ tests/pos/named-tuple-selectable.scala | 29 +++++++++++++++++++ 3 files changed, 60 insertions(+), 2 deletions(-) create mode 100644 tests/neg/named-tuple-selectable.scala create mode 100644 tests/pos/named-tuple-selectable.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index e44da20814dd..24d28aa8b4e6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -821,10 +821,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Otherwise, if the qualifier derives from class Selectable, // and the selector name matches one of the element of the `Fields` type member, - // and the selector is neither applied nor assigned to, + // and the selector is not assigned to, // expand to a typed dynamic dispatch using selectDynamic wrapped in a cast if qual.tpe.derivesFrom(defn.SelectableClass) && !isDynamicExpansion(tree) - && !pt.isInstanceOf[FunOrPolyProto] && pt != LhsProto + && pt != LhsProto then val pre = if !TypeOps.isLegalPrefix(qual.tpe) then SkolemType(qual.tpe) else qual.tpe val fieldsType = pre.select(tpnme.Fields).dealias.simplified diff --git a/tests/neg/named-tuple-selectable.scala b/tests/neg/named-tuple-selectable.scala new file mode 100644 index 000000000000..5cf7e68654ef --- /dev/null +++ b/tests/neg/named-tuple-selectable.scala @@ -0,0 +1,29 @@ +import scala.language.experimental.namedTuples + +class FromFields extends Selectable: + type Fields = (i: Int) + def selectDynamic(key: String) = + List(1, 2, 3) + +trait FromRefs extends Selectable: + def selectDynamic(key: String) = + List(1, 2, 3) + +def test( + fromFlds: FromFields, + fromRefs: FromRefs { val i: Int } +): Unit = + fromFlds.i(0) // error + fromRefs.i(0) // error + + fromFlds.i.apply(0) // error + fromRefs.i.apply(0) // error + + fromFlds.i[Int](List(1)) // error + fromRefs.i[Int](List(1)) // error + + fromFlds.i(List(1)) // error + fromRefs.i(List(1)) // error + + fromFlds.i.apply(List(1)) // error + fromRefs.i.apply(List(1)) // error diff --git a/tests/pos/named-tuple-selectable.scala b/tests/pos/named-tuple-selectable.scala new file mode 100644 index 000000000000..be5f0400e58c --- /dev/null +++ b/tests/pos/named-tuple-selectable.scala @@ -0,0 +1,29 @@ +import scala.language.experimental.namedTuples + +class FromFields extends Selectable: + type Fields = (xs: List[Int], poly: [T] => (x: List[T]) => Option[T]) + def selectDynamic(key: String) = + List(1, 2, 3) + +trait FromRefs extends Selectable: + def selectDynamic(key: String) = + List(1, 2, 3) + +def test( + fromFlds: FromFields, + fromRefs: FromRefs { val xs: List[Int]; val poly: [T] => (x: List[T]) => Option[T] } +): Unit = + fromFlds.xs(0) + fromRefs.xs(0) + + fromFlds.xs.apply(0) + fromRefs.xs.apply(0) + + fromFlds.poly[Int](List(1)): Option[Int] + fromRefs.poly[Int](List(1)): Option[Int] + + fromFlds.poly(List(1)) + fromRefs.poly(List(1)) + + fromFlds.poly.apply(List(1)) + fromRefs.poly.apply(List(1)) From bf0cd3c317c06a9e2cec335d06a112808e6e7db1 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 29 May 2024 23:53:38 +0200 Subject: [PATCH 135/827] implement NamedTuple.build for better inference with target types --- .../src/dotty/tools/dotc/ast/Desugar.scala | 10 +- library/src/scala/NamedTuple.scala | 7 +- tests/pos/named-tuples-ops-mirror.scala | 121 ++++++++++++++++++ tests/run/named-tuples.check | 1 + tests/run/named-tuples.scala | 15 ++- 5 files changed, 145 insertions(+), 9 deletions(-) create mode 100644 tests/pos/named-tuples-ops-mirror.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index b1b771bc7512..977eac5df4fc 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1596,9 +1596,13 @@ object desugar { if ctx.mode.is(Mode.Type) then AppliedTypeTree(ref(defn.NamedTupleTypeRef), namesTuple :: tup :: Nil) else - TypeApply( - Apply(Select(ref(defn.NamedTupleModule), nme.withNames), tup), - namesTuple :: Nil) + Apply( + Apply( + TypeApply( + Select(ref(defn.NamedTupleModule), nme.build), // NamedTuple.build + namesTuple :: Nil), // ++ [(names...)] + Nil), // ++ () + tup :: Nil) // .++ ((values...)) /** When desugaring a list pattern arguments `elems` adapt them and the * expected type `pt` to each other. This means: diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index dc6e6c3144f6..4c31728d6626 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -1,4 +1,5 @@ package scala +import scala.language.experimental.clauseInterleaving import annotation.experimental import compiletime.ops.boolean.* @@ -19,6 +20,11 @@ object NamedTuple: def unapply[N <: Tuple, V <: Tuple](x: NamedTuple[N, V]): Some[V] = Some(x) + /** A named tuple expression will desugar to a call to `build`. For instance, + * `(name = "Lyra", age = 23)` will desugar to `build[("name", "age")]()(("Lyra", 23))`. + */ + inline def build[N <: Tuple]()[V <: Tuple](x: V): NamedTuple[N, V] = x + extension [V <: Tuple](x: V) inline def withNames[N <: Tuple]: NamedTuple[N, V] = x @@ -214,4 +220,3 @@ object NamedTupleDecomposition: /** The value types of a named tuple represented as a regular tuple. */ type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match case NamedTuple[_, x] => x - diff --git a/tests/pos/named-tuples-ops-mirror.scala b/tests/pos/named-tuples-ops-mirror.scala new file mode 100644 index 000000000000..f66eb89534fb --- /dev/null +++ b/tests/pos/named-tuples-ops-mirror.scala @@ -0,0 +1,121 @@ +import language.experimental.namedTuples +import NamedTuple.* + +@FailsWith[HttpError] +trait GreetService derives HttpService: + @HttpInfo("GET", "/greet/{name}") + def greet(@HttpPath name: String): String + @HttpInfo("POST", "/greet/{name}") + def setGreeting(@HttpPath name: String, @HttpBody greeting: String): Unit + +@main def Test = + + val e = HttpService.endpoints[GreetService] + + println(e.greet.describe) + println(e.setGreeting.describe) + + // Type-safe server logic, driven by the ops-mirror, + // requires named tuple with same labels in the same order, + // and function that matches the required signature. + val logic = e.serverLogic: + ( + greet = (name) => Right("Hello, " + name), + setGreeting = (name, greeting) => Right(()) + ) + + val server = ServerBuilder() + .handleAll(logic) + .create(port = 8080) + + sys.addShutdownHook(server.close()) + +end Test + +// IMPLEMENTATION DETAILS FOLLOW + +/** Assume existence of macro to generate this */ +given (OpsMirror.Of[GreetService] { + type MirroredType = GreetService + type OperationLabels = ("greet", "setGreeting") + type Operations = ( + OpsMirror.Operation { type InputTypes = (String *: EmptyTuple); type OutputType = String; type ErrorType = HttpError }, + OpsMirror.Operation { type InputTypes = (String *: String *: EmptyTuple); type OutputType = Unit; type ErrorType = HttpError } + ) +}) = new OpsMirror: + type MirroredType = GreetService + type OperationLabels = ("greet", "setGreeting") + type Operations = ( + OpsMirror.Operation { type InputTypes = (String *: EmptyTuple); type OutputType = String; type ErrorType = HttpError }, + OpsMirror.Operation { type InputTypes = (String *: String *: EmptyTuple); type OutputType = Unit; type ErrorType = HttpError } + ) + +object OpsMirror: + type Of[T] = OpsMirror { type MirroredType = T } + + type Operation_I[I <: Tuple] = Operation { type InputTypes = I } + type Operation_O[O] = Operation { type OutputType = O } + type Operation_E[E] = Operation { type ErrorType = E } + + trait Operation: + type InputTypes <: Tuple + type OutputType + type ErrorType + +trait OpsMirror: + type MirroredType + type OperationLabels <: Tuple + type Operations <: Tuple + +trait HttpService[T]: + def route(str: String): Route +trait Route + +type Func[I <: Tuple, O, E] = I match + case EmptyTuple => Either[E, O] + case t *: EmptyTuple => t => Either[E, O] + case t *: u *: EmptyTuple => (t, u) => Either[E, O] + +type ToFunc[T] = T match + case HttpService.Endpoint[i, o, e] => Func[i, o, e] + +final class FailsWith[E] extends scala.annotation.Annotation +final class HttpInfo(method: String, route: String) extends scala.annotation.Annotation +final class HttpBody() extends scala.annotation.Annotation +final class HttpPath() extends scala.annotation.Annotation + +sealed trait HttpError + +object HttpService: + opaque type Endpoint[I <: Tuple, O, E] = Route + + extension [I <: Tuple, O, E](e: Endpoint[I, O, E]) + def describe: String = ??? // some thing that looks inside the Route to debug it + + type ToEndpoints[Ops <: Tuple] <: Tuple = Ops match + case EmptyTuple => EmptyTuple + case op *: ops => (op, op, op) match + case (OpsMirror.Operation_I[i]) *: (OpsMirror.Operation_O[o]) *: (OpsMirror.Operation_E[e]) *: _ => + Endpoint[i, o, e] *: ToEndpoints[ops] + + trait Handler + + class Endpoints[T](val model: HttpService[T]) extends Selectable: + type Fields <: AnyNamedTuple + def selectDynamic(name: String): Route = model.route(name) + + def serverLogic(funcs: NamedTuple[Names[Fields], Tuple.Map[DropNames[Fields], ToFunc]]): List[Handler] = ??? + + def derived[T](using OpsMirror.Of[T]): HttpService[T] = ??? // inline method to create routes + + def endpoints[T](using model: HttpService[T], m: OpsMirror.Of[T]): Endpoints[T] { + type Fields = NamedTuple[m.OperationLabels, ToEndpoints[m.Operations]] + } = + new Endpoints(model) { type Fields = NamedTuple[m.OperationLabels, ToEndpoints[m.Operations]] } + +class ServerBuilder(): + def handleAll(hs: List[HttpService.Handler]): this.type = this + def create(port: Int): Server = Server() + +class Server(): + def close(): Unit = () diff --git a/tests/run/named-tuples.check b/tests/run/named-tuples.check index 6485aefafa9a..ab1817255336 100644 --- a/tests/run/named-tuples.check +++ b/tests/run/named-tuples.check @@ -8,3 +8,4 @@ Bob is younger than Bill Bob is younger than Lucy Bill is younger than Lucy (((Lausanne,Pully),Preverenges),((1003,1009),1028)) +118 diff --git a/tests/run/named-tuples.scala b/tests/run/named-tuples.scala index 676c21a0e434..32c634188d52 100644 --- a/tests/run/named-tuples.scala +++ b/tests/run/named-tuples.scala @@ -100,6 +100,16 @@ val _: CombinedInfo = bob ++ addr val addr4 = addr3.zip("Preverenges", 1028) println(addr4) + val reducer: (map: Person => Int, reduce: (Int, Int) => Int) = + (map = _.age, reduce = _ + _) + + extension [T](xs: List[T]) + def mapReduce[U](reducer: (map: T => U, reduce: (U, U) => U)): U = + xs.map(reducer.map).reduce(reducer.reduce) + + val totalAge = persons.mapReduce(reducer) + println(totalAge) + // testing conversions object Conv: @@ -107,8 +117,3 @@ object Conv: def f22(x: (String, Int)) = x._1 def f22(x: String) = x f22(bob) - - - - - From 64af4e28ad96866730e4610e375b0dab8ecf58d7 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 31 May 2024 13:48:31 +0200 Subject: [PATCH 136/827] make NamedTuple.Empty an alias to NamedTuple --- library/src/scala/NamedTuple.scala | 4 ++-- tests/run/named-tuples.check | 2 ++ tests/run/named-tuples.scala | 8 ++++++++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index 4c31728d6626..f380fb5528ba 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -201,10 +201,10 @@ object NamedTuple: type From[T] <: AnyNamedTuple /** The type of the empty named tuple */ - type Empty = EmptyTuple.type + type Empty = NamedTuple[EmptyTuple, EmptyTuple] /** The empty named tuple */ - val Empty: Empty = EmptyTuple.asInstanceOf[Empty] + val Empty: Empty = EmptyTuple end NamedTuple diff --git a/tests/run/named-tuples.check b/tests/run/named-tuples.check index ab1817255336..e36037573090 100644 --- a/tests/run/named-tuples.check +++ b/tests/run/named-tuples.check @@ -9,3 +9,5 @@ Bob is younger than Lucy Bill is younger than Lucy (((Lausanne,Pully),Preverenges),((1003,1009),1028)) 118 +() +(name,age) diff --git a/tests/run/named-tuples.scala b/tests/run/named-tuples.scala index 32c634188d52..b3fa213703af 100644 --- a/tests/run/named-tuples.scala +++ b/tests/run/named-tuples.scala @@ -110,6 +110,14 @@ val _: CombinedInfo = bob ++ addr val totalAge = persons.mapReduce(reducer) println(totalAge) + inline def namesOf[T <: AnyNamedTuple](t: T): Names[T] = compiletime.constValueTuple[Names[T]] + val namesEmpty = namesOf(NamedTuple.Empty) + val namesBob = namesOf(bob) + val namesEmpty2: EmptyTuple = namesEmpty + val namesBob2: ("name", "age") = namesBob + println(namesEmpty) + println(namesBob) + // testing conversions object Conv: From de96d27c01d4c75ab93c92e9f5e60c9d3e434cab Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 31 May 2024 14:38:14 +0200 Subject: [PATCH 137/827] move NamedTuple methods to separate scope. re-export --- library/src/scala/NamedTuple.scala | 163 ++++++++++++------------ tests/pos/named-tuple-combinators.scala | 154 ++++++++++++++++++++++ 2 files changed, 238 insertions(+), 79 deletions(-) create mode 100644 tests/pos/named-tuple-combinators.scala diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index 4c31728d6626..f2c435717df6 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -28,100 +28,26 @@ object NamedTuple: extension [V <: Tuple](x: V) inline def withNames[N <: Tuple]: NamedTuple[N, V] = x - export NamedTupleDecomposition.{Names, DropNames} + export NamedTupleDecomposition.{ + Names, DropNames, + apply, size, init, last, tail, take, drop, splitAt, ++, map, reverse, zip, toList, toArray, toIArray + } extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) /** The underlying tuple without the names */ inline def toTuple: V = x - /** The number of elements in this tuple */ - inline def size: Tuple.Size[V] = toTuple.size - // This intentionally works for empty named tuples as well. I think NonEmptyTuple is a dead end // and should be reverted, just like NonEmptyList is also appealing at first, but a bad idea // in the end. - /** The value (without the name) at index `n` of this tuple */ - inline def apply(n: Int): Tuple.Elem[V, n.type] = - inline toTuple match - case tup: NonEmptyTuple => tup(n).asInstanceOf[Tuple.Elem[V, n.type]] - case tup => tup.productElement(n).asInstanceOf[Tuple.Elem[V, n.type]] - /** The first element value of this tuple */ - inline def head: Tuple.Elem[V, 0] = apply(0) - - /** The tuple consisting of all elements of this tuple except the first one */ - inline def tail: NamedTuple[Tuple.Tail[N], Tuple.Tail[V]] = - toTuple.drop(1).asInstanceOf[NamedTuple[Tuple.Tail[N], Tuple.Tail[V]]] - - /** The last element value of this tuple */ - inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] - - /** The tuple consisting of all elements of this tuple except the last one */ - inline def init: NamedTuple[Tuple.Init[N], Tuple.Init[V]] = - toTuple.take(size - 1).asInstanceOf[NamedTuple[Tuple.Init[N], Tuple.Init[V]]] - - /** The tuple consisting of the first `n` elements of this tuple, or all - * elements if `n` exceeds `size`. - */ - inline def take(n: Int): NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]] = - toTuple.take(n) - - /** The tuple consisting of all elements of this tuple except the first `n` ones, - * or no elements if `n` exceeds `size`. - */ - inline def drop(n: Int): NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]] = - toTuple.drop(n) - - /** The tuple `(x.take(n), x.drop(n))` */ - inline def splitAt(n: Int): - (NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]], - NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]]) = - // would be nice if this could have type `Split[NamedTuple[N, V]]` instead, but - // we get a type error then. Similar for other methods here. - toTuple.splitAt(n) - - /** The tuple consisting of all elements of this tuple followed by all elements - * of tuple `that`. The names of the two tuples must be disjoint. - */ - inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) - : NamedTuple[Tuple.Concat[N, N2], Tuple.Concat[V, V2]] - = toTuple ++ that.toTuple + inline def head: Tuple.Elem[V, 0] = x.apply(0) // inline def :* [L] (x: L): NamedTuple[Append[N, ???], Append[V, L] = ??? // inline def *: [H] (x: H): NamedTuple[??? *: N], H *: V] = ??? - /** The named tuple consisting of all element values of this tuple mapped by - * the polymorphic mapping function `f`. The names of elements are preserved. - * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. - */ - inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = - toTuple.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] - - /** The named tuple consisting of all elements of this tuple in reverse */ - inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = - toTuple.reverse - - /** The named tuple consisting of all elements values of this tuple zipped - * with corresponding element values in named tuple `that`. - * If the two tuples have different sizes, - * the extra elements of the larger tuple will be disregarded. - * The names of `x` and `that` at the same index must be the same. - * The result tuple keeps the same names as the operand tuples. - */ - inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): NamedTuple[N, Tuple.Zip[V, V2]] = - toTuple.zip(that.toTuple) - - /** A list consisting of all element values */ - inline def toList: List[Tuple.Union[V]] = toTuple.toList.asInstanceOf[List[Tuple.Union[V]]] - - /** An array consisting of all element values */ - inline def toArray: Array[Object] = toTuple.toArray - - /** An immutable array consisting of all element values */ - inline def toIArray: IArray[Object] = toTuple.toIArray - end extension /** The size of a named tuple, represented as a literal constant subtype of Int */ @@ -212,6 +138,85 @@ end NamedTuple @experimental object NamedTupleDecomposition: import NamedTuple.* + extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) + /** The value (without the name) at index `n` of this tuple */ + inline def apply(n: Int): Tuple.Elem[V, n.type] = + inline x.toTuple match + case tup: NonEmptyTuple => tup(n).asInstanceOf[Tuple.Elem[V, n.type]] + case tup => tup.productElement(n).asInstanceOf[Tuple.Elem[V, n.type]] + + /** The number of elements in this tuple */ + inline def size: Tuple.Size[V] = x.toTuple.size + + /** The last element value of this tuple */ + inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] + + /** The tuple consisting of all elements of this tuple except the last one */ + inline def init: NamedTuple[Tuple.Init[N], Tuple.Init[V]] = + x.toTuple.take(size - 1).asInstanceOf[NamedTuple[Tuple.Init[N], Tuple.Init[V]]] + + /** The tuple consisting of all elements of this tuple except the first one */ + inline def tail: NamedTuple[Tuple.Tail[N], Tuple.Tail[V]] = + x.toTuple.drop(1).asInstanceOf[NamedTuple[Tuple.Tail[N], Tuple.Tail[V]]] + + /** The tuple consisting of the first `n` elements of this tuple, or all + * elements if `n` exceeds `size`. + */ + inline def take(n: Int): NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]] = + x.toTuple.take(n) + + /** The tuple consisting of all elements of this tuple except the first `n` ones, + * or no elements if `n` exceeds `size`. + */ + inline def drop(n: Int): NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]] = + x.toTuple.drop(n) + + /** The tuple `(x.take(n), x.drop(n))` */ + inline def splitAt(n: Int): + (NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]], + NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]]) = + // would be nice if this could have type `Split[NamedTuple[N, V]]` instead, but + // we get a type error then. Similar for other methods here. + x.toTuple.splitAt(n) + + /** The tuple consisting of all elements of this tuple followed by all elements + * of tuple `that`. The names of the two tuples must be disjoint. + */ + inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) + : NamedTuple[Tuple.Concat[N, N2], Tuple.Concat[V, V2]] + = x.toTuple ++ that.toTuple + + /** The named tuple consisting of all element values of this tuple mapped by + * the polymorphic mapping function `f`. The names of elements are preserved. + * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. + */ + inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = + x.toTuple.map(f) + + /** The named tuple consisting of all elements of this tuple in reverse */ + inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = + x.toTuple.reverse + + /** The named tuple consisting of all elements values of this tuple zipped + * with corresponding element values in named tuple `that`. + * If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The names of `x` and `that` at the same index must be the same. + * The result tuple keeps the same names as the operand tuples. + */ + inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): NamedTuple[N, Tuple.Zip[V, V2]] = + x.toTuple.zip(that.toTuple) + + /** A list consisting of all element values */ + inline def toList: List[Tuple.Union[V]] = x.toTuple.toList + + /** An array consisting of all element values */ + inline def toArray: Array[Object] = x.toTuple.toArray + + /** An immutable array consisting of all element values */ + inline def toIArray: IArray[Object] = x.toTuple.toIArray + + end extension /** The names of a named tuple, represented as a tuple of literal string values. */ type Names[X <: AnyNamedTuple] <: Tuple = X match diff --git a/tests/pos/named-tuple-combinators.scala b/tests/pos/named-tuple-combinators.scala new file mode 100644 index 000000000000..a5134b2e7d26 --- /dev/null +++ b/tests/pos/named-tuple-combinators.scala @@ -0,0 +1,154 @@ +import scala.language.experimental.namedTuples + +object Test: + // original code from issue https://github.com/scala/scala3/issues/20427 + type NT = NamedTuple.Concat[(hi: Int), (bla: String)] + def foo(x: NT) = + x.hi // error + val y: (hi: Int, bla: String) = x + y.hi // ok + + // SELECTOR (reduces to apply) + def foo1(x: NT) = + val res1 = x.hi // error + summon[res1.type <:< Int] + val y: (hi: Int, bla: String) = x + val res2 = y.hi // ok + summon[res2.type <:< Int] + + // toTuple + def foo2(x: NT) = + val res1 = x.toTuple + summon[res1.type <:< (Int, String)] + val y: (hi: Int, bla: String) = x + val res2 = y.toTuple + summon[res2.type <:< (Int, String)] + + // apply + def foo3(x: NT) = + val res1 = x.apply(1) + summon[res1.type <:< String] + val y: (hi: Int, bla: String) = x + val res2 = y.apply(1) + summon[res2.type <:< String] + + // size + def foo4(x: NT) = + class Box: + final val res1 = x.size // final val constrains to a singleton type + summon[res1.type <:< 2] + val y: (hi: Int, bla: String) = x + final val res2 = y.size // final val constrains to a singleton type + summon[res2.type <:< 2] + + // head + def foo5(x: NT) = + val res1 = x.head + summon[res1.type <:< Int] + val y: (hi: Int, bla: String) = x + val res2 = y.head + summon[res2.type <:< Int] + + // last + def foo6(x: NT) = + val res1 = x.last + summon[res1.type <:< String] + val y: (hi: Int, bla: String) = x + val res2 = y.last + summon[res2.type <:< String] + + // init + def foo7(x: NT) = + val res1 = x.init + summon[res1.type <:< (hi: Int)] + val y: (hi: Int, bla: String) = x + val res2 = y.init + summon[res2.type <:< (hi: Int)] + + // tail + def foo8(x: NT) = + val res1 = x.tail + summon[res1.type <:< (bla: String)] + val y: (hi: Int, bla: String) = x + val res2 = y.tail + summon[res2.type <:< (bla: String)] + + // take + def foo9(x: NT) = + val res1 = x.take(1) + summon[res1.type <:< (hi: Int)] + val y: (hi: Int, bla: String) = x + val res2 = y.take(1) + summon[res2.type <:< (hi: Int)] + + // drop + def foo10(x: NT) = + val res1 = x.drop(1) + summon[res1.type <:< (bla: String)] + val y: (hi: Int, bla: String) = x + val res2 = y.drop(1) + summon[res2.type <:< (bla: String)] + + // splitAt + def foo11(x: NT) = + val res1 = x.splitAt(1) + summon[res1.type <:< ((hi: Int), (bla: String))] + val y: (hi: Int, bla: String) = x + val res2 = y.splitAt(1) + summon[res2.type <:< ((hi: Int), (bla: String))] + + // ++ + def foo12(x: NT) = + val res1 = x ++ (baz = 23) + summon[res1.type <:< (hi: Int, bla: String, baz: Int)] + val y: (hi: Int, bla: String) = x + val res2 = y ++ (baz = 23) + summon[res2.type <:< (hi: Int, bla: String, baz: Int)] + + // map + def foo13(x: NT) = + val res1 = x.map([T] => (t: T) => Option(t)) + summon[res1.type <:< (hi: Option[Int], bla: Option[String])] + val y: (hi: Int, bla: String) = x + val res2 = y.map([T] => (t: T) => Option(t)) + summon[res2.type <:< (hi: Option[Int], bla: Option[String])] + + // reverse + def foo14(x: NT) = + val res1 = x.reverse + summon[res1.type <:< (bla: String, hi: Int)] + val y: (hi: Int, bla: String) = x + val res2 = y.reverse + summon[res2.type <:< (bla: String, hi: Int)] + + // zip + def foo15(x: NT) = + val res1 = x.zip((hi = "xyz", bla = true)) + summon[res1.type <:< (hi: (Int, String), bla: (String, Boolean))] + val y: (hi: Int, bla: String) = x + val res2 = y.zip((hi = "xyz", bla = true)) + summon[res2.type <:< (hi: (Int, String), bla: (String, Boolean))] + + // toList + def foo16(x: NT) = + val res1 = x.toList + summon[res1.type <:< List[Tuple.Union[(Int, String)]]] + val y: (hi: Int, bla: String) = x + val res2 = y.toList + summon[res2.type <:< List[Tuple.Union[(Int, String)]]] + + // toArray + def foo17(x: NT) = + val res1 = x.toArray + summon[res1.type <:< Array[Object]] + val y: (hi: Int, bla: String) = x + val res2 = y.toArray + summon[res2.type <:< Array[Object]] + + // toIArray + def foo18(x: NT) = + val res1 = x.toIArray + summon[res1.type <:< IArray[Object]] + val y: (hi: Int, bla: String) = x + val res2 = y.toIArray + summon[res2.type <:< IArray[Object]] From 32e39698efd00dd3ddc3d72f62fc1f432d82d5c0 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 31 May 2024 15:46:50 +0200 Subject: [PATCH 138/827] widenDealias before extracting namedTupleElementTypes --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- tests/pos/named-tuple-selections.scala | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 tests/pos/named-tuple-selections.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 24d28aa8b4e6..c11a0d1f15d6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -756,7 +756,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return typedSelect(tree, pt, qual) // Otherwise, try to expand a named tuple selection - val namedTupleElems = qual.tpe.widen.namedTupleElementTypes + val namedTupleElems = qual.tpe.widenDealias.namedTupleElementTypes val nameIdx = namedTupleElems.indexWhere(_._1 == selName) if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then return typed( diff --git a/tests/pos/named-tuple-selections.scala b/tests/pos/named-tuple-selections.scala new file mode 100644 index 000000000000..c3569f21b323 --- /dev/null +++ b/tests/pos/named-tuple-selections.scala @@ -0,0 +1,12 @@ +import scala.language.experimental.namedTuples + +object Test1: + // original code from issue https://github.com/scala/scala3/issues/20439 + val bar = (a = 1, b = 2) + + type ThatBar = bar.type + val thatBar: ThatBar = bar + val thatBar2: bar.type = bar + + def test2 = thatBar.a // error + def test3 = thatBar2.a // ok From 0345a3c8b7fa8ae32a6f8c3636974741b743d556 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 31 May 2024 15:51:47 +0200 Subject: [PATCH 139/827] fix issue #20267 with regression test --- tests/run/named-tuples.scala | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/run/named-tuples.scala b/tests/run/named-tuples.scala index 32c634188d52..ddefbb4ac455 100644 --- a/tests/run/named-tuples.scala +++ b/tests/run/named-tuples.scala @@ -117,3 +117,12 @@ object Conv: def f22(x: (String, Int)) = x._1 def f22(x: String) = x f22(bob) + +object SingletonExpectedTypes: + // original code from issue https://github.com/scala/scala3/issues/20267 + type TripleSingle = ("Lausanne", 1000, 140000) + type CitySingle = (name: "Lausanne", zip: 1000, pop: 140000) + + def test = + val tripleSingle: TripleSingle = ("Lausanne", 1000, 140000) // OK + val citySingle: CitySingle = (name = "Lausanne", zip = 1000, pop = 140000) // error From cf2745d278a43f014129a496272e74be0c95b1a5 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 31 May 2024 15:59:35 +0200 Subject: [PATCH 140/827] add back in casts --- library/src/scala/NamedTuple.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index f2c435717df6..fdaa09198649 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -191,7 +191,7 @@ object NamedTupleDecomposition: * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. */ inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = - x.toTuple.map(f) + x.toTuple.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] /** The named tuple consisting of all elements of this tuple in reverse */ inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = @@ -208,7 +208,7 @@ object NamedTupleDecomposition: x.toTuple.zip(that.toTuple) /** A list consisting of all element values */ - inline def toList: List[Tuple.Union[V]] = x.toTuple.toList + inline def toList: List[Tuple.Union[V]] = x.toTuple.toList.asInstanceOf[List[Tuple.Union[V]]] /** An array consisting of all element values */ inline def toArray: Array[Object] = x.toTuple.toArray From 6e6f3bef50e8ea755440afdb625d1aeaf068ce94 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 4 Jun 2024 19:45:29 +0200 Subject: [PATCH 141/827] Fix soundness hole of forgotten reach capabilities Fixes #20503 --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 17 ++++++++++++----- tests/neg-custom-args/captures/reaches2.check | 10 ++++++++++ tests/neg-custom-args/captures/reaches2.scala | 9 +++++++++ tests/neg/i20503.scala | 6 ++++++ tests/neg/unsound-reach-2.scala | 2 +- tests/pos-custom-args/captures/reaches.scala | 4 ++-- 6 files changed, 40 insertions(+), 8 deletions(-) create mode 100644 tests/neg-custom-args/captures/reaches2.check create mode 100644 tests/neg-custom-args/captures/reaches2.scala create mode 100644 tests/neg/i20503.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index a5bb8792af2c..46a00ccaac39 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -369,11 +369,18 @@ class CheckCaptures extends Recheck, SymTransformer: inline def isVisibleFromEnv(sym: Symbol) = !isContainedInEnv(sym) // Only captured references that are visible from the environment // should be included. - val included = cs.filter: - case ref: TermRef => isVisibleFromEnv(ref.symbol.owner) - case ref: ThisType => isVisibleFromEnv(ref.cls) - case _ => false - capt.println(i"Include call capture $included in ${env.owner}") + val included = cs.filter: c => + c.stripReach match + case ref: TermRef => + val isVisible = isVisibleFromEnv(ref.symbol.owner) + if !isVisible && c.isReach then + // Reach capabilities that go out of scope have to be approximated + // by their underlyiong capture set. See i20503.scala. + checkSubset(CaptureSet.ofInfo(c), env.captured, pos, provenance(env)) + isVisible + case ref: ThisType => isVisibleFromEnv(ref.cls) + case _ => false + capt.println(i"Include call or box capture $included from $cs in ${env.owner}") checkSubset(included, env.captured, pos, provenance(env)) /** Include references captured by the called method in the current environment stack */ diff --git a/tests/neg-custom-args/captures/reaches2.check b/tests/neg-custom-args/captures/reaches2.check new file mode 100644 index 000000000000..504955b220ad --- /dev/null +++ b/tests/neg-custom-args/captures/reaches2.check @@ -0,0 +1,10 @@ +-- Error: tests/neg-custom-args/captures/reaches2.scala:8:10 ----------------------------------------------------------- +8 | ps.map((x, y) => compose1(x, y)) // error // error + | ^ + |reference (ps : List[(box A => A, box A => A)]) @reachCapability is not included in the allowed capture set {} + |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? A^? +-- Error: tests/neg-custom-args/captures/reaches2.scala:8:13 ----------------------------------------------------------- +8 | ps.map((x, y) => compose1(x, y)) // error // error + | ^ + |reference (ps : List[(box A => A, box A => A)]) @reachCapability is not included in the allowed capture set {} + |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? A^? diff --git a/tests/neg-custom-args/captures/reaches2.scala b/tests/neg-custom-args/captures/reaches2.scala new file mode 100644 index 000000000000..f2447b8c8795 --- /dev/null +++ b/tests/neg-custom-args/captures/reaches2.scala @@ -0,0 +1,9 @@ +class List[+A]: + def map[B](f: A -> B): List[B] = ??? + +def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = + z => g(f(z)) + +def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = + ps.map((x, y) => compose1(x, y)) // error // error + diff --git a/tests/neg/i20503.scala b/tests/neg/i20503.scala new file mode 100644 index 000000000000..23212667051b --- /dev/null +++ b/tests/neg/i20503.scala @@ -0,0 +1,6 @@ +import language.experimental.captureChecking +def runOps(ops: List[() => Unit]): Unit = + ops.foreach(op => op()) + +def main(): Unit = + val f: List[() => Unit] -> Unit = runOps // error diff --git a/tests/neg/unsound-reach-2.scala b/tests/neg/unsound-reach-2.scala index 27742d72557b..083cec6ee5b2 100644 --- a/tests/neg/unsound-reach-2.scala +++ b/tests/neg/unsound-reach-2.scala @@ -19,7 +19,7 @@ def bad(): Unit = var escaped: File^{backdoor*} = null withFile("hello.txt"): f => boom.use(f): // error - new Consumer[File^{backdoor*}]: + new Consumer[File^{backdoor*}]: // error def apply(f1: File^{backdoor*}) = escaped = f1 diff --git a/tests/pos-custom-args/captures/reaches.scala b/tests/pos-custom-args/captures/reaches.scala index f17c25712c39..bc222ebe8cfd 100644 --- a/tests/pos-custom-args/captures/reaches.scala +++ b/tests/pos-custom-args/captures/reaches.scala @@ -45,8 +45,8 @@ def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = def compose2[A, B, C](f: A => B, g: B => C): A => C = z => g(f(z)) -def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = - ps.map((x, y) => compose1(x, y)) // Does not work if map takes an impure function, see reaches in neg +//def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = +// ps.map((x, y) => compose1(x, y)) // Does not work, see neg-customargs/../reaches2.scala @annotation.capability class IO From 171dcd025e3c4fb5425652424ec9dda6edaaedb0 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 1 Jun 2024 16:22:15 +0200 Subject: [PATCH 142/827] Refactor adaptBoxed --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 33 ++ .../dotty/tools/dotc/cc/CheckCaptures.scala | 291 ++++++++---------- .../src/dotty/tools/dotc/core/TypeUtils.scala | 1 - 3 files changed, 154 insertions(+), 171 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 8276a0987003..c272183b6dfb 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -491,4 +491,37 @@ object ReachCapability extends AnnotatedCapability(defn.ReachCapabilityAnnot) */ object MaybeCapability extends AnnotatedCapability(defn.MaybeCapabilityAnnot) +/** An extractor for all kinds of function types as well as method and poly types. + * @return 1st half: The argument types or empty if this is a type function + * 2nd half: The result type + */ +object FunctionOrMethod: + def unapply(tp: Type)(using Context): Option[(List[Type], Type)] = tp match + case defn.FunctionOf(args, res, isContextual) => Some((args, res)) + case mt: MethodType => Some((mt.paramInfos, mt.resType)) + case mt: PolyType => Some((Nil, mt.resType)) + case defn.RefinedFunctionOf(rinfo) => unapply(rinfo) + case _ => None + +/** If `tp` is a function or method, a type of the same kind with the given + * argument and result types. + */ +extension (self: Type) + def derivedFunctionOrMethod(argTypes: List[Type], resType: Type)(using Context): Type = self match + case self @ AppliedType(tycon, args) if defn.isNonRefinedFunction(self) => + val args1 = argTypes :+ resType + if args.corresponds(args1)(_ eq _) then self + else self.derivedAppliedType(tycon, args1) + case self @ defn.RefinedFunctionOf(rinfo) => + val rinfo1 = rinfo.derivedFunctionOrMethod(argTypes, resType) + if rinfo1 eq rinfo then self + else if rinfo1.isInstanceOf[PolyType] then self.derivedRefinedType(refinedInfo = rinfo1) + else rinfo1.toFunctionType(alwaysDependent = true) + case self: MethodType => + self.derivedLambdaType(paramInfos = argTypes, resType = resType) + case self: PolyType => + assert(argTypes.isEmpty) + self.derivedLambdaType(resType = resType) + case _ => + self diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 4c38de931f5f..e41f32cab672 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -886,7 +886,7 @@ class CheckCaptures extends Recheck, SymTransformer: */ override def checkConformsExpr(actual: Type, expected: Type, tree: Tree, addenda: Addenda)(using Context): Type = var expected1 = alignDependentFunction(expected, actual.stripCapturing) - val actualBoxed = adaptBoxed(actual, expected1, tree.srcPos) + val actualBoxed = adapt(actual, expected1, tree.srcPos) //println(i"check conforms $actualBoxed <<< $expected1") if actualBoxed eq actual then @@ -985,183 +985,134 @@ class CheckCaptures extends Recheck, SymTransformer: * * @param alwaysConst always make capture set variables constant after adaptation */ - def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, alwaysConst: Boolean = false)(using Context): Type = + def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, covariant: Boolean, alwaysConst: Boolean)(using Context): Type = - inline def inNestedEnv[T](boxed: Boolean)(op: => T): T = - val saved = curEnv - curEnv = Env(curEnv.owner, EnvKind.NestedInOwner, CaptureSet.Var(curEnv.owner), if boxed then null else curEnv) - try op - finally curEnv = saved - - /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) - * to `expected` type. - * It returns the adapted type along with a capture set consisting of the references - * that were additionally captured during adaptation. - * @param reconstruct how to rebuild the adapted function type + /** Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation + * @param boxed if true we adapt to a boxed expected type */ - def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, - covariant: Boolean, boxed: Boolean, - reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = - inNestedEnv(boxed): - val (eargs, eres) = expected.dealias.stripCapturing match - case defn.FunctionOf(eargs, eres, _) => (eargs, eres) - case expected: MethodType => (expected.paramInfos, expected.resType) - case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionNType(expected) => (rinfo.paramInfos, rinfo.resType) - case _ => (aargs.map(_ => WildcardType), WildcardType) - val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } - val ares1 = adapt(ares, eres, covariant) - - val resTp = - if (ares1 eq ares) && (aargs1 eq aargs) then actual - else reconstruct(aargs1, ares1) - - (resTp, CaptureSet(curEnv.captured.elems)) - end adaptFun - - /** Adapt type function type `actual` to the expected type. - * @see [[adaptFun]] - */ - def adaptTypeFun( - actual: Type, ares: Type, expected: Type, - covariant: Boolean, boxed: Boolean, - reconstruct: Type => Type): (Type, CaptureSet) = - inNestedEnv(boxed): - val eres = expected.dealias.stripCapturing match - case defn.PolyFunctionOf(rinfo: PolyType) => rinfo.resType - case expected: PolyType => expected.resType - case _ => WildcardType - - val ares1 = adapt(ares, eres, covariant) - - val resTp = - if ares1 eq ares then actual - else reconstruct(ares1) - - (resTp, CaptureSet(curEnv.captured.elems)) - end adaptTypeFun - - def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = - val arrow = if covariant then "~~>" else "<~~" - i"adapting $actual $arrow $expected" - - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true): - if expected.isInstanceOf[WildcardType] then actual - else - // Decompose the actual type into the inner shape type, the capture set and the box status - val styp = if actual.isFromJavaObject then actual else actual.stripCapturing - val cs = actual.captureSet - val boxed = actual.isBoxedCapturing - - // A box/unbox should be inserted, if the actual box status mismatches with the expectation - val needsAdaptation = boxed != expected.isBoxedCapturing - // Whether to insert a box or an unbox? - val insertBox = needsAdaptation && covariant != boxed - - // Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation - val (styp1, leaked) = styp match { - case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => - adaptFun(actual, args.init, args.last, expected, covariant, insertBox, - (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) - case actual @ defn.RefinedFunctionOf(rinfo: MethodType) => - // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) - adaptFun(actual, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, - (aargs1, ares1) => - rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) - .toFunctionType(alwaysDependent = true)) - case actual: MethodType => - adaptFun(actual, actual.paramInfos, actual.resType, expected, covariant, insertBox, - (aargs1, ares1) => - actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) - case actual @ defn.RefinedFunctionOf(rinfo: PolyType) => - adaptTypeFun(actual, rinfo.resType, expected, covariant, insertBox, - ares1 => - val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) - val actual1 = actual.derivedRefinedType(refinedInfo = rinfo1) - actual1 - ) - case _ => - (styp, CaptureSet()) - } + def adaptShape(actualShape: Type, boxed: Boolean): (Type, CaptureSet) = actualShape match + case FunctionOrMethod(aargs, ares) => + val saved = curEnv + curEnv = Env( + curEnv.owner, EnvKind.NestedInOwner, + CaptureSet.Var(curEnv.owner), + if boxed then null else curEnv) + try + val (eargs, eres) = expected.dealias.stripCapturing match + case FunctionOrMethod(eargs, eres) => (eargs, eres) + case _ => (aargs.map(_ => WildcardType), WildcardType) + val aargs1 = aargs.zipWithConserve(eargs): + adaptBoxed(_, _, pos, !covariant, alwaysConst) + val ares1 = adaptBoxed(ares, eres, pos, covariant, alwaysConst) + val resTp = + if (aargs1 eq aargs) && (ares1 eq ares) then actualShape // optimize to avoid redundant matches + else actualShape.derivedFunctionOrMethod(aargs1, ares1) + (resTp, CaptureSet(curEnv.captured.elems)) + finally curEnv = saved + case _ => + (actualShape, CaptureSet()) - // Capture set of the term after adaptation - val cs1 = - if covariant then cs ++ leaked - else - if !leaked.subCaptures(cs, frozen = false).isOK then - report.error( - em"""$expected cannot be box-converted to $actual - |since the additional capture set $leaked resulted from box conversion is not allowed in $actual""", pos) - cs - - // Compute the adapted type - def adaptedType(resultBoxed: Boolean) = - if (styp1 eq styp) && leaked.isAlwaysEmpty && boxed == resultBoxed then actual - else styp1.capturing(if alwaysConst then CaptureSet(cs1.elems) else cs1).forceBoxStatus(resultBoxed) - - if needsAdaptation then - val criticalSet = // the set which is not allowed to have `cap` - if covariant then cs1 // can't box with `cap` - else expected.captureSet // can't unbox with `cap` - if criticalSet.isUniversal && expected.isValueType && !ccConfig.allowUniversalInBoxed then - // We can't box/unbox the universal capability. Leave `actual` as it is - // so we get an error in checkConforms. This tends to give better error - // messages than disallowing the root capability in `criticalSet`. - if ctx.settings.YccDebug.value then - println(i"cannot box/unbox $actual vs $expected") - actual - else - if !ccConfig.allowUniversalInBoxed then - // Disallow future addition of `cap` to `criticalSet`. - criticalSet.disallowRootCapability { () => - report.error( - em"""$actual cannot be box-converted to $expected - |since one of their capture sets contains the root capability `cap`""", - pos) - } - if !insertBox then // unboxing - //debugShowEnvs() - markFree(criticalSet, pos) - adaptedType(!boxed) + def adaptStr = i"adapting $actual ${if covariant then "~~>" else "<~~"} $expected" + + if expected.isInstanceOf[WildcardType] then actual + else trace(adaptStr, recheckr, show = true): + // Decompose the actual type into the inner shape type, the capture set and the box status + val actualShape = if actual.isFromJavaObject then actual else actual.stripCapturing + val actualIsBoxed = actual.isBoxedCapturing + + // A box/unbox should be inserted, if the actual box status mismatches with the expectation + val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing + // Whether to insert a box or an unbox? + val insertBox = needsAdaptation && covariant != actualIsBoxed + + // Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation + val (adaptedShape, leaked) = adaptShape(actualShape, insertBox) + + // Capture set of the term after adaptation + val captures = + val cs = actual.captureSet + if covariant then cs ++ leaked + else + if !leaked.subCaptures(cs, frozen = false).isOK then + report.error( + em"""$expected cannot be box-converted to $actual + |since the additional capture set $leaked resulted from box conversion is not allowed in $actual""", pos) + cs + + // Compute the adapted type + def adaptedType(resultBoxed: Boolean) = + if (adaptedShape eq actualShape) && leaked.isAlwaysEmpty && actualIsBoxed == resultBoxed + then actual + else adaptedShape + .capturing(if alwaysConst then CaptureSet(captures.elems) else captures) + .forceBoxStatus(resultBoxed) + + if needsAdaptation then + val criticalSet = // the set which is not allowed to have `cap` + if covariant then captures // can't box with `cap` + else expected.captureSet // can't unbox with `cap` + if criticalSet.isUniversal && expected.isValueType && !ccConfig.allowUniversalInBoxed then + // We can't box/unbox the universal capability. Leave `actual` as it is + // so we get an error in checkConforms. This tends to give better error + // messages than disallowing the root capability in `criticalSet`. + if ctx.settings.YccDebug.value then + println(i"cannot box/unbox $actual vs $expected") + actual else - adaptedType(boxed) - end adapt + if !ccConfig.allowUniversalInBoxed then + // Disallow future addition of `cap` to `criticalSet`. + criticalSet.disallowRootCapability { () => + report.error( + em"""$actual cannot be box-converted to $expected + |since one of their capture sets contains the root capability `cap`""", + pos) + } + if !insertBox then // unboxing + //debugShowEnvs() + markFree(criticalSet, pos) + adaptedType(!actualIsBoxed) + else + adaptedType(actualIsBoxed) + end adaptBoxed - /** If result derives from caps.Capability, yet is not a capturing type itself, - * make its capture set explicit. - */ - def makeCaptureSetExplicit(result: Type) = result match - case CapturingType(_, _) => result - case _ => - if result.derivesFromCapability then - val cap: CaptureRef = actual match - case ref: CaptureRef if ref.isTracked => - ref - case _ => - defn.captureRoot.termRef // TODO: skolemize? - CapturingType(result, cap.singletonCaptureSet) - else result + /** If actual derives from caps.Capability, yet is not a capturing type itself, + * make its capture set explicit. + */ + private def makeCaptureSetExplicit(actual: Type)(using Context): Type = actual match + case CapturingType(_, _) => actual + case _ if actual.derivesFromCapability => + val cap: CaptureRef = actual match + case ref: CaptureRef if ref.isTracked => ref + case _ => defn.captureRoot.termRef // TODO: skolemize? + CapturingType(actual, cap.singletonCaptureSet) + case _ => actual + + /** If actual is a tracked CaptureRef `a` and widened is a capturing type T^C, + * improve `T^C` to `T^{a}`, following the VAR rule of CC. + */ + private def improveCaptures(widened: Type, actual: Type)(using Context): Type = actual match + case ref: CaptureRef if ref.isTracked => + widened match + case CapturingType(p, refs) if ref.singletonCaptureSet.mightSubcapture(refs) => + widened.derivedCapturingType(p, ref.singletonCaptureSet) + .showing(i"improve $widened to $result", capt) + case _ => widened + case _ => widened + /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions + * + * @param alwaysConst always make capture set variables constant after adaptation + */ + def adapt(actual: Type, expected: Type, pos: SrcPos)(using Context): Type = if expected == LhsProto || expected.isSingleton && actual.isSingleton then actual else - var actualw = actual.widenDealias - actual match - case ref: CaptureRef if ref.isTracked => - actualw match - case CapturingType(p, refs) if ref.singletonCaptureSet.mightSubcapture(refs) => - actualw = actualw.derivedCapturingType(p, ref.singletonCaptureSet) - .showing(i"improve $actualw to $result", capt) - // given `a: T^C`, improve `T^C` to `T^{a}` - case _ => - case _ => - val adapted = adapt(actualw.withReachCaptures(actual), expected, covariant = true) - makeCaptureSetExplicit: - if adapted ne actualw then - capt.println(i"adapt boxed $actual vs $expected ===> $adapted") - adapted - else - actual - end adaptBoxed + val normalized = makeCaptureSetExplicit(actual) + val widened = improveCaptures(normalized.widenDealias, actual) + val adapted = adaptBoxed(widened.withReachCaptures(actual), expected, pos, covariant = true, alwaysConst = false) + if adapted eq widened then normalized + else adapted.showing(i"adapt boxed $actual vs $expected ===> $adapted", capt) + end adapt /** Check overrides again, taking capture sets into account. * TODO: Can we avoid doing overrides checks twice? @@ -1180,7 +1131,7 @@ class CheckCaptures extends Recheck, SymTransformer: val saved = curEnv try curEnv = Env(clazz, EnvKind.NestedInOwner, capturedVars(clazz), outer0 = curEnv) - val adapted = adaptBoxed(actual, expected1, srcPos, alwaysConst = true) + val adapted = adaptBoxed(actual, expected1, srcPos, covariant = true, alwaysConst = true) actual match case _: MethodType => // We remove the capture set resulted from box adaptation for method types, diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index afc2cc39f9cf..beacf15e4afe 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -189,6 +189,5 @@ class TypeUtils: def stripRefinement: Type = self match case self: RefinedOrRecType => self.parent.stripRefinement case seld => self - end TypeUtils From 16b33a922a1ec8c96c9b0d07bc786211dcd336a8 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 5 Jun 2024 15:10:46 +0200 Subject: [PATCH 143/827] Mark AppliedType cachedSuper valid Nowhere when using provisional args --- compiler/src/dotty/tools/dotc/core/Types.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index ca4834558d9a..ea2774b81c85 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4643,7 +4643,9 @@ object Types extends TypeUtils { cachedSuper = tycon match case tycon: HKTypeLambda => defn.AnyType case tycon: TypeRef if tycon.symbol.isClass => tycon - case tycon: TypeProxy => tycon.superType.applyIfParameterized(args) + case tycon: TypeProxy => + if validSuper != Nowhere && args.exists(_.isProvisional) then validSuper = Nowhere + tycon.superType.applyIfParameterized(args) case _ => defn.AnyType cachedSuper From 59b0f3ae413556ea537c442bb25f93b2a8487245 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 5 Jun 2024 15:11:06 +0200 Subject: [PATCH 144/827] Reclassify test --- .../typeclass-encoding3b.scala} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename tests/{neg/typeclass-encoding3.scala => pos/typeclass-encoding3b.scala} (99%) diff --git a/tests/neg/typeclass-encoding3.scala b/tests/pos/typeclass-encoding3b.scala similarity index 99% rename from tests/neg/typeclass-encoding3.scala rename to tests/pos/typeclass-encoding3b.scala index ff403314cd1a..84db4c4b5045 100644 --- a/tests/neg/typeclass-encoding3.scala +++ b/tests/pos/typeclass-encoding3b.scala @@ -345,5 +345,5 @@ object functors { } MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5))) // ok, synthesizes (using ListMonad) - MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5)))(using ListMonad) // error + MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5)))(using ListMonad) } \ No newline at end of file From 9ee8c5d253caa0efd6a7a77025585e928be34075 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 6 Jun 2024 14:24:15 +0200 Subject: [PATCH 145/827] Fix error related to reaches --- tests/neg/i20503.scala | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/neg/i20503.scala b/tests/neg/i20503.scala index 23212667051b..7a1bffcff529 100644 --- a/tests/neg/i20503.scala +++ b/tests/neg/i20503.scala @@ -1,6 +1,16 @@ import language.experimental.captureChecking + +class List[+A]: + def head: A = ??? + def tail: List[A] = ??? + def map[B](f: A => B): List[B] = ??? + def foreach[U](f: A => U): Unit = ??? + def nonEmpty: Boolean = ??? + def runOps(ops: List[() => Unit]): Unit = - ops.foreach(op => op()) + // See i20156, due to limitation in expressiveness of current system, + // we cannot map over the list of impure elements. + ops.foreach(op => op()) // error def main(): Unit = val f: List[() => Unit] -> Unit = runOps // error From eaa673d5ca2cea3d0c5671898db99b6059c32fe6 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 6 Jun 2024 22:20:53 +0200 Subject: [PATCH 146/827] Add explanation doc --- compiler/src/dotty/tools/dotc/core/Types.scala | 6 +++++- tests/pos/typeclass-encoding3b.scala | 9 ++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index ea2774b81c85..b9379300344e 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4644,7 +4644,11 @@ object Types extends TypeUtils { case tycon: HKTypeLambda => defn.AnyType case tycon: TypeRef if tycon.symbol.isClass => tycon case tycon: TypeProxy => - if validSuper != Nowhere && args.exists(_.isProvisional) then validSuper = Nowhere + if validSuper != Nowhere && args.exists(_.isProvisional) then + // applyIfParameterized may perform eta-reduction leading to different + // variance annotations depending on the instantiation of type params + // see tests/pos/typeclass-encoding3b.scala:348 for an example + validSuper = Nowhere tycon.superType.applyIfParameterized(args) case _ => defn.AnyType cachedSuper diff --git a/tests/pos/typeclass-encoding3b.scala b/tests/pos/typeclass-encoding3b.scala index 84db4c4b5045..2d5111c4313b 100644 --- a/tests/pos/typeclass-encoding3b.scala +++ b/tests/pos/typeclass-encoding3b.scala @@ -345,5 +345,12 @@ object functors { } MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5))) // ok, synthesizes (using ListMonad) - MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5)))(using ListMonad) + MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5)))(using ListMonad) // was an error + /* + When checking `ListMonad <:< functors.Monad.Impl[T]` + we eventually get to the comparison `[X] =>> T[X] <:< [+X] =>> List[X]` + because the `This` type member of `ListMonad` has a covariance annotation. + This fails the variance conformance checks despite the fact that T has been instantiated to List, + since it has been substituted into the refinement (and cached) before its instantiation. + */ } \ No newline at end of file From ee5481408b09e95ce9becaea510be8687ff27c87 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 6 Jun 2024 22:22:24 +0200 Subject: [PATCH 147/827] Keep `validUnderlyingMatch` inline with `validSuper` --- compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index b9379300344e..bd3fa6e6a3dd 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4677,8 +4677,8 @@ object Types extends TypeUtils { */ override def underlyingMatchType(using Context): Type = if ctx.period != validUnderlyingMatch then - validUnderlyingMatch = if tycon.isProvisional then Nowhere else ctx.period cachedUnderlyingMatch = superType.underlyingMatchType + validUnderlyingMatch = validSuper cachedUnderlyingMatch override def tryNormalize(using Context): Type = tycon.stripTypeVar match { From 81a118488124bbe009556be5e9a54910de544e67 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 7 Jun 2024 11:03:49 +0200 Subject: [PATCH 148/827] Amend doc --- tests/pos/typeclass-encoding3b.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/pos/typeclass-encoding3b.scala b/tests/pos/typeclass-encoding3b.scala index 2d5111c4313b..8ff416728718 100644 --- a/tests/pos/typeclass-encoding3b.scala +++ b/tests/pos/typeclass-encoding3b.scala @@ -347,10 +347,10 @@ object functors { MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5))) // ok, synthesizes (using ListMonad) MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5)))(using ListMonad) // was an error /* - When checking `ListMonad <:< functors.Monad.Impl[T]` - we eventually get to the comparison `[X] =>> T[X] <:< [+X] =>> List[X]` + Before the changes, when checking `ListMonad <:< functors.Monad.Impl[T]` + we eventually got to the comparison `[X] =>> T[X] <:< [+X] =>> List[X]` because the `This` type member of `ListMonad` has a covariance annotation. - This fails the variance conformance checks despite the fact that T has been instantiated to List, - since it has been substituted into the refinement (and cached) before its instantiation. + This failed the variance conformance checks despite the fact that T had been instantiated to List, + since it had been substituted into the refinement (and cached) before its instantiation. */ } \ No newline at end of file From e814569e30f584a749cf59ad6aa9728d5adb2788 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Sat, 8 Jun 2024 19:47:34 +0100 Subject: [PATCH 149/827] Add reusable workflow for SDKMAN! release --- .github/workflows/release-sdkman.yml | 69 ++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 .github/workflows/release-sdkman.yml diff --git a/.github/workflows/release-sdkman.yml b/.github/workflows/release-sdkman.yml new file mode 100644 index 000000000000..270b4d3feb9b --- /dev/null +++ b/.github/workflows/release-sdkman.yml @@ -0,0 +1,69 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO PUBLISH SCALA TO SDKMAN! ### +### HOW TO USE: ### +### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### +### - IT WILL PUBLISH TO SDKMAN! THE BINARIES TO EACH SUPPORTED PLATFORM AND A UNIVERSAL JAR ### +### - IT CHANGES THE DEFAULT VERSION IN SDKMAN! ### +### ### +### NOTE: ### +### - WE SHOULD KEEP IN SYNC THE NAME OF THE ARCHIVES WITH THE ACTUAL BUILD ### +### - WE SHOULD KEEP IN SYNC THE URL OF THE RELEASE ### +################################################################################################### + + +name: Release Scala to SDKMAN! +run-name: Release Scala ${{ inputs.version }} to SDKMAN! + +on: + workflow_call: + inputs: + version: + required: true + type: string + secrets: + CONSUMER-KEY: + required: true + CONSUMER-TOKEN: + required: true + +env: + RELEASE-URL: 'https://github.com/scala/scala3/releases/download/${{ inputs.version }}' + +jobs: + publish: + runs-on: ubuntu-latest + strategy: + matrix: + include: + - platform: LINUX_64 + archive : 'scala3-${{ inputs.version }}-x86_64-pc-linux.tar.gz' + - platform: LINUX_ARM64 + archive : 'scala3-${{ inputs.version }}-aarch64-pc-linux.tar.gz' + - platform: MAC_OSX + archive : 'scala3-${{ inputs.version }}-x86_64-apple-darwin.tar.gz' + - platform: MAC_ARM64 + archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.tar.gz' + - platform: WINDOWS_64 + archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.tar.gz' + - platform: UNIVERSAL + archive : 'scala3-${{ inputs.version }}.zip' + steps: + - uses: hamzaremmal/sdkman-release-action@main # TODO: Make a release of the action and configure the version here + with: + CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} + CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} + CANDIDATE : scala + VERSION : ${{ inputs.version }} + URL : '${{ env.RELEASE-URL }}/${{ matrix.archive }}' + PLATFORM : ${{ matrix.platform }} + + default: + runs-on: ubuntu-latest + needs: publish + steps: + - uses: hamzaremmal/sdkman-default-action@main # TODO: Make a release of the action and configure the version here + with: + CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} + CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} + CANDIDATE : scala + VERSION : ${{ inputs.version }} From 857bf12e00aaca8cc337f94e83207001a884ca69 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Sat, 8 Jun 2024 20:13:54 +0100 Subject: [PATCH 150/827] Amend the release workflow to use release-sdkman --- .github/workflows/releases.yml | 36 ++++----------- .github/workflows/scripts/publish-sdkman.sh | 50 --------------------- 2 files changed, 9 insertions(+), 77 deletions(-) delete mode 100755 .github/workflows/scripts/publish-sdkman.sh diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index dde8b0372d52..fd883edd5a3d 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -2,31 +2,13 @@ name: Releases on: workflow_dispatch: -permissions: - contents: read - jobs: - publish_release: - runs-on: [self-hosted, Linux] - container: - image: lampepfl/dotty:2021-03-22 - options: --cpu-shares 4096 - - env: - SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} - SDKMAN_TOKEN: ${{ secrets.SDKMAN_TOKEN }} - - steps: - - name: Reset existing repo - run: | - git config --global --add safe.directory /__w/dotty/dotty - git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - - - name: Cleanup - run: .github/workflows/cleanup.sh - - - name: Git Checkout - uses: actions/checkout@v4 - - - name: Publish to SDKMAN - run: .github/workflows/scripts/publish-sdkman.sh + publish-sdkman: + uses: ./.github/workflows/release-sdkman.yml + with: + version: '???' # TODO: Find a way to extract the version number. Easiest way would be to add it as an input + secrets: + CONSUMER-KEY: ${{ secrets.SDKMAN_KEY }} + CONSUMER-TOKEN: ${{ secrets.SDKMAN_TOKEN }} + + # TODO: ADD RELEASE WORKFLOW TO CHOCOLATEY AND OTHER PACKAGE MANAGERS HERE \ No newline at end of file diff --git a/.github/workflows/scripts/publish-sdkman.sh b/.github/workflows/scripts/publish-sdkman.sh deleted file mode 100755 index f959c426e9d8..000000000000 --- a/.github/workflows/scripts/publish-sdkman.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash - -# This is script for publishing scala on SDKMAN. -# Script resolves the latest stable version of scala and then send REST request to SDKMAN Vendor API. -# It's releasing and announcing the release of scala on SDKMAN. -# -# Requirement: -# - the latest stable version of scala should be available in github artifacts - -set -u - -# latest stable dotty version -DOTTY_VERSION=$(curl -s https://api.github.com/repos/scala/scala3/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') -DOTTY_URL="https://github.com/scala/scala3/releases/download/$DOTTY_VERSION/scala3-$DOTTY_VERSION.zip" - -# checking if dotty version is available -if ! curl --output /dev/null --silent --head --fail "$DOTTY_URL"; then - echo "URL doesn't exist: $DOTTY_URL" - exit 1 -fi - -# Release a new Candidate Version -curl --silent --show-error --fail \ - -X POST \ - -H "Consumer-Key: $SDKMAN_KEY" \ - -H "Consumer-Token: $SDKMAN_TOKEN" \ - -H "Content-Type: application/json" \ - -H "Accept: application/json" \ - -d '{"candidate": "scala", "version": "'"$DOTTY_VERSION"'", "url": "'"$DOTTY_URL"'"}' \ - https://vendors.sdkman.io/release - -if [[ $? -ne 0 ]]; then - echo "Fail sending POST request to releasing scala on SDKMAN." - exit 1 -fi - -# Set DOTTY_VERSION as Default for Candidate -curl --silent --show-error --fail \ - -X PUT \ - -H "Consumer-Key: $SDKMAN_KEY" \ - -H "Consumer-Token: $SDKMAN_TOKEN" \ - -H "Content-Type: application/json" \ - -H "Accept: application/json" \ - -d '{"candidate": "scala", "version": "'"$DOTTY_VERSION"'"}' \ - https://vendors.sdkman.io/default - -if [[ $? -ne 0 ]]; then - echo "Fail sending PUT request to announcing the release of scala on SDKMAN." - exit 1 -fi From 2a46134884b60b0e985f59b50240422e566dec1d Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Sat, 8 Jun 2024 20:48:33 +0100 Subject: [PATCH 151/827] Prepare the official release workflow --- ...{release-sdkman.yml => publish-sdkman.yml} | 4 +-- .github/workflows/releases.yml | 25 ++++++++++++++++--- 2 files changed, 24 insertions(+), 5 deletions(-) rename .github/workflows/{release-sdkman.yml => publish-sdkman.yml} (97%) diff --git a/.github/workflows/release-sdkman.yml b/.github/workflows/publish-sdkman.yml similarity index 97% rename from .github/workflows/release-sdkman.yml rename to .github/workflows/publish-sdkman.yml index 270b4d3feb9b..5d6744dd74e9 100644 --- a/.github/workflows/release-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -11,8 +11,8 @@ ################################################################################################### -name: Release Scala to SDKMAN! -run-name: Release Scala ${{ inputs.version }} to SDKMAN! +name: Publish Scala to SDKMAN! +run-name: Publish Scala ${{ inputs.version }} to SDKMAN! on: workflow_call: diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index fd883edd5a3d..4b75dd1b737d 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -1,12 +1,31 @@ -name: Releases +################################################################################################### +### OFFICIAL RELEASE WORKFLOW ### +### HOW TO USE: ### +### - THIS WORKFLOW WILL NEED TO BE TRIGGERED MANUALLY ### +### ### +### NOTE: ### +### - THIS WORKFLOW SHOULD ONLY BE RUN ON STABLE RELEASES ### +### - IT ASSUMES THAT THE PRE-RELEASE WORKFLOW WAS PREVIOUSLY EXECUTED ### +### ### +################################################################################################### + +name: Official release of Scala +run-name: Official release of Scala ${{ inputs.version }} + on: workflow_dispatch: + inputs: + version: + description: 'The version to officially release' + required: true + type: string jobs: + # TODO: ADD JOB TO SWITCH THE GITHUB RELEASE FROM DRAFT TO LATEST publish-sdkman: - uses: ./.github/workflows/release-sdkman.yml + uses: ./.github/workflows/publish-sdkman.yml with: - version: '???' # TODO: Find a way to extract the version number. Easiest way would be to add it as an input + version: ${{ inputs.version }} secrets: CONSUMER-KEY: ${{ secrets.SDKMAN_KEY }} CONSUMER-TOKEN: ${{ secrets.SDKMAN_TOKEN }} From 29dc892fb2444bc52530a1d992eacd1ff97b2d67 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 10 Jun 2024 13:28:40 +0200 Subject: [PATCH 152/827] fix: insert missing members in correct place for case classes --- .../pc/completions/OverrideCompletions.scala | 2 ++ .../AutoImplementAbstractMembersSuite.scala | 29 +++++++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala index df0bb70b596c..a1edbcaa0381 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala @@ -506,6 +506,8 @@ object OverrideCompletions: defn match case td: TypeDef if text.charAt(td.rhs.span.end) == ':' => Some(td.rhs.span.end) + case TypeDef(_, temp : Template) => + temp.parentsOrDerived.lastOption.map(_.span.end).filter(text.charAt(_) == ':') case _ => None private def fallbackFromParent(parent: Tree, name: String)(using Context) = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala index 04c3f8a018e9..1742913d0923 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala @@ -1272,6 +1272,35 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: |""".stripMargin, ) + @Test def `braceless-case-class` = + checkEdit( + """|package a + | + |trait Base: + | def foo(x: Int): Int + | def bar(x: String): String + | + |case class <>() extends Base: + | def aaa = "aaa" + |end Concrete + |""".stripMargin, + """|package a + | + |trait Base: + | def foo(x: Int): Int + | def bar(x: String): String + | + |case class Concrete() extends Base: + | + | override def bar(x: String): String = ??? + | + | override def foo(x: Int): Int = ??? + | + | def aaa = "aaa" + |end Concrete + |""".stripMargin + ) + def checkEdit( original: String, expected: String From d38999965453821cb2c3e91509267d40706aa61c Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 10 Jun 2024 15:02:38 +0200 Subject: [PATCH 153/827] fix: show implicit param when it is an apply --- .../dotty/tools/pc/PcInlayHintsProvider.scala | 16 +++++++++------ .../pc/tests/inlayHints/InlayHintsSuite.scala | 20 +++++++++++++++++++ 2 files changed, 30 insertions(+), 6 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala index c4fdb97c0418..b3f836801460 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala @@ -3,6 +3,8 @@ package dotty.tools.pc import java.nio.file.Paths +import scala.annotation.tailrec + import scala.meta.internal.metals.ReportContext import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.printer.ShortenedTypePrinter @@ -194,10 +196,10 @@ object ImplicitConversion: def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = if (params.implicitConversions()) { tree match - case Apply(fun: Ident, args) if isSynthetic(fun) => + case Apply(fun: Ident, args) if isSynthetic(fun) && args.exists(!_.span.isZeroExtent) => implicitConversion(fun, args) case Apply(Select(fun, name), args) - if name == nme.apply && isSynthetic(fun) => + if name == nme.apply && isSynthetic(fun) && args.exists(!_.span.isZeroExtent) => implicitConversion(fun, args) case _ => None } else None @@ -218,7 +220,7 @@ object ImplicitParameters: if (params.implicitParameters()) { tree match case Apply(fun, args) - if args.exists(isSyntheticArg) && !tree.sourcePos.span.isZeroExtent => + if args.exists(isSyntheticArg) && !tree.sourcePos.span.isZeroExtent && !args.exists(isQuotes(_)) => val (implicitArgs, providedArgs) = args.partition(isSyntheticArg) val allImplicit = providedArgs.isEmpty || providedArgs.forall { case Ident(name) => name == nme.MISSING @@ -229,10 +231,12 @@ object ImplicitParameters: case _ => None } else None - private def isSyntheticArg(tree: Tree)(using Context) = tree match + @tailrec + def isSyntheticArg(tree: Tree)(using Context): Boolean = tree match case tree: Ident => - tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) && - !isQuotes(tree) + tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) + case Apply(fun, _ ) if tree.span.isZeroExtent => isSyntheticArg(fun) + case TypeApply(fun, _ ) if tree.span.isZeroExtent => isSyntheticArg(fun) case _ => false // Decorations for Quotes are rarely useful diff --git a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala index 8ce7cdce4382..fac30bc757b7 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala @@ -920,4 +920,24 @@ class InlayHintsSuite extends BaseInlayHintsSuite { | case '[field *: fields] => ??? |""".stripMargin ) + + @Test def `arg-apply` = + check( + """|object Main: + | case class A() + | case class B[T]() + | given A = A() + | implicit def bar(using a: A): B[A] = B[A]() + | def foo(using b: B[A]): String = "aaa" + | val g: String = foo + |""".stripMargin, + """|object Main: + | case class A() + | case class B[T]() + | given A = A() + | implicit def bar(using a: A): B[A] = B[A]() + | def foo(using b: B[A]): String = "aaa" + | val g: String = foo/*(using bar<<(5:15)>>)*/ + |""".stripMargin + ) } From 284e7ccfb404b2133ae34c5d682ae4c61ceb7fb4 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 10 Jun 2024 15:03:18 +0200 Subject: [PATCH 154/827] fix: handle implicit params in extract method --- .../tools/pc/ExtractMethodProvider.scala | 22 +++-- .../pc/tests/edit/ExtractMethodSuite.scala | 92 +++++++++++++++++++ 2 files changed, 108 insertions(+), 6 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala index 4416d0c0d000..c72a0602f1ce 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala @@ -13,6 +13,7 @@ import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.tpd.DeepFolder import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.Types.MethodType import dotty.tools.dotc.core.Types.PolyType @@ -116,9 +117,15 @@ final class ExtractMethodProvider( typeParams.toList.sortBy(_.decodedName), ) end localRefs + val optEnclosing = + path.dropWhile(src => !src.sourcePos.encloses(range)) match + case Nil => None + case _ :: (app @ Apply(fun, args)) :: _ if args.exists(ImplicitParameters.isSyntheticArg(_)) => Some(app) + case found :: _ => Some(found) + val edits = for - enclosing <- path.find(src => src.sourcePos.encloses(range)) + enclosing <- optEnclosing extracted = extractFromBlock(enclosing) head <- extracted.headOption expr <- extracted.lastOption @@ -131,11 +138,14 @@ final class ExtractMethodProvider( val exprType = prettyPrint(expr.typeOpt.widen) val name = genName(indexedCtx.scopeSymbols.map(_.decodedName).toSet, "newMethod") - val (methodParams, typeParams) = + val (allMethodParams, typeParams) = localRefs(extracted, stat.sourcePos, extractedPos) - val methodParamsText = methodParams - .map(sym => s"${sym.decodedName}: ${prettyPrint(sym.info)}") - .mkString(", ") + val (methodParams, implicitParams) = allMethodParams.partition(!_.isOneOf(Flags.GivenOrImplicit)) + def toParamText(params: List[Symbol]) = + params.map(sym => s"${sym.decodedName}: ${prettyPrint(sym.info)}") + .mkString(", ") + val methodParamsText = toParamText(methodParams) + val implicitParamsText = if implicitParams.nonEmpty then s"(given ${toParamText(implicitParams)})" else "" val typeParamsText = typeParams .map(_.decodedName) match case Nil => "" @@ -155,7 +165,7 @@ final class ExtractMethodProvider( if noIndent && extracted.length > 1 then (" {", s"$newIndent}") else ("", "") val defText = - s"def $name$typeParamsText($methodParamsText): $exprType =$obracket\n${toExtract}\n$cbracket\n$newIndent" + s"def $name$typeParamsText($methodParamsText)$implicitParamsText: $exprType =$obracket\n${toExtract}\n$cbracket\n$newIndent" val replacedText = s"$name($exprParamsText)" List( new l.TextEdit( diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/ExtractMethodSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/ExtractMethodSuite.scala index 2bb896660123..bc8b91fed5e8 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/ExtractMethodSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/ExtractMethodSuite.scala @@ -446,3 +446,95 @@ class ExtractMethodSuite extends BaseExtractMethodSuite: | } |}""".stripMargin ) + + @Test def `i6476` = + checkEdit( + """|object O { + | class C + | def foo(i: Int)(implicit o: C) = i + | + | @@val o = { + | implicit val c = new C + | <> + | ??? + | } + |} + |""".stripMargin, + """|object O { + | class C + | def foo(i: Int)(implicit o: C) = i + | + | def newMethod()(given c: C): Int = + | foo(2) + | + | val o = { + | implicit val c = new C + | newMethod() + | ??? + | } + |} + |""".stripMargin + ) + + + @Test def `i6476-2` = + checkEdit( + """|object O { + | class C + | def foo(i: Int)(implicit o: C) = i + | + | @@val o = { + | <> + | ??? + | } + |} + |""".stripMargin, + """|object O { + | class C + | def foo(i: Int)(implicit o: C) = i + | + | def newMethod(): Int = + | foo(2)(new C) + | + | val o = { + | newMethod() + | ??? + | } + |} + |""".stripMargin + ) + + @Test def `i6476-3` = + checkEdit( + """|object O { + | class C + | class D + | def foo(i: Int)(using o: C)(x: Int)(using d: D) = i + | + | @@val o = { + | given C = new C + | given D = new D + | val w = 2 + | <> + | ??? + | } + |} + |""".stripMargin, + """|object O { + | class C + | class D + | def foo(i: Int)(using o: C)(x: Int)(using d: D) = i + | + | def newMethod(w: Int)(given given_C: C, given_D: D): Int = + | foo(w)(w) + | + | val o = { + | given C = new C + | given D = new D + | val w = 2 + | newMethod(w) + | ??? + | } + |} + |""".stripMargin + ) From a4c7e4a3dcf5b09c11ca9a7399d66fe036bcf68f Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 10 Jun 2024 15:17:11 +0200 Subject: [PATCH 155/827] fix: correctly auto import when there is a renamed symbol with the same name in scope --- .../main/dotty/tools/pc/IndexedContext.scala | 4 +-- .../pc/tests/completion/CompletionSuite.scala | 31 +++++++++++++++++++ .../pc/tests/edit/AutoImportsSuite.scala | 31 +++++++++++++++++++ 3 files changed, 64 insertions(+), 2 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala index 6b74e3aa2ec1..7c2c34cf5ebb 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala @@ -36,8 +36,8 @@ sealed trait IndexedContext: Result.InScope // when all the conflicting symbols came from an old version of the file case Some(symbols) if symbols.nonEmpty && symbols.forall(_.isStale) => Result.Missing - case Some(_) => Result.Conflict - case None => Result.Missing + case Some(symbols) if symbols.exists(rename(_).isEmpty) => Result.Conflict + case _ => Result.Missing end lookupSym /** diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 03c4fa2bc5bc..f660baa6af6d 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -1952,3 +1952,34 @@ class CompletionSuite extends BaseCompletionSuite: """TestEnum test |""".stripMargin, ) + + @Test def `i6477-1` = + checkEdit( + """|package a + |import a.b.SomeClass as SC + | + |package b { + | class SomeClass + |} + |package c { + | class SomeClass + |} + | + |val bar: SC = ??? + |val foo: SomeClass@@ + |""".stripMargin, + """|package a + |import a.b.SomeClass as SC + |import a.c.SomeClass + | + |package b { + | class SomeClass + |} + |package c { + | class SomeClass + |} + | + |val bar: SC = ??? + |val foo: SomeClass + |""".stripMargin, + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala index a862df975d0b..ce5ae4a1cca4 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala @@ -405,6 +405,37 @@ class AutoImportsSuite extends BaseAutoImportsSuite: |""".stripMargin, ) + @Test def `i6477` = + checkEdit( + """|package a + |import a.b.SomeClass as SC + | + |package b { + | class SomeClass + |} + |package c { + | class SomeClass + |} + | + |val bar: SC = ??? + |val foo: <> = ??? + |""".stripMargin, + """|package a + |import a.b.SomeClass as SC + |import a.c.SomeClass + | + |package b { + | class SomeClass + |} + |package c { + | class SomeClass + |} + | + |val bar: SC = ??? + |val foo: SomeClass = ??? + |""".stripMargin + ) + private def ammoniteWrapper(code: String): String = // Vaguely looks like a scala file that Ammonite generates // from a sc file. From 9aec96dd81af2b7823b8306c5400610da1e24720 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Mon, 10 Jun 2024 18:22:19 +0200 Subject: [PATCH 156/827] Fall back to direct subtype comparison at the end of dropIfSuper and dropIfSub --- .../dotty/tools/dotc/core/TypeComparer.scala | 16 +- tests/pos/i20516.scala | 205 ++++++++++++++++++ 2 files changed, 207 insertions(+), 14 deletions(-) create mode 100644 tests/pos/i20516.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 6e360faa322d..d248d2e00b0d 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2535,36 +2535,24 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** If some (&-operand of) `tp` is a supertype of `sub` replace it with `NoType`. */ private def dropIfSuper(tp: Type, sub: Type): Type = - - def isSuperOf(sub: Type): Boolean = sub match - case AndType(sub1, sub2) => isSuperOf(sub1) || isSuperOf(sub2) - case sub: TypeVar if sub.isInstantiated => isSuperOf(sub.instanceOpt) - case _ => isSubTypeWhenFrozen(sub, tp) - tp match case tp @ AndType(tp1, tp2) => recombine(dropIfSuper(tp1, sub), dropIfSuper(tp2, sub), tp) case tp: TypeVar if tp.isInstantiated => dropIfSuper(tp.instanceOpt, sub) case _ => - if isSuperOf(sub) then NoType else tp + if isSubTypeWhenFrozen(sub, tp) then NoType else tp end dropIfSuper /** If some (|-operand of) `tp` is a subtype of `sup` replace it with `NoType`. */ private def dropIfSub(tp: Type, sup: Type, canConstrain: Boolean): Type = - - def isSubOf(sup: Type): Boolean = sup match - case OrType(sup1, sup2) => isSubOf(sup1) || isSubOf(sup2) - case sup: TypeVar if sup.isInstantiated => isSubOf(sup.instanceOpt) - case _ => isSubType(tp, sup, whenFrozen = !canConstrain) - tp match case tp @ OrType(tp1, tp2) => recombine(dropIfSub(tp1, sup, canConstrain), dropIfSub(tp2, sup, canConstrain), tp) case tp: TypeVar if tp.isInstantiated => dropIfSub(tp.instanceOpt, sup, canConstrain) case _ => - if isSubOf(sup) then NoType else tp + if isSubType(tp, sup, whenFrozen = !canConstrain) then NoType else tp end dropIfSub /** There's a window of vulnerability between ElimByName and Erasure where some diff --git a/tests/pos/i20516.scala b/tests/pos/i20516.scala new file mode 100644 index 000000000000..ff755177bda8 --- /dev/null +++ b/tests/pos/i20516.scala @@ -0,0 +1,205 @@ +object Main { + trait A {} + trait B {} + trait C {} + trait D {} + trait E {} + trait F {} + trait G {} + trait H {} + trait I {} + trait J {} + trait K {} + trait L {} + trait M {} + trait N {} + trait O {} + trait P {} + trait Q {} + trait R {} + trait S {} + trait T {} + trait U {} + trait V {} + trait W {} + trait X {} + trait Y {} + trait Z {} + + type AlphabeticServices = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + + type EnvOutA = B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutB = A & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutC = A & B & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutD = A & B & C & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutE = A & B & C & D & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutF = A & B & C & D & E & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutG = A & B & C & D & E & F & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutH = A & B & C & D & E & F & G & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutI = A & B & C & D & E & F & G & H & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutJ = A & B & C & D & E & F & G & H & I & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutK = A & B & C & D & E & F & G & H & I & J & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutL = A & B & C & D & E & F & G & H & I & J & K & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutM = A & B & C & D & E & F & G & H & I & J & K & L & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutN = A & B & C & D & E & F & G & H & I & J & K & L & M & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutO = A & B & C & D & E & F & G & H & I & J & K & L & M & N & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutP = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutQ = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & R & S & T & U & V & W & X & Y & Z + type EnvOutR = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & S & T & U & V & W & X & Y & Z + type EnvOutS = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & T & U & V & W & X & Y & Z + type EnvOutT = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & U & V & W & X & Y & Z + type EnvOutU = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & V & W & X & Y & Z + type EnvOutV = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & W & X & Y & Z + type EnvOutW = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & X & Y & Z + type EnvOutX = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & Y & Z + type EnvOutY = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Z + type EnvOutZ = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y + + trait Reader[-E, A] { + def map[B](f: A => B): Reader[E, B] = ??? + def flatMap[E2 <: E, B](f: A => Reader[E2, B]): Reader[E2, B] = ??? + } + + def e1: Reader[EnvOutA, Unit] = ??? + def e2: Reader[EnvOutB, Unit] = ??? + def e3: Reader[EnvOutC, Unit] = ??? + def e4: Reader[EnvOutD, Unit] = ??? + def e5: Reader[EnvOutE, Unit] = ??? + def e6: Reader[EnvOutF, Unit] = ??? + def e7: Reader[EnvOutG, Unit] = ??? + def e8: Reader[EnvOutH, Unit] = ??? + def e9: Reader[EnvOutI, Unit] = ??? + def e10: Reader[EnvOutJ, Unit] = ??? + def e11: Reader[EnvOutK, Unit] = ??? + def e12: Reader[EnvOutL, Unit] = ??? + def e13: Reader[EnvOutM, Unit] = ??? + def e14: Reader[EnvOutN, Unit] = ??? + def e15: Reader[EnvOutO, Unit] = ??? + def e16: Reader[EnvOutP, Unit] = ??? + def e17: Reader[EnvOutQ, Unit] = ??? + def e18: Reader[EnvOutR, Unit] = ??? + def e19: Reader[EnvOutS, Unit] = ??? + def e20: Reader[EnvOutT, Unit] = ??? + def e21: Reader[EnvOutU, Unit] = ??? + def e22: Reader[EnvOutV, Unit] = ??? + def e23: Reader[EnvOutW, Unit] = ??? + def e24: Reader[EnvOutX, Unit] = ??? + def e25: Reader[EnvOutY, Unit] = ??? + def e26: Reader[EnvOutZ, Unit] = ??? + + def program: Reader[AlphabeticServices, Unit] = for { + //1 + _ <- e1 + _ <- e2 + _ <- e3 + _ <- e4 + _ <- e5 + _ <- e6 + _ <- e7 + _ <- e8 + _ <- e8 + _ <- e9 + _ <- e10 + _ <- e11 + _ <- e12 + _ <- e13 + _ <- e14 + _ <- e15 + _ <- e16 + _ <- e17 + _ <- e18 + _ <- e19 + _ <- e20 + _ <- e21 + _ <- e22 + _ <- e23 + _ <- e24 + _ <- e25 + _ <- e26 + // 2 + _ <- e1 + _ <- e2 + _ <- e3 + _ <- e4 + _ <- e5 + _ <- e6 + _ <- e7 + _ <- e8 + _ <- e8 + _ <- e9 + _ <- e10 + _ <- e11 + _ <- e12 + _ <- e13 + _ <- e14 + _ <- e15 + _ <- e16 + _ <- e17 + _ <- e18 + _ <- e19 + _ <- e20 + _ <- e21 + _ <- e22 + _ <- e23 + _ <- e24 + _ <- e25 + _ <- e26 + // TODO: optimize the subtype checking for large intersection types further + //3 + // _ <- e1 + // _ <- e2 + // _ <- e3 + // _ <- e4 + // _ <- e5 + // _ <- e6 + // _ <- e7 + // _ <- e8 + // _ <- e8 + // _ <- e9 + // _ <- e10 + // _ <- e11 + // _ <- e12 + // _ <- e13 + // _ <- e14 + // _ <- e15 + // _ <- e16 + // _ <- e17 + // _ <- e18 + // _ <- e19 + // _ <- e20 + // _ <- e21 + // _ <- e22 + // _ <- e23 + // _ <- e24 + // _ <- e25 + // _ <- e26 + // 4 + // _ <- e1 + // _ <- e2 + // _ <- e3 + // _ <- e4 + // _ <- e5 + // _ <- e6 + // _ <- e7 + // _ <- e8 + // _ <- e8 + // _ <- e9 + // _ <- e10 + // _ <- e11 + // _ <- e12 + // _ <- e13 + // _ <- e14 + // _ <- e15 + // _ <- e16 + // _ <- e17 + // _ <- e18 + // _ <- e19 + // _ <- e20 + // _ <- e21 + // _ <- e22 + // _ <- e23 + // _ <- e24 + // _ <- e25 + // _ <- e26 + } yield () +} \ No newline at end of file From ba82f73ab30c820751ebc87e7b10b87f6fda6b18 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Mon, 10 Jun 2024 18:41:15 +0200 Subject: [PATCH 157/827] Move test to deep subtype --- compiler/src/dotty/tools/dotc/core/TypeComparer.scala | 4 ++++ tests/{pos => pos-deep-subtype}/i20516.scala | 0 2 files changed, 4 insertions(+) rename tests/{pos => pos-deep-subtype}/i20516.scala (100%) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index d248d2e00b0d..1cd737909822 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2535,6 +2535,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** If some (&-operand of) `tp` is a supertype of `sub` replace it with `NoType`. */ private def dropIfSuper(tp: Type, sub: Type): Type = + // We need to be careful to check branches of AndTypes and OrTypes in correct order, + // see discussion in issue #20516. tp match case tp @ AndType(tp1, tp2) => recombine(dropIfSuper(tp1, sub), dropIfSuper(tp2, sub), tp) @@ -2546,6 +2548,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** If some (|-operand of) `tp` is a subtype of `sup` replace it with `NoType`. */ private def dropIfSub(tp: Type, sup: Type, canConstrain: Boolean): Type = + // We need to be careful to check branches of AndTypes and OrTypes in correct order, + // see discussion in issue #20516. tp match case tp @ OrType(tp1, tp2) => recombine(dropIfSub(tp1, sup, canConstrain), dropIfSub(tp2, sup, canConstrain), tp) diff --git a/tests/pos/i20516.scala b/tests/pos-deep-subtype/i20516.scala similarity index 100% rename from tests/pos/i20516.scala rename to tests/pos-deep-subtype/i20516.scala From a782184be09f9a314f4eda68abdb3b9bbd69f212 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 10 Jun 2024 17:54:20 +0200 Subject: [PATCH 158/827] adjust test --- .../pc/tests/edit/AutoImplementAbstractMembersSuite.scala | 4 ---- 1 file changed, 4 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala index 1742913d0923..9911d3f6d627 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala @@ -1243,7 +1243,6 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: | |object A { | trait Base: - | def foo(x: Int): Int | def bar(x: String): String | | class <>(x: Int, y: String) extends Base: @@ -1256,13 +1255,10 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: | |object A { | trait Base: - | def foo(x: Int): Int | def bar(x: String): String | | class Concrete(x: Int, y: String) extends Base: | - | override def foo(x: Int): Int = ??? - | | override def bar(x: String): String = ??? | | From 8ab198d95a90b4a160e0ee18d322ef968111dbaf Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 10 Jun 2024 22:26:12 +0200 Subject: [PATCH 159/827] Filter values by class before member selection --- .../tools/dotc/transform/init/Semantic.scala | 43 +++++++++++-------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index caf3435608d2..85b2764ff0f3 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -548,9 +548,23 @@ object Semantic: value.promote(msg) value + def filterClass(sym: Symbol)(using Context): Value = + if !sym.isClass then value + else + val klass = sym.asClass + value match + case Cold => Cold + case Hot => Hot + case ref: Ref => if ref.klass.isSubClass(klass) then ref else Hot + case RefSet(values) => values.map(v => v.filterClass(klass)).join + case fun: Fun => + if klass.isOneOf(Flags.AbstractOrTrait) && klass.baseClasses.exists(defn.isFunctionClass) + then fun + else Hot + def select(field: Symbol, receiver: Type, needResolve: Boolean = true): Contextual[Value] = log("select " + field.show + ", this = " + value, printer, (_: Value).show) { if promoted.isCurrentObjectPromoted then Hot - else value match + else value.filterClass(field.owner) match case Hot => Hot @@ -588,13 +602,8 @@ object Semantic: reporter.report(error) Hot else - if ref.klass.isSubClass(receiver.widenSingleton.classSymbol) then - report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", field = " + field.show + Trace.show, Trace.position) - Hot - else - // This is possible due to incorrect type cast. - // See tests/init/pos/Type.scala - Hot + report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", field = " + field.show + Trace.show, Trace.position) + Hot case fun: Fun => report.warning("[Internal error] unexpected tree in selecting a function, fun = " + fun.expr.show + Trace.show, fun.expr) @@ -645,11 +654,16 @@ object Semantic: } (errors, allArgsHot) + def filterValue(value: Value): Value = + // methods of polyfun does not have denotation + if !meth.exists then value + else value.filterClass(meth.owner) + // fast track if the current object is already initialized if promoted.isCurrentObjectPromoted then Hot else if isAlwaysSafe(meth) then Hot else if meth eq defn.Any_asInstanceOf then value - else value match { + else filterValue(value) match { case Hot => if isSyntheticApply(meth) && meth.hasSource then val klass = meth.owner.companionClass.asClass @@ -724,13 +738,8 @@ object Semantic: else value.select(target, receiver, needResolve = false) else - if ref.klass.isSubClass(receiver.widenSingleton.classSymbol) then - report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", meth = " + meth.show + Trace.show, Trace.position) - Hot - else - // This is possible due to incorrect type cast. - // See tests/init/pos/Type.scala - Hot + report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", meth = " + meth.show + Trace.show, Trace.position) + Hot case Fun(body, thisV, klass) => // meth == NoSymbol for poly functions @@ -822,7 +831,7 @@ object Semantic: warm if promoted.isCurrentObjectPromoted then Hot - else value match { + else value.filterClass(klass.owner) match { case Hot => var allHot = true val args2 = args.map { arg => From f75f6dc211081715cc6abb54e83c602b5c6f1cbb Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 10 Jun 2024 22:27:12 +0200 Subject: [PATCH 160/827] Add test --- tests/init/warn/type-filter.scala | 15 +++++++++++++++ tests/init/warn/type-filter2.scala | 19 +++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 tests/init/warn/type-filter.scala create mode 100644 tests/init/warn/type-filter2.scala diff --git a/tests/init/warn/type-filter.scala b/tests/init/warn/type-filter.scala new file mode 100644 index 000000000000..1d25454992fe --- /dev/null +++ b/tests/init/warn/type-filter.scala @@ -0,0 +1,15 @@ +class A(o: O): + var a = 20 + +class B(o: O): + var b = 20 + +class O: + val o: A | B = new A(this) + if o.isInstanceOf[A] then + o.asInstanceOf[A].a += 1 + else + o.asInstanceOf[B].b += 1 // o.asInstanceOf[B] is treated as bottom + + // prevent early promotion + val x = 10 diff --git a/tests/init/warn/type-filter2.scala b/tests/init/warn/type-filter2.scala new file mode 100644 index 000000000000..65f5be8f4b53 --- /dev/null +++ b/tests/init/warn/type-filter2.scala @@ -0,0 +1,19 @@ +class A(c: C): + val f: Int = 10 + def m() = f + +class B(c: C): + val f: Int = g() // warn + def g(): Int = f + +class C(x: Int): + val a: A | B = if x > 0 then new A(this) else new B(this) + + def cast[T](a: Any): T = a.asInstanceOf[T] + + val c: A = a.asInstanceOf[A] // abstraction for c is {A, B} + val d = c.f // treat as c.asInstanceOf[owner of f].f + val e = c.m() // treat as c.asInstanceOf[owner of f].m() + val c2: B = a.asInstanceOf[B] + val g = c2.f // no error here + From f7ab68322bcdeccfab45035bf78b487b32c91ba7 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 11 Jun 2024 18:42:42 +0900 Subject: [PATCH 161/827] Bundle scala cli in scala command (#20351) fixes #20098 Proposed changes to zip/targz archive: - in the `/bin` directory store an extra launcher for Scala CLI (either JAR, or native per platform). - `/bin/scala[.bat]` is modified to invoke Scala CLI stored in `/bin` - new `/maven2` directory, which stores all the Jars and POM files necessary (in maven repo style) for scala-cli to invoke scala compiler offline (using the `-r` launcher option). - CHOICE: either replace jar files in `/lib` by aliases to the corresponding jar in `/maven2`, OR delete `/lib` and update references from scripts. (Looks like symlinks are not portable, so probably we should encode the classpath in a file, or adjust slightly how we build the toolchain) - add platform specific suffixes to artefacts: - e.g. `scala-3.5.0-x86_64-pc-linux.tar.gz` (for the artefact that bundles the x64 linux launcher) --------- Co-authored-by: Hamza REMMAL --- .github/workflows/ci.yaml | 14 +- .github/workflows/launchers.yml | 96 +++++ bin/common | 9 +- bin/common-platform | 63 +++ bin/scala | 35 +- bin/scalac | 2 +- bin/scaladoc | 2 +- bin/test/TestScripts.scala | 2 +- build.sbt | 5 + .../src/dotty/tools/MainGenericRunner.scala | 16 + .../tools/coursier/CoursierScalaTests.scala | 2 +- .../scripting/argfileClasspath.sc | 9 - ...hReport.sc => classpathReport_scalacli.sc} | 6 +- .../scripting/cpArgumentsFile.txt | 1 - compiler/test-resources/scripting/envtest.sc | 2 + .../scripting/envtest_scalacli.sc | 3 + compiler/test-resources/scripting/hashBang.sc | 2 +- .../test-resources/scripting/hashBang.scala | 4 +- .../test-resources/scripting/scriptName.scala | 2 +- .../test-resources/scripting/scriptPath.sc | 2 +- .../scripting/scriptPath_scalacli.sc | 13 + compiler/test-resources/scripting/showArgs.sc | 2 +- .../scripting/showArgs_scalacli.sc | 7 + .../test-resources/scripting/sqlDateError.sc | 2 +- .../scripting/sqlDateError_scalacli.sc | 6 + .../test-resources/scripting/touchFile.sc | 2 +- .../scripting/unglobClasspath.sc | 8 - .../scripting/unglobClasspath_scalacli.sc | 9 + .../test/dotty/tools/io/ClasspathTest.scala | 4 +- .../tools/scripting/BashExitCodeTests.scala | 22 +- .../tools/scripting/BashScriptsTests.scala | 50 ++- .../tools/scripting/ClasspathTests.scala | 32 +- .../tools/scripting/ExpressionTest.scala | 6 +- .../dotty/tools/scripting/ScriptTestEnv.scala | 76 +++- .../tools/scripting/ScriptingTests.scala | 6 +- compiler/test/dotty/tools/utils.scala | 13 +- dist/bin-native-overrides/cli-common-platform | 16 + .../cli-common-platform.bat | 18 + dist/bin/cli-common-platform | 3 + dist/bin/cli-common-platform.bat | 5 + dist/bin/common | 132 +----- dist/bin/common-shared | 139 ++++++ dist/bin/scala | 66 ++- dist/bin/scala.bat | 85 ++-- dist/bin/scala_legacy | 72 ++++ dist/bin/scalac.bat | 3 + dist/bin/scaladoc.bat | 4 + project/Build.scala | 88 +++- project/RepublishPlugin.scala | 400 ++++++++++++++++++ project/scripts/bootstrappedOnlyCmdTests | 37 +- project/scripts/buildScalaBinary | 12 + project/scripts/cmdTestsCommon.inc.sh | 17 + project/scripts/echoArgs.sc | 6 + project/scripts/native-integration/bashTests | 84 ++++ .../reportScalaVersion.scala | 4 + .../scripts/native-integration/winTests.bat | 19 + project/scripts/winCmdTests | 6 +- project/scripts/winCmdTests.bat | 6 +- .../src/main/scala/a/zz.scala | 6 + tests/run-with-compiler/i14541.scala | 1 + 60 files changed, 1423 insertions(+), 341 deletions(-) create mode 100644 .github/workflows/launchers.yml create mode 100755 bin/common-platform delete mode 100755 compiler/test-resources/scripting/argfileClasspath.sc rename compiler/test-resources/scripting/{classpathReport.sc => classpathReport_scalacli.sc} (59%) delete mode 100755 compiler/test-resources/scripting/cpArgumentsFile.txt create mode 100755 compiler/test-resources/scripting/envtest_scalacli.sc create mode 100755 compiler/test-resources/scripting/scriptPath_scalacli.sc create mode 100755 compiler/test-resources/scripting/showArgs_scalacli.sc create mode 100755 compiler/test-resources/scripting/sqlDateError_scalacli.sc delete mode 100755 compiler/test-resources/scripting/unglobClasspath.sc create mode 100755 compiler/test-resources/scripting/unglobClasspath_scalacli.sc create mode 100644 dist/bin-native-overrides/cli-common-platform create mode 100644 dist/bin-native-overrides/cli-common-platform.bat create mode 100644 dist/bin/cli-common-platform create mode 100644 dist/bin/cli-common-platform.bat create mode 100644 dist/bin/common-shared create mode 100755 dist/bin/scala_legacy create mode 100644 project/RepublishPlugin.scala create mode 100755 project/scripts/buildScalaBinary create mode 100644 project/scripts/echoArgs.sc create mode 100755 project/scripts/native-integration/bashTests create mode 100644 project/scripts/native-integration/reportScalaVersion.scala create mode 100755 project/scripts/native-integration/winTests.bat create mode 100644 tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 12e90eb9d653..de1f74c641db 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -141,7 +141,8 @@ jobs: - name: Cmd Tests run: | - ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" + ./project/scripts/buildScalaBinary + ./project/scripts/sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests @@ -230,7 +231,7 @@ jobs: shell: cmd - name: build binary - run: sbt "dist/pack" & bash -version + run: sbt "dist-win-x86_64/pack" & bash -version shell: cmd - name: cygwin tests @@ -269,8 +270,12 @@ jobs: - name: Git Checkout uses: actions/checkout@v4 + - name: build binary + run: sbt "dist-win-x86_64/pack" + shell: cmd + - name: Test - run: sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test" + run: sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test" shell: cmd - name: Scala.js Test @@ -596,7 +601,8 @@ jobs: - name: Test run: | - ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*" + ./project/scripts/buildScalaBinary + ./project/scripts/sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests diff --git a/.github/workflows/launchers.yml b/.github/workflows/launchers.yml new file mode 100644 index 000000000000..818e3b72b06b --- /dev/null +++ b/.github/workflows/launchers.yml @@ -0,0 +1,96 @@ +name: Test CLI Launchers on all the platforms +on: + pull_request: + workflow_dispatch: + +jobs: + linux-x86_64: + name: Deploy and Test on Linux x64 architecture + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-linux-x86_64" + + linux-aarch64: + name: Deploy and Test on Linux ARM64 architecture + runs-on: macos-latest + if: ${{ false }} + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + # https://github.com/actions/runner-images/issues/9369 + - name: Install sbt + run: brew install sbt + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-linux-aarch64" + + mac-x86_64: + name: Deploy and Test on Mac x64 architecture + runs-on: macos-13 + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + # https://github.com/actions/runner-images/issues/9369 + - name: Install sbt + run: brew install sbt + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-mac-x86_64" + + mac-aarch64: + name: Deploy and Test on Mac ARM64 architecture + runs-on: macos-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + # https://github.com/actions/runner-images/issues/9369 + - name: Install sbt + run: brew install sbt + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-mac-aarch64" + + win-x86_64: + name: Deploy and Test on Windows x64 architecture + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + - name: Build the launcher command + run: sbt "dist-win-x86_64/pack" + - name: Run the launcher command tests + run: './project/scripts/native-integration/winTests.bat' + shell: cmd diff --git a/bin/common b/bin/common index 7d3aa7148265..37b2ebd1ff93 100755 --- a/bin/common +++ b/bin/common @@ -9,15 +9,18 @@ target="$1" shift # Mutates $@ by deleting the first element ($1) +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + # Marker file used to obtain the date of latest call to sbt-back -version="$ROOT/dist/target/pack/VERSION" +version="$ROOT/$DIST_DIR/target/pack/VERSION" # Create the target if absent or if file changed in ROOT/compiler new_files="$(find "$ROOT/compiler" \( -iname "*.scala" -o -iname "*.java" \) -newer "$version" 2> /dev/null)" if [ ! -f "$version" ] || [ ! -z "$new_files" ]; then echo "Building Dotty..." - (cd $ROOT && sbt "dist/pack") + (cd $ROOT && sbt "$DIST_PROJECT/pack") fi -"$target" "$@" +"$ROOT/$DIST_DIR/target/pack/bin/$target" "$@" diff --git a/bin/common-platform b/bin/common-platform new file mode 100755 index 000000000000..648e0195e7e6 --- /dev/null +++ b/bin/common-platform @@ -0,0 +1,63 @@ +#!/usr/bin/env bash + +unset cygwin mingw msys darwin + +# COLUMNS is used together with command line option '-pageWidth'. +if command -v tput >/dev/null 2>&1; then + export COLUMNS="$(tput -Tdumb cols)" +fi + +case "`uname`" in + CYGWIN*) cygwin=true + ;; + MINGW*) mingw=true + ;; + MSYS*) msys=true + ;; + Darwin*) darwin=true + ;; +esac + +unset DIST_PROJECT DIST_DIR + +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + DIST_PROJECT="dist-win-x86_64" + DIST_DIR="dist/win-x86_64" +else + # OS and arch logic taken from https://github.com/VirtusLab/scala-cli/blob/main/scala-cli.sh + unset arch ARCH_NORM + arch=$(uname -m) + if [[ "$arch" == "aarch64" ]] || [[ "$arch" == "x86_64" ]]; then + ARCH_NORM="$arch" + elif [[ "$arch" == "amd64" ]]; then + ARCH_NORM="x86_64" + elif [[ "$arch" == "arm64" ]]; then + ARCH_NORM="aarch64" + else + ARCH_NORM="unknown" + fi + + if [ "$(expr substr $(uname -s) 1 5 2>/dev/null)" == "Linux" ]; then + if [[ "$ARCH_NORM" == "unknown" ]]; then + echo >&2 "unknown Linux CPU architecture, defaulting to JVM launcher" + DIST_PROJECT="dist" + DIST_DIR="dist" + else + DIST_PROJECT="dist-linux-$ARCH_NORM" + DIST_DIR="dist/linux-$ARCH_NORM" + fi + elif [ "$(uname)" == "Darwin" ]; then + if [[ "$ARCH_NORM" == "unknown" ]]; then + echo >&2 "unknown Darwin CPU architecture, defaulting to JVM launcher" + DIST_PROJECT="dist" + DIST_DIR="dist" + else + DIST_PROJECT="dist-mac-$ARCH_NORM" + DIST_DIR="dist/mac-$ARCH_NORM" + fi + else + echo >&2 "unknown OS, defaulting to JVM launcher" + DIST_PROJECT="dist" + DIST_DIR="dist" + fi +fi diff --git a/bin/scala b/bin/scala index 66ec9a5774c7..e87c4391806b 100755 --- a/bin/scala +++ b/bin/scala @@ -2,4 +2,37 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scala" "$@" +scala_args() { + + declare -a CLI_ARGS + declare -a SCRIPT_ARGS + declare DISABLE_BLOOP=1 + + while (( "$#" )); do + case "$1" in + "--") + shift + SCRIPT_ARGS+=("--") + SCRIPT_ARGS+=("$@") + break + ;; + "clean" | "version" | "--version" | "-version" | "help" | "--help" | "-help") + CLI_ARGS+=("$1") + DISABLE_BLOOP=0 # clean command should not add --offline --server=false + shift + ;; + *) + CLI_ARGS+=("$1") + shift + ;; + esac + done + + if [ $DISABLE_BLOOP -eq 1 ]; then + CLI_ARGS+=("--offline" "--server=false") + fi + + echo "--power ${CLI_ARGS[@]} ${SCRIPT_ARGS[@]}" +} + +"$ROOT/bin/common" "scala" $(scala_args "$@") diff --git a/bin/scalac b/bin/scalac index faeb48d92d87..d141b9a6c6bb 100755 --- a/bin/scalac +++ b/bin/scalac @@ -2,4 +2,4 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scalac" "$@" +"$ROOT/bin/common" "scalac" "$@" diff --git a/bin/scaladoc b/bin/scaladoc index 11a754c6579f..02decabb9ae3 100755 --- a/bin/scaladoc +++ b/bin/scaladoc @@ -2,4 +2,4 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scaladoc" "$@" +"$ROOT/bin/common" "scaladoc" "$@" diff --git a/bin/test/TestScripts.scala b/bin/test/TestScripts.scala index bada140580fc..4a2fd9a05c83 100644 --- a/bin/test/TestScripts.scala +++ b/bin/test/TestScripts.scala @@ -57,7 +57,7 @@ class TestScripts { s"bin/scalac script did not run properly. Output:$lineSep$dotcOutput" ) - val (retDotr, dotrOutput) = executeScript("./bin/scala HelloWorld") + val (retDotr, dotrOutput) = executeScript("./bin/scala -M HelloWorld") assert( retDotr == 0 && dotrOutput == "hello world\n", s"Running hello world exited with status: $retDotr and output: $dotrOutput" diff --git a/build.sbt b/build.sbt index 1bc74e5e23fb..f357044c91ca 100644 --- a/build.sbt +++ b/build.sbt @@ -28,6 +28,11 @@ val `scaladoc-js-main` = Build.`scaladoc-js-main` val `scaladoc-js-contributors` = Build.`scaladoc-js-contributors` val `scala3-bench-run` = Build.`scala3-bench-run` val dist = Build.dist +val `dist-mac-x86_64` = Build.`dist-mac-x86_64` +val `dist-mac-aarch64` = Build.`dist-mac-aarch64` +val `dist-win-x86_64` = Build.`dist-win-x86_64` +val `dist-linux-x86_64` = Build.`dist-linux-x86_64` +val `dist-linux-aarch64` = Build.`dist-linux-aarch64` val `community-build` = Build.`community-build` val `sbt-community-build` = Build.`sbt-community-build` val `scala3-presentation-compiler` = Build.`scala3-presentation-compiler` diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala index 1540cc86d7a6..bf477f019cba 100644 --- a/compiler/src/dotty/tools/MainGenericRunner.scala +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -266,6 +266,22 @@ object MainGenericRunner { run(settings.withExecuteMode(ExecuteMode.Run)) else run(settings.withExecuteMode(ExecuteMode.Repl)) + end run + + val ranByCoursierBootstrap = + sys.props.isDefinedAt("coursier.mainJar") + || sys.props.get("bootstrap.mainClass").contains("dotty.tools.MainGenericRunner") + + val silenced = sys.props.get("scala.use_legacy_launcher") == Some("true") + + if !silenced then + Console.err.println(s"[warning] MainGenericRunner class is deprecated since Scala 3.5.0, and Scala CLI features will not work.") + Console.err.println(s"[warning] Please be sure to update to the Scala CLI launcher to use the new features.") + if ranByCoursierBootstrap then + Console.err.println(s"[warning] It appears that your Coursier-based Scala installation is misconfigured.") + Console.err.println(s"[warning] To update to the new Scala CLI runner, please update (coursier, cs) commands first before re-installing scala.") + Console.err.println(s"[warning] Check the Scala 3.5.0 release notes to troubleshoot your installation.") + run(settings) match case Some(ex: (StringDriverException | ScriptingException)) => errorFn(ex.getMessage) diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index b8dfa833c437..115803d79dc1 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -166,7 +166,7 @@ object CoursierScalaTests: case Nil => args case _ => "--" +: args val newJOpts = jOpts.map(s => s"--java-opt ${s.stripPrefix("-J")}").mkString(" ") - execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true"""" +: newOptions)*)._2 + execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true" --property "scala.use_legacy_launcher=true"""" +: newOptions)*)._2 /** Get coursier script */ @BeforeClass def setup(): Unit = diff --git a/compiler/test-resources/scripting/argfileClasspath.sc b/compiler/test-resources/scripting/argfileClasspath.sc deleted file mode 100755 index c31371ba8934..000000000000 --- a/compiler/test-resources/scripting/argfileClasspath.sc +++ /dev/null @@ -1,9 +0,0 @@ -#!dist/target/pack/bin/scala @compiler/test-resources/scripting/cpArgumentsFile.txt - -import java.nio.file.Paths - -def main(args: Array[String]): Unit = - val cwd = Paths.get(".").toAbsolutePath.toString.replace('\\', '/').replaceAll("/$", "") - printf("cwd: %s\n", cwd) - printf("classpath: %s\n", sys.props("java.class.path")) - diff --git a/compiler/test-resources/scripting/classpathReport.sc b/compiler/test-resources/scripting/classpathReport_scalacli.sc similarity index 59% rename from compiler/test-resources/scripting/classpathReport.sc rename to compiler/test-resources/scripting/classpathReport_scalacli.sc index a9eacbbba1f7..0b2552b3ac84 100755 --- a/compiler/test-resources/scripting/classpathReport.sc +++ b/compiler/test-resources/scripting/classpathReport_scalacli.sc @@ -1,8 +1,8 @@ -#!bin/scala -classpath 'dist/target/pack/lib/*' - +#!/usr/bin/env bin/scala +// This file is a Scala CLI script. import java.nio.file.Paths -def main(args: Array[String]): Unit = +// def main(args: Array[String]): Unit = // MIGRATION: Scala CLI expects `*.sc` files to be straight-line code val cwd = Paths.get(".").toAbsolutePath.normalize.toString.norm printf("cwd: %s\n", cwd) printf("classpath: %s\n", sys.props("java.class.path").norm) diff --git a/compiler/test-resources/scripting/cpArgumentsFile.txt b/compiler/test-resources/scripting/cpArgumentsFile.txt deleted file mode 100755 index 73037eb7d9bc..000000000000 --- a/compiler/test-resources/scripting/cpArgumentsFile.txt +++ /dev/null @@ -1 +0,0 @@ --classpath dist/target/pack/lib/* diff --git a/compiler/test-resources/scripting/envtest.sc b/compiler/test-resources/scripting/envtest.sc index b2fde1b32339..724580449229 100755 --- a/compiler/test-resources/scripting/envtest.sc +++ b/compiler/test-resources/scripting/envtest.sc @@ -1,2 +1,4 @@ +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class + def main(args: Array[String]): Unit = println("Hello " + util.Properties.propOrNull("key")) diff --git a/compiler/test-resources/scripting/envtest_scalacli.sc b/compiler/test-resources/scripting/envtest_scalacli.sc new file mode 100755 index 000000000000..993ea1691640 --- /dev/null +++ b/compiler/test-resources/scripting/envtest_scalacli.sc @@ -0,0 +1,3 @@ +// This file is a Scala CLI script. + +println("Hello " + util.Properties.propOrNull("key")) diff --git a/compiler/test-resources/scripting/hashBang.sc b/compiler/test-resources/scripting/hashBang.sc index d767bd1a1592..98884bc050c0 100755 --- a/compiler/test-resources/scripting/hashBang.sc +++ b/compiler/test-resources/scripting/hashBang.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +#!/usr/bin/env fake-program-to-test-hashbang-removal # comment STUFF=nada !# diff --git a/compiler/test-resources/scripting/hashBang.scala b/compiler/test-resources/scripting/hashBang.scala index 1aab26269f86..b7bf6b541854 100755 --- a/compiler/test-resources/scripting/hashBang.scala +++ b/compiler/test-resources/scripting/hashBang.scala @@ -1,8 +1,8 @@ -#!/usr/bin/env scala +#!/usr/bin/env fake-program-to-test-hashbang-removal # comment STUFF=nada !# - +// everything above this point should be ignored by the compiler def main(args: Array[String]): Unit = System.err.printf("mainClassFromStack: %s\n",mainFromStack) assert(mainFromStack.contains("hashBang"),s"fromStack[$mainFromStack]") diff --git a/compiler/test-resources/scripting/scriptName.scala b/compiler/test-resources/scripting/scriptName.scala index 21aec32fe0bb..7e479197d567 100755 --- a/compiler/test-resources/scripting/scriptName.scala +++ b/compiler/test-resources/scripting/scriptName.scala @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class def main(args: Array[String]): Unit = val name = Option(sys.props("script.name")) match { diff --git a/compiler/test-resources/scripting/scriptPath.sc b/compiler/test-resources/scripting/scriptPath.sc index 46cd5e8a7385..e29e659d09d4 100755 --- a/compiler/test-resources/scripting/scriptPath.sc +++ b/compiler/test-resources/scripting/scriptPath.sc @@ -1,4 +1,4 @@ -#!dist/target/pack/bin/scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class def main(args: Array[String]): Unit = args.zipWithIndex.foreach { case (arg,i) => printf("arg %d: [%s]\n",i,arg) } diff --git a/compiler/test-resources/scripting/scriptPath_scalacli.sc b/compiler/test-resources/scripting/scriptPath_scalacli.sc new file mode 100755 index 000000000000..c13888d0e4b1 --- /dev/null +++ b/compiler/test-resources/scripting/scriptPath_scalacli.sc @@ -0,0 +1,13 @@ +#!/usr/bin/env bin/scala + +// THIS FILE IS RAN WITH SCALA CLI, which wraps scripts exposing scriptPath and args variables + +args.zipWithIndex.foreach { case (arg,i) => printf("arg %d: [%s]\n",i,arg) } + +if !scriptPath.endsWith("scriptPath_scalacli.sc") then + printf( s"incorrect script.path defined as [$scriptPath]") +else + printf("scriptPath: %s\n", scriptPath) // report the value + +extension(s: String) + def norm: String = s.replace('\\', '/') diff --git a/compiler/test-resources/scripting/showArgs.sc b/compiler/test-resources/scripting/showArgs.sc index 28f16a9022b3..69d552b9cf5f 100755 --- a/compiler/test-resources/scripting/showArgs.sc +++ b/compiler/test-resources/scripting/showArgs.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class // precise output format expected by BashScriptsTests.scala def main(args: Array[String]): Unit = diff --git a/compiler/test-resources/scripting/showArgs_scalacli.sc b/compiler/test-resources/scripting/showArgs_scalacli.sc new file mode 100755 index 000000000000..4591ac159345 --- /dev/null +++ b/compiler/test-resources/scripting/showArgs_scalacli.sc @@ -0,0 +1,7 @@ +#!/usr/bin/env bin/scala + +// This file is a Scala CLI script. + +// precise output format expected by BashScriptsTests.scala +for (a,i) <- args.zipWithIndex do + printf(s"arg %2d:[%s]\n",i,a) diff --git a/compiler/test-resources/scripting/sqlDateError.sc b/compiler/test-resources/scripting/sqlDateError.sc index ceff98f40cad..e7c3a623c6c1 100755 --- a/compiler/test-resources/scripting/sqlDateError.sc +++ b/compiler/test-resources/scripting/sqlDateError.sc @@ -1,4 +1,4 @@ -#!bin/scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class def main(args: Array[String]): Unit = { println(new java.sql.Date(100L)) diff --git a/compiler/test-resources/scripting/sqlDateError_scalacli.sc b/compiler/test-resources/scripting/sqlDateError_scalacli.sc new file mode 100755 index 000000000000..10b58821a6e4 --- /dev/null +++ b/compiler/test-resources/scripting/sqlDateError_scalacli.sc @@ -0,0 +1,6 @@ +#!/usr/bin/env bin/scala + +// This file is a Scala CLI script. + +println(new java.sql.Date(100L)) +System.err.println("SCALA_OPTS="+Option(System.getenv("SCALA_OPTS")).getOrElse("")) diff --git a/compiler/test-resources/scripting/touchFile.sc b/compiler/test-resources/scripting/touchFile.sc index 974f8a64d192..b46b3c99d786 100755 --- a/compiler/test-resources/scripting/touchFile.sc +++ b/compiler/test-resources/scripting/touchFile.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class import java.io.File diff --git a/compiler/test-resources/scripting/unglobClasspath.sc b/compiler/test-resources/scripting/unglobClasspath.sc deleted file mode 100755 index 796697cdedf2..000000000000 --- a/compiler/test-resources/scripting/unglobClasspath.sc +++ /dev/null @@ -1,8 +0,0 @@ -#!bin/scala -classpath 'dist/target/pack/lib/*' - -// won't compile unless the hashbang line sets classpath -import org.jline.terminal.Terminal - -def main(args: Array[String]) = - val cp = sys.props("java.class.path") - printf("unglobbed classpath: %s\n", cp) diff --git a/compiler/test-resources/scripting/unglobClasspath_scalacli.sc b/compiler/test-resources/scripting/unglobClasspath_scalacli.sc new file mode 100755 index 000000000000..ccc4cf667085 --- /dev/null +++ b/compiler/test-resources/scripting/unglobClasspath_scalacli.sc @@ -0,0 +1,9 @@ +// This file is a Scala CLI script. + +import dotty.tools.tasty.TastyFormat +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// not visible on default classpath, "compiler/test/dotty/tools/scripting/ClasspathTests.scala" +// adds it to classpath via a compiler argument `-classpath 'org/scala-lang/tasty-core_3/$VERSION/*'` + +val cp = sys.props("java.class.path") +printf("unglobbed classpath: %s\n", cp) diff --git a/compiler/test/dotty/tools/io/ClasspathTest.scala b/compiler/test/dotty/tools/io/ClasspathTest.scala index a0fef65afdec..333f2b8062b0 100755 --- a/compiler/test/dotty/tools/io/ClasspathTest.scala +++ b/compiler/test/dotty/tools/io/ClasspathTest.scala @@ -15,6 +15,8 @@ class ClasspathTest { def pathsep = sys.props("path.separator") + def isWindows: Boolean = scala.util.Properties.isWin + // // Cope with wildcard classpath entries, exercised with -classpath // @@ -23,7 +25,7 @@ class ClasspathTest { @Test def testWildcards(): Unit = val outDir = Files.createTempDirectory("classpath-test") try - val compilerLib = "dist/target/pack/lib" + val compilerLib = s"${if isWindows then "dist-win-x86_64" else "dist"}/target/pack/lib" val libdir = Paths.get(compilerLib).toFile if libdir.exists then val libjarFiles = libdir.listFiles.toList.take(5) diff --git a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala index 9b65522fc549..857f5ef378e7 100644 --- a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala +++ b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala @@ -16,7 +16,11 @@ import ScriptTestEnv.* class BashExitCodeTests: private var myTmpDir: String | Null = null private lazy val tmpDir = { myTmpDir = Files.createTempDirectory("exit-code-tests").toFile.absPath; myTmpDir } - @After def cleanup(): Unit = if myTmpDir != null then io.Directory(myTmpDir).deleteRecursively() + @After def cleanup(): Unit = { + if myTmpDir != null then io.Directory(myTmpDir).deleteRecursively() + + cleanupScalaCLIDirs() + } /** Verify the exit code of running `cmd args*`. */ def verifyExit(cmd: String, args: String*)(expectedExitCode: Int): Unit = @@ -29,7 +33,7 @@ class BashExitCodeTests: }, expectedExitCode, exitCode) // Helpers for running scala, scalac, and scalac without the output directory ("raw") - def scala(args: String*) = verifyExit(scalaPath, args*) + def scala(args: String*) = verifyExit(scalaPath, ("--power" +: args :+ "--offline" :+ "--server=false")*) def scalacRaw(args: String*) = verifyExit(scalacPath, args*) def scalac(args: String*) = scalacRaw(("-d" +: tmpDir +: args)*) @@ -38,12 +42,16 @@ class BashExitCodeTests: Files.write(Files.createTempFile(tmpDir.toPath, getClass.getSimpleName, suffix), body.getBytes(UTF_8)).absPath @Test def neg = scalac(f("@main def Test = prin"))(1) - @Test def run = scalac(f("@main def Test = ???"))(0) & scala("-classpath", tmpDir, "Test")(1) - @Test def pos = scalac(f("@main def Test = ()"))(0) & scala("-classpath", tmpDir, "Test")(0) + @Test def run = scalac(f("@main def Test = ???"))(0) & scala("-classpath", tmpDir, "-M", "Test")(1) + @Test def pos = scalac(f("@main def Test = ()"))(0) & scala("-classpath", tmpDir, "-M", "Test")(0) + + @Test def runNeg_script = scala(f("prin", ".sc"))(1) + @Test def runRun_script = scala(f("???", ".sc"))(1) + @Test def runPos_script = scala(f("()", ".sc"))(0) - @Test def runNeg = scala(f("@main def Test = prin", ".sc"))(1) - @Test def runRun = scala(f("@main def Test = ???", ".sc"))(1) - @Test def runPos = scala(f("@main def Test = ()", ".sc"))(0) + @Test def runNeg = scala(f("@main def Test = prin", ".scala"))(1) + @Test def runRun = scala(f("@main def Test = ???", ".scala"))(1) + @Test def runPos = scala(f("@main def Test = ()", ".scala"))(0) @Test def scNeg = scalac("-script", f("@main def Test = prin", ".sc"))(1) @Test def scRun = scalac("-script", f("@main def Test = ???", ".sc"))(1) diff --git a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala index f3f364754e20..6af863f0fccd 100644 --- a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala +++ b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala @@ -5,7 +5,7 @@ package scripting import scala.language.unsafeNulls import java.nio.file.Paths -import org.junit.{Test, AfterClass} +import org.junit.{Test, Ignore, AfterClass} import org.junit.Assert.assertEquals import org.junit.Assume.assumeFalse import org.junit.experimental.categories.Category @@ -25,11 +25,13 @@ object BashScriptsTests: def testFiles = scripts("/scripting") @AfterClass def cleanup: Unit = { + cleanupScalaCLIDirs() + val af = argsfile.toFile - if (af.exists) { + if af.exists then af.delete() - } } + printf("osname[%s]\n", osname) printf("uname[%s]\n", ostypeFull) printf("using JAVA_HOME=%s\n", envJavaHome) @@ -50,7 +52,9 @@ object BashScriptsTests: val testScriptArgs = Seq( "a", "b", "c", "-repl", "-run", "-script", "-debug" ) - val showArgsScript = testFiles.find(_.getName == "showArgs.sc").get.absPath + val Seq(showArgsScript, showArgsScalaCli) = Seq("showArgs.sc", "showArgs_scalacli.sc").map { name => + testFiles.find(_.getName == name).get.absPath + } def testFile(name: String): String = val file = testFiles.find(_.getName == name) match { @@ -64,13 +68,13 @@ object BashScriptsTests: } file - val Seq(envtestSc, envtestScala) = Seq("envtest.sc", "envtest.scala").map { testFile(_) } + val Seq(envtestNuSc, envtestScala) = Seq("envtest_scalacli.sc", "envtest.scala").map { testFile(_) } // create command line with given options, execute specified script, return stdout def callScript(tag: String, script: String, keyPre: String): String = val keyArg = s"$keyPre=$tag" printf("pass tag [%s] via [%s] to script [%s]\n", tag, keyArg, script) - val cmd: String = Seq("SCALA_OPTS= ", scalaPath, keyArg, script).mkString(" ") + val cmd: String = Seq("SCALA_OPTS= ", scalaPath, "run", keyArg, "--power", "--offline", "--server=false", script).mkString(" ") printf("cmd: [%s]\n", cmd) val (validTest, exitCode, stdout, stderr) = bashCommand(cmd) stderr.filter { !_.contains("Inappropriate ioctl") }.foreach { System.err.printf("stderr [%s]\n", _) } @@ -84,13 +88,15 @@ class BashScriptsTests: ////////////////////////// begin tests ////////////////////// /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.sc */ + @Ignore // SCALA CLI does not support `-J` to pass java properties, only things like -Xmx5g @Test def verifyScJProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World1" - val stdout = callScript(tag, envtestSc, s"-J-Dkey") + val stdout = callScript(tag, envtestNuSc, s"-J-Dkey") assertEquals( s"Hello $tag", stdout) /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.scala */ + @Ignore // SCALA CLI does not support `-J` to pass java properties, only things like -Xmx5g @Test def verifyScalaJProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World2" @@ -101,7 +107,7 @@ class BashScriptsTests: @Test def verifyScDProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World3" - val stdout = callScript(tag, envtestSc, s"-Dkey") + val stdout = callScript(tag, envtestNuSc, s"-Dkey") assertEquals(s"Hello $tag", stdout) /* verify that `dist/bin/scala` can set system properties via -D for envtest.scala */ @@ -114,7 +120,9 @@ class BashScriptsTests: /* verify that `dist/bin/scala` can set system properties via -D when executing compiled script via -jar envtest.jar */ @Test def saveAndRunWithDProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val commandline = Seq("SCALA_OPTS= ", scalaPath.relpath, "-save", envtestScala.relpath).mkString(" ") + val libOut = envtestScala.relpath.stripSuffix(".scala") + ".jar" + val commandline = Seq( + "SCALA_OPTS= ", scalaPath.relpath, "--power", "package", envtestScala.relpath, "-o", libOut, "--library", "--offline", "--server=false").mkString(" ") val (_, _, _, _) = bashCommand(commandline) // compile jar, discard output val testJar = testFile("envtest.jar") // jar is created by the previous bashCommand() if (testJar.isFile){ @@ -124,7 +132,8 @@ class BashScriptsTests: } val tag = "World5" - val commandline2 = Seq("SCALA_OPTS= ", scalaPath.relpath, s"-Dkey=$tag", testJar.relpath) + val commandline2 = Seq( + "SCALA_OPTS= ", scalaPath.relpath, "run", s"-Dkey=$tag", "-classpath", testJar.relpath, "--power", "--offline", "--server=false") printf("cmd[%s]\n", commandline2.mkString(" ")) val (validTest, exitCode, stdout, stderr) = bashCommand(commandline2.mkString(" ")) assertEquals(s"Hello $tag", stdout.mkString("/n")) @@ -148,7 +157,11 @@ class BashScriptsTests: /* verify `dist/bin/scala` non-interference with command line args following script name */ @Test def verifyScalaArgs = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val commandline = (Seq("SCALA_OPTS= ", scalaPath, showArgsScript) ++ testScriptArgs).mkString(" ") + val commandline = ( + Seq("SCALA_OPTS= ", scalaPath, showArgsScalaCli) + ++ Seq("--power", "--offline", "--server=false") + ++ ("--" +: testScriptArgs) + ).mkString(" ") val (validTest, exitCode, stdout, stderr) = bashCommand(commandline) if verifyValid(validTest) then var fail = false @@ -162,13 +175,13 @@ class BashScriptsTests: assert(stdout == expectedOutput) /* - * verify that scriptPath.sc sees a valid script.path property, - * and that it's value is the path to "scriptPath.sc". + * verify that scriptPath_scalacli.sc sees a valid script.path property, + * and that it's value is the path to "scriptPath_scalacli.sc". */ @Category(Array(classOf[BootstrappedOnlyTests])) @Test def verifyScriptPathProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptFile = testFiles.find(_.getName == "scriptPath.sc").get + val scriptFile = testFiles.find(_.getName == "scriptPath_scalacli.sc").get val expected = s"${scriptFile.getName}" printf("===> verify valid system property script.path is reported by script [%s]\n", scriptFile.getName) printf("calling scriptFile: %s\n", scriptFile) @@ -177,15 +190,15 @@ class BashScriptsTests: stdout.foreach { printf("stdout: [%s]\n", _) } stderr.foreach { printf("stderr: [%s]\n", _) } val valid = stdout.exists { _.endsWith(expected) } - if valid then printf("# valid script.path reported by [%s]\n", scriptFile.getName) - assert(valid, s"script ${scriptFile.absPath} did not report valid script.path value") + if valid then printf("# valid scriptPath reported by [%s]\n", scriptFile.getName) + assert(valid, s"script ${scriptFile.absPath} did not report valid scriptPath value") /* * verify SCALA_OPTS can specify an @argsfile when launching a scala script in `dist/bin/scala`. */ @Test def verifyScalaOpts = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptFile = testFiles.find(_.getName == "classpathReport.sc").get + val scriptFile = testFiles.find(_.getName == "classpathReport_scalacli.sc").get printf("===> verify SCALA_OPTS='@argsfile' is properly handled by `dist/bin/scala`\n") val envPairs = List(("SCALA_OPTS", s"@$argsfile")) val (validTest, exitCode, stdout, stderr) = bashCommand(scriptFile.absPath, envPairs) @@ -208,7 +221,7 @@ class BashScriptsTests: */ @Test def sqlDateTest = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptBase = "sqlDateError" + val scriptBase = "sqlDateError_scalacli" val scriptFile = testFiles.find(_.getName == s"$scriptBase.sc").get val testJar = testFile(s"$scriptBase.jar") // jar should not be created when scriptFile runs val tj = Paths.get(testJar).toFile @@ -236,7 +249,6 @@ class BashScriptsTests: printf("===> verify -e is properly handled by `dist/bin/scala`\n") val expected = "9" val expression = s"println(3*3)" - val cmd = s"bin/scala -e $expression" val (validTest, exitCode, stdout, stderr) = bashCommand(s"""bin/scala -e '$expression'""") val result = stdout.filter(_.nonEmpty).mkString("") printf("stdout: %s\n", result) diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index 4fd1211698f6..d5f13065ccb3 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -11,8 +11,12 @@ import org.junit.{Test, Ignore, AfterClass} import vulpix.TestConfiguration import ScriptTestEnv.* -/** Test java command line generated by bin/scala and bin/scalac */ +object ClasspathTests: + @AfterClass def cleanup: Unit = { + cleanupScalaCLIDirs() + } +/** Test java command line generated by bin/scala and bin/scalac */ class ClasspathTests: /* * Test disabled (temporarily). @@ -24,7 +28,7 @@ class ClasspathTests: @Ignore @Test def hashbangClasspathVerifyTest = { // only interested in classpath test scripts - val testScriptName = "classpathReport.sc" + val testScriptName = "classpathReport_scalacli.sc" val testScript = scripts("/scripting").find { _.getName.matches(testScriptName) } match case None => sys.error(s"test script not found: ${testScriptName}") case Some(file) => file @@ -39,7 +43,7 @@ class ClasspathTests: cmd.foreach { printf("[%s]\n", _) } - // classpathReport.sc is expected to produce two lines: + // classpathReport_scalacli.sc is expected to produce two lines: // cwd: // classpath: @@ -51,10 +55,10 @@ class ClasspathTests: // convert scriptCp to a list of files val hashbangJars: List[File] = scriptCp.split(psep).map { _.toFile }.toList val hashbangClasspathJars = hashbangJars.map { _.name }.sorted.distinct // get jar basenames, remove duplicates - val packlibDir = s"$scriptCwd/$packLibDir" // classpathReport.sc specifies a wildcard classpath in this directory + val packlibDir: String = ??? /* ??? was s"$scriptCwd/$packLibDir" */ // classpathReport_scalacli.sc specifies a wildcard classpath in this directory val packlibJars: List[File] = listJars(packlibDir) // classpath entries expected to have been reported by the script - printf("%d jar files in dist/target/pack/lib\n", packlibJars.size) + printf(s"%d jar files in $packDir/lib\n", packlibJars.size) printf("%d test script jars in classpath\n", hashbangClasspathJars.size) val (diff: Set[File], msg: String) = if (packlibJars.size > hashbangClasspathJars.size) { @@ -63,7 +67,7 @@ class ClasspathTests: (hashbangJars.toSet -- packlibJars.toSet , "only in hashbang classpath") } // verify that the script hasbang classpath setting was effective at supplementing the classpath - // (a minimal subset of jars below dist/target/pack/lib are always be in the classpath) + // (a minimal subset of jars below dist*/target/pack/lib are always be in the classpath) val missingClasspathEntries = if hashbangClasspathJars.size != packlibJars.size then printf("packlib dir [%s]\n", packlibDir) printf("hashbangClasspathJars: %s\n", hashbangJars.map { _.relpath.norm }.mkString("\n ", "\n ", "")) @@ -78,17 +82,29 @@ class ClasspathTests: * verify classpath is unglobbed by MainGenericRunner. */ @Test def unglobClasspathVerifyTest = { - val testScriptName = "unglobClasspath.sc" + val testScriptName = "unglobClasspath_scalacli.sc" val testScript = scripts("/scripting").find { _.name.matches(testScriptName) } match case None => sys.error(s"test script not found: ${testScriptName}") case Some(file) => file val relpath = testScript.toPath.relpath.norm + val scalaCommand = scalaPath.relpath.norm printf("===> unglobClasspathVerifyTest for script [%s]\n", relpath) printf("bash is [%s]\n", bashExe) if packBinScalaExists then - val bashCmdline = s"set +x ; SCALA_OPTS= $relpath" + val sv = packScalaVersion + val tastyDirGlob = s"$packMavenDir/org/scala-lang/tasty-core_3/$sv/*" + // ^^^^^^^^^^^^^ + // the classpath is a glob pattern that should be unglobbed by scala command, + // otherwise the script could not compile because it references a class + // from tasty-core + + val bashCmdline = Seq( + "set +x ;", + "SCALA_OPTS=", + scalaCommand, "run", "--classpath", s"'$tastyDirGlob'", "--power", "--offline", "--server=false", relpath + ).mkString(" ") val cmd = Array(bashExe, "-c", bashCmdline) cmd.foreach { printf("[%s]\n", _) } diff --git a/compiler/test/dotty/tools/scripting/ExpressionTest.scala b/compiler/test/dotty/tools/scripting/ExpressionTest.scala index 6b5248e67f08..bc42860253b0 100755 --- a/compiler/test/dotty/tools/scripting/ExpressionTest.scala +++ b/compiler/test/dotty/tools/scripting/ExpressionTest.scala @@ -44,7 +44,7 @@ class ExpressionTest: assert(success) def getResult(expression: String): String = - val (_, _, stdout, stderr) = bashCommand(s"$scalaPath -e '$expression'") + val (_, _, stdout, stderr) = bashCommand(s"$scalaPath -e '$expression' --power --offline --server=false") printf("stdout: %s\n", stdout.mkString("|")) printf("stderr: %s\n", stderr.mkString("\n", "\n", "")) stdout.filter(_.nonEmpty).mkString("") @@ -55,6 +55,10 @@ class ExpressionTest: object ExpressionTest: + @AfterClass def cleanup(): Unit = { + cleanupScalaCLIDirs() + } + def main(args: Array[String]): Unit = val tests = new ExpressionTest println("\n=== verifyCommandLineExpression ===") diff --git a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala index 1db92d5415b4..dd1cc04bb58a 100644 --- a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala +++ b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala @@ -5,6 +5,7 @@ package scripting import scala.language.unsafeNulls import java.io.File +import java.util.Locale import java.nio.file.{Path, Paths, Files} import dotty.tools.dotc.config.Properties.* @@ -15,7 +16,7 @@ import scala.jdk.CollectionConverters.* /** * Common Code for supporting scripting tests. * To override the path to the bash executable, set TEST_BASH= - * To specify where `dist/target/pack/bin` resides, set TEST_CWD= + * To specify where `dist[*]/target/pack/bin` resides, set TEST_CWD= * Test scripts run in a bash env, so paths are converted to forward slash via .norm. */ object ScriptTestEnv { @@ -28,6 +29,44 @@ object ScriptTestEnv { def whichJava: String = whichExe("java") def whichBash: String = whichExe("bash") + def cleanupScalaCLIDirs(): Unit = { + val scriptingDir = io.Directory(scriptsDir("/scripting").getPath) + val dottyDir = io.Directory(workingDirectory) + + val residueDirs = Seq( + (scriptingDir / ".bsp"), + (scriptingDir / ".scala-build"), + (dottyDir / ".scala-build") + ) + + for f <- residueDirs do + f.deleteRecursively() + + val bspDir = dottyDir / ".bsp" + (bspDir / "scala.json").delete() + if bspDir.isEmpty then bspDir.delete() + } + + lazy val nativePackDir: Option[String] = { + def nativeDir(os: String, arch: String) = Some(s"dist/$os-$arch/target/pack") + def nativeOs(os: String) = archNorm match + case arch @ ("aarch64" | "x86_64") => nativeDir(os, arch) + case _ => None + + if winshell then nativeDir("win", "x86_64") // assume x86_64 for now + else if linux then nativeOs("linux") + else if mac then nativeOs("mac") + else None + } + + def jvmPackDir() = + println("warning: unknown OS architecture combination, defaulting to JVM launcher.") + "dist/target/pack" + + def packDir: String = nativePackDir.getOrElse(jvmPackDir()) + + def packBinDir: String = s"$packDir/bin" + lazy val workingDirectory: String = { val dirstr = if testCwd.nonEmpty then if verbose then printf("TEST_CWD set to [%s]\n", testCwd) @@ -36,7 +75,7 @@ object ScriptTestEnv { userDir // userDir, if TEST_CWD not set // issue warning if things don't look right - val test = Paths.get(s"$dirstr/dist/target/pack/bin").normalize + val test = Paths.get(s"$dirstr/$packBinDir").normalize if !test.isDirectory then printf("warning: not found below working directory: %s\n", test.norm) @@ -46,7 +85,7 @@ object ScriptTestEnv { def envPath: String = envOrElse("PATH", "") // remove duplicate entries in path - def supplementedPath: String = s"dist/target/pack/bin$psep$envJavaHome/bin$psep$envScalaHome/bin$psep$envPath".norm + def supplementedPath: String = s"$packBinDir$psep$envJavaHome/bin$psep$envScalaHome/bin$psep$envPath".norm def adjustedPathEntries: List[String] = supplementedPath.norm.split(psep).toList.distinct def adjustedPath: String = adjustedPathEntries.mkString(psep) def envPathEntries: List[String] = envPath.split(psep).toList.distinct @@ -55,11 +94,18 @@ object ScriptTestEnv { def unameExe = which("uname") def ostypeFull = if unameExe.nonEmpty then exec(unameExe).mkString else "" - def ostype = ostypeFull.toLowerCase.takeWhile{ cc => cc >= 'a' && cc <='z' || cc >= 'A' && cc <= 'Z' } + def ostype = ostypeFull.toLowerCase(Locale.ROOT).takeWhile{ cc => cc >= 'a' && cc <='z' || cc >= 'A' && cc <= 'Z' } + def archFull = if unameExe.nonEmpty then exec(unameExe, "-m").mkString else "" + def archNorm = archFull match + case "arm64" => "aarch64" + case "amd64" => "x86_64" + case id => id def cygwin = ostype == "cygwin" def mingw = ostype == "mingw" def msys = ostype == "msys" + def linux = ostype == "linux" + def mac = ostype == "darwin" def winshell: Boolean = cygwin || mingw || msys def which(str: String) = @@ -124,10 +170,22 @@ object ScriptTestEnv { } yield line - def packBinDir = "dist/target/pack/bin" - def packLibDir = "dist/target/pack/lib" + // def packLibDir = s"$packDir/lib" // replaced by packMavenDir + def packMavenDir = s"$packDir/maven2" + def packVersionFile = s"$packDir/VERSION" def packBinScalaExists: Boolean = Files.exists(Paths.get(s"$packBinDir/scala")) + def packScalaVersion: String = { + val versionFile = Paths.get(packVersionFile) + if Files.exists(versionFile) then + val lines = Files.readAllLines(versionFile).asScala + lines.find { _.startsWith("version:=") } match + case Some(line) => line.drop(9) + case None => sys.error(s"no version:= found in $packVersionFile") + else + sys.error(s"no $packVersionFile found") + } + def listJars(dir: String): List[File] = val packlibDir = Paths.get(dir).toFile if packlibDir.isDirectory then @@ -235,8 +293,8 @@ object ScriptTestEnv { lazy val cwd: Path = Paths.get(".").toAbsolutePath.normalize lazy val (scalacPath: String, scalaPath: String) = { - val scalac = s"$workingDirectory/dist/target/pack/bin/scalac".toPath.normalize - val scala = s"$workingDirectory/dist/target/pack/bin/scala".toPath.normalize + val scalac = s"$workingDirectory/$packBinDir/scalac".toPath.normalize + val scala = s"$workingDirectory/$packBinDir/scala".toPath.normalize (scalac.norm, scala.norm) } @@ -244,7 +302,7 @@ object ScriptTestEnv { // use optional TEST_BASH if defined, otherwise, bash must be in PATH // envScalaHome is: - // dist/target/pack, if present + // dist[*]/target/pack, if present // else, SCALA_HOME if defined // else, not defined lazy val envScalaHome = diff --git a/compiler/test/dotty/tools/scripting/ScriptingTests.scala b/compiler/test/dotty/tools/scripting/ScriptingTests.scala index 5ec417090504..8d07cb137917 100644 --- a/compiler/test/dotty/tools/scripting/ScriptingTests.scala +++ b/compiler/test/dotty/tools/scripting/ScriptingTests.scala @@ -17,7 +17,11 @@ import org.junit.Assume.assumeFalse /** Runs all tests contained in `compiler/test-resources/scripting/` */ class ScriptingTests: // classpath tests managed by scripting.ClasspathTests.scala - def testFiles = scripts("/scripting").filter { ! _.getName.toLowerCase.contains("classpath") } + def testFiles = scripts("/scripting").filter { sc => + val name = sc.getName.toLowerCase + !name.contains("classpath") + && !name.contains("_scalacli") + } /* * Call .scala scripts without -save option, verify no jar created diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala index a8c480088e08..d17edbaa855e 100644 --- a/compiler/test/dotty/tools/utils.scala +++ b/compiler/test/dotty/tools/utils.scala @@ -20,14 +20,19 @@ import dotc.config.CommandLineParser object Dummy def scripts(path: String): Array[File] = { - val dir = new File(Dummy.getClass.getResource(path).getPath) - assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") + val dir = scriptsDir(path) dir.listFiles.filter { f => val path = if f.isDirectory then f.getPath + "/" else f.getPath Properties.testsFilter.isEmpty || Properties.testsFilter.exists(path.contains) } } +def scriptsDir(path: String): File = { + val dir = new File(Dummy.getClass.getResource(path).getPath) + assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") + dir +} + extension (f: File) def absPath = f.getAbsolutePath.replace('\\', '/') @@ -101,10 +106,10 @@ def toolArgsParse(lines: List[String], filename: Option[String]): List[(String,S case toolArg(name, args) => List((name, args)) case _ => Nil } ++ - lines.flatMap { + lines.flatMap { case directiveOptionsArg(args) => List(("scalac", args)) case directiveJavacOptions(args) => List(("javac", args)) - case _ => Nil + case _ => Nil } import org.junit.Test diff --git a/dist/bin-native-overrides/cli-common-platform b/dist/bin-native-overrides/cli-common-platform new file mode 100644 index 000000000000..1a11c770f91a --- /dev/null +++ b/dist/bin-native-overrides/cli-common-platform @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + SCALA_CLI_VERSION="" + # iterate through lines in VERSION_SRC + while IFS= read -r line; do + # if line starts with "version:=" then extract the version + if [[ "$line" == cli_version:=* ]]; then + SCALA_CLI_VERSION="${line#cli_version:=}" + break + fi + done < "$PROG_HOME/EXTRA_PROPERTIES" + SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"" "--cli-version \"$SCALA_CLI_VERSION\"") +else + SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"") +fi diff --git a/dist/bin-native-overrides/cli-common-platform.bat b/dist/bin-native-overrides/cli-common-platform.bat new file mode 100644 index 000000000000..e0cfa40692b5 --- /dev/null +++ b/dist/bin-native-overrides/cli-common-platform.bat @@ -0,0 +1,18 @@ +@echo off + +setlocal enabledelayedexpansion + +set "_SCALA_CLI_VERSION=" +@rem read for cli_version:=_SCALA_CLI_VERSION in EXTRA_PROPERTIES file +FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\EXTRA_PROPERTIES") DO ( + SET "line=%%G" + IF "!line:~0,13!"=="cli_version:=" ( + SET "_SCALA_CLI_VERSION=!line:~13!" + GOTO :foundCliVersion + ) +) + +:foundCliVersion +endlocal & set "SCALA_CLI_VERSION=%_SCALA_CLI_VERSION%" + +set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" "--cli-version" "%SCALA_CLI_VERSION%" \ No newline at end of file diff --git a/dist/bin/cli-common-platform b/dist/bin/cli-common-platform new file mode 100644 index 000000000000..a5906e882bb4 --- /dev/null +++ b/dist/bin/cli-common-platform @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +SCALA_CLI_CMD_BASH=("\"$JAVACMD\"" "-jar \"$PROG_HOME/bin/scala-cli.jar\"") diff --git a/dist/bin/cli-common-platform.bat b/dist/bin/cli-common-platform.bat new file mode 100644 index 000000000000..99103266c1d9 --- /dev/null +++ b/dist/bin/cli-common-platform.bat @@ -0,0 +1,5 @@ +@echo off + +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" +set SCALA_CLI_CMD_WIN="%_JAVACMD%" "-jar" "%_PROG_HOME%\bin\scala-cli.jar" \ No newline at end of file diff --git a/dist/bin/common b/dist/bin/common index e3e4253938fb..4a0152fbc4cb 100755 --- a/dist/bin/common +++ b/dist/bin/common @@ -1,132 +1,6 @@ #!/usr/bin/env bash -#/*-------------------------------------------------------------------------- -# * Credits: This script is based on the script generated by sbt-pack. -# *--------------------------------------------------------------------------*/ - -# save terminal settings -saved_stty=$(stty -g 2>/dev/null) -# clear on error so we don't later try to restore them -if [[ ! $? ]]; then - saved_stty="" -fi - -# restore stty settings (echo in particular) -function restoreSttySettings() { - stty $saved_stty - saved_stty="" -} - -scala_exit_status=127 -function onExit() { - [[ "$saved_stty" != "" ]] && restoreSttySettings - exit $scala_exit_status -} - -# to reenable echo if we are interrupted before completing. -trap onExit INT TERM EXIT - -unset cygwin mingw msys darwin conemu - -# COLUMNS is used together with command line option '-pageWidth'. -if command -v tput >/dev/null 2>&1; then - export COLUMNS="$(tput -Tdumb cols)" -fi - -case "`uname`" in - CYGWIN*) cygwin=true - ;; - MINGW*) mingw=true - ;; - MSYS*) msys=true - ;; - Darwin*) darwin=true - if [ -z "$JAVA_VERSION" ] ; then - JAVA_VERSION="CurrentJDK" - else - echo "Using Java version: $JAVA_VERSION" 1>&2 - fi - if [ -z "$JAVA_HOME" ] ; then - JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home - fi - JAVACMD="`which java`" - ;; -esac - -unset CYGPATHCMD -if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then - # ConEmu terminal is incompatible with jna-5.*.jar - [[ (${CONEMUANSI-} || ${ConEmuANSI-}) ]] && conemu=true - # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc. - CYGPATHCMD=`which cygpath 2>/dev/null` - case "$TERM" in - rxvt* | xterm* | cygwin*) - stty -icanon min 1 -echo - JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix" - ;; - esac -fi - -# Resolve JAVA_HOME from javac command path -if [ -z "$JAVA_HOME" ]; then - javaExecutable="`which javac`" - if [ -n "$javaExecutable" -a -f "$javaExecutable" -a ! "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then - # readlink(1) is not available as standard on Solaris 10. - readLink=`which readlink` - if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then - javaExecutable="`readlink -f \"$javaExecutable\"`" - javaHome="`dirname \"$javaExecutable\"`" - javaHome=`expr "$javaHome" : '\(.*\)/bin'` - JAVA_HOME="$javaHome" - export JAVA_HOME - fi - fi -fi - -if [ -z "${JAVACMD-}" ] ; then - if [ -n "${JAVA_HOME-}" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" - else - JAVACMD="$JAVA_HOME/bin/java" - fi - else - JAVACMD="`which java`" - fi -fi - -if [ ! -x "$JAVACMD" ] ; then - echo "Error: JAVA_HOME is not defined correctly." - echo " We cannot execute $JAVACMD" - exit 1 -fi - -if [ -z "$JAVA_HOME" ] ; then - echo "Warning: JAVA_HOME environment variable is not set." -fi - -CLASSPATH_SUFFIX="" -# Path separator used in EXTRA_CLASSPATH -PSEP=":" - -# translate paths to Windows-mixed format before running java -if [ -n "${CYGPATHCMD-}" ]; then - [ -n "${PROG_HOME-}" ] && - PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"` - [ -n "$JAVA_HOME" ] && - JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"` - CLASSPATH_SUFFIX=";" - PSEP=";" -elif [[ ${mingw-} || ${msys-} ]]; then - # For Mingw / Msys, convert paths from UNIX format before anything is touched - [ -n "$PROG_HOME" ] && - PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`" - [ -n "$JAVA_HOME" ] && - JAVA_HOME="`(cd "$JAVA_HOME"; pwd -W | sed 's|/|\\\\|g')`" - CLASSPATH_SUFFIX=";" - PSEP=";" -fi +source "$PROG_HOME/bin/common-shared" #/*-------------------------------------------------- # * The code below is for Dotty @@ -205,16 +79,12 @@ ReplMain=dotty.tools.repl.Main ScriptingMain=dotty.tools.scripting.Main declare -a java_args -declare -a scala_args declare -a residual_args declare -a script_args addJava () { java_args+=("'$1'") } -addScala () { - scala_args+=("'$1'") -} addResidual () { residual_args+=("'$1'") } diff --git a/dist/bin/common-shared b/dist/bin/common-shared new file mode 100644 index 000000000000..8c85993a5283 --- /dev/null +++ b/dist/bin/common-shared @@ -0,0 +1,139 @@ +#!/usr/bin/env bash + +# Common options for both scala-cli and java based launchers + +#/*-------------------------------------------------------------------------- +# * Credits: This script is based on the script generated by sbt-pack. +# *--------------------------------------------------------------------------*/ + +# save terminal settings +saved_stty=$(stty -g 2>/dev/null) +# clear on error so we don't later try to restore them +if [[ ! $? ]]; then + saved_stty="" +fi + +# restore stty settings (echo in particular) +function restoreSttySettings() { + stty $saved_stty + saved_stty="" +} + +scala_exit_status=127 +function onExit() { + [[ "$saved_stty" != "" ]] && restoreSttySettings + exit $scala_exit_status +} + +# to reenable echo if we are interrupted before completing. +trap onExit INT TERM EXIT + +unset cygwin mingw msys darwin conemu + +# COLUMNS is used together with command line option '-pageWidth'. +if command -v tput >/dev/null 2>&1; then + export COLUMNS="$(tput -Tdumb cols)" +fi + +case "`uname`" in + CYGWIN*) cygwin=true + ;; + MINGW*) mingw=true + ;; + MSYS*) msys=true + ;; + Darwin*) darwin=true + if [ -z "$JAVA_VERSION" ] ; then + JAVA_VERSION="CurrentJDK" + else + echo "Using Java version: $JAVA_VERSION" 1>&2 + fi + if [ -z "$JAVA_HOME" ] ; then + JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home + fi + JAVACMD="`which java`" + ;; +esac + +unset CYGPATHCMD +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + # ConEmu terminal is incompatible with jna-5.*.jar + [[ (${CONEMUANSI-} || ${ConEmuANSI-}) ]] && conemu=true + # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc. + CYGPATHCMD=`which cygpath 2>/dev/null` + case "$TERM" in + rxvt* | xterm* | cygwin*) + stty -icanon min 1 -echo + JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix" + ;; + esac +fi + +# Resolve JAVA_HOME from javac command path +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" -a -f "$javaExecutable" -a ! "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + javaExecutable="`readlink -f \"$javaExecutable\"`" + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "${JAVACMD-}" ] ; then + if [ -n "${JAVA_HOME-}" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." + echo " We cannot execute $JAVACMD" + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSPATH_SUFFIX="" +# Path separator used in EXTRA_CLASSPATH +PSEP=":" +PROG_HOME_URI="file://$PROG_HOME" + +# translate paths to Windows-mixed format before running java +if [ -n "${CYGPATHCMD-}" ]; then + [ -n "${PROG_HOME-}" ] && + PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"` + PROG_HOME_URI="file:///$PROG_HOME" # Add extra root dir prefix + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"` + CLASSPATH_SUFFIX=";" + PSEP=";" +elif [[ ${mingw-} || ${msys-} ]]; then + # For Mingw / Msys, convert paths from UNIX format before anything is touched + [ -n "$PROG_HOME" ] && + PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`" + PROG_HOME_URI="file:///$PROG_HOME" # Add extra root dir prefix + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd -W | sed 's|/|\\\\|g')`" + CLASSPATH_SUFFIX=";" + PSEP=";" +fi + +declare -a scala_args +addScala () { + scala_args+=("'$1'") +} diff --git a/dist/bin/scala b/dist/bin/scala index bd69d40c2b97..c6c6f8807a64 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -26,47 +26,43 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common" +source "$PROG_HOME/bin/common-shared" +source "$PROG_HOME/bin/cli-common-platform" +SCALA_VERSION="" +# iterate through lines in VERSION_SRC +while IFS= read -r line; do + # if line starts with "version:=" then extract the version + if [[ "$line" == version:=* ]]; then + SCALA_VERSION="${line#version:=}" + break + fi +done < "$PROG_HOME/VERSION" + +# assert that SCALA_VERSION is not empty +if [ -z "$SCALA_VERSION" ]; then + echo "Failed to extract Scala version from $PROG_HOME/VERSION" + exit 1 +fi + +MVN_REPOSITORY="$PROG_HOME_URI/maven2" + +# escape all script arguments while [[ $# -gt 0 ]]; do - case "$1" in - -D*) - # pass to scala as well: otherwise we lose it sometimes when we - # need it, e.g. communicating with a server compiler. - # respect user-supplied -Dscala.usejavacp - addJava "$1" - addScala "$1" - shift - ;; - -J*) - # as with -D, pass to scala even though it will almost - # never be used. - addJava "${1:2}" - addScala "$1" - shift - ;; - -classpath*) - if [ "$1" != "${1##* }" ]; then - # -classpath and its value have been supplied in a single string e.g. "-classpath 'lib/*'" - A=$1 ; shift # consume $1 before adding its substrings back - set -- $A "$@" # split $1 on whitespace and put it back - else - addScala "$1" - shift - fi - ;; - *) - addScala "$1" - shift - ;; - esac + addScala "$1" + shift done # exec here would prevent onExit from being called, leaving terminal in unusable state -compilerJavaClasspathArgs [ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 -eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" -scala_exit_status=$? +# SCALA_CLI_CMD_BASH is an array, set by cli-common-platform +eval "${SCALA_CLI_CMD_BASH[@]}" \ + "--prog-name scala" \ + "--cli-default-scala-version \"$SCALA_VERSION\"" \ + "-r \"$MVN_REPOSITORY\"" \ + "${scala_args[@]}" + +scala_exit_status=$? onExit diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index ca908fd340be..d473facbbb1c 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -14,14 +14,16 @@ for %%f in ("%~dp0.") do ( call "%_PROG_HOME%\bin\common.bat" if not %_EXITCODE%==0 goto end -call :args %* - @rem ######################################################################### @rem ## Main -call :compilerJavaClasspathArgs +call :setScalaOpts + +call "%_PROG_HOME%\bin\cli-common-platform.bat" + +@rem SCALA_CLI_CMD_WIN is an array, set in cli-common-platform.bat +call %SCALA_CLI_CMD_WIN% "--prog-name" "scala" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* -call "%_JAVACMD%" %_JAVA_ARGS% "-Dscala.home=%_PROG_HOME%" -classpath "%_JVM_CP_ARGS%" dotty.tools.MainGenericRunner -classpath "%_JVM_CP_ARGS%" %_SCALA_ARGS% if not %ERRORLEVEL%==0 ( set _EXITCODE=1& goto end ) goto end @@ -29,62 +31,31 @@ goto end @rem ######################################################################### @rem ## Subroutines -:args -set _JAVA_ARGS= -set _SCALA_ARGS= -set _SCALA_CPATH= - -:args_loop -if "%~1"=="" goto args_done -set "__ARG=%~1" -if "%__ARG:~0,2%"=="-D" ( - @rem pass to scala as well: otherwise we lose it sometimes when we - @rem need it, e.g. communicating with a server compiler. - set _JAVA_ARGS=!_JAVA_ARGS! "%__ARG%" - set _SCALA_ARGS=!_SCALA_ARGS! "%__ARG%" -) else if "%__ARG:~0,2%"=="-J" ( - @rem as with -D, pass to scala even though it will almost - @rem never be used. - set _JAVA_ARGS=!_JAVA_ARGS! %__ARG:~2% - set _SCALA_ARGS=!_SCALA_ARGS! "%__ARG%" -) else if "%__ARG%"=="-classpath" ( - set "_SCALA_CPATH=%~2" - shift -) else if "%__ARG%"=="-cp" ( - set "_SCALA_CPATH=%~2" - shift -) else ( - set _SCALA_ARGS=!_SCALA_ARGS! "%__ARG%" +:setScalaOpts + +@REM sfind the index of the first colon in _PROG_HOME +set "index=0" +set "char=!_PROG_HOME:~%index%,1!" +:findColon +if not "%char%"==":" ( + set /a "index+=1" + set "char=!_PROG_HOME:~%index%,1!" + goto :findColon ) -shift -goto args_loop -:args_done -goto :eof -@rem output parameter: _JVM_CP_ARGS -:compilerJavaClasspathArgs -set __TOOLCHAIN= -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_ASM%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SBT_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_COMP%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_TASTY_CORE%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_STAGING%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_TASTY_INSPECTOR%%_PSEP%" - -@rem # jline -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_READER%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL_JNA%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JNA%%_PSEP%" - -if defined _SCALA_CPATH ( - set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" -) else ( - set "_JVM_CP_ARGS=%__TOOLCHAIN%" +set "_SCALA_VERSION=" +set "MVN_REPOSITORY=file:///%_PROG_HOME:\=/%/maven2" + +@rem read for version:=_SCALA_VERSION in VERSION_FILE +FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\VERSION") DO ( + SET "line=%%G" + IF "!line:~0,9!"=="version:=" ( + SET "_SCALA_VERSION=!line:~9!" + GOTO :foundVersion + ) ) + +:foundVersion goto :eof @rem ######################################################################### diff --git a/dist/bin/scala_legacy b/dist/bin/scala_legacy new file mode 100755 index 000000000000..bd69d40c2b97 --- /dev/null +++ b/dist/bin/scala_legacy @@ -0,0 +1,72 @@ +#!/usr/bin/env bash + +# Try to autodetect real location of the script +if [ -z "${PROG_HOME-}" ] ; then + ## resolve links - $0 may be a link to PROG_HOME + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + PROG_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + PROG_HOME=`cd "$PROG_HOME" && pwd` + + cd "$saveddir" +fi + +source "$PROG_HOME/bin/common" + +while [[ $# -gt 0 ]]; do + case "$1" in + -D*) + # pass to scala as well: otherwise we lose it sometimes when we + # need it, e.g. communicating with a server compiler. + # respect user-supplied -Dscala.usejavacp + addJava "$1" + addScala "$1" + shift + ;; + -J*) + # as with -D, pass to scala even though it will almost + # never be used. + addJava "${1:2}" + addScala "$1" + shift + ;; + -classpath*) + if [ "$1" != "${1##* }" ]; then + # -classpath and its value have been supplied in a single string e.g. "-classpath 'lib/*'" + A=$1 ; shift # consume $1 before adding its substrings back + set -- $A "$@" # split $1 on whitespace and put it back + else + addScala "$1" + shift + fi + ;; + *) + addScala "$1" + shift + ;; + esac +done + +# exec here would prevent onExit from being called, leaving terminal in unusable state +compilerJavaClasspathArgs +[ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 +eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" +scala_exit_status=$? + + +onExit diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index cb1a76471f70..c8cd0babe60b 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -21,6 +21,9 @@ call :args %* call :compilerJavaClasspathArgs +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" + call "%_JAVACMD%" %_JAVA_ARGS% -classpath "%_JVM_CP_ARGS%" "-Dscala.usejavacp=true" "-Dscala.home=%_PROG_HOME%" dotty.tools.MainGenericCompiler %_SCALA_ARGS% if not %ERRORLEVEL%==0 ( set _EXITCODE=1 diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index bcc0d71788a3..c30a4689244c 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -26,6 +26,10 @@ call :classpathArgs if defined JAVA_OPTS ( set _JAVA_OPTS=%JAVA_OPTS% ) else ( set _JAVA_OPTS=%_DEFAULT_JAVA_OPTS% ) + +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" + call "%_JAVACMD%" %_JAVA_OPTS% %_JAVA_DEBUG% %_JAVA_ARGS% ^ -classpath "%_CLASS_PATH%" ^ -Dscala.usejavacp=true ^ diff --git a/project/Build.scala b/project/Build.scala index 921fbcd80b90..c1a8800421a6 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -12,6 +12,8 @@ import pl.project13.scala.sbt.JmhPlugin import pl.project13.scala.sbt.JmhPlugin.JmhKeys.Jmh import sbt.Package.ManifestAttributes import sbt.PublishBinPlugin.autoImport._ +import dotty.tools.sbtplugin.RepublishPlugin +import dotty.tools.sbtplugin.RepublishPlugin.autoImport._ import sbt.plugins.SbtPlugin import sbt.ScriptedPlugin.autoImport._ import xerial.sbt.pack.PackPlugin @@ -26,6 +28,7 @@ import sbttastymima.TastyMiMaPlugin import sbttastymima.TastyMiMaPlugin.autoImport._ import scala.util.Properties.isJavaAtLeast + import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._ import org.scalajs.linker.interface.{ModuleInitializer, StandardConfig} @@ -114,6 +117,13 @@ object Build { */ val mimaPreviousLTSDottyVersion = "3.3.0" + /** Version of Scala CLI to download */ + val scalaCliLauncherVersion = "1.3.2" + /** Version of Scala CLI to download (on Windows - last known validated version) */ + val scalaCliLauncherVersionWindows = "1.3.2" + /** Version of Coursier to download for initializing the local maven repo of Scala command */ + val coursierJarVersion = "2.1.10" + object CompatMode { final val BinaryCompatible = 0 final val SourceAndBinaryCompatible = 1 @@ -2114,13 +2124,72 @@ object Build { packMain := Map(), publishArtifact := false, packGenerateMakefile := false, - packExpandedClasspath := true, - packArchiveName := "scala3-" + dottyVersion + republishRepo := target.value / "republish", + packResourceDir += (republishRepo.value / "bin" -> "bin"), + packResourceDir += (republishRepo.value / "maven2" -> "maven2"), + Compile / pack := (Compile / pack).dependsOn(republish).value, ) lazy val dist = project.asDist(Bootstrapped) .settings( - packResourceDir += (baseDirectory.value / "bin" -> "bin"), + packArchiveName := "scala3-" + dottyVersion, + republishBinDir := baseDirectory.value / "bin", + republishCoursier += + ("coursier.jar" -> s"https://github.com/coursier/coursier/releases/download/v$coursierJarVersion/coursier.jar"), + republishLaunchers += + ("scala-cli.jar" -> s"https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli.jar"), + ) + + lazy val `dist-mac-x86_64` = project.in(file("dist/mac-x86_64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-x86_64-apple-darwin", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-apple-darwin.gz") + ) + + lazy val `dist-mac-aarch64` = project.in(file("dist/mac-aarch64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-aarch64-apple-darwin", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-apple-darwin.gz") + ) + + lazy val `dist-win-x86_64` = project.in(file("dist/win-x86_64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-win32", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishExtraProps += ("cli_version" -> scalaCliLauncherVersion), + mappings += (republishRepo.value / "etc" / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), + republishLaunchers += + ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersionWindows/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") + ) + + lazy val `dist-linux-x86_64` = project.in(file("dist/linux-x86_64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-linux", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-pc-linux.gz") + ) + + lazy val `dist-linux-aarch64` = project.in(file("dist/linux-aarch64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-aarch64-pc-linux", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-pc-linux.gz") ) private def customMimaReportBinaryIssues(issueFilterLocation: String) = mimaReportBinaryIssues := { @@ -2254,10 +2323,19 @@ object Build { settings(scala3PresentationCompilerBuildInfo) def asDist(implicit mode: Mode): Project = project. - enablePlugins(PackPlugin). + enablePlugins(PackPlugin, RepublishPlugin). withCommonSettings. - dependsOn(`scala3-interfaces`, dottyCompiler, dottyLibrary, tastyCore, `scala3-staging`, `scala3-tasty-inspector`, scaladoc). settings(commonDistSettings). + dependsOn( + `scala3-interfaces`, + dottyCompiler, + dottyLibrary, + tastyCore, + `scala3-staging`, + `scala3-tasty-inspector`, + scaladoc, + `scala3-sbt-bridge`, // for scala-cli + ). bootstrappedSettings( target := baseDirectory.value / "target" // override setting in commonBootstrappedSettings ) diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala new file mode 100644 index 000000000000..537c82d62cce --- /dev/null +++ b/project/RepublishPlugin.scala @@ -0,0 +1,400 @@ +package dotty.tools.sbtplugin + +import sbt._ +import xerial.sbt.pack.PackPlugin +import sbt.Keys._ +import sbt.AutoPlugin +import sbt.PublishBinPlugin +import sbt.PublishBinPlugin.autoImport._ +import sbt.io.Using +import sbt.util.CacheImplicits._ + +import scala.collection.mutable +import java.nio.file.Files + +import java.nio.file.attribute.PosixFilePermission +import java.nio.file.{Files, Path} + +import scala.jdk.CollectionConverters._ + +/** This local plugin provides ways of publishing a project classpath and library dependencies to + * .a local repository */ +object RepublishPlugin extends AutoPlugin { + + /** copied from github.com/coursier/coursier */ + private object FileUtil { + + def tryMakeExecutable(path: Path): Boolean = + try { + val perms = Files.getPosixFilePermissions(path).asScala.toSet + + var newPerms = perms + if (perms(PosixFilePermission.OWNER_READ)) + newPerms += PosixFilePermission.OWNER_EXECUTE + if (perms(PosixFilePermission.GROUP_READ)) + newPerms += PosixFilePermission.GROUP_EXECUTE + if (perms(PosixFilePermission.OTHERS_READ)) + newPerms += PosixFilePermission.OTHERS_EXECUTE + + if (newPerms != perms) + Files.setPosixFilePermissions( + path, + newPerms.asJava + ) + + true + } + catch { + case _: UnsupportedOperationException => + false + } + + } + + override def trigger = allRequirements + override def requires = super.requires && PublishBinPlugin && PackPlugin + + object autoImport { + val republishProjectRefs = taskKey[Seq[ProjectRef]]("fetch the classpath deps from the project.") + val republishLocalResolved = taskKey[Seq[ResolvedArtifacts]]("resolve local artifacts for distribution.") + val republishAllResolved = taskKey[Seq[ResolvedArtifacts]]("Resolve the dependencies for the distribution") + val republishClasspath = taskKey[Set[File]]("cache the dependencies for the distribution") + val republishFetchLaunchers = taskKey[Set[File]]("cache the launcher deps for the distribution") + val republishFetchCoursier = taskKey[File]("cache the coursier.jar for resolving the local maven repo.") + val republishPrepareBin = taskKey[File]("prepare the bin directory, including launchers and scripts.") + val republishWriteExtraProps = taskKey[Option[File]]("write extra properties for the launchers.") + val republishBinDir = settingKey[File]("where to find static files for the bin dir.") + val republishCoursierDir = settingKey[File]("where to download the coursier launcher jar.") + val republishBinOverrides = settingKey[Seq[File]]("files to override those in bin-dir.") + val republish = taskKey[File]("cache the dependencies and download launchers for the distribution") + val republishRepo = settingKey[File]("the location to store the republished artifacts.") + val republishLaunchers = settingKey[Seq[(String, String)]]("launchers to download. Sequence of (name, URL).") + val republishCoursier = settingKey[Seq[(String, String)]]("coursier launcher to download. Sequence of (name, URL).") + val republishExtraProps = settingKey[Seq[(String, String)]]("extra properties for launchers.") + } + + import autoImport._ + + case class SimpleModuleId(org: String, name: String, revision: String) { + override def toString = s"$org:$name:$revision" + } + case class ResolvedArtifacts(id: SimpleModuleId, jar: Option[File], pom: Option[File]) + + private def republishResolvedArtifacts(resolved: Seq[ResolvedArtifacts], mavenRepo: File, logOpt: Option[Logger]): Set[File] = { + IO.createDirectory(mavenRepo) + resolved.map { ra => + for (log <- logOpt) + log.info(s"[republish] publishing ${ra.id} to $mavenRepo...") + val jarOpt = ra.jar + val pomOpt = ra.pom + + assert(jarOpt.nonEmpty || pomOpt.nonEmpty, s"Neither jar nor pom found for ${ra.id}") + + val pathElems = ra.id.org.split('.').toVector :+ ra.id.name :+ ra.id.revision + val artifactDir = pathElems.foldLeft(mavenRepo)(_ / _) + IO.createDirectory(artifactDir) + for (pom <- pomOpt) IO.copyFile(pom, artifactDir / pom.getName) + for (jar <- jarOpt) IO.copyFile(jar, artifactDir / jar.getName) + artifactDir + }.toSet + } + + private def coursierCmd(jar: File, cache: File, args: Seq[String]): Unit = { + val jar0 = jar.getAbsolutePath.toString + val javaHome = sys.props.get("java.home").getOrElse { + throw new MessageOnlyException("java.home property not set") + } + val javaCmd = { + val cmd = if (scala.util.Properties.isWin) "java.exe" else "java" + (file(javaHome) / "bin" / cmd).getAbsolutePath + } + val env = Map("COURSIER_CACHE" -> cache.getAbsolutePath.toString) + val cmdLine = Seq(javaCmd, "-jar", jar0) ++ args + // invoke cmdLine with env + val p = new ProcessBuilder(cmdLine: _*).inheritIO() + p.environment().putAll(env.asJava) + val proc = p.start() + proc.waitFor() + if (proc.exitValue() != 0) + throw new MessageOnlyException(s"Error running coursier.jar with args ${args.mkString(" ")}") + } + + private def coursierFetch(coursierJar: File, log: Logger, cacheDir: File, localRepo: File, libs: Seq[String]): Unit = { + val localRepoArg = { + val path = localRepo.getAbsolutePath + if (scala.util.Properties.isWin) { + val path0 = path.replace('\\', '/') + s"file:///$path0" // extra root slash for Windows paths + } + else + s"file://$path" + } + + IO.createDirectory(cacheDir) + for (lib <- libs) { + log.info(s"[republish] Fetching $lib with coursier.jar...") + coursierCmd(coursierJar, cacheDir, + Seq( + "fetch", + "--repository", localRepoArg, + lib + ) + ) + } + } + + /**Resolve the transitive library dependencies of `libs` to `csrCacheDir`. + */ + private def resolveLibraryDeps( + coursierJar: File, + log: Logger, + csrCacheDir: File, + localRepo: File, + resolvedLocal: Seq[ResolvedArtifacts]): Seq[ResolvedArtifacts] = { + + // publish the local artifacts to the local repo, so coursier can resolve them + republishResolvedArtifacts(resolvedLocal, localRepo, logOpt = None) + + coursierFetch(coursierJar, log, csrCacheDir, localRepo, resolvedLocal.map(_.id.toString)) + + val maven2Root = java.nio.file.Files.walk(csrCacheDir.toPath) + .filter(_.getFileName.toString == "maven2") + .findFirst() + .orElseThrow(() => new MessageOnlyException(s"Could not find maven2 directory in $csrCacheDir")) + + def pathToArtifact(p: Path): ResolvedArtifacts = { + // relative path from maven2Root + val lastAsString = p.getFileName.toString + val relP = maven2Root.relativize(p) + val parts = relP.iterator().asScala.map(_.toString).toVector + val (orgParts :+ name :+ rev :+ _) = parts + val id = SimpleModuleId(orgParts.mkString("."), name, rev) + if (lastAsString.endsWith(".jar")) { + ResolvedArtifacts(id, Some(p.toFile), None) + } else { + ResolvedArtifacts(id, None, Some(p.toFile)) + } + } + + java.nio.file.Files.walk(maven2Root) + .filter(p => { + val lastAsString = p.getFileName.toString + lastAsString.endsWith(".pom") || lastAsString.endsWith(".jar") + }) + .map[ResolvedArtifacts](pathToArtifact(_)) + .iterator() + .asScala + .toSeq + } + + private def fetchFilesTask( + libexecT: Def.Initialize[Task[File]], + srcs: SettingKey[Seq[(String, String)]], + strict: Boolean) = Def.task[Set[File]] { + val s = streams.value + val log = s.log + val repoDir = republishRepo.value + val launcherVersions = srcs.value + val libexec = libexecT.value + + val dlCache = s.cacheDirectory / "republish-launchers" + + val store = s.cacheStoreFactory / "versions" + + def work(name: String, dest: File, launcher: String): File = { + val (launcherURL, workFile, prefix, subPart) = { + if (launcher.startsWith("gz+")) { + IO.createDirectory(dlCache) + val launcherURL = url(launcher.stripPrefix("gz+")) + (launcherURL, dlCache / s"$name.gz", "gz", "") + } else if (launcher.startsWith("zip+")) { + IO.createDirectory(dlCache) + val (urlPart, subPath) = launcher.split("!/") match { + case Array(urlPart, subPath) => (urlPart, subPath) + case _ => + throw new MessageOnlyException(s"[republish] Invalid zip+ URL, expected ! to mark subpath: $launcher") + } + val launcherURL = url(urlPart.stripPrefix("zip+")) + (launcherURL, dlCache / s"$name.zip", "zip", subPath) + } else { + IO.createDirectory(libexec) + (url(launcher), dest, "", "") + } + } + IO.delete(workFile) + Using.urlInputStream(launcherURL) { in => + log.info(s"[republish] Downloading $launcherURL to $workFile...") + IO.transfer(in, workFile) + log.info(s"[republish] Downloaded $launcherURL to $workFile...") + } + if (prefix == "gz") { + IO.delete(dest) + Using.fileInputStream(workFile) { in => + Using.gzipInputStream(in) { gzIn => + IO.transfer(gzIn, dest) + } + } + log.info(s"[republish] uncompressed gz file $workFile to $dest...") + IO.delete(workFile) + } else if (prefix == "zip") { + IO.delete(dest) + val files = IO.unzip(workFile, dlCache, new ExactFilter(subPart)) + val extracted = files.headOption.getOrElse(throw new MessageOnlyException(s"[republish] No files extracted from $workFile matching $subPart")) + log.info(s"[republish] unzipped $workFile to $extracted...") + IO.move(extracted, dest) + log.info(s"[republish] moved $extracted to $dest...") + IO.delete(workFile) + } + FileUtil.tryMakeExecutable(dest.toPath) + dest + } + + val allLaunchers = { + if (strict && launcherVersions.isEmpty) + throw new MessageOnlyException(s"[republish] No launchers to fetch, check the build configuration for ${srcs.key.label}.") + + for ((name, launcher) <- launcherVersions) yield { + val dest = libexec / name + + val id = name.replaceAll("[^a-zA-Z0-9]", "_") + + val fetchAction = Tracked.inputChanged[String, File](store.make(id)) { (inChanged, launcher) => + if (inChanged || !Files.exists(dest.toPath)) { + work(name, dest, launcher) + } else { + log.info(s"[republish] Using cached $name launcher ($launcher).") + dest + } + } + + fetchAction(launcher) + } + } + allLaunchers.toSet + } + + override val projectSettings: Seq[Def.Setting[_]] = Def.settings( + republishCoursierDir := republishRepo.value / "coursier", + republishLaunchers := Seq.empty, + republishCoursier := Seq.empty, + republishBinOverrides := Seq.empty, + republishExtraProps := Seq.empty, + republishLocalResolved / republishProjectRefs := { + val proj = thisProjectRef.value + val deps = buildDependencies.value + + deps.classpathRefs(proj) + }, + republishLocalResolved := Def.taskDyn { + val deps = (republishLocalResolved / republishProjectRefs).value + val publishAllLocalBin = deps.map({ d => ((d / publishLocalBin / packagedArtifacts)) }).join + val resolveId = deps.map({ d => ((d / projectID)) }).join + Def.task { + val published = publishAllLocalBin.value + val ids = resolveId.value + + ids.zip(published).map({ case (id, as) => + val simpleId = { + val name0 = id.crossVersion match { + case cv: CrossVersion.Binary => + // projectID does not add binary suffix + (s"${id.name}_${cv.prefix}${cv.suffix}3") + .ensuring(!id.name.endsWith("_3") && id.revision.startsWith("3.")) + case _ => id.name + } + SimpleModuleId(id.organization, name0, id.revision) + } + var jarOrNull: File = null + var pomOrNull: File = null + as.foreach({ case (a, f) => + if (a.`type` == "jar") { + jarOrNull = f + } else if (a.`type` == "pom") { + pomOrNull = f + } + }) + assert(jarOrNull != null, s"Could not find jar for ${id}") + assert(pomOrNull != null, s"Could not find pom for ${id}") + ResolvedArtifacts(simpleId, Some(jarOrNull), Some(pomOrNull)) + }) + } + }.value, + republishAllResolved := { + val resolvedLocal = republishLocalResolved.value + val coursierJar = republishFetchCoursier.value + val report = (thisProjectRef / updateFull).value + val s = streams.value + val lm = (republishAllResolved / dependencyResolution).value + val cacheDir = republishRepo.value + + val log = s.log + val csrCacheDir = s.cacheDirectory / "csr-cache" + val localRepo = s.cacheDirectory / "localRepo" / "maven2" + + // resolve the transitive dependencies of the local artifacts + val resolvedLibs = resolveLibraryDeps(coursierJar, log, csrCacheDir, localRepo, resolvedLocal) + + // the combination of local artifacts and resolved transitive dependencies + val merged = + (resolvedLocal ++ resolvedLibs).groupBy(_.id).values.map(_.reduce { (ra1, ra2) => + val jar = ra1.jar.orElse(ra2.jar) + val pom = ra1.pom.orElse(ra2.pom) + ResolvedArtifacts(ra1.id, jar, pom) + }) + + merged.toSeq + }, + republishClasspath := { + val s = streams.value + val resolved = republishAllResolved.value + val cacheDir = republishRepo.value + republishResolvedArtifacts(resolved, cacheDir / "maven2", logOpt = Some(s.log)) + }, + republishFetchLaunchers := { + fetchFilesTask(republishPrepareBin, republishLaunchers, strict = true).value + }, + republishFetchCoursier := { + fetchFilesTask(republishCoursierDir.toTask, republishCoursier, strict = true).value.head + }, + republishPrepareBin := { + val baseDir = baseDirectory.value + val srcBin = republishBinDir.value + val overrides = republishBinOverrides.value + val repoDir = republishRepo.value + + val targetBin = repoDir / "bin" + IO.copyDirectory(srcBin, targetBin) + overrides.foreach { dir => + IO.copyDirectory(dir, targetBin, overwrite = true) + } + targetBin + }, + republishWriteExtraProps := { + val s = streams.value + val log = s.log + val extraProps = republishExtraProps.value + if (extraProps.isEmpty) { + log.info("[republish] No extra properties to write.") + None + } + else { + val repoDir = republishRepo.value + val propsFile = repoDir / "etc" / "EXTRA_PROPERTIES" + log.info(s"[republish] Writing extra properties to $propsFile...") + Using.fileWriter()(propsFile) { writer => + extraProps.foreach { case (k, v) => + writer.write(s"$k:=$v\n") + } + } + Some(propsFile) + } + }, + republish := { + val cacheDir = republishRepo.value + val artifacts = republishClasspath.value + val launchers = republishFetchLaunchers.value + val extraProps = republishWriteExtraProps.value + cacheDir + } + ) +} diff --git a/project/scripts/bootstrappedOnlyCmdTests b/project/scripts/bootstrappedOnlyCmdTests index 4e18e3a1d4a4..11c35a7028cc 100755 --- a/project/scripts/bootstrappedOnlyCmdTests +++ b/project/scripts/bootstrappedOnlyCmdTests @@ -14,32 +14,38 @@ echo "testing scala.quoted.Expr.run from sbt scala" "$SBT" ";scala3-compiler-bootstrapped/scalac -with-compiler tests/run-staging/quote-run.scala; scala3-compiler-bootstrapped/scala -with-compiler Test" > "$tmp" grep -qe "val a: scala.Int = 3" "$tmp" - # setup for `scalac`/`scala` script tests -"$SBT" dist/pack +"$SBT" "$DIST_PROJECT/pack" + +echo "capturing scala version from $DIST_DIR/target/pack/VERSION" +IFS=':=' read -ra versionProps < "$ROOT/$DIST_DIR/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps +[ ${#versionProps[@]} -eq 3 ] && \ + [ ${versionProps[0]} = "version" ] && \ + [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $ROOT/$DIST_DIR/target/pack/VERSION" +scala_version=${versionProps[2]} # check that `scalac` compiles and `scala` runs it echo "testing ./bin/scalac and ./bin/scala" clear_out "$OUT" ./bin/scalac "$SOURCE" -d "$OUT" -./bin/scala -classpath "$OUT" "$MAIN" > "$tmp" +./bin/scala -classpath "$OUT" -M "$MAIN" > "$tmp" test "$EXPECTED_OUTPUT" = "$(cat "$tmp")" # Test scaladoc based on compiled classes ./bin/scaladoc -project Staging -d "$OUT1" "$OUT" clear_out "$OUT1" -# check that `scalac` and `scala` works for staging +# check that `scalac` and `scala` works for staging. clear_out "$OUT" ./bin/scalac tests/run-staging/i4044f.scala -d "$OUT" -./bin/scala -with-compiler -classpath "$OUT" Test > "$tmp" +./bin/scala -with-compiler -classpath "$OUT" -M Test > "$tmp" # check that `scalac -from-tasty` compiles and `scala` runs it echo "testing ./bin/scalac -from-tasty and scala -classpath" clear_out "$OUT1" ./bin/scalac "$SOURCE" -d "$OUT" ./bin/scalac -from-tasty -d "$OUT1" "$OUT/$TASTY" -./bin/scala -classpath "$OUT1" "$MAIN" > "$tmp" +./bin/scala -classpath "$OUT1" -M "$MAIN" > "$tmp" test "$EXPECTED_OUTPUT" = "$(cat "$tmp")" # check that `sbt scalac -decompile` runs @@ -71,7 +77,7 @@ echo "testing sbt scalac with suspension" clear_out "$OUT" "$SBT" "scala3-compiler-bootstrapped/scalac -d $OUT tests/pos-macros/macros-in-same-project-1/Bar.scala tests/pos-macros/macros-in-same-project-1/Foo.scala" > "$tmp" -# echo ":quit" | ./dist/target/pack/bin/scala # not supported by CI +# echo ":quit" | ./$DIST_DIR/target/pack/bin/scala # not supported by CI echo "testing ./bin/scaladoc" clear_out "$OUT1" @@ -91,10 +97,17 @@ clear_out "$OUT" grep -qe "Usage: scalac " "$tmp" ./bin/scala -help > "$tmp" 2>&1 -grep -qe "Usage: scala " "$tmp" +grep -qe "See 'scala --help' to read about a specific subcommand." "$tmp" ./bin/scala -d hello.jar tests/run/hello.scala ls hello.jar +clear_cli_dotfiles tests/run + +# check that `scala` runs scripts with args +echo "testing ./bin/scala with arguments" +./bin/scala run project/scripts/echoArgs.sc -- abc true 123 > "$tmp" +test "$EXPECTED_OUTPUT_ARGS" = "$(cat "$tmp")" +clear_cli_dotfiles project/scripts echo "testing i12973" clear_out "$OUT" @@ -102,14 +115,6 @@ clear_out "$OUT" echo "Bug12973().check" | TERM=dumb ./bin/scala -cp "$OUT/out.jar" > "$tmp" 2>&1 grep -qe "Bug12973 is fixed" "$tmp" -echo "capturing scala version from dist/target/pack/VERSION" -cwd=$(pwd) -IFS=':=' read -ra versionProps < "$cwd/dist/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps -[ ${#versionProps[@]} -eq 3 ] && \ - [ ${versionProps[0]} = "version" ] && \ - [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $cwd/dist/target/pack/VERSION" -scala_version=${versionProps[2]} - echo "testing -sourcepath with incremental compile: inlining changed inline def into a def" # Here we will test that a changed inline method symbol loaded from the sourcepath (-sourcepath compiler option) # will have its `defTree` correctly set when its method body is required for inlining. diff --git a/project/scripts/buildScalaBinary b/project/scripts/buildScalaBinary new file mode 100755 index 000000000000..7fc5275e5d8d --- /dev/null +++ b/project/scripts/buildScalaBinary @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/../.." +SBT="$ROOT/project/scripts/sbt" # if run on CI + +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + +# build the scala/scalac/scaladoc binary, where scala is native for the current platform. +"$SBT" "$DIST_PROJECT/pack" diff --git a/project/scripts/cmdTestsCommon.inc.sh b/project/scripts/cmdTestsCommon.inc.sh index a37ab757c057..bccb4aa56ac1 100644 --- a/project/scripts/cmdTestsCommon.inc.sh +++ b/project/scripts/cmdTestsCommon.inc.sh @@ -9,11 +9,15 @@ SOURCE="tests/pos/HelloWorld.scala" MAIN="HelloWorld" TASTY="HelloWorld.tasty" EXPECTED_OUTPUT="hello world" +EXPECTED_OUTPUT_ARGS="[0:abc],[1:true],[2:123]" OUT=$(mktemp -d) OUT1=$(mktemp -d) tmp=$(mktemp) +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + die () { echo >&2 "$@" exit 1 @@ -24,3 +28,16 @@ clear_out() local out="$1" rm -rf "$out"/* } + +clear_cli_dotfiles() +{ + local out="$1" + rm -rf "$out"/.bsp + rm -rf "$out"/.scala-build + + rm -f "$ROOT"/.bsp/scala.json + if [ -z "$(ls -A "$ROOT"/.bsp)" ]; then + rm -rf "$ROOT"/.bsp + fi + rm -rf "$ROOT"/.scala-build +} diff --git a/project/scripts/echoArgs.sc b/project/scripts/echoArgs.sc new file mode 100644 index 000000000000..cb9acbb6ad2e --- /dev/null +++ b/project/scripts/echoArgs.sc @@ -0,0 +1,6 @@ +// This is a Scala CLI script + +val formatted = + (for (arg, i) <- args.zipWithIndex yield + s"[$i:$arg]").mkString(",") +println(formatted) diff --git a/project/scripts/native-integration/bashTests b/project/scripts/native-integration/bashTests new file mode 100755 index 000000000000..5fb77355238c --- /dev/null +++ b/project/scripts/native-integration/bashTests @@ -0,0 +1,84 @@ +#!/usr/bin/env bash + +set -eux + +#/*---------------*\ +# * SETUP VARS *# +# *---------------*/ + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/../../.." + +SBT="$ROOT/project/scripts/sbt" # if run on CI +# SBT="sbt" # if run locally + +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + +die () { + echo >&2 "$@" + exit 1 +} + +PROG_HOME="$DIST_DIR/target/pack" + +SOURCE="$ROOT/tests/pos/HelloWorld.scala" +SOURCE_VERSION="$ROOT/project/scripts/native-integration/reportScalaVersion.scala" + +clear_cli_dotfiles() +{ + local out="$1" + rm -rf "$out"/.bsp + rm -rf "$out"/.scala-build + + rm -f "$ROOT"/.bsp/scala.json + if [ -z "$(ls -A "$ROOT"/.bsp)" ]; then + rm -rf "$ROOT"/.bsp + fi + rm -rf "$ROOT"/.scala-build +} + +#/*---------------*\ +# * INITIALIZE *# +# *---------------*/ + +# build the distribution +"$SBT" "$DIST_PROJECT/pack" + +SCALA_VERSION="" +# iterate through lines in VERSION_SRC +while IFS= read -r line; do + # if line starts with "version:=" then extract the version + if [[ "$line" == version:=* ]]; then + SCALA_VERSION="${line#version:=}" + break + fi +done < "$PROG_HOME/VERSION" + +if [ -z "$SCALA_VERSION" ]; then + die "Could not find scala version in $PROG_HOME/VERSION" +fi + +#/*-------------------*\ +# * TESTING BEGINS *# +# *-------------------*/ + +echo "assert native launcher matches expected version" +if [ -z "$LAUNCHER_EXPECTED_PROJECT" ]; then + die "LAUNCHER_EXPECTED_PROJECT is not set in the environment" +fi +test "$LAUNCHER_EXPECTED_PROJECT" = "$DIST_PROJECT" + +echo "testing version output (default)" +std_output=$("$PROG_HOME/bin/scala" version --scala-version) +test "$SCALA_VERSION" = "$std_output" + +echo "testing run command" +std_output=$("$PROG_HOME/bin/scala" run "$SOURCE" --power --offline --server=false) +test "hello world" = "$std_output" +clear_cli_dotfiles "$ROOT/tests/pos" + +echo "testing run command (-with-compiler)" +std_output=$("$PROG_HOME/bin/scala" run "$SOURCE_VERSION" -with-compiler --power --offline --server=false) +test "$SCALA_VERSION" = "$std_output" +clear_cli_dotfiles "$ROOT/project/scripts/native-integration" + diff --git a/project/scripts/native-integration/reportScalaVersion.scala b/project/scripts/native-integration/reportScalaVersion.scala new file mode 100644 index 000000000000..dc6e93708a48 --- /dev/null +++ b/project/scripts/native-integration/reportScalaVersion.scala @@ -0,0 +1,4 @@ +// To be ran by Scala CLI (requires -with-compiler command line option) + +@main def reportScalaVersion: Unit = + println(dotty.tools.dotc.config.Properties.versionNumberString) diff --git a/project/scripts/native-integration/winTests.bat b/project/scripts/native-integration/winTests.bat new file mode 100755 index 000000000000..a85b2c8c2531 --- /dev/null +++ b/project/scripts/native-integration/winTests.bat @@ -0,0 +1,19 @@ +@echo off +setlocal + +@rem paths are relative to the root project directory +set "_PREFIX=dist\win-x86_64\target\pack" +set "_SOURCE=tests\pos\HelloWorld.scala" +set "_OUT_DIR=out" + +@rem if-tests mimic the non-existing bash instruction 'set -e'. +call "%_PREFIX%\bin\scalac.bat" "@project\scripts\options" "%_SOURCE%" +if not %ERRORLEVEL%==0 endlocal& exit /b 1 + +call "%_PREFIX%\bin\scalac.bat" -d "%_OUT_DIR%" "%_SOURCE%" +if not %ERRORLEVEL%==0 endlocal& exit /b 1 + +call "%_PREFIX%\bin\scala.bat" --power -classpath "%_OUT_DIR%" -M HelloWorld --offline --server=false +if not %ERRORLEVEL%==0 endlocal& exit /b 1 + +endlocal diff --git a/project/scripts/winCmdTests b/project/scripts/winCmdTests index d287b60992b2..fe6a43c7f68f 100644 --- a/project/scripts/winCmdTests +++ b/project/scripts/winCmdTests @@ -1,10 +1,10 @@ #!/usr/bin/env bash set -e -PREFIX="dist/target/pack" +PREFIX="dist/win-x86_64/target/pack" SOURCE="tests/pos/HelloWorld.scala" $PREFIX/bin/scalac @project/scripts/options "$SOURCE" $PREFIX/bin/scalac -d out "$SOURCE" -$PREFIX/bin/scala -classpath out HelloWorld -$PREFIX/bin/scala -classpath out -J-Xmx512m HelloWorld +$PREFIX/bin/scala --power -classpath out -M HelloWorld --offline '--server=false' +$PREFIX/bin/scala --power -classpath out -J -Xmx512m -M HelloWorld --offline '--server=false' mkdir -p _site && $PREFIX/bin/scaladoc -d _site -project Hello "$SOURCE" diff --git a/project/scripts/winCmdTests.bat b/project/scripts/winCmdTests.bat index ee9b8237c694..903f74d7ab98 100644 --- a/project/scripts/winCmdTests.bat +++ b/project/scripts/winCmdTests.bat @@ -2,7 +2,7 @@ setlocal @rem paths are relative to the root project directory -set "_PREFIX=dist\target\pack" +set "_PREFIX=dist\win-x86_64\target\pack" set "_SOURCE=tests\pos\HelloWorld.scala" set "_OUT_DIR=out" set "_SITE_DIR=_site" @@ -14,10 +14,10 @@ if not %ERRORLEVEL%==0 endlocal& exit /b 1 call "%_PREFIX%\bin\scalac.bat" -d "%_OUT_DIR%" "%_SOURCE%" if not %ERRORLEVEL%==0 endlocal& exit /b 1 -call "%_PREFIX%\bin\scala.bat" -classpath "%_OUT_DIR%" HelloWorld +call "%_PREFIX%\bin\scala.bat" --power -classpath "%_OUT_DIR%" -M HelloWorld --offline --server=false if not %ERRORLEVEL%==0 endlocal& exit /b 1 -call "%_PREFIX%\bin\scala.bat" -classpath "%_OUT_DIR%" -J-Xmx512m HelloWorld +call "%_PREFIX%\bin\scala.bat" --power -classpath "%_OUT_DIR%" -J -Xmx512m -M HelloWorld --offline --server=false if not %ERRORLEVEL%==0 endlocal& exit /b 1 if not exist "%_SITE_DIR%" mkdir "%_SITE_DIR%" diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala b/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala new file mode 100644 index 000000000000..17a7488ccb1a --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala @@ -0,0 +1,6 @@ +package a + +object Foo: // note that `Foo` is defined in `zz.scala` + class Local + inline def foo(using Local): Nothing = + ??? diff --git a/tests/run-with-compiler/i14541.scala b/tests/run-with-compiler/i14541.scala index 0fdfb89674d5..2b942007c5b6 100644 --- a/tests/run-with-compiler/i14541.scala +++ b/tests/run-with-compiler/i14541.scala @@ -6,6 +6,7 @@ object Test: def main(args: Array[String]): Unit = getClass.getClassLoader.run("echo", List("hello", "raw", "world")) // caution: uses "SCALA_OPTS" + sys.props("scala.use_legacy_launcher") = "true" dotty.tools.MainGenericRunner.main(Array("--class-path", classpath, "echo", "hello", "run", "world")) @main def echo(args: String*): Unit = println { From 42cf114774d49766cdef2d6195b030689d7ee19c Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 9 May 2024 14:18:01 +0200 Subject: [PATCH 162/827] Reinstantiate restriction to transparent inline methods Reverts parts of #19922. Fixes #20342, #20297 The logic that we should ignore declared result types of inline methods really only applies to transparent inlines. --- .../dotty/tools/dotc/typer/ProtoTypes.scala | 7 +++--- tests/neg/i18123.check | 12 +++++++++ tests/neg/i18123.scala | 25 +++++++++++++++++++ tests/pos/i18123.scala | 3 ++- tests/pos/i20297.scala | 20 +++++++++++++++ 5 files changed, 63 insertions(+), 4 deletions(-) create mode 100644 tests/neg/i18123.check create mode 100644 tests/neg/i18123.scala create mode 100644 tests/pos/i20297.scala diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index ecf1da30cac1..7bfe00aea13e 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -11,7 +11,7 @@ import Constants.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet} import Decorators.* import Uniques.* -import Flags.Method +import Flags.{Method, Transparent} import inlines.Inlines import config.Printers.typr import Inferencing.* @@ -108,7 +108,7 @@ object ProtoTypes { res /** Constrain result with two special cases: - * 1. If `meth` is an inlineable method in an inlineable context, + * 1. If `meth` is a transparent inlineable method in an inlineable context, * we should always succeed and not constrain type parameters in the expected type, * because the actual return type can be a subtype of the currently known return type. * However, we should constrain parameters of the declared return type. This distinction is @@ -128,11 +128,12 @@ object ProtoTypes { case _ => false - if Inlines.isInlineable(meth) then + if Inlines.isInlineable(meth) && meth.is(Transparent) then constrainResult(mt, wildApprox(pt)) true else constFoldException(pt) || constrainResult(mt, pt) + end constrainResult end Compatibility diff --git a/tests/neg/i18123.check b/tests/neg/i18123.check new file mode 100644 index 000000000000..d784c4d12673 --- /dev/null +++ b/tests/neg/i18123.check @@ -0,0 +1,12 @@ +-- [E172] Type Error: tests/neg/i18123.scala:25:33 --------------------------------------------------------------------- +25 | (charClassIntersection.rep() | classItem.rep()) // error + | ^^^^^^^^^^^^^^^ + |No given instance of type pkg.Implicits.Repeater[pkg.RegexTree, V] was found. + |I found: + | + | pkg.Implicits.Repeater.GenericRepeaterImplicit[T] + | + |But method GenericRepeaterImplicit in object Repeater does not match type pkg.Implicits.Repeater[pkg.RegexTree, V] + | + |where: V is a type variable with constraint <: Seq[pkg.CharClassIntersection] + |. diff --git a/tests/neg/i18123.scala b/tests/neg/i18123.scala new file mode 100644 index 000000000000..bb220dc78e93 --- /dev/null +++ b/tests/neg/i18123.scala @@ -0,0 +1,25 @@ +// may not compile anymore in Scala 3.4+ +package pkg + +trait P[+T] + +extension [T](inline parse0: P[T]) + inline def | [V >: T](inline other: P[V]): P[V] = ??? + +extension [T](inline parse0: => P[T]) + inline def rep[V](inline min: Int = 0)(using repeater: Implicits.Repeater[T, V]): P[V] = ??? + +object Implicits: + trait Repeater[-T, R] + object Repeater: + implicit def GenericRepeaterImplicit[T]: Repeater[T, Seq[T]] = ??? + +sealed trait RegexTree +abstract class Node extends RegexTree +class CharClassIntersection() extends Node + +def classItem: P[RegexTree] = ??? +def charClassIntersection: P[CharClassIntersection] = ??? + +def x = + (charClassIntersection.rep() | classItem.rep()) // error diff --git a/tests/pos/i18123.scala b/tests/pos/i18123.scala index 714850004d2c..2b18b3fc73c3 100644 --- a/tests/pos/i18123.scala +++ b/tests/pos/i18123.scala @@ -7,7 +7,8 @@ extension [T](inline parse0: P[T]) inline def | [V >: T](inline other: P[V]): P[V] = ??? extension [T](inline parse0: => P[T]) - inline def rep[V](inline min: Int = 0)(using repeater: Implicits.Repeater[T, V]): P[V] = ??? + // transparent needed to make this compile in 3.4+ + transparent inline def rep[V](inline min: Int = 0)(using repeater: Implicits.Repeater[T, V]): P[V] = ??? object Implicits: trait Repeater[-T, R] diff --git a/tests/pos/i20297.scala b/tests/pos/i20297.scala new file mode 100644 index 000000000000..ee7ee57045ae --- /dev/null +++ b/tests/pos/i20297.scala @@ -0,0 +1,20 @@ +sealed abstract class Kyo[+T, -S] +opaque type <[+T, -S] >: T = T | Kyo[T, S] + +extension [T, S](v: T < S) + inline def map[U, S2](inline f: T => U < S2): U < (S & S2) = ??? + +class Streams[V] +object Streams: + def emitValue[V](v: V): Unit < Streams[V] = ??? + +opaque type Stream[+T, V, -S] = T < (Streams[V] & S) +object Stream: + extension [T, V, S](s: Stream[T, V, S]) + def reemit[S2, V2](f: V => Unit < (Streams[V2] & S2)): Stream[T, V2, S & S2] = ??? + def filter[S2](f: V => Boolean < S2): Stream[T, V, S & S2] = reemit { v => + f(v).map { + case false => () + case true => Streams.emitValue(v) + } + } From 25b733e9c4c2e853d00c21b94f5270bb0490799e Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 9 May 2024 15:19:50 +0200 Subject: [PATCH 163/827] Re-instantiate previous behavior also for LTS --- .../dotty/tools/dotc/typer/ProtoTypes.scala | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 7bfe00aea13e..aae795277136 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -13,6 +13,7 @@ import Decorators.* import Uniques.* import Flags.{Method, Transparent} import inlines.Inlines +import config.{Feature, SourceVersion} import config.Printers.typr import Inferencing.* import ErrorReporting.* @@ -128,11 +129,22 @@ object ProtoTypes { case _ => false - if Inlines.isInlineable(meth) && meth.is(Transparent) then - constrainResult(mt, wildApprox(pt)) - true - else - constFoldException(pt) || constrainResult(mt, pt) + constFoldException(pt) || { + if Inlines.isInlineable(meth) then + // Stricter behaviour in 3.4+: do not apply `wildApprox` to non-transparent inlines + if Feature.sourceVersion.isAtLeast(SourceVersion.`3.4`) then + if meth.is(Transparent) then + constrainResult(mt, wildApprox(pt)) + // do not constrain the result type of transparent inline methods + true + else + constrainResult(mt, pt) + else + // Best-effort to fix https://github.com/scala/scala3/issues/9685 in the 3.3.x series + // while preserving source compatibility as much as possible + constrainResult(mt, wildApprox(pt)) || meth.is(Transparent) + else constrainResult(mt, pt) + } end constrainResult end Compatibility From d8a3d3b0182206626b9d2923ef335c017dcc79cb Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 14 May 2024 14:30:16 +0200 Subject: [PATCH 164/827] Revert changes in CB to #17924 --- community-build/community-projects/munit | 2 +- community-build/community-projects/specs2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/community-build/community-projects/munit b/community-build/community-projects/munit index 5c77d7316fc6..c5d6f474fa0d 160000 --- a/community-build/community-projects/munit +++ b/community-build/community-projects/munit @@ -1 +1 @@ -Subproject commit 5c77d7316fc66adaed64e9532ee0a45a668b01ec +Subproject commit c5d6f474fa0d481e2c29f15d6a67d10ef2099e78 diff --git a/community-build/community-projects/specs2 b/community-build/community-projects/specs2 index a618330aa808..005c5847ecf9 160000 --- a/community-build/community-projects/specs2 +++ b/community-build/community-projects/specs2 @@ -1 +1 @@ -Subproject commit a618330aa80833787859dae805d02e45d4304c42 +Subproject commit 005c5847ecf9439691505f0628d318b0fed9d341 From 07e9c40533c3ca1af6238941289ab803afa19c57 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 14 May 2024 16:55:58 +0200 Subject: [PATCH 165/827] Add test for #20342 --- tests/pos/i20342.scala | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 tests/pos/i20342.scala diff --git a/tests/pos/i20342.scala b/tests/pos/i20342.scala new file mode 100644 index 000000000000..250839680174 --- /dev/null +++ b/tests/pos/i20342.scala @@ -0,0 +1,15 @@ +class Repo[EC, E](using defaults: RepoDefaults[EC, E]) +trait RepoDefaults[EC, E] +object RepoDefaults: + inline given genImmutableRepo[E: DbCodec]: RepoDefaults[E, E] = ??? + inline given genRepo[EC: DbCodec, E: DbCodec]: RepoDefaults[EC, E] = ??? + +trait DbCodec[E] + +case class PersonCreator(name: String) +case class Person(id: Long) +given DbCodec[Person] = ??? +given DbCodec[PersonCreator] = ??? + +@main def Test = + val personRepo = Repo[PersonCreator, Person] From 6e8bea7d15d062a142533c28c07e4d078635d675 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 11 Jun 2024 10:35:21 +0200 Subject: [PATCH 166/827] Refine constraining scheme for result types - Take MatchTypes into account - Add test cases for tests that failed originally and would otherwise fail again after this PR. i19415.scala is fixed by the MatchType extension. i19749.scala was fixed by adding a `transparent`. --- .../dotty/tools/dotc/typer/ProtoTypes.scala | 11 +++- tests/pos/i19415.scala | 24 +++++++++ tests/pos/i19479.scala | 54 +++++++++++++++++++ 3 files changed, 87 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i19415.scala create mode 100644 tests/pos/i19479.scala diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index aae795277136..5909cda8c428 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -131,9 +131,16 @@ object ProtoTypes { constFoldException(pt) || { if Inlines.isInlineable(meth) then - // Stricter behaviour in 3.4+: do not apply `wildApprox` to non-transparent inlines + // Stricter behavisour in 3.4+: do not apply `wildApprox` to non-transparent inlines + // unless their return type is a MatchType. In this case there's no reason + // not to constrain type variables in the expected type. For transparent inlines + // we do not want to constrain type variables in the expected type since the + // actual return type might be smaller after instantiation. For inlines returning + // MatchTypes we do not want to constrain because the MatchType might be more + // specific after instantiation. TODO: Should we also use Wildcards for non-inline + // methods returning MatchTypes? if Feature.sourceVersion.isAtLeast(SourceVersion.`3.4`) then - if meth.is(Transparent) then + if meth.is(Transparent) || mt.resultType.isMatchAlias then constrainResult(mt, wildApprox(pt)) // do not constrain the result type of transparent inline methods true diff --git a/tests/pos/i19415.scala b/tests/pos/i19415.scala new file mode 100644 index 000000000000..3d9c40127cb0 --- /dev/null +++ b/tests/pos/i19415.scala @@ -0,0 +1,24 @@ +def Test = { + val left: Parser[String] = ??? + val right: Parser[Int] = ??? + val both = left && right + + val works = both.map(Ior.Both.apply) + val fails = (left && right).map(Ior.Both.apply) +} + +trait Parser[T]: + final def &&[T2](other: Parser[T2])(implicit zip: Zip[T, T2]): Parser[zip.Out] = ??? + final def map[T2](f: T => T2): Parser[T2] = ??? + +infix trait Ior[+A, +B] +object Ior: + final case class Both[+A, +B](a: A, b: B) extends (A Ior B) + +trait Zip[In1, In2]: + type Out + +object Zip { + type Out[In1, In2, O] = Zip[In1, In2] { type Out = O } + implicit def zip2[_1, _2]: Zip.Out[_1, _2, (_1, _2)] = ??? +} \ No newline at end of file diff --git a/tests/pos/i19479.scala b/tests/pos/i19479.scala new file mode 100644 index 000000000000..a12bd378a490 --- /dev/null +++ b/tests/pos/i19479.scala @@ -0,0 +1,54 @@ +case class Person(id: Int) + +class GeodeContinuousSourceSpec { + summon[PdxEncoder[Person]] +} + +trait PdxEncoder[A] { + def encode(a: A): Boolean +} + +object PdxEncoder extends ObjectEncoder { + implicit def intEncoder: PdxEncoder[Int] = ??? +} + +trait ObjectEncoder { + given emptyTupleEncoder: PdxEncoder[EmptyTuple] = ??? + + given tupleEncoder[K <: String, H, T <: Tuple](using + m: ValueOf[K], + hEncoder: PdxEncoder[H], + tEncoder: PdxEncoder[T] + ): PdxEncoder[FieldType[K, H] *: T] = ??? + + given objectEncoder[A, Repr <: Tuple](using + gen: LabelledGeneric.Aux[A, Repr], + tupleEncoder: PdxEncoder[Repr] + ): PdxEncoder[A] = ??? +} + +import scala.deriving.Mirror + +private type FieldType[K, +V] = V & KeyTag[K, V] +private type KeyTag[K, +V] +private type ZipWith[T1 <: Tuple, T2 <: Tuple, F[_, _]] <: Tuple = (T1, T2) match { + case (h1 *: t1, h2 *: t2) => F[h1, h2] *: ZipWith[t1, t2, F] + case (EmptyTuple, ?) => EmptyTuple + case (?, EmptyTuple) => EmptyTuple + case _ => Tuple +} + +private trait LabelledGeneric[A] { + type Repr +} + +private object LabelledGeneric { + type Aux[A, R] = LabelledGeneric[A] { type Repr = R } + + transparent inline given productInst[A <: Product](using + m: Mirror.ProductOf[A] + ): LabelledGeneric.Aux[A, ZipWith[m.MirroredElemLabels, m.MirroredElemTypes, FieldType]] = + new LabelledGeneric[A] { + type Repr = Tuple & ZipWith[m.MirroredElemLabels, m.MirroredElemTypes, FieldType] + } +} \ No newline at end of file From 33b7644f472d5d305a0df191d83bca0e2d462172 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Tue, 11 Jun 2024 14:52:24 +0100 Subject: [PATCH 167/827] Disable ClasspathTests.unglobClasspathVerifyTest (#20551) cc @bishabosha @Gedochao [test_scala2_library_tasty] [test_windows_full] [test_java8] --- compiler/test/dotty/tools/scripting/ClasspathTests.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index d5f13065ccb3..a946e509aeb3 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -81,6 +81,7 @@ class ClasspathTests: /* * verify classpath is unglobbed by MainGenericRunner. */ + @Ignore @Test def unglobClasspathVerifyTest = { val testScriptName = "unglobClasspath_scalacli.sc" val testScript = scripts("/scripting").find { _.name.matches(testScriptName) } match From e2dfea3356ca2b9acac20d7cdb86976e82f98c9f Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 11 Jun 2024 14:21:05 +0200 Subject: [PATCH 168/827] Only set `AppliedType#validSuper` after `AppliedType#cachedSuper` since cycles are possible when computing `AppliedType#superType`, see tests/neg/i20546.scala for an example leading to an NPE. We could use `ctx.period == validSuper && cachedSuper == null` as condition to detect cycles, but they are already handled in `TypeApplications#appliedTo`, with a better error message. We can update `AppliedType#validSuper` only after the computation is done to fix #20546 --- .../src/dotty/tools/dotc/core/Types.scala | 6 ++--- tests/neg/i20546.scala | 22 +++++++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) create mode 100644 tests/neg/i20546.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 2a609e1deec9..cb47bd92352e 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4646,18 +4646,18 @@ object Types extends TypeUtils { override def superType(using Context): Type = if ctx.period != validSuper then - validSuper = if (tycon.isProvisional) Nowhere else ctx.period + var superIsProvisional = tycon.isProvisional cachedSuper = tycon match case tycon: HKTypeLambda => defn.AnyType case tycon: TypeRef if tycon.symbol.isClass => tycon case tycon: TypeProxy => - if validSuper != Nowhere && args.exists(_.isProvisional) then + superIsProvisional ||= args.exists(_.isProvisional) // applyIfParameterized may perform eta-reduction leading to different // variance annotations depending on the instantiation of type params // see tests/pos/typeclass-encoding3b.scala:348 for an example - validSuper = Nowhere tycon.superType.applyIfParameterized(args) case _ => defn.AnyType + validSuper = if superIsProvisional then Nowhere else ctx.period cachedSuper override def translucentSuperType(using Context): Type = tycon match { diff --git a/tests/neg/i20546.scala b/tests/neg/i20546.scala new file mode 100644 index 000000000000..63bd3706d12e --- /dev/null +++ b/tests/neg/i20546.scala @@ -0,0 +1,22 @@ +import NamedTuple.{NamedTuple, AnyNamedTuple} + +type And[X <: Boolean, Y <: Boolean] <: Boolean = (X, Y) match + case (true, true) => true + case _ => false +type AndLambda = [X <: Boolean, Y <: Boolean] =>> And[X, Y] + +trait Expr2[Result, Scalar <: Boolean]: + type StripScalar[E] = E match + case Expr2[_, s] => s + + type AllScalar[A <: AnyNamedTuple] = Tuple.Fold[Tuple.Map[NamedTuple.DropNames[A], StripScalar], true, AndLambda] // error: cyclic + + +object Minimization: + type And[X <: Boolean, Y <: Boolean] = (X, Y) match + case (true, true) => true + case _ => false + + type AndLambda = [X <: Boolean, Y <: Boolean] =>> And[X, Y] + + type All[A <: Tuple] = Tuple.Fold[A, true, AndLambda] // error: cyclic From 5c295f1626c6361cb702fbe95bcf280cbc889990 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 11 Jun 2024 21:28:25 +0100 Subject: [PATCH 169/827] Fix EnclosingMethod for lifted anonfun The anonfun "() => new TB {.." code is lifted to a static method, in the original class (A), but the GenBCode logic was still returning the TA anon class. --- .../dotty/tools/backend/jvm/BCodeAsmCommon.scala | 2 +- tests/run/i18701.check | 1 + tests/run/i18701.fixed.check | 1 + tests/run/i18701.fixed.scala | 15 +++++++++++++++ tests/run/i18701.scala | 14 ++++++++++++++ 5 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 tests/run/i18701.check create mode 100644 tests/run/i18701.fixed.check create mode 100644 tests/run/i18701.fixed.scala create mode 100644 tests/run/i18701.scala diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala index 4027cf9fb564..e1ff94be6362 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala @@ -60,7 +60,7 @@ final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { assert(classSym.isClass, classSym) def enclosingMethod(sym: Symbol): Option[Symbol] = { if (sym.isClass || sym == NoSymbol) None - else if (sym.is(Method)) Some(sym) + else if (sym.is(Method, butNot=Synthetic)) Some(sym) else enclosingMethod(sym.originalOwner) } enclosingMethod(classSym.originalOwner) diff --git a/tests/run/i18701.check b/tests/run/i18701.check new file mode 100644 index 000000000000..2c0264028887 --- /dev/null +++ b/tests/run/i18701.check @@ -0,0 +1 @@ +public TB A$$anon$1.tb() diff --git a/tests/run/i18701.fixed.check b/tests/run/i18701.fixed.check new file mode 100644 index 000000000000..6d18ba0cbb0e --- /dev/null +++ b/tests/run/i18701.fixed.check @@ -0,0 +1 @@ +public TB A$$anon$2.apply() diff --git a/tests/run/i18701.fixed.scala b/tests/run/i18701.fixed.scala new file mode 100644 index 000000000000..361f3f40dc2b --- /dev/null +++ b/tests/run/i18701.fixed.scala @@ -0,0 +1,15 @@ +abstract class TA { def tb(): TB } +abstract class TB { def chk(): Unit } +class A: + def a(): TA = + new TA { + def tb(): TB = + val fn: () => TB = new Function0[TB]: + def apply(): TB = new TB { + def chk() = println(getClass.getEnclosingMethod()) + } + fn() + } + +object Test: + def main(args: Array[String]): Unit = new A().a().tb().chk() diff --git a/tests/run/i18701.scala b/tests/run/i18701.scala new file mode 100644 index 000000000000..45df41af8f06 --- /dev/null +++ b/tests/run/i18701.scala @@ -0,0 +1,14 @@ +abstract class TA { def tb(): TB } +abstract class TB { def chk(): Unit } +class A: + def a(): TA = + new TA { + def tb(): TB = + val fn: () => TB = () => new TB { + def chk() = println(getClass.getEnclosingMethod()) + } + fn() + } + +object Test: + def main(args: Array[String]): Unit = new A().a().tb().chk() From 6230405edb0d356469bd3d36560431c9e3d8f72a Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 4 Mar 2024 13:06:07 +0000 Subject: [PATCH 170/827] Rename to typedSelectWithAdapt --- .../src/dotty/tools/dotc/typer/Typer.scala | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 1c779ac050fa..99fcc8cb3d81 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -724,7 +724,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer then report.error(StableIdentPattern(tree, pt), tree.srcPos) - def typedSelect(tree0: untpd.Select, pt: Type, qual: Tree)(using Context): Tree = + def typedSelectWithAdapt(tree0: untpd.Select, pt: Type, qual: Tree)(using Context): Tree = val selName = tree0.name val tree = cpy.Select(tree0)(qual, selName) val superAccess = qual.isInstanceOf[Super] @@ -753,7 +753,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // there's a simply visible type variable in the result; try again with a more defined qualifier type // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, // but that is done only after we search for extension methods or conversions. - return typedSelect(tree, pt, qual) + return typedSelectWithAdapt(tree, pt, qual) // Otherwise, try to expand a named tuple selection val namedTupleElems = qual.tpe.widenDealias.namedTupleElementTypes @@ -769,7 +769,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // to the Tuple class of the right arity and select from that one if qual.tpe.isSmallGenericTuple then val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) - return typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) + return typedSelectWithAdapt(tree, pt, qual.cast(defn.tupleType(elems))) // Otherwise try an extension or conversion if selName.isTermName then @@ -796,7 +796,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if qual1.tpe.isSmallGenericTuple then gadts.println(i"Tuple member selection healed by GADT approximation") - return typedSelect(tree, pt, qual1) + return typedSelectWithAdapt(tree, pt, qual1) val tree2 = tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) if !tree2.isEmpty then @@ -805,7 +805,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Otherwise, if there are uninstantiated type variables in the qualifier type, // instantiate them and try again if canDefineFurther(qual.tpe.widen) then - return typedSelect(tree, pt, qual) + return typedSelectWithAdapt(tree, pt, qual) def dynamicSelect(pt: Type) = val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) @@ -854,7 +854,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer inaccessibleErrorType(rawType, superAccess, tree.srcPos) case _ => notAMemberErrorType(tree, qual, pt)) - end typedSelect + end typedSelectWithAdapt /** Expand a selection A.m on a context bound companion A with type * `[ref_1 | ... | ref_N]` as described by @@ -906,7 +906,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case witness: TermRef => val altQual = tpd.ref(witness).withSpan(qual.span) val altCtx = ctx.fresh.setNewTyperState() - val alt = typedSelect(tree, pt, altQual)(using altCtx) + val alt = typedSelectWithAdapt(tree, pt, altQual)(using altCtx) def current = (alt, altCtx.typerState, witness) if altCtx.reporter.hasErrors then prevs else @@ -938,7 +938,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if ctx.isJava then javaSelection(qual) else - typedSelect(tree, pt, qual).withSpan(tree.span).computeNullable() + typedSelectWithAdapt(tree, pt, qual).withSpan(tree.span).computeNullable() def javaSelection(qual: Tree)(using Context) = val tree1 = assignType(cpy.Select(tree)(qual, tree.name), qual) @@ -3879,7 +3879,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if isExtension then return found else checkImplicitConversionUseOK(found, selProto) - return withoutMode(Mode.ImplicitsEnabled)(typedSelect(tree, pt, found)) + return withoutMode(Mode.ImplicitsEnabled)(typedSelectWithAdapt(tree, pt, found)) case failure: SearchFailure => if failure.isAmbiguous then return From c477ceab6b79c9021fa9f5bad4d9910db51c0cc1 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 4 Mar 2024 13:06:07 +0000 Subject: [PATCH 171/827] Reorganise typedSelectWithAdapt Prior to the next commit, I broke up the logic into internal methods, so some can be reused, consuming then in a big Tree#orElse chain. I also took the opportunity to rename the method, to more easily distinguish it from the other typedSelect. --- .../src/dotty/tools/dotc/typer/Typer.scala | 204 ++++++++++-------- 1 file changed, 114 insertions(+), 90 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 99fcc8cb3d81..5892baf5a2db 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -729,131 +729,155 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tree = cpy.Select(tree0)(qual, selName) val superAccess = qual.isInstanceOf[Super] val rawType = selectionType(tree, qual) - val checkedType = accessibleType(rawType, superAccess) - def finish(tree: untpd.Select, qual: Tree, checkedType: Type): Tree = - val select = toNotNullTermRef(assignType(tree, checkedType), pt) - if selName.isTypeName then checkStable(qual.tpe, qual.srcPos, "type prefix") - checkLegalValue(select, pt) - ConstFold(select) - - // If regular selection is typeable, we are done - if checkedType.exists then - return finish(tree, qual, checkedType) + def tryType(tree: untpd.Select, qual: Tree, rawType: Type) = + val checkedType = accessibleType(rawType, superAccess) + // If regular selection is typeable, we are done + if checkedType.exists then + val select = toNotNullTermRef(assignType(tree, checkedType), pt) + if selName.isTypeName then checkStable(qual.tpe, qual.srcPos, "type prefix") + checkLegalValue(select, pt) + ConstFold(select) + else EmptyTree // Otherwise, simplify `m.apply(...)` to `m(...)` - if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then - return qual + def trySimplifyApply() = + if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then + qual + else EmptyTree // Otherwise, if there's a simply visible type variable in the result, try again // with a more defined qualifier type. There's a second trial where we try to instantiate // all type variables in `qual.tpe.widen`, but that is done only after we search for // extension methods or conversions. - if couldInstantiateTypeVar(qual.tpe.widen) then - // there's a simply visible type variable in the result; try again with a more defined qualifier type - // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, - // but that is done only after we search for extension methods or conversions. - return typedSelectWithAdapt(tree, pt, qual) + def tryInstantiateTypeVar() = + if couldInstantiateTypeVar(qual.tpe.widen) then + // there's a simply visible type variable in the result; try again with a more defined qualifier type + // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, + // but that is done only after we search for extension methods or conversions. + typedSelectWithAdapt(tree, pt, qual) + else EmptyTree // Otherwise, try to expand a named tuple selection - val namedTupleElems = qual.tpe.widenDealias.namedTupleElementTypes - val nameIdx = namedTupleElems.indexWhere(_._1 == selName) - if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then - return typed( - untpd.Apply( - untpd.Select(untpd.TypedSplice(qual), nme.apply), - untpd.Literal(Constant(nameIdx))), - pt) + def tryNamedTupleSelection() = + val namedTupleElems = qual.tpe.widenDealias.namedTupleElementTypes + val nameIdx = namedTupleElems.indexWhere(_._1 == selName) + if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then + typed( + untpd.Apply( + untpd.Select(untpd.TypedSplice(qual), nme.apply), + untpd.Literal(Constant(nameIdx))), + pt) + else EmptyTree // Otherwise, map combinations of A *: B *: .... EmptyTuple with nesting levels <= 22 // to the Tuple class of the right arity and select from that one - if qual.tpe.isSmallGenericTuple then - val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) - return typedSelectWithAdapt(tree, pt, qual.cast(defn.tupleType(elems))) + def trySmallGenericTuple(qual: Tree, withCast: Boolean) = + if qual.tpe.isSmallGenericTuple then + if withCast then + val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) + typedSelectWithAdapt(tree, pt, qual.cast(defn.tupleType(elems))) + else + typedSelectWithAdapt(tree, pt, qual) + else EmptyTree // Otherwise try an extension or conversion - if selName.isTermName then - val tree1 = tryExtensionOrConversion( - tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) - if !tree1.isEmpty then - return tree1 + def tryExt(tree: untpd.Select, qual: Tree) = + if selName.isTermName then + tryExtensionOrConversion( + tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) + else EmptyTree // Otherwise, try a GADT approximation if we're trying to select a member - // Member lookup cannot take GADTs into account b/c of cache, so we - // approximate types based on GADT constraints instead. For an example, - // see MemberHealing in gadt-approximation-interaction.scala. - if ctx.gadt.isNarrowing then - val wtp = qual.tpe.widen - gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") - val gadtApprox = Inferencing.approximateGADT(wtp) - gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") - val qual1 = qual.cast(gadtApprox) - val tree1 = cpy.Select(tree0)(qual1, selName) - val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) - if checkedType1.exists then - gadts.println(i"Member selection healed by GADT approximation") - return finish(tree1, qual1, checkedType1) - - if qual1.tpe.isSmallGenericTuple then - gadts.println(i"Tuple member selection healed by GADT approximation") - return typedSelectWithAdapt(tree, pt, qual1) - - val tree2 = tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) - if !tree2.isEmpty then - return tree2 + def tryGadt() = + if ctx.gadt.isNarrowing then + // Member lookup cannot take GADTs into account b/c of cache, so we + // approximate types based on GADT constraints instead. For an example, + // see MemberHealing in gadt-approximation-interaction.scala. + val wtp = qual.tpe.widen + gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") + val gadtApprox = Inferencing.approximateGADT(wtp) + gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") + val qual1 = qual.cast(gadtApprox) + val tree1 = cpy.Select(tree0)(qual1, selName) + tryType(tree1, qual1, selectionType(tree1, qual1)) + .orElse(trySmallGenericTuple(qual1, withCast = false)) + .orElse(tryExt(tree1, qual1)) + else EmptyTree // Otherwise, if there are uninstantiated type variables in the qualifier type, // instantiate them and try again - if canDefineFurther(qual.tpe.widen) then - return typedSelectWithAdapt(tree, pt, qual) + def tryDefineFurther() = + if canDefineFurther(qual.tpe.widen) then + typedSelectWithAdapt(tree, pt, qual) + else EmptyTree def dynamicSelect(pt: Type) = - val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) - if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then - assignType(tree2, TryDynamicCallType) - else - typedDynamicSelect(tree2, Nil, pt) + val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) + if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then + assignType(tree2, TryDynamicCallType) + else + typedDynamicSelect(tree2, Nil, pt) // Otherwise, if the qualifier derives from class Dynamic, expand to a // dynamic dispatch using selectDynamic or applyDynamic - if qual.tpe.derivesFrom(defn.DynamicClass) && selName.isTermName && !isDynamicExpansion(tree) then - return dynamicSelect(pt) + def tryDynamic() = + if qual.tpe.derivesFrom(defn.DynamicClass) && selName.isTermName && !isDynamicExpansion(tree) then + dynamicSelect(pt) + else EmptyTree // Otherwise, if the qualifier derives from class Selectable, // and the selector name matches one of the element of the `Fields` type member, // and the selector is not assigned to, // expand to a typed dynamic dispatch using selectDynamic wrapped in a cast - if qual.tpe.derivesFrom(defn.SelectableClass) && !isDynamicExpansion(tree) - && pt != LhsProto - then - val pre = if !TypeOps.isLegalPrefix(qual.tpe) then SkolemType(qual.tpe) else qual.tpe - val fieldsType = pre.select(tpnme.Fields).dealias.simplified - val fields = fieldsType.namedTupleElementTypes - typr.println(i"try dyn select $qual, $selName, $fields") - fields.find(_._1 == selName) match - case Some((_, fieldType)) => - val dynSelected = dynamicSelect(fieldType) - dynSelected match - case Apply(sel: Select, _) if !sel.denot.symbol.exists => - // Reject corner case where selectDynamic needs annother selectDynamic to be called. E.g. as in neg/unselectable-fields.scala. - report.error(i"Cannot use selectDynamic here since it needs another selectDynamic to be invoked", tree.srcPos) - case _ => - return dynSelected.ensureConforms(fieldType) - case _ => + def trySelectable() = + if qual.tpe.derivesFrom(defn.SelectableClass) && !isDynamicExpansion(tree) + && pt != LhsProto + then + val pre = if !TypeOps.isLegalPrefix(qual.tpe) then SkolemType(qual.tpe) else qual.tpe + val fieldsType = pre.select(tpnme.Fields).dealias.simplified + val fields = fieldsType.namedTupleElementTypes + typr.println(i"try dyn select $qual, $selName, $fields") + fields.find(_._1 == selName) match + case Some((_, fieldType)) => + val dynSelected = dynamicSelect(fieldType) + dynSelected match + case Apply(sel: Select, _) if !sel.denot.symbol.exists => + // Reject corner case where selectDynamic needs annother selectDynamic to be called. E.g. as in neg/unselectable-fields.scala. + report.error(i"Cannot use selectDynamic here since it needs another selectDynamic to be invoked", tree.srcPos) + case _ => + dynSelected.ensureConforms(fieldType) + case _ => EmptyTree + else EmptyTree // Otherwise, if the qualifier is a context bound companion, handle // by selecting a witness in typedCBSelect - if qual.tpe.typeSymbol == defn.CBCompanion then - val witnessSelection = typedCBSelect(tree0, pt, qual) - if !witnessSelection.isEmpty then return witnessSelection + def tryCBCompanion() = + if qual.tpe.typeSymbol == defn.CBCompanion then + typedCBSelect(tree0, pt, qual) + else EmptyTree // Otherwise, report an error - assignType(tree, - rawType match - case rawType: NamedType => - inaccessibleErrorType(rawType, superAccess, tree.srcPos) - case _ => - notAMemberErrorType(tree, qual, pt)) + def reportAnError() = + assignType(tree, + rawType match + case rawType: NamedType => + inaccessibleErrorType(rawType, superAccess, tree.srcPos) + case _ => + notAMemberErrorType(tree, qual, pt)) + + tryType(tree, qual, rawType) + .orElse(trySimplifyApply()) + .orElse(tryInstantiateTypeVar()) + .orElse(tryNamedTupleSelection()) + .orElse(trySmallGenericTuple(qual, withCast = true)) + .orElse(tryExt(tree, qual)) + .orElse(tryGadt()) + .orElse(tryDefineFurther()) + .orElse(tryDynamic()) + .orElse(trySelectable()) + .orElse(tryCBCompanion()) + .orElse(reportAnError()) end typedSelectWithAdapt /** Expand a selection A.m on a context bound companion A with type From 4443395a1de7317e5b3b0349f96d5c0102d5941f Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 19 Feb 2024 11:01:41 +0000 Subject: [PATCH 172/827] Heal member-select on opaque reference When the prefix of an opaque isn't the .this reference of the module class, then its RHS isn't visible. TypeComparer uses ctx.owner to "heal" or "lift" this type such that it is. We reuse that logic for member selection. --- .../dotty/tools/dotc/core/TypeComparer.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 13 ++++++++++ tests/pos/i19609.orig.scala | 12 ++++++++++ tests/pos/i19609.scala | 24 +++++++++++++++++++ 4 files changed, 50 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i19609.orig.scala create mode 100644 tests/pos/i19609.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 6e360faa322d..fd1deebbf8c2 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1596,7 +1596,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * Note: It would be legal to do the lifting also if M does not contain opaque types, * but in this case the retries in tryLiftedToThis would be redundant. */ - private def liftToThis(tp: Type): Type = { + def liftToThis(tp: Type): Type = { def findEnclosingThis(moduleClass: Symbol, from: Symbol): Type = if ((from.owner eq moduleClass) && from.isPackageObject && from.is(Opaque)) from.thisType diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 5892baf5a2db..dbc9818abf23 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -758,6 +758,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedSelectWithAdapt(tree, pt, qual) else EmptyTree + // Otherwise, heal member selection on an opaque reference, + // reusing the logic in TypeComparer. + def tryLiftToThis() = + val wtp = qual.tpe.widen + val liftedTp = comparing(_.liftToThis(wtp)) + if liftedTp ne wtp then + val qual1 = qual.cast(liftedTp) + val tree1 = cpy.Select(tree0)(qual1, selName) + val rawType1 = selectionType(tree1, qual1) + tryType(tree1, qual1, rawType1) + else EmptyTree + // Otherwise, try to expand a named tuple selection def tryNamedTupleSelection() = val namedTupleElems = qual.tpe.widenDealias.namedTupleElementTypes @@ -869,6 +881,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tryType(tree, qual, rawType) .orElse(trySimplifyApply()) .orElse(tryInstantiateTypeVar()) + .orElse(tryLiftToThis()) .orElse(tryNamedTupleSelection()) .orElse(trySmallGenericTuple(qual, withCast = true)) .orElse(tryExt(tree, qual)) diff --git a/tests/pos/i19609.orig.scala b/tests/pos/i19609.orig.scala new file mode 100644 index 000000000000..62622075dbed --- /dev/null +++ b/tests/pos/i19609.orig.scala @@ -0,0 +1,12 @@ +object o { + opaque type T = String + + summon[o.T =:= T] // OK + summon[o.T =:= String] // OK + + def test1(t: T): Int = + t.length // OK + + def test2(t: o.T): Int = + t.length // Error: value length is not a member of Playground.o.T +} diff --git a/tests/pos/i19609.scala b/tests/pos/i19609.scala new file mode 100644 index 000000000000..0879fa16c7cf --- /dev/null +++ b/tests/pos/i19609.scala @@ -0,0 +1,24 @@ +object o { u => + opaque type T = String + + def st = summon[String =:= T] + def su = summon[String =:= u.T] + def so = summon[String =:= o.T] + + def ts = summon[T =:= String] + def tu = summon[T =:= u.T] + def to = summon[T =:= o.T] + + def us = summon[u.T =:= String] + def ut = summon[u.T =:= T] + def uo = summon[u.T =:= o.T] + + def os = summon[o.T =:= String] + def ot = summon[o.T =:= T] + def ou = summon[o.T =:= u.T] + + def ms(x: String): Int = x.length // ok + def mt(x: T): Int = x.length // ok + def mu(x: u.T): Int = x.length // ok + def mo(x: o.T): Int = x.length // was: error: value length is not a member of o.T +} From 0c20d86bf7aa37d4c40f57d0af15947dad9947f2 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 12 Jun 2024 13:13:54 +0100 Subject: [PATCH 173/827] Disable scalajs on tests --- tests/run/i18701.fixed.scala | 2 ++ tests/run/i18701.scala | 2 ++ 2 files changed, 4 insertions(+) diff --git a/tests/run/i18701.fixed.scala b/tests/run/i18701.fixed.scala index 361f3f40dc2b..f0610380ad17 100644 --- a/tests/run/i18701.fixed.scala +++ b/tests/run/i18701.fixed.scala @@ -1,3 +1,5 @@ +// scalajs: --skip +// Use of Java reflection (getEnclosingMethod) abstract class TA { def tb(): TB } abstract class TB { def chk(): Unit } class A: diff --git a/tests/run/i18701.scala b/tests/run/i18701.scala index 45df41af8f06..50001233af10 100644 --- a/tests/run/i18701.scala +++ b/tests/run/i18701.scala @@ -1,3 +1,5 @@ +// scalajs: --skip +// Use of Java reflection (getEnclosingMethod) abstract class TA { def tb(): TB } abstract class TB { def chk(): Unit } class A: From 6af7022e57fbef7526caee71985774859992def9 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 13 Jun 2024 09:12:46 +0900 Subject: [PATCH 174/827] Update library/src/scala/NamedTuple.scala --- library/src/scala/NamedTuple.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index fdaa09198649..1f1b6f3e2d9f 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -35,6 +35,7 @@ object NamedTuple: extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) + // ALL METHODS DEPENDING ON `toTuple` MUST BE EXPORTED FROM `NamedTupleDecomposition` /** The underlying tuple without the names */ inline def toTuple: V = x From d31aab81cc651603a41460123706a57d4aabf120 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Thu, 13 Jun 2024 09:18:28 +0200 Subject: [PATCH 175/827] improvement: sort abstract members for auto implement --- .../dotty/tools/pc/completions/OverrideCompletions.scala | 9 ++++++++- .../tests/edit/AutoImplementAbstractMembersSuite.scala | 8 ++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala index a1edbcaa0381..1e310ca0e8ec 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala @@ -279,7 +279,14 @@ object OverrideCompletions: else "" (indent, indent, lastIndent) end calcIndent - val abstractMembers = defn.typeOpt.abstractTermMembers.map(_.symbol) + val abstractMembers = + defn.tpe.abstractTermMembers.map(_.symbol).groupBy(_.owner).map { + case (owner, members) => (owner, members.sortWith{ (sym1, sym2) => + if(sym1.sourcePos.exists && sym2.sourcePos.exists) + sym1.sourcePos.start <= sym2.sourcePos.start + else !sym2.sourcePos.exists + }) + }.toSeq.sortBy(_._1.name.decoded).flatMap(_._2) val caseClassOwners = Set("Product", "Equals") val overridables = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala index 9911d3f6d627..ffe4e293ba30 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala @@ -345,10 +345,10 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: |object Main { | class Baz extends Bar { | - | override def foo: Int = ??? - | | override def bar: Int = ??? | + | override def foo: Int = ??? + | | } |} |""".stripMargin @@ -1288,10 +1288,10 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: | |case class Concrete() extends Base: | - | override def bar(x: String): String = ??? - | | override def foo(x: Int): Int = ??? | + | override def bar(x: String): String = ??? + | | def aaa = "aaa" |end Concrete |""".stripMargin From e4a5d9c30317de5cd82bca4012130672c086f8e8 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 14 Jun 2024 09:30:13 +0100 Subject: [PATCH 176/827] Change the trigger to check the CLA (#20567) --- .github/workflows/cla.yml | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index f370cb2b541c..539bbacee9a7 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -1,15 +1,9 @@ -name: Scala CLA +name: Check Scala CLA on: pull_request: + types: opened branches-ignore: - 'language-reference-stable' - push: - branches: - - 'language-reference-stable' - merge_group: -permissions: - contents: write - pull-requests: write jobs: check: @@ -17,6 +11,5 @@ jobs: steps: - uses: actions/checkout@v4 - run: ./project/scripts/check-cla.sh - if: github.event_name == 'pull_request' env: AUTHOR: ${{ github.event.pull_request.user.login }} From de2d35c3271deff1479dac3a42039b7cdb4584f2 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 14 Jun 2024 09:30:43 +0100 Subject: [PATCH 177/827] Adapt the release workflow to SIP-46 (#20565) --- .github/workflows/ci.yaml | 185 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 176 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index de1f74c641db..974866930c68 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -769,13 +769,35 @@ jobs: - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - - - name: Prepare Release - run: | + # Extract the release tag + - name: Extract the release tag + run : echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV + # BUILD THE SDKs + - name: Build and pack the SDK (universal) + run : | ./project/scripts/sbt dist/packArchive sha256sum dist/target/scala3-* > dist/target/sha256sum.txt - echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV - + - name: Build and pack the SDK (linux x86-64) + run : | + ./project/scripts/sbt dist-linux-x86_64/packArchive + sha256sum dist/linux-x86_64/target/scala3-* > dist/linux-x86_64/target/sha256sum.txt + - name: Build and pack the SDK (linux aarch64) + run : | + ./project/scripts/sbt dist-linux-aarch64/packArchive + sha256sum dist/linux-aarch64/target/scala3-* > dist/linux-aarch64/target/sha256sum.txt + - name: Build and pack the SDK (mac x86-64) + run : | + ./project/scripts/sbt dist-mac-x86_64/packArchive + sha256sum dist/mac-x86_64/target/scala3-* > dist/mac-x86_64/target/sha256sum.txt + - name: Build and pack the SDK (mac aarch64) + run : | + ./project/scripts/sbt dist-mac-aarch64/packArchive + sha256sum dist/mac-aarch64/target/scala3-* > dist/mac-aarch64/target/sha256sum.txt + - name: Build and pack the SDK (win x86-64) + run : | + ./project/scripts/sbt dist-win-x86_64/packArchive + sha256sum dist/win-x86_64/target/scala3-* > dist/win-x86_64/target/sha256sum.txt + # Create the GitHub release - name: Create GitHub Release id: create_gh_release uses: actions/create-release@latest @@ -788,7 +810,7 @@ jobs: draft: true prerelease: ${{ contains(env.RELEASE_TAG, '-') }} - - name: Upload zip archive to GitHub Release + - name: Upload zip archive to GitHub Release (universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -797,8 +819,7 @@ jobs: asset_path: ./dist/target/scala3-${{ env.RELEASE_TAG }}.zip asset_name: scala3-${{ env.RELEASE_TAG }}.zip asset_content_type: application/zip - - - name: Upload tar.gz archive to GitHub Release + - name: Upload tar.gz archive to GitHub Release (universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -808,7 +829,103 @@ jobs: asset_name: scala3-${{ env.RELEASE_TAG }}.tar.gz asset_content_type: application/gzip - - name: Upload SHA256 sum of the release artefacts to GitHub Release + - name: Upload zip archive to GitHub Release (linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (mac x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (mac x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (mac aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (mac aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (win x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (win x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_content_type: application/gzip + + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -818,6 +935,56 @@ jobs: asset_name: sha256sum.txt asset_content_type: text/plain + - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-x86_64/target/sha256sum.txt + asset_name: sha256sum-x86_64-pc-linux.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-aarch64/target/sha256sum-aarch64-pc-linux.txt + asset_name: sha256sum.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-x86_64/target/sha256sum.txt + asset_name: sha256sum-x86_64-apple-darwin.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-aarch64/target/sha256sum.txt + asset_name: sha256sum-aarch64-apple-darwin.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (win x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/win-x86_64/target/sha256sum.txt + asset_name: sha256sum-x86_64-pc-win32.txt + asset_content_type: text/plain + - name: Publish Release run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleRelease" From 58a08979a209b689a52e3857b4963eed609cc082 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 14 Jun 2024 11:22:03 +0100 Subject: [PATCH 178/827] Revert "Change the trigger to check the CLA (#20567)" This reverts commit e4a5d9c30317de5cd82bca4012130672c086f8e8. --- .github/workflows/cla.yml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 539bbacee9a7..f370cb2b541c 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -1,9 +1,15 @@ -name: Check Scala CLA +name: Scala CLA on: pull_request: - types: opened branches-ignore: - 'language-reference-stable' + push: + branches: + - 'language-reference-stable' + merge_group: +permissions: + contents: write + pull-requests: write jobs: check: @@ -11,5 +17,6 @@ jobs: steps: - uses: actions/checkout@v4 - run: ./project/scripts/check-cla.sh + if: github.event_name == 'pull_request' env: AUTHOR: ${{ github.event.pull_request.user.login }} From 550a960305d771cdd0d4c3236284d20b73cad4ee Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 14 Jun 2024 18:09:28 +0100 Subject: [PATCH 179/827] Use the commit instead of the branch --- .github/workflows/publish-sdkman.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 5d6744dd74e9..2126a3237d83 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -48,7 +48,7 @@ jobs: - platform: UNIVERSAL archive : 'scala3-${{ inputs.version }}.zip' steps: - - uses: hamzaremmal/sdkman-release-action@main # TODO: Make a release of the action and configure the version here + - uses: hamzaremmal/sdkman-release-action@7e437233a6bd79bc4cb0fa9071b685e94bdfdba6 with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} @@ -61,7 +61,7 @@ jobs: runs-on: ubuntu-latest needs: publish steps: - - uses: hamzaremmal/sdkman-default-action@main # TODO: Make a release of the action and configure the version here + - uses: hamzaremmal/sdkman-default-action@866bc79fc5bd397eeb48f9cedda2f15221c8515d with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} From 8e4aa195e630475af71a007309040da06bcfa7b2 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Fri, 14 Jun 2024 22:16:45 +0200 Subject: [PATCH 180/827] Fix typo in comment MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Lhoták --- tests/init/warn/type-filter2.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/init/warn/type-filter2.scala b/tests/init/warn/type-filter2.scala index 65f5be8f4b53..cc9a8f8b00d0 100644 --- a/tests/init/warn/type-filter2.scala +++ b/tests/init/warn/type-filter2.scala @@ -13,7 +13,7 @@ class C(x: Int): val c: A = a.asInstanceOf[A] // abstraction for c is {A, B} val d = c.f // treat as c.asInstanceOf[owner of f].f - val e = c.m() // treat as c.asInstanceOf[owner of f].m() + val e = c.m() // treat as c.asInstanceOf[owner of m].m() val c2: B = a.asInstanceOf[B] val g = c2.f // no error here From 5727187a647dbce044ddb0e577c07f5710585997 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Sun, 16 Jun 2024 16:46:00 +0100 Subject: [PATCH 181/827] Add `-Wall` --- .../tools/dotc/config/ScalaSettings.scala | 68 +++++++++++-------- .../src/dotty/tools/dotc/core/Symbols.scala | 2 +- .../dotc/inlines/PrepareInlineable.scala | 2 +- .../dotty/tools/dotc/parsing/Parsers.scala | 2 +- .../tools/dotc/transform/CheckUnused.scala | 2 +- .../tools/dotc/transform/init/Checker.scala | 4 +- .../src/dotty/tools/dotc/typer/Linter.scala | 4 +- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- 8 files changed, 50 insertions(+), 36 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index c64521ec74e1..bcfc651aeb92 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -158,49 +158,53 @@ private sealed trait WarningSettings: val Whelp: Setting[Boolean] = BooleanSetting(WarningSetting, "W", "Print a synopsis of warning options.") val XfatalWarnings: Setting[Boolean] = BooleanSetting(WarningSetting, "Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) - val WvalueDiscard: Setting[Boolean] = BooleanSetting(WarningSetting, "Wvalue-discard", "Warn when non-Unit expression results are unused.") - val WNonUnitStatement = BooleanSetting(WarningSetting, "Wnonunit-statement", "Warn when block statements are non-Unit expressions.") - val WenumCommentDiscard = BooleanSetting(WarningSetting, "Wenum-comment-discard", "Warn when a comment ambiguously assigned to multiple enum cases is discarded.") - val WimplausiblePatterns = BooleanSetting(WarningSetting, "Wimplausible-patterns", "Warn if comparison with a pattern value looks like it might always fail.") - val WunstableInlineAccessors = BooleanSetting(WarningSetting, "WunstableInlineAccessors", "Warn an inline methods has references to non-stable binary APIs.") - val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( + val Wall: Setting[Boolean] = BooleanSetting(WarningSetting, "Wall", "Enable all warning settings.") + private val WvalueDiscard: Setting[Boolean] = BooleanSetting(WarningSetting, "Wvalue-discard", "Warn when non-Unit expression results are unused.") + private val WNonUnitStatement = BooleanSetting(WarningSetting, "Wnonunit-statement", "Warn when block statements are non-Unit expressions.") + private val WenumCommentDiscard = BooleanSetting(WarningSetting, "Wenum-comment-discard", "Warn when a comment ambiguously assigned to multiple enum cases is discarded.") + private val WimplausiblePatterns = BooleanSetting(WarningSetting, "Wimplausible-patterns", "Warn if comparison with a pattern value looks like it might always fail.") + private val WunstableInlineAccessors = BooleanSetting(WarningSetting, "WunstableInlineAccessors", "Warn an inline methods has references to non-stable binary APIs.") + private val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( WarningSetting, name = "Wunused", helpArg = "warning", descr = "Enable or disable specific `unused` warnings", choices = List( ChoiceWithHelp("nowarn", ""), - ChoiceWithHelp("all",""), + ChoiceWithHelp("all", ""), ChoiceWithHelp( name = "imports", description = "Warn if an import selector is not referenced.\n" + "NOTE : overrided by -Wunused:strict-no-implicit-warn"), - ChoiceWithHelp("privates","Warn if a private member is unused"), - ChoiceWithHelp("locals","Warn if a local definition is unused"), - ChoiceWithHelp("explicits","Warn if an explicit parameter is unused"), - ChoiceWithHelp("implicits","Warn if an implicit parameter is unused"), - ChoiceWithHelp("params","Enable -Wunused:explicits,implicits"), - ChoiceWithHelp("linted","Enable -Wunused:imports,privates,locals,implicits"), - ChoiceWithHelp( - name = "strict-no-implicit-warn", - description = "Same as -Wunused:import, only for imports of explicit named members.\n" + - "NOTE : This overrides -Wunused:imports and NOT set by -Wunused:all" - ), - // ChoiceWithHelp("patvars","Warn if a variable bound in a pattern is unused"), - ChoiceWithHelp( - name = "unsafe-warn-patvars", - description = "(UNSAFE) Warn if a variable bound in a pattern is unused.\n" + - "This warning can generate false positive, as warning cannot be\n" + - "suppressed yet." - ) + ChoiceWithHelp("privates", "Warn if a private member is unused"), + ChoiceWithHelp("locals", "Warn if a local definition is unused"), + ChoiceWithHelp("explicits", "Warn if an explicit parameter is unused"), + ChoiceWithHelp("implicits", "Warn if an implicit parameter is unused"), + ChoiceWithHelp("params", "Enable -Wunused:explicits,implicits"), + ChoiceWithHelp("linted", "Enable -Wunused:imports,privates,locals,implicits"), + ChoiceWithHelp( + name = "strict-no-implicit-warn", + description = "Same as -Wunused:import, only for imports of explicit named members.\n" + + "NOTE : This overrides -Wunused:imports and NOT set by -Wunused:all" + ), + // ChoiceWithHelp("patvars","Warn if a variable bound in a pattern is unused"), + ChoiceWithHelp( + name = "unsafe-warn-patvars", + description = "(UNSAFE) Warn if a variable bound in a pattern is unused.\n" + + "This warning can generate false positive, as warning cannot be\n" + + "suppressed yet." + ) ), default = Nil ) object WunusedHas: def isChoiceSet(s: String)(using Context) = Wunused.value.pipe(us => us.contains(s)) - def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) + def allOr(s: String)(using Context) = Wall.value || Wunused.value.pipe(us => us.contains("all") || us.contains(s)) def nowarn(using Context) = allOr("nowarn") + // Is any choice set for -Wunused? + def any(using Context): Boolean = Wunused.value.nonEmpty + // overrided by strict-no-implicit-warn def imports(using Context) = (allOr("imports") || allOr("linted")) && !(strictNoImplicitWarn) @@ -296,7 +300,17 @@ private sealed trait WarningSettings: def typeParameterShadow(using Context) = allOr("type-parameter-shadow") - val WcheckInit: Setting[Boolean] = BooleanSetting(WarningSetting, "Wsafe-init", "Ensure safe initialization of objects.") + private val WcheckInit: Setting[Boolean] = BooleanSetting(WarningSetting, "Wsafe-init", "Ensure safe initialization of objects.") + + object Whas: + def allOr(s: Setting[Boolean])(using Context): Boolean = + Wall.value || s.value + def valueDiscard(using Context): Boolean = allOr(WvalueDiscard) + def nonUnitStatement(using Context): Boolean = allOr(WNonUnitStatement) + def enumCommentDiscard(using Context): Boolean = allOr(WenumCommentDiscard) + def implausiblePatterns(using Context): Boolean = allOr(WimplausiblePatterns) + def unstableInlineAccessors(using Context): Boolean = allOr(WunstableInlineAccessors) + def checkInit(using Context): Boolean = allOr(WcheckInit) /** -X "Extended" or "Advanced" settings */ private sealed trait XSettings: diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index da0ecac47b7d..b8830f2520c5 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -84,7 +84,7 @@ object Symbols extends SymUtils { ctx.settings.YretainTrees.value || denot.owner.isTerm || // no risk of leaking memory after a run for these denot.isOneOf(InlineOrProxy) || // need to keep inline info - ctx.settings.WcheckInit.value || // initialization check + ctx.settings.Whas.checkInit || // initialization check ctx.settings.YcheckInitGlobal.value /** The last denotation of this symbol */ diff --git a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala index 1acc6a1c8317..bb950fbe43cd 100644 --- a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala @@ -91,7 +91,7 @@ object PrepareInlineable { postTransform(super.transform(preTransform(tree))) protected def checkUnstableAccessor(accessedTree: Tree, accessor: Symbol)(using Context): Unit = - if ctx.settings.WunstableInlineAccessors.value then + if ctx.settings.Whas.unstableInlineAccessors then val accessorTree = accessorDef(accessor, accessedTree.symbol) report.warning(reporting.UnstableInlineAccessor(accessedTree.symbol, accessorTree), accessedTree) } diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index e28ba5fd669e..3a987a9358e4 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -4083,7 +4083,7 @@ object Parsers { if (in.token == COMMA) { in.nextToken() val ids = commaSeparated(() => termIdent()) - if ctx.settings.WenumCommentDiscard.value then + if ctx.settings.Whas.enumCommentDiscard then in.getDocComment(start).foreach: comm => warning( em"""Ambiguous Scaladoc comment on multiple cases is ignored. diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index d8389ff964a4..10a4721ad1d0 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -58,7 +58,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke override def isRunnable(using Context): Boolean = super.isRunnable && - ctx.settings.Wunused.value.nonEmpty && + ctx.settings.WunusedHas.any && !ctx.isJava // ========== SETUP ============ diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index 9e78bd5474a3..4d5c467cf4fe 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -29,7 +29,7 @@ class Checker extends Phase: override val runsAfter = Set(Pickler.name) override def isEnabled(using Context): Boolean = - super.isEnabled && (ctx.settings.WcheckInit.value || ctx.settings.YcheckInitGlobal.value) + super.isEnabled && (ctx.settings.Whas.checkInit || ctx.settings.YcheckInitGlobal.value) def traverse(traverser: InitTreeTraverser)(using Context): Boolean = monitor(phaseName): val unit = ctx.compilationUnit @@ -50,7 +50,7 @@ class Checker extends Phase: cancellable { val classes = traverser.getClasses() - if ctx.settings.WcheckInit.value then + if ctx.settings.Whas.checkInit then Semantic.checkClasses(classes)(using checkCtx) if ctx.settings.YcheckInitGlobal.value then diff --git a/compiler/src/dotty/tools/dotc/typer/Linter.scala b/compiler/src/dotty/tools/dotc/typer/Linter.scala index c0ba581b3732..4c02bf80df63 100644 --- a/compiler/src/dotty/tools/dotc/typer/Linter.scala +++ b/compiler/src/dotty/tools/dotc/typer/Linter.scala @@ -55,7 +55,7 @@ object Linter: && !isJavaApplication(t) // Java methods are inherently side-effecting // && !treeInfo.hasExplicitUnit(t) // suppressed by explicit expr: Unit // TODO Should explicit `: Unit` be added as warning suppression? - if ctx.settings.WNonUnitStatement.value && !ctx.isAfterTyper && checkInterestingShapes(t) then + if ctx.settings.Whas.nonUnitStatement && !ctx.isAfterTyper && checkInterestingShapes(t) then val where = t match case Block(_, res) => res case If(_, thenpart, Literal(Constant(()))) => @@ -119,7 +119,7 @@ object Linter: // still compute `canEqual(A & B, B & A) = true`. canEqual(a, b.tp1) || canEqual(a, b.tp2) - if ctx.settings.WimplausiblePatterns.value && !canEqual(pat.tpe, selType) then + if ctx.settings.Whas.implausiblePatterns && !canEqual(pat.tpe, selType) then report.warning(ImplausiblePatternWarning(pat, selType), pat.srcPos) end warnOnImplausiblePattern diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 1c779ac050fa..d5e7e693111c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4478,7 +4478,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // so will take the code path that decides on inlining val tree1 = adapt(tree, WildcardType, locked) checkStatementPurity(tree1)(tree, ctx.owner, isUnitExpr = true) - if (!ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && ctx.settings.WvalueDiscard.value && !isThisTypeResult(tree)) { + if (!ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && ctx.settings.Whas.valueDiscard && !isThisTypeResult(tree)) { report.warning(ValueDiscarding(tree.tpe), tree.srcPos) } return tpd.Block(tree1 :: Nil, unitLiteral) From d9e5fd4440e91af32672064e9dcfaa822add501a Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Mon, 10 Jun 2024 18:25:07 +0100 Subject: [PATCH 182/827] Add test cases for `-Wall` --- tests/warn/i18559a.check | 12 ++++++++++++ tests/warn/i18559a.scala | 15 +++++++++++++++ tests/warn/i18559b.check | 12 ++++++++++++ tests/warn/i18559b.scala | 9 +++++++++ tests/warn/i18559c.check | 4 ++++ tests/warn/i18559c.scala | 15 +++++++++++++++ 6 files changed, 67 insertions(+) create mode 100644 tests/warn/i18559a.check create mode 100644 tests/warn/i18559a.scala create mode 100644 tests/warn/i18559b.check create mode 100644 tests/warn/i18559b.scala create mode 100644 tests/warn/i18559c.check create mode 100644 tests/warn/i18559c.scala diff --git a/tests/warn/i18559a.check b/tests/warn/i18559a.check new file mode 100644 index 000000000000..9652a4d97ac8 --- /dev/null +++ b/tests/warn/i18559a.check @@ -0,0 +1,12 @@ +-- [E198] Unused Symbol Warning: tests/warn/i18559a.scala:4:28 --------------------------------------------------------- +4 | import collection.mutable.Set // warn + | ^^^ + | unused import +-- [E198] Unused Symbol Warning: tests/warn/i18559a.scala:8:8 ---------------------------------------------------------- +8 | val x = 1 // warn + | ^ + | unused local definition +-- [E198] Unused Symbol Warning: tests/warn/i18559a.scala:11:26 -------------------------------------------------------- +11 | import SomeGivenImports.given // warn + | ^^^^^ + | unused import diff --git a/tests/warn/i18559a.scala b/tests/warn/i18559a.scala new file mode 100644 index 000000000000..24f1cae449b8 --- /dev/null +++ b/tests/warn/i18559a.scala @@ -0,0 +1,15 @@ +//> using options -Wall +// This test checks that -Wall turns on -Wunused:all if -Wunused is not set +object FooImportUnused: + import collection.mutable.Set // warn + +object FooUnusedLocal: + def test(): Unit = + val x = 1 // warn + +object FooGivenUnused: + import SomeGivenImports.given // warn + +object SomeGivenImports: + given Int = 0 + given String = "foo" diff --git a/tests/warn/i18559b.check b/tests/warn/i18559b.check new file mode 100644 index 000000000000..710df8234a9a --- /dev/null +++ b/tests/warn/i18559b.check @@ -0,0 +1,12 @@ +-- Warning: tests/warn/i18559b.scala:8:6 ------------------------------------------------------------------------------- +8 | val localFile: String = s"${url.##}.tmp" // warn + | ^ + | Access non-initialized value localFile. Calling trace: + | ├── class RemoteFile(url: String) extends AbstractFile: [ i18559b.scala:7 ] + | │ ^ + | ├── abstract class AbstractFile: [ i18559b.scala:3 ] + | │ ^ + | ├── val extension: String = name.substring(4) [ i18559b.scala:5 ] + | │ ^^^^ + | └── def name: String = localFile [ i18559b.scala:9 ] + | ^^^^^^^^^ diff --git a/tests/warn/i18559b.scala b/tests/warn/i18559b.scala new file mode 100644 index 000000000000..dac6e8c57c83 --- /dev/null +++ b/tests/warn/i18559b.scala @@ -0,0 +1,9 @@ +//> using options -Wall +// This test checks that -Wall turns on -Wsafe-init +abstract class AbstractFile: + def name: String + val extension: String = name.substring(4) + +class RemoteFile(url: String) extends AbstractFile: + val localFile: String = s"${url.##}.tmp" // warn + def name: String = localFile diff --git a/tests/warn/i18559c.check b/tests/warn/i18559c.check new file mode 100644 index 000000000000..7fd42a48db0c --- /dev/null +++ b/tests/warn/i18559c.check @@ -0,0 +1,4 @@ +-- [E198] Unused Symbol Warning: tests/warn/i18559c.scala:8:8 ---------------------------------------------------------- +8 | val x = 1 // warn + | ^ + | unused local definition diff --git a/tests/warn/i18559c.scala b/tests/warn/i18559c.scala new file mode 100644 index 000000000000..3ca0c8893a66 --- /dev/null +++ b/tests/warn/i18559c.scala @@ -0,0 +1,15 @@ +//> using options -Wall -Wunused:locals +// This test checks that -Wall leaves -Wunused:... untouched if it is already set +object FooImportUnused: + import collection.mutable.Set // not warn + +object FooUnusedLocal: + def test(): Unit = + val x = 1 // warn + +object FooGivenUnused: + import SomeGivenImports.given // not warn + +object SomeGivenImports: + given Int = 0 + given String = "foo" From c68e5949933164a6b863c3050deb8126db49e81d Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 29 May 2024 18:26:08 +0200 Subject: [PATCH 183/827] Fix symbol reference retrivial of `scala.caps.Caps` - it was changed from opaque type to class in #18463 --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 52535f26c692..1f0a673f90b1 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -991,7 +991,7 @@ class Definitions { @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") - @tu lazy val Caps_Capability: TypeSymbol = CapsModule.requiredType("Capability") + @tu lazy val Caps_Capability: ClassSymbol = requiredClass("scala.caps.Capability") @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") From 133c14aa923a02147f1468e167d88f662d8210ba Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Fri, 14 Jun 2024 14:15:12 +0200 Subject: [PATCH 184/827] Add regression test for i20498 --- tests/pos/i20498/impl.scala | 4 ++++ tests/pos/i20498/ops.scala | 4 ++++ tests/pos/i20498/package.scala | 2 ++ 3 files changed, 10 insertions(+) create mode 100644 tests/pos/i20498/impl.scala create mode 100644 tests/pos/i20498/ops.scala create mode 100644 tests/pos/i20498/package.scala diff --git a/tests/pos/i20498/impl.scala b/tests/pos/i20498/impl.scala new file mode 100644 index 000000000000..81ad4e8d4633 --- /dev/null +++ b/tests/pos/i20498/impl.scala @@ -0,0 +1,4 @@ +package demo.debug +import demo.util._ +class Impl: + "".tap() diff --git a/tests/pos/i20498/ops.scala b/tests/pos/i20498/ops.scala new file mode 100644 index 000000000000..9a1949d44421 --- /dev/null +++ b/tests/pos/i20498/ops.scala @@ -0,0 +1,4 @@ +package demo.util +trait Ops: + final implicit class Ops[A](private val self: A): + def tap(): Unit = () diff --git a/tests/pos/i20498/package.scala b/tests/pos/i20498/package.scala new file mode 100644 index 000000000000..1dac7f805c9a --- /dev/null +++ b/tests/pos/i20498/package.scala @@ -0,0 +1,2 @@ +package demo +package object util extends Ops From 85dcdc45e2280e25ece3ae7f9066f489e19c116e Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 18 Jun 2024 10:46:57 +0200 Subject: [PATCH 185/827] chore: Fix doc link in clauseInterleaving docstring --- library/src/scala/runtime/stdLibPatches/language.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 1171c62602fb..d89bd9dcf72e 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -64,7 +64,7 @@ object language: /** Adds support for clause interleaving: * Methods can now have as many type clauses as they like, this allows to have type bounds depend on terms: `def f(x: Int)[A <: x.type]: A` * - * @see [[http://dotty.epfl.ch/docs/reference/other-new-features/explicit-nulls.html]] + * @see [[https://github.com/scala/improvement-proposals/blob/main/content/clause-interleaving.md]] */ @compileTimeOnly("`clauseInterleaving` can only be used at compile time in import statements") object clauseInterleaving From 01ada7428eb1355566059da37428295792df01c4 Mon Sep 17 00:00:00 2001 From: Lucy Martin Date: Wed, 19 Jun 2024 08:45:39 +0100 Subject: [PATCH 186/827] #20105: Adding a warning to the case where nested named definitions contain non-tail recursive calls. Code will now compile where a child def calls the parent def in a non-tail position (with the warning). Code will no longer compile if all calls to a @tailrec method are in named child methods (as these do not tail recurse). --- .../tools/dotc/reporting/ErrorMessageID.scala | 1 + .../dotty/tools/dotc/reporting/messages.scala | 14 +++++++++ .../dotty/tools/dotc/transform/TailRec.scala | 31 +++++++++++++++++-- tests/neg/i20105.check | 10 ++++++ tests/neg/i20105.scala | 9 ++++++ tests/neg/i5397.scala | 6 ++-- tests/warn/i20105.check | 6 ++++ tests/warn/i20105.scala | 10 ++++++ 8 files changed, 83 insertions(+), 4 deletions(-) create mode 100644 tests/neg/i20105.check create mode 100644 tests/neg/i20105.scala create mode 100644 tests/warn/i20105.check create mode 100644 tests/warn/i20105.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 0e42629773cc..273c3720bc1c 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -212,6 +212,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case ContextBoundCompanionNotValueID // errorNumber: 196 case InlinedAnonClassWarningID // errorNumber: 197 case UnusedSymbolID // errorNumber: 198 + case TailrecNestedCallID //errorNumber: 199 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 87c80ebd78d0..b349cf1fb678 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -1918,6 +1918,20 @@ class TailrecNotApplicable(symbol: Symbol)(using Context) def explain(using Context) = "" } +class TailrecNestedCall(definition: Symbol, innerDef: Symbol)(using Context) + extends SyntaxMsg(TailrecNestedCallID) { + def msg(using Context) = { + s"The tail recursive def ${definition.name} contains a recursive call inside the non-inlined inner def ${innerDef.name}" + } + + def explain(using Context) = + """Tail recursion is only validated and optimised directly in the definition. + |Any calls to the recursive method via an inner def cannot be validated as + |tail recursive, nor optimised if they are. To enable tail recursion from + |inner calls, mark the inner def as inline. + |""".stripMargin +} + class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol], classRoot: Symbol)(using Context) extends Message(FailureToEliminateExistentialID) { def kind = MessageKind.Compatibility diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 43c740ce7d38..d054c5aa6232 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -429,10 +429,23 @@ class TailRec extends MiniPhase { assert(false, "We should never have gotten inside a pattern") tree - case tree: ValOrDefDef => + case tree: ValDef => if (isMandatory) noTailTransform(tree.rhs) tree + case tree: DefDef => + if (isMandatory) + if (tree.symbol.is(Synthetic)) + noTailTransform(tree.rhs) + else + // We can't tail recurse through nested definitions, so don't want to propagate to child nodes + // We don't want to fail if there is a call that would recurse (as this would be a non self recurse), so don't + // want to call noTailTransform + // We can however warn in this case, as its likely in this situation that someone would expect a tail + // recursion optimization and enabling this to optimise would be a simple case of inlining the inner method + new NestedTailRecAlerter(method, tree.symbol).traverse(tree) + tree + case _: Super | _: This | _: Literal | _: TypeTree | _: TypeDef | EmptyTree => tree @@ -446,7 +459,8 @@ class TailRec extends MiniPhase { case Return(expr, from) => val fromSym = from.symbol - val inTailPosition = !fromSym.is(Label) || tailPositionLabeledSyms.contains(fromSym) + val inTailPosition = tailPositionLabeledSyms.contains(fromSym) // Label returns are only tail if the label is in tail position + || (fromSym eq method) // Method returns are only tail if we are looking at the original method cpy.Return(tree)(transform(expr, inTailPosition), from) case _ => @@ -454,6 +468,19 @@ class TailRec extends MiniPhase { } } } + + class NestedTailRecAlerter(method: Symbol, inner: Symbol) extends TreeTraverser { + override def traverse(tree: tpd.Tree)(using Context): Unit = + tree match { + case a: Apply => + if (a.fun.symbol eq method) { + report.warning(new TailrecNestedCall(method, inner), a.srcPos) + } + traverseChildren(tree) + case _ => + traverseChildren(tree) + } + } } object TailRec { diff --git a/tests/neg/i20105.check b/tests/neg/i20105.check new file mode 100644 index 000000000000..5fb33283387b --- /dev/null +++ b/tests/neg/i20105.check @@ -0,0 +1,10 @@ +-- [E199] Syntax Warning: tests/neg/i20105.scala:6:9 ------------------------------------------------------------------- +6 | foo() + | ^^^^^ + | The tail recursive def foo contains a recursive call inside the non-inlined inner def bar + | + | longer explanation available when compiling with `-explain` +-- [E097] Syntax Error: tests/neg/i20105.scala:3:4 --------------------------------------------------------------------- +3 |def foo(): Unit = // error + | ^ + | TailRec optimisation not applicable, method foo contains no recursive calls diff --git a/tests/neg/i20105.scala b/tests/neg/i20105.scala new file mode 100644 index 000000000000..08d54e895ec1 --- /dev/null +++ b/tests/neg/i20105.scala @@ -0,0 +1,9 @@ +import scala.annotation.tailrec +@tailrec +def foo(): Unit = // error + def bar(): Unit = + if (???) + foo() + else + bar() + bar() \ No newline at end of file diff --git a/tests/neg/i5397.scala b/tests/neg/i5397.scala index d38b0e67bff9..ebe89875b3df 100644 --- a/tests/neg/i5397.scala +++ b/tests/neg/i5397.scala @@ -16,8 +16,10 @@ object Test { rec3 // error: not in tail position }) - @tailrec def rec4: Unit = { - def local = rec4 // error: not in tail position + // This is technically not breaching tail recursion as rec4 does not call itself, local does + // This instead fails due to having no tail recursion at all + @tailrec def rec4: Unit = { // error: no recursive calls + def local = rec4 } @tailrec def rec5: Int = { diff --git a/tests/warn/i20105.check b/tests/warn/i20105.check new file mode 100644 index 000000000000..d291931748cf --- /dev/null +++ b/tests/warn/i20105.check @@ -0,0 +1,6 @@ +-- [E199] Syntax Warning: tests/warn/i20105.scala:6:9 ------------------------------------------------------------------ +6 | foo() // warn + | ^^^^^ + | The tail recursive def foo contains a recursive call inside the non-inlined inner def bar + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i20105.scala b/tests/warn/i20105.scala new file mode 100644 index 000000000000..6d691b7e6bfb --- /dev/null +++ b/tests/warn/i20105.scala @@ -0,0 +1,10 @@ +import scala.annotation.tailrec +@tailrec +def foo(): Unit = + def bar(): Unit = + if (???) + foo() // warn + else + bar() + bar() + foo() \ No newline at end of file From 2217634cdf2620a1fe97ab9929dad6ce87b57aed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Ferreira?= Date: Thu, 23 May 2024 19:24:03 +0100 Subject: [PATCH 187/827] Correct Java signature for value classes appearing in type arguments As suggested in #10846 the fix to this issue should be to port https://github.com/scala/scala/pull/8127 to scala3 --- .../dotc/transform/GenericSignatures.scala | 27 +++++++++--------- tests/pos/i10347/C_2.java | 2 +- tests/run/i10846.check | 3 ++ tests/run/i10846/i10846.scala | 28 +++++++++++++++++++ 4 files changed, 45 insertions(+), 15 deletions(-) create mode 100644 tests/run/i10846.check create mode 100644 tests/run/i10846/i10846.scala diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index 217c843c4e50..b5b75450272c 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -64,7 +64,7 @@ object GenericSignatures { ps.foreach(boxedSig) } - def boxedSig(tp: Type): Unit = jsig(tp.widenDealias, primitiveOK = false) + def boxedSig(tp: Type): Unit = jsig(tp.widenDealias, unboxedVCs = false) /** The signature of the upper-bound of a type parameter. * @@ -232,7 +232,7 @@ object GenericSignatures { } @noinline - def jsig(tp0: Type, toplevel: Boolean = false, primitiveOK: Boolean = true): Unit = { + def jsig(tp0: Type, toplevel: Boolean = false, unboxedVCs: Boolean = true): Unit = { val tp = tp0.dealias tp match { @@ -241,7 +241,7 @@ object GenericSignatures { val erasedUnderlying = fullErasure(ref.underlying.bounds.hi) // don't emit type param name if the param is upper-bounded by a primitive type (including via a value class) if erasedUnderlying.isPrimitiveValueType then - jsig(erasedUnderlying, toplevel, primitiveOK) + jsig(erasedUnderlying, toplevel, unboxedVCs) else typeParamSig(ref.paramName.lastPart) case defn.ArrayOf(elemtp) => @@ -269,15 +269,14 @@ object GenericSignatures { else if (sym == defn.NullClass) builder.append("Lscala/runtime/Null$;") else if (sym.isPrimitiveValueClass) - if (!primitiveOK) jsig(defn.ObjectType) + if (!unboxedVCs) jsig(defn.ObjectType) else if (sym == defn.UnitClass) jsig(defn.BoxedUnitClass.typeRef) else builder.append(defn.typeTag(sym.info)) else if (sym.isDerivedValueClass) { - val erasedUnderlying = fullErasure(tp) - if (erasedUnderlying.isPrimitiveValueType && !primitiveOK) - classSig(sym, pre, args) - else - jsig(erasedUnderlying, toplevel, primitiveOK) + if (unboxedVCs) { + val erasedUnderlying = fullErasure(tp) + jsig(erasedUnderlying, toplevel) + } else classSig(sym, pre, args) } else if (defn.isSyntheticFunctionClass(sym)) { val erasedSym = defn.functionTypeErasure(sym).typeSymbol @@ -286,7 +285,7 @@ object GenericSignatures { else if sym.isClass then classSig(sym, pre, args) else - jsig(erasure(tp), toplevel, primitiveOK) + jsig(erasure(tp), toplevel, unboxedVCs) case ExprType(restpe) if toplevel => builder.append("()") @@ -339,7 +338,7 @@ object GenericSignatures { val (reprParents, _) = splitIntersection(parents) val repr = reprParents.find(_.typeSymbol.is(TypeParam)).getOrElse(reprParents.head) - jsig(repr, primitiveOK = primitiveOK) + jsig(repr, unboxedVCs = unboxedVCs) case ci: ClassInfo => val tParams = tp.typeParams @@ -347,15 +346,15 @@ object GenericSignatures { superSig(ci.typeSymbol, ci.parents) case AnnotatedType(atp, _) => - jsig(atp, toplevel, primitiveOK) + jsig(atp, toplevel, unboxedVCs) case hktl: HKTypeLambda => - jsig(hktl.finalResultType, toplevel, primitiveOK) + jsig(hktl.finalResultType, toplevel, unboxedVCs) case _ => val etp = erasure(tp) if (etp eq tp) throw new UnknownSig - else jsig(etp, toplevel, primitiveOK) + else jsig(etp, toplevel, unboxedVCs) } } val throwsArgs = sym0.annotations flatMap ThrownException.unapply diff --git a/tests/pos/i10347/C_2.java b/tests/pos/i10347/C_2.java index 7525c5e7325d..6d3352a6e88f 100644 --- a/tests/pos/i10347/C_2.java +++ b/tests/pos/i10347/C_2.java @@ -1,5 +1,5 @@ public class C_2 { String hi = A.foo().head(); - String hy = A.bar().head(); + String hy = A.bar().head().s(); String hj = A.baz("").head(); } diff --git a/tests/run/i10846.check b/tests/run/i10846.check new file mode 100644 index 000000000000..ae6bbc8d4c3a --- /dev/null +++ b/tests/run/i10846.check @@ -0,0 +1,3 @@ +i10846.V: scala.Option +i10846.U: scala.Option +i10846.W: scala.Option>> diff --git a/tests/run/i10846/i10846.scala b/tests/run/i10846/i10846.scala new file mode 100644 index 000000000000..8ece0caca402 --- /dev/null +++ b/tests/run/i10846/i10846.scala @@ -0,0 +1,28 @@ +// scalajs: --skip + +package i10846 { + final class V(val x: Int) extends AnyVal + object V { def get: Option[V] = null } + + final class U(val y: String) extends AnyVal + object U { def get: Option[U] = null } + + final class W[T](val z: T) extends AnyVal + object W { def get: Option[W[Int => String]] = null } +} + + +object Test extends scala.App { + def check[T](implicit tt: reflect.ClassTag[T]): Unit = { + val companion = tt.runtimeClass.getClassLoader.loadClass(tt.runtimeClass.getName + '$') + val get = companion.getMethod("get") + assert(get.getReturnType == classOf[Option[_]]) + println(s"${tt.runtimeClass.getName}: ${get.getGenericReturnType}") + } + + import i10846._ + + check[V] + check[U] + check[W[_]] +} From 7500b0504747de41f5c3f5ff67d6577c4227d27a Mon Sep 17 00:00:00 2001 From: Arnout Engelen Date: Tue, 18 Jun 2024 17:00:49 +0200 Subject: [PATCH 188/827] Fix deterministically adding additional interfaces When a class contains calls to 'super' for traits it does not directly implement, these are added to the list of interfaces of the generated class. Previously, because these interfaces were determined using set logic, the ordering of that list was not deterministic. This change makes the order deterministic (assuming the order in which these calls are registered using `registerSuperCall` in the `CollectSuperCalls` phase is deterministic within each class) Fixes #20496 --- .../tools/backend/jvm/BTypesFromSymbols.scala | 7 +++--- .../backend/jvm/DottyBackendInterface.scala | 2 +- .../dotty/tools/backend/jvm/GenBCode.scala | 7 +++--- .../backend/jvm/DottyBytecodeTests.scala | 24 +++++++++++++++++++ 4 files changed, 33 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index b8d7ee04c870..97934935f352 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -113,11 +113,12 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce val directlyInheritedTraits = sym.directlyInheritedTraits val directlyInheritedTraitsSet = directlyInheritedTraits.toSet val allBaseClasses = directlyInheritedTraits.iterator.flatMap(_.asClass.baseClasses.drop(1)).toSet - val superCalls = superCallsMap.getOrElse(sym, Set.empty) - val additional = (superCalls -- directlyInheritedTraitsSet).filter(_.is(Trait)) + val superCalls = superCallsMap.getOrElse(sym, List.empty) + val superCallsSet = superCalls.toSet + val additional = superCalls.filter(t => !directlyInheritedTraitsSet(t) && t.is(Trait)) // if (additional.nonEmpty) // println(s"$fullName: adding supertraits $additional") - directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCalls(t)) ++ additional + directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCallsSet(t)) ++ additional } val interfaces = classSym.superInterfaces.map(classBTypeFromSymbol) diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index 8016c2bfc209..cab17b31c3f3 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -25,7 +25,7 @@ import StdNames.nme import NameKinds.{LazyBitMapName, LazyLocalName} import Names.Name -class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { +class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, List[ClassSymbol]])(using val ctx: Context) { private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index a616241d9a3e..58daa01e4bdf 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -23,10 +23,11 @@ class GenBCode extends Phase { self => override def isRunnable(using Context) = super.isRunnable && !ctx.usedBestEffortTasty - private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] + private val superCallsMap = new MutableSymbolMap[List[ClassSymbol]] def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { - val old = superCallsMap.getOrElse(sym, Set.empty) - superCallsMap.update(sym, old + calls) + val old = superCallsMap.getOrElse(sym, List.empty) + if (!old.contains(calls)) + superCallsMap.update(sym, old :+ calls) } private val entryPoints = new mutable.HashSet[String]() diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index f80336646dfd..e92c4c26adb8 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -1963,6 +1963,30 @@ class DottyBytecodeTests extends DottyBytecodeTest { assertSameCode(instructions, expected) } } + + /** + * Test 'additional' imports are generated in deterministic order + * https://github.com/scala/scala3/issues/20496 + */ + @Test def deterministicAdditionalImports = { + val source = + """trait Actor: + | def receive() = () + |trait Timers: + | def timers() = () + |abstract class ShardCoordinator extends Actor with Timers + |class PersistentShardCoordinator extends ShardCoordinator: + | def foo = + | super.receive() + | super.timers()""".stripMargin + checkBCode(source) { dir => + val clsIn = dir.lookupName("PersistentShardCoordinator.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val expected = List("Actor", "Timers") + assertEquals(expected, clsNode.interfaces.asScala) + } + } } object invocationReceiversTestCode { From a5a3658bcb29d2bb3d768f1dbcc2567a0c325133 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 19 Jun 2024 15:21:30 +0100 Subject: [PATCH 189/827] Release .zip instead of .tar.gz for windows in sdkman --- .github/workflows/publish-sdkman.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 2126a3237d83..02e00bcbf03d 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -44,7 +44,7 @@ jobs: - platform: MAC_ARM64 archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.tar.gz' - platform: WINDOWS_64 - archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.tar.gz' + archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' - platform: UNIVERSAL archive : 'scala3-${{ inputs.version }}.zip' steps: From 6d7f782bef6f2614f227d2416110419dc36481ab Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 18 Jun 2024 13:55:36 +0200 Subject: [PATCH 190/827] Fix notify Zinc about class file in jar --- .../tools/backend/jvm/ClassfileWriters.scala | 27 +++++++++++-------- .../tools/backend/jvm/PostProcessor.scala | 10 +++---- 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index 44498082c697..36e95c788086 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -1,6 +1,6 @@ package dotty.tools.backend.jvm -import java.io.{DataOutputStream, IOException, BufferedOutputStream, FileOutputStream} +import java.io.{DataOutputStream, File, IOException, BufferedOutputStream, FileOutputStream} import java.nio.ByteBuffer import java.nio.channels.{ClosedByInterruptException, FileChannel} import java.nio.charset.StandardCharsets.UTF_8 @@ -12,7 +12,7 @@ import java.util.zip.{CRC32, Deflater, ZipEntry, ZipOutputStream} import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.em -import dotty.tools.io.{AbstractFile, PlainFile} +import dotty.tools.io.{AbstractFile, PlainFile, VirtualFile} import dotty.tools.io.PlainFile.toPlainFile import BTypes.InternalName import scala.util.chaining.* @@ -26,7 +26,6 @@ import scala.language.unsafeNulls * Until then, any changes to this file should be copied to `dotty.tools.io.FileWriters` as well. */ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { - type NullableFile = AbstractFile | Null import frontendAccess.{compilerSettings, backendReporting} sealed trait TastyWriter { @@ -46,7 +45,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { /** * Write a classfile */ - def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile + def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile /** @@ -91,7 +90,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } private final class SingleClassWriter(underlying: FileWriter) extends ClassfileWriter { - override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile = { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile = { underlying.writeFile(classRelativePath(className), bytes) } override def writeTasty(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { @@ -103,7 +102,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } private final class DebugClassWriter(basic: ClassfileWriter, dump: FileWriter) extends ClassfileWriter { - override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile = { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile = { val outFile = basic.writeClass(className, bytes, sourceFile) dump.writeFile(classRelativePath(className), bytes) outFile @@ -121,7 +120,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } sealed trait FileWriter { - def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile + def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile def close(): Unit } @@ -165,7 +164,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { lazy val crc = new CRC32 - override def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile = this.synchronized { + override def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile = this.synchronized { val entry = new ZipEntry(relativePath) if (storeOnly) { // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ @@ -182,7 +181,13 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { jarWriter.putNextEntry(entry) try jarWriter.write(bytes, 0, bytes.length) finally jarWriter.flush() - null + // important detail here, even on Windows, Zinc expects the separator within the jar + // to be the system default, (even if in the actual jar file the entry always uses '/'). + // see https://github.com/sbt/zinc/blob/dcddc1f9cfe542d738582c43f4840e17c053ce81/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala#L47 + val pathInJar = + if File.separatorChar == '/' then relativePath + else relativePath.replace('/', File.separatorChar) + PlainFile.toPlainFile(Paths.get(s"${file.absolutePath}!$pathInJar")) } override def close(): Unit = this.synchronized(jarWriter.close()) @@ -230,7 +235,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) - override def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile = { val path = base.resolve(relativePath) try { ensureDirForPath(base, path) @@ -279,7 +284,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { finally out.close() } - override def writeFile(relativePath: String, bytes: Array[Byte]):NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile = { val outFile = getFile(base, relativePath) writeBytes(outFile, bytes) outFile diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala index 45c6d6ecad44..06c3c7f1cb4f 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -44,11 +44,11 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: backendReporting.error(em"Error while emitting $internalName\n${ex.getMessage}") null - if bytes != null then - if (AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern)) - AsmUtils.traceClass(bytes) - val clsFile = classfileWriter.writeClass(internalName, bytes, sourceFile) - if clsFile != null then clazz.onFileCreated(clsFile) + if bytes != null then + if AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern) then + AsmUtils.traceClass(bytes) + val clsFile = classfileWriter.writeClass(internalName, bytes, sourceFile) + clazz.onFileCreated(clsFile) } def sendToDisk(tasty: GeneratedTasty, sourceFile: AbstractFile): Unit = { From 2fb98fd36b02bc30a32545e6ef07d979afdb39f9 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 18 Jun 2024 15:06:37 +0200 Subject: [PATCH 191/827] Fix toString of JarArchive It returns the absolute path of the jar file instead of '/' --- compiler/src/dotty/tools/io/JarArchive.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index 728f89966af0..c396699f93b3 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -10,11 +10,13 @@ import scala.jdk.CollectionConverters.* * This class implements an [[AbstractFile]] backed by a jar * that be can used as the compiler's output directory. */ -class JarArchive private (root: Directory) extends PlainDirectory(root) { +class JarArchive private (val jarPath: Path, root: Directory) extends PlainDirectory(root) { def close(): Unit = this.synchronized(jpath.getFileSystem().close()) override def exists: Boolean = jpath.getFileSystem().isOpen() && super.exists def allFileNames(): Iterator[String] = java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString) + + override def toString: String = jarPath.toString } object JarArchive { @@ -40,6 +42,6 @@ object JarArchive { } } val root = fs.getRootDirectories().iterator.next() - new JarArchive(Directory(root)) + new JarArchive(path, Directory(root)) } } From 8094196242acfb2c605ec88085669e48698974dd Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 19 Jun 2024 16:49:31 +0100 Subject: [PATCH 192/827] Do not release to the UNIVERSAL platform in sdkman --- .github/workflows/publish-sdkman.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 02e00bcbf03d..d4238b9371e4 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -45,8 +45,6 @@ jobs: archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.tar.gz' - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' - - platform: UNIVERSAL - archive : 'scala3-${{ inputs.version }}.zip' steps: - uses: hamzaremmal/sdkman-release-action@7e437233a6bd79bc4cb0fa9071b685e94bdfdba6 with: From 036b86bea26c1cd082b0a9dca31fcdf2b7b7dd87 Mon Sep 17 00:00:00 2001 From: Lucy Martin Date: Wed, 19 Jun 2024 16:56:34 +0100 Subject: [PATCH 193/827] #20145 - bugfix when a return tail recurse is called inside a val definition, previously this would neither optimise nor fail. --- .../src/dotty/tools/dotc/transform/TailRec.scala | 4 ++-- tests/run/i20145.check | 1 + tests/run/i20145.scala | 15 +++++++++++++++ 3 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 tests/run/i20145.check create mode 100644 tests/run/i20145.scala diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index d054c5aa6232..b8052721ff27 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -430,8 +430,8 @@ class TailRec extends MiniPhase { tree case tree: ValDef => - if (isMandatory) noTailTransform(tree.rhs) - tree + // This could contain a return statement in a code block, so we do have to go into it. + cpy.ValDef(tree)(rhs = noTailTransform(tree.rhs)) case tree: DefDef => if (isMandatory) diff --git a/tests/run/i20145.check b/tests/run/i20145.check new file mode 100644 index 000000000000..f6af6debe594 --- /dev/null +++ b/tests/run/i20145.check @@ -0,0 +1 @@ +10000001 diff --git a/tests/run/i20145.scala b/tests/run/i20145.scala new file mode 100644 index 000000000000..ea26f00e2c89 --- /dev/null +++ b/tests/run/i20145.scala @@ -0,0 +1,15 @@ +import scala.annotation.tailrec +@tailrec +def foo(i: Int): Int = { + if (i > 10000000) { + i + } else { + val bar: String = { + return foo(i + 1) + "foo" + } + -1 + } +} +@main def Test = + println(foo(0)) \ No newline at end of file From 081a19a3767f5917acdf9b9bccbc74a608c44690 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 10 Jun 2024 17:38:46 +0200 Subject: [PATCH 194/827] improvement: use pc for finding references of local symbols and when semanticdb is missing --- .../src/main/dotty/tools/pc/PcCollector.scala | 431 +++--------------- .../pc/PcDocumentHighlightProvider.scala | 2 +- .../tools/pc/PcInlineValueProviderImpl.scala | 4 +- .../dotty/tools/pc/PcReferencesProvider.scala | 67 +++ .../dotty/tools/pc/PcRenameProvider.scala | 13 +- .../tools/pc/PcSemanticTokensProvider.scala | 2 +- .../main/dotty/tools/pc/PcSymbolSearch.scala | 275 +++++++++++ .../tools/pc/ScalaPresentationCompiler.scala | 15 +- .../tools/pc/SymbolInformationProvider.scala | 97 ++-- .../dotty/tools/pc/WithCompilationUnit.scala | 104 +++++ .../tools/pc/utils/DefSymbolCollector.scala | 4 +- project/Build.scala | 4 +- 12 files changed, 584 insertions(+), 434 deletions(-) create mode 100644 presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala create mode 100644 presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala create mode 100644 presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala index c447123c8725..5de80cda4ddf 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala @@ -2,6 +2,7 @@ package dotty.tools.pc import java.nio.file.Paths +import dotty.tools.pc.PcSymbolSearch.* import scala.meta.internal.metals.CompilerOffsetParams import scala.meta.pc.OffsetParams import scala.meta.pc.VirtualFileParams @@ -28,363 +29,59 @@ import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans.Span import dotty.tools.pc.utils.InteractiveEnrichments.* -abstract class PcCollector[T]( - driver: InteractiveDriver, - params: VirtualFileParams -): - private val caseClassSynthetics: Set[Name] = Set(nme.apply, nme.copy) - val uri = params.uri().nn - val filePath = Paths.get(uri).nn - val sourceText = params.text().nn - val text = sourceText.toCharArray().nn - val source = - SourceFile.virtual(filePath.toString(), sourceText) - driver.run(uri, source) - given ctx: Context = driver.currentCtx - - val unit = driver.currentCtx.run.nn.units.head - val compilatonUnitContext = ctx.fresh.setCompilationUnit(unit) - val offset = params match - case op: OffsetParams => op.offset() - case _ => 0 - val offsetParams = - params match - case op: OffsetParams => op - case _ => CompilerOffsetParams(uri, sourceText, 0, params.token().nn) - val pos = driver.sourcePosition(offsetParams) - val rawPath = - Interactive - .pathTo(driver.openedTrees(uri), pos)(using driver.currentCtx) - .dropWhile(t => // NamedArg anyway doesn't have symbol - t.symbol == NoSymbol && !t.isInstanceOf[NamedArg] || - // same issue https://github.com/scala/scala3/issues/15937 as below - t.isInstanceOf[TypeTree] - ) - - val path = rawPath match - // For type it will sometimes go into the wrong tree since TypeTree also contains the same span - // https://github.com/scala/scala3/issues/15937 - case TypeApply(sel: Select, _) :: tail if sel.span.contains(pos.span) => - Interactive.pathTo(sel, pos.span) ::: rawPath - case _ => rawPath +trait PcCollector[T]: + self: WithCompilationUnit => def collect( parent: Option[Tree] )(tree: Tree| EndMarker, pos: SourcePosition, symbol: Option[Symbol]): T - def symbolAlternatives(sym: Symbol) = - def member(parent: Symbol) = parent.info.member(sym.name).symbol - def primaryConstructorTypeParam(owner: Symbol) = - for - typeParams <- owner.primaryConstructor.paramSymss.headOption - param <- typeParams.find(_.name == sym.name) - if (param.isType) - yield param - def additionalForEnumTypeParam(enumClass: Symbol) = - if enumClass.is(Flags.Enum) then - val enumOwner = - if enumClass.is(Flags.Case) - then - Option.when(member(enumClass).is(Flags.Synthetic))( - enumClass.maybeOwner.companionClass - ) - else Some(enumClass) - enumOwner.toSet.flatMap { enumOwner => - val symsInEnumCases = enumOwner.children.toSet.flatMap(enumCase => - if member(enumCase).is(Flags.Synthetic) - then primaryConstructorTypeParam(enumCase) - else None - ) - val symsInEnumOwner = - primaryConstructorTypeParam(enumOwner).toSet + member(enumOwner) - symsInEnumCases ++ symsInEnumOwner - } - else Set.empty - val all = - if sym.is(Flags.ModuleClass) then - Set(sym, sym.companionModule, sym.companionModule.companion) - else if sym.isClass then - Set(sym, sym.companionModule, sym.companion.moduleClass) - else if sym.is(Flags.Module) then - Set(sym, sym.companionClass, sym.moduleClass) - else if sym.isTerm && (sym.owner.isClass || sym.owner.isConstructor) - then - val info = - if sym.owner.isClass then sym.owner.info else sym.owner.owner.info - Set( - sym, - info.member(sym.asTerm.name.setterName).symbol, - info.member(sym.asTerm.name.getterName).symbol - ) ++ sym.allOverriddenSymbols.toSet - // type used in primary constructor will not match the one used in the class - else if sym.isTypeParam && sym.owner.isPrimaryConstructor then - Set(sym, member(sym.maybeOwner.maybeOwner)) - ++ additionalForEnumTypeParam(sym.maybeOwner.maybeOwner) - else if sym.isTypeParam then - primaryConstructorTypeParam(sym.maybeOwner).toSet - ++ additionalForEnumTypeParam(sym.maybeOwner) + sym - else Set(sym) - all.filter(s => s != NoSymbol && !s.isError) - end symbolAlternatives - - private def isGeneratedGiven(df: NamedDefTree)(using Context) = - val nameSpan = df.nameSpan - df.symbol.is(Flags.Given) && sourceText.substring( - nameSpan.start, - nameSpan.end - ) != df.name.toString() - - // First identify the symbol we are at, comments identify @@ as current cursor position - def soughtSymbols(path: List[Tree]): Option[(Set[Symbol], SourcePosition)] = - val sought = path match - /* reference of an extension paramter - * extension [EF](<>: List[EF]) - * def double(ys: List[EF]) = <> ++ ys - */ - case (id: Ident) :: _ - if id.symbol - .is(Flags.Param) && id.symbol.owner.is(Flags.ExtensionMethod) => - Some(findAllExtensionParamSymbols(id.sourcePos, id.name, id.symbol)) - /** - * Workaround for missing symbol in: - * class A[T](a: T) - * val x = new <>(1) - */ - case t :: (n: New) :: (sel: Select) :: _ - if t.symbol == NoSymbol && sel.symbol.isConstructor => - Some(symbolAlternatives(sel.symbol.owner), namePos(t)) - /** - * Workaround for missing symbol in: - * class A[T](a: T) - * val x = <>[Int](1) - */ - case (sel @ Select(New(t), _)) :: (_: TypeApply) :: _ - if sel.symbol.isConstructor => - Some(symbolAlternatives(sel.symbol.owner), namePos(t)) - /* simple identifier: - * val a = val@@ue + value - */ - case (id: Ident) :: _ => - Some(symbolAlternatives(id.symbol), id.sourcePos) - /* simple selector: - * object.val@@ue - */ - case (sel: Select) :: _ if selectNameSpan(sel).contains(pos.span) => - Some(symbolAlternatives(sel.symbol), pos.withSpan(sel.nameSpan)) - /* named argument: - * foo(nam@@e = "123") - */ - case (arg: NamedArg) :: (appl: Apply) :: _ => - val realName = arg.name.stripModuleClassSuffix.lastPart - if pos.span.start > arg.span.start && pos.span.end < arg.span.point + realName.length - then - val length = realName.toString.backticked.length() - val pos = arg.sourcePos.withSpan( - arg.span - .withEnd(arg.span.start + length) - .withPoint(arg.span.start) - ) - appl.symbol.paramSymss.flatten.find(_.name == arg.name).map { s => - // if it's a case class we need to look for parameters also - if caseClassSynthetics(s.owner.name) && s.owner.is(Flags.Synthetic) - then - ( - Set( - s, - s.owner.owner.companion.info.member(s.name).symbol, - s.owner.owner.info.member(s.name).symbol - ) - .filter(_ != NoSymbol), - pos, - ) - else (Set(s), pos) - } - else None - end if - /* all definitions: - * def fo@@o = ??? - * class Fo@@o = ??? - * etc. - */ - case (df: NamedDefTree) :: _ - if df.nameSpan.contains(pos.span) && !isGeneratedGiven(df) => - Some(symbolAlternatives(df.symbol), pos.withSpan(df.nameSpan)) - /* enum cases with params - * enum Foo: - * case B@@ar[A](i: A) - */ - case (df: NamedDefTree) :: Template(_, _, self, _) :: _ - if (df.name == nme.apply || df.name == nme.unapply) && df.nameSpan.isZeroExtent => - Some(symbolAlternatives(self.tpt.symbol), self.sourcePos) - /** - * For traversing annotations: - * @JsonNo@@tification("") - * def params() = ??? - */ - case (df: MemberDef) :: _ if df.span.contains(pos.span) => - val annotTree = df.mods.annotations.find { t => - t.span.contains(pos.span) - } - collectTrees(annotTree).flatMap { t => - soughtSymbols( - Interactive.pathTo(t, pos.span) - ) - }.headOption - - /* Import selectors: - * import scala.util.Tr@@y - */ - case (imp: Import) :: _ if imp.span.contains(pos.span) => - imp - .selector(pos.span) - .map(sym => (symbolAlternatives(sym), sym.sourcePos)) - - case _ => None - - sought match - case None => seekInExtensionParameters() - case _ => sought - - end soughtSymbols - - lazy val extensionMethods = - NavigateAST - .untypedPath(pos.span)(using compilatonUnitContext) - .collectFirst { case em @ ExtMethods(_, _) => em } - - private def findAllExtensionParamSymbols( - pos: SourcePosition, - name: Name, - sym: Symbol - ) = - val symbols = - for - methods <- extensionMethods.map(_.methods) - symbols <- collectAllExtensionParamSymbols( - unit.tpdTree, - ExtensionParamOccurence(name, pos, sym, methods) - ) - yield symbols - symbols.getOrElse((symbolAlternatives(sym), pos)) - end findAllExtensionParamSymbols - - private def seekInExtensionParameters() = - def collectParams( - extMethods: ExtMethods - ): Option[ExtensionParamOccurence] = - NavigateAST - .pathTo(pos.span, extMethods.paramss.flatten)(using - compilatonUnitContext - ) - .collectFirst { - case v: untpd.ValOrTypeDef => - ExtensionParamOccurence( - v.name, - v.namePos, - v.symbol, - extMethods.methods - ) - case i: untpd.Ident => - ExtensionParamOccurence( - i.name, - i.sourcePos, - i.symbol, - extMethods.methods - ) - } - - for - extensionMethodScope <- extensionMethods - occurrence <- collectParams(extensionMethodScope) - symbols <- collectAllExtensionParamSymbols( - path.headOption.getOrElse(unit.tpdTree), - occurrence - ) - yield symbols - end seekInExtensionParameters - - private def collectAllExtensionParamSymbols( - tree: tpd.Tree, - occurrence: ExtensionParamOccurence - ): Option[(Set[Symbol], SourcePosition)] = - occurrence match - case ExtensionParamOccurence(_, namePos, symbol, _) - if symbol != NoSymbol && !symbol.isError && !symbol.owner.is( - Flags.ExtensionMethod - ) => - Some((symbolAlternatives(symbol), namePos)) - case ExtensionParamOccurence(name, namePos, _, methods) => - val symbols = - for - method <- methods.toSet - symbol <- - Interactive.pathTo(tree, method.span) match - case (d: DefDef) :: _ => - d.paramss.flatten.collect { - case param if param.name.decoded == name.decoded => - param.symbol - } - case _ => Set.empty[Symbol] - if (symbol != NoSymbol && !symbol.isError) - withAlt <- symbolAlternatives(symbol) - yield withAlt - if symbols.nonEmpty then Some((symbols, namePos)) else None - end collectAllExtensionParamSymbols - - def result(): List[T] = - params match - case _: OffsetParams => resultWithSought() - case _ => resultAllOccurences().toList - def resultAllOccurences(): Set[T] = def noTreeFilter = (_: Tree) => true def noSoughtFilter = (_: Symbol => Boolean) => true traverseSought(noTreeFilter, noSoughtFilter) - def resultWithSought(): List[T] = - soughtSymbols(path) match - case Some((sought, _)) => - lazy val owners = sought - .flatMap { s => Set(s.owner, s.owner.companionModule) } - .filter(_ != NoSymbol) - lazy val soughtNames: Set[Name] = sought.map(_.name) - - /* - * For comprehensions have two owners, one for the enumerators and one for - * yield. This is a heuristic to find that out. - */ - def isForComprehensionOwner(named: NameTree) = - soughtNames(named.name) && - scala.util - .Try(named.symbol.owner) - .toOption - .exists(_.isAnonymousFunction) && - owners.exists(o => - o.span.exists && o.span.point == named.symbol.owner.span.point - ) - - def soughtOrOverride(sym: Symbol) = - sought(sym) || sym.allOverriddenSymbols.exists(sought(_)) + def resultWithSought(sought: Set[Symbol]): List[T] = + lazy val owners = sought + .flatMap { s => Set(s.owner, s.owner.companionModule) } + .filter(_ != NoSymbol) + lazy val soughtNames: Set[Name] = sought.map(_.name) + + /* + * For comprehensions have two owners, one for the enumerators and one for + * yield. This is a heuristic to find that out. + */ + def isForComprehensionOwner(named: NameTree) = + soughtNames(named.name) && + scala.util + .Try(named.symbol.owner) + .toOption + .exists(_.isAnonymousFunction) && + owners.exists(o => + o.span.exists && o.span.point == named.symbol.owner.span.point + ) - def soughtTreeFilter(tree: Tree): Boolean = - tree match - case ident: Ident - if soughtOrOverride(ident.symbol) || - isForComprehensionOwner(ident) => - true - case sel: Select if soughtOrOverride(sel.symbol) => true - case df: NamedDefTree - if soughtOrOverride(df.symbol) && !df.symbol.isSetter => - true - case imp: Import if owners(imp.expr.symbol) => true - case _ => false + def soughtOrOverride(sym: Symbol) = + sought(sym) || sym.allOverriddenSymbols.exists(sought(_)) - def soughtFilter(f: Symbol => Boolean): Boolean = - sought.exists(f) + def soughtTreeFilter(tree: Tree): Boolean = + tree match + case ident: Ident + if soughtOrOverride(ident.symbol) || + isForComprehensionOwner(ident) => + true + case sel: Select if soughtOrOverride(sel.symbol) => true + case df: NamedDefTree + if soughtOrOverride(df.symbol) && !df.symbol.isSetter => + true + case imp: Import if owners(imp.expr.symbol) => true + case _ => false - traverseSought(soughtTreeFilter, soughtFilter).toList + def soughtFilter(f: Symbol => Boolean): Boolean = + sought.exists(f) - case None => Nil + traverseSought(soughtTreeFilter, soughtFilter).toList + end resultWithSought extension (span: Span) def isCorrect = @@ -453,7 +150,7 @@ abstract class PcCollector[T]( */ case df: NamedDefTree if df.span.isCorrect && df.nameSpan.isCorrect && - filter(df) && !isGeneratedGiven(df) => + filter(df) && !isGeneratedGiven(df, sourceText) => def collectEndMarker = EndMarker.getPosition(df, pos, sourceText).map: collect(EndMarker(df.symbol), _) @@ -572,35 +269,9 @@ abstract class PcCollector[T]( val traverser = new PcCollector.DeepFolderWithParent[Set[T]](collectNamesWithParent) - val all = traverser(Set.empty[T], unit.tpdTree) - all + traverser(Set.empty[T], unit.tpdTree) end traverseSought - // @note (tgodzik) Not sure currently how to get rid of the warning, but looks to correctly - // @nowarn - private def collectTrees(trees: Iterable[Positioned]): Iterable[Tree] = - trees.collect { case t: Tree => - t - } - - // NOTE: Connected to https://github.com/scala/scala3/issues/16771 - // `sel.nameSpan` is calculated incorrectly in (1 + 2).toString - // See test DocumentHighlightSuite.select-parentheses - private def selectNameSpan(sel: Select): Span = - val span = sel.span - if span.exists then - val point = span.point - if sel.name.toTermName == nme.ERROR then Span(point) - else if sel.qualifier.span.start > span.point then // right associative - val realName = sel.name.stripModuleClassSuffix.lastPart - Span(span.start, span.start + realName.length, point) - else Span(point, span.end, point) - else span - - private def namePos(tree: Tree): SourcePosition = - tree match - case sel: Select => sel.sourcePos.withSpan(selectNameSpan(sel)) - case _ => tree.sourcePos end PcCollector object PcCollector: @@ -656,3 +327,21 @@ object EndMarker: ) end getPosition end EndMarker + +abstract class WithSymbolSearchCollector[T]( + driver: InteractiveDriver, + params: OffsetParams, +) extends WithCompilationUnit(driver, params) + with PcSymbolSearch + with PcCollector[T]: + def result(): List[T] = + soughtSymbols.toList.flatMap { case (sought, _) => + resultWithSought(sought) + } + +abstract class SimpleCollector[T]( + driver: InteractiveDriver, + params: VirtualFileParams, +) extends WithCompilationUnit(driver, params) + with PcCollector[T]: + def result(): List[T] = resultAllOccurences().toList diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala index d9b94ebb82a3..0c1af215b7f7 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala @@ -14,7 +14,7 @@ import org.eclipse.lsp4j.DocumentHighlightKind final class PcDocumentHighlightProvider( driver: InteractiveDriver, params: OffsetParams -) extends PcCollector[DocumentHighlight](driver, params): +) extends WithSymbolSearchCollector[DocumentHighlight](driver, params): def collect( parent: Option[Tree] diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala index 38b5e8d0069b..bbba44d0d84f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala @@ -22,9 +22,9 @@ import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l final class PcInlineValueProviderImpl( - val driver: InteractiveDriver, + driver: InteractiveDriver, val params: OffsetParams -) extends PcCollector[Option[Occurence]](driver, params) +) extends WithSymbolSearchCollector[Option[Occurence]](driver, params) with InlineValueProvider: val position: l.Position = pos.toLsp.getStart().nn diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala new file mode 100644 index 000000000000..8d22ce320eee --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala @@ -0,0 +1,67 @@ +package dotty.tools.pc + +import scala.language.unsafeNulls + +import scala.jdk.CollectionConverters.* + +import scala.meta.internal.metals.CompilerOffsetParams +import scala.meta.pc.ReferencesRequest +import scala.meta.pc.ReferencesResult + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.util.SourcePosition +import org.eclipse.lsp4j +import org.eclipse.lsp4j.Location +import dotty.tools.pc.utils.InteractiveEnrichments.* +import scala.meta.internal.pc.PcReferencesResult + +class PcReferencesProvider( + driver: InteractiveDriver, + request: ReferencesRequest, +) extends WithCompilationUnit(driver, request.file()) with PcCollector[Option[(String, Option[lsp4j.Range])]]: + + private def soughtSymbols = + if(request.offsetOrSymbol().isLeft()) { + val offsetParams = CompilerOffsetParams( + request.file().uri(), + request.file().text(), + request.offsetOrSymbol().getLeft() + ) + val symbolSearch = new WithCompilationUnit(driver, offsetParams) with PcSymbolSearch + symbolSearch.soughtSymbols.map(_._1) + } else { + SymbolProvider.compilerSymbol(request.offsetOrSymbol().getRight()).map(symbolAlternatives(_)) + } + + def collect(parent: Option[Tree])( + tree: Tree | EndMarker, + toAdjust: SourcePosition, + symbol: Option[Symbol], + ): Option[(String, Option[lsp4j.Range])] = + val (pos, _) = toAdjust.adjust(text) + tree match + case t: DefTree if !request.includeDefinition() => + val sym = symbol.getOrElse(t.symbol) + Some(SemanticdbSymbols.symbolName(sym), None) + case t: Tree => + val sym = symbol.getOrElse(t.symbol) + Some(SemanticdbSymbols.symbolName(sym), Some(pos.toLsp)) + case _ => None + + def references(): List[ReferencesResult] = + soughtSymbols match + case Some(sought) if sought.nonEmpty => + resultWithSought(sought) + .flatten + .groupMap(_._1) { case (_, optRange) => + optRange.map(new Location(request.file().uri().toString(), _)) + } + .map { case (symbol, locs) => + PcReferencesResult(symbol, locs.flatten.asJava) + } + .toList + case _ => Nil +end PcReferencesProvider \ No newline at end of file diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala index 94482767f917..666ccf9c614f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala @@ -16,7 +16,7 @@ final class PcRenameProvider( driver: InteractiveDriver, params: OffsetParams, name: Option[String] -) extends PcCollector[l.TextEdit](driver, params): +) extends WithSymbolSearchCollector[l.TextEdit](driver, params): private val forbiddenMethods = Set("equals", "hashCode", "unapply", "unary_!", "!") def canRenameSymbol(sym: Symbol)(using Context): Boolean = @@ -25,7 +25,7 @@ final class PcRenameProvider( || sym.source.path.isWorksheet) def prepareRename(): Option[l.Range] = - soughtSymbols(path).flatMap((symbols, pos) => + soughtSymbols.flatMap((symbols, pos) => if symbols.forall(canRenameSymbol) then Some(pos.toLsp) else None ) @@ -42,13 +42,10 @@ final class PcRenameProvider( ) end collect - def rename( - ): List[l.TextEdit] = - val (symbols, _) = soughtSymbols(path).getOrElse(Set.empty, pos) + def rename(): List[l.TextEdit] = + val (symbols, _) = soughtSymbols.getOrElse(Set.empty, pos) if symbols.nonEmpty && symbols.forall(canRenameSymbol(_)) - then - val res = result() - res + then result() else Nil end rename end PcRenameProvider diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala index a5332f1e4ff6..216d9318197b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala @@ -60,7 +60,7 @@ final class PcSemanticTokensProvider( case _ => !df.rhs.isEmpty case _ => false - object Collector extends PcCollector[Option[Node]](driver, params): + object Collector extends SimpleCollector[Option[Node]](driver, params): override def collect( parent: Option[Tree] )(tree: Tree | EndMarker, pos: SourcePosition, symbol: Option[Symbol]): Option[Node] = diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala new file mode 100644 index 000000000000..fd3d74f16c16 --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala @@ -0,0 +1,275 @@ +package dotty.tools.pc + +import dotty.tools.pc.PcSymbolSearch.* + +import dotty.tools.dotc.ast.NavigateAST +import dotty.tools.dotc.ast.Positioned +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.ast.untpd.ExtMethods +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.NameOps.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.interactive.Interactive +import dotty.tools.dotc.util.SourcePosition +import dotty.tools.dotc.util.Spans.Span +import dotty.tools.pc.utils.InteractiveEnrichments.* + +trait PcSymbolSearch: + self: WithCompilationUnit => + + private val caseClassSynthetics: Set[Name] = Set(nme.apply, nme.copy) + + lazy val rawPath = + Interactive + .pathTo(driver.openedTrees(uri), pos)(using driver.currentCtx) + .dropWhile(t => // NamedArg anyway doesn't have symbol + t.symbol == NoSymbol && !t.isInstanceOf[NamedArg] || + // same issue https://github.com/lampepfl/dotty/issues/15937 as below + t.isInstanceOf[TypeTree] + ) + + lazy val extensionMethods = + NavigateAST + .untypedPath(pos.span)(using compilatonUnitContext) + .collectFirst { case em @ ExtMethods(_, _) => em } + + lazy val path = rawPath match + // For type it will sometimes go into the wrong tree since TypeTree also contains the same span + // https://github.com/lampepfl/dotty/issues/15937 + case TypeApply(sel: Select, _) :: tail if sel.span.contains(pos.span) => + Interactive.pathTo(sel, pos.span) ::: rawPath + case _ => rawPath + + lazy val soughtSymbols: Option[(Set[Symbol], SourcePosition)] = + soughtSymbols(path) + + def soughtSymbols(path: List[Tree]): Option[(Set[Symbol], SourcePosition)] = + val sought = path match + /* reference of an extension paramter + * extension [EF](<>: List[EF]) + * def double(ys: List[EF]) = <> ++ ys + */ + case (id: Ident) :: _ + if id.symbol + .is(Flags.Param) && id.symbol.owner.is(Flags.ExtensionMethod) => + Some(findAllExtensionParamSymbols(id.sourcePos, id.name, id.symbol)) + /** + * Workaround for missing symbol in: + * class A[T](a: T) + * val x = new <>(1) + */ + case t :: (n: New) :: (sel: Select) :: _ + if t.symbol == NoSymbol && sel.symbol.isConstructor => + Some(symbolAlternatives(sel.symbol.owner), namePos(t)) + /** + * Workaround for missing symbol in: + * class A[T](a: T) + * val x = <>[Int](1) + */ + case (sel @ Select(New(t), _)) :: (_: TypeApply) :: _ + if sel.symbol.isConstructor => + Some(symbolAlternatives(sel.symbol.owner), namePos(t)) + /* simple identifier: + * val a = val@@ue + value + */ + case (id: Ident) :: _ => + Some(symbolAlternatives(id.symbol), id.sourcePos) + /* simple selector: + * object.val@@ue + */ + case (sel: Select) :: _ if selectNameSpan(sel).contains(pos.span) => + Some(symbolAlternatives(sel.symbol), pos.withSpan(sel.nameSpan)) + /* named argument: + * foo(nam@@e = "123") + */ + case (arg: NamedArg) :: (appl: Apply) :: _ => + val realName = arg.name.stripModuleClassSuffix.lastPart + if pos.span.start > arg.span.start && pos.span.end < arg.span.point + realName.length + then + val length = realName.toString.backticked.length() + val pos = arg.sourcePos.withSpan( + arg.span + .withEnd(arg.span.start + length) + .withPoint(arg.span.start) + ) + appl.symbol.paramSymss.flatten.find(_.name == arg.name).map { s => + // if it's a case class we need to look for parameters also + if caseClassSynthetics(s.owner.name) && s.owner.is(Flags.Synthetic) + then + ( + Set( + s, + s.owner.owner.companion.info.member(s.name).symbol, + s.owner.owner.info.member(s.name).symbol + ) + .filter(_ != NoSymbol), + pos, + ) + else (Set(s), pos) + } + else None + end if + /* all definitions: + * def fo@@o = ??? + * class Fo@@o = ??? + * etc. + */ + case (df: NamedDefTree) :: _ + if df.nameSpan.contains(pos.span) && !isGeneratedGiven(df, sourceText) => + Some(symbolAlternatives(df.symbol), pos.withSpan(df.nameSpan)) + /* enum cases with params + * enum Foo: + * case B@@ar[A](i: A) + */ + case (df: NamedDefTree) :: Template(_, _, self, _) :: _ + if (df.name == nme.apply || df.name == nme.unapply) && df.nameSpan.isZeroExtent => + Some(symbolAlternatives(self.tpt.symbol), self.sourcePos) + /** + * For traversing annotations: + * @JsonNo@@tification("") + * def params() = ??? + */ + case (df: MemberDef) :: _ if df.span.contains(pos.span) => + val annotTree = df.mods.annotations.find { t => + t.span.contains(pos.span) + } + collectTrees(annotTree).flatMap { t => + soughtSymbols( + Interactive.pathTo(t, pos.span) + ) + }.headOption + + /* Import selectors: + * import scala.util.Tr@@y + */ + case (imp: Import) :: _ if imp.span.contains(pos.span) => + imp + .selector(pos.span) + .map(sym => (symbolAlternatives(sym), sym.sourcePos)) + + case _ => None + + sought match + case None => seekInExtensionParameters() + case _ => sought + + end soughtSymbols + + private def seekInExtensionParameters() = + def collectParams( + extMethods: ExtMethods + ): Option[ExtensionParamOccurence] = + NavigateAST + .pathTo(pos.span, extMethods.paramss.flatten)(using + compilatonUnitContext + ) + .collectFirst { + case v: untpd.ValOrTypeDef => + ExtensionParamOccurence( + v.name, + v.namePos, + v.symbol, + extMethods.methods + ) + case i: untpd.Ident => + ExtensionParamOccurence( + i.name, + i.sourcePos, + i.symbol, + extMethods.methods + ) + } + + for + extensionMethodScope <- extensionMethods + occurrence <- collectParams(extensionMethodScope) + symbols <- collectAllExtensionParamSymbols( + path.headOption.getOrElse(unit.tpdTree), + occurrence + ) + yield symbols + end seekInExtensionParameters + + private def collectAllExtensionParamSymbols( + tree: tpd.Tree, + occurrence: ExtensionParamOccurence, + ): Option[(Set[Symbol], SourcePosition)] = + occurrence match + case ExtensionParamOccurence(_, namePos, symbol, _) + if symbol != NoSymbol && !symbol.isError && !symbol.owner.is( + Flags.ExtensionMethod + ) => + Some((symbolAlternatives(symbol), namePos)) + case ExtensionParamOccurence(name, namePos, _, methods) => + val symbols = + for + method <- methods.toSet + symbol <- + Interactive.pathTo(tree, method.span) match + case (d: DefDef) :: _ => + d.paramss.flatten.collect { + case param if param.name.decoded == name.decoded => + param.symbol + } + case _ => Set.empty[Symbol] + if (symbol != NoSymbol && !symbol.isError) + withAlt <- symbolAlternatives(symbol) + yield withAlt + if symbols.nonEmpty then Some((symbols, namePos)) else None + end collectAllExtensionParamSymbols + + private def findAllExtensionParamSymbols( + pos: SourcePosition, + name: Name, + sym: Symbol, + ) = + val symbols = + for + methods <- extensionMethods.map(_.methods) + symbols <- collectAllExtensionParamSymbols( + unit.tpdTree, + ExtensionParamOccurence(name, pos, sym, methods), + ) + yield symbols + symbols.getOrElse((symbolAlternatives(sym), pos)) + end findAllExtensionParamSymbols +end PcSymbolSearch + +object PcSymbolSearch: + // NOTE: Connected to https://github.com/lampepfl/dotty/issues/16771 + // `sel.nameSpan` is calculated incorrectly in (1 + 2).toString + // See test DocumentHighlightSuite.select-parentheses + def selectNameSpan(sel: Select): Span = + val span = sel.span + if span.exists then + val point = span.point + if sel.name.toTermName == nme.ERROR then Span(point) + else if sel.qualifier.span.start > span.point then // right associative + val realName = sel.name.stripModuleClassSuffix.lastPart + Span(span.start, span.start + realName.length, point) + else Span(point, span.end, point) + else span + + def collectTrees(trees: Iterable[Positioned]): Iterable[Tree] = + trees.collect { case t: Tree => t } + + def namePos(tree: Tree)(using Context): SourcePosition = + tree match + case sel: Select => sel.sourcePos.withSpan(selectNameSpan(sel)) + case _ => tree.sourcePos + + def isGeneratedGiven(df: NamedDefTree, sourceText: String)(using Context) = + val nameSpan = df.nameSpan + df.symbol.is(Flags.Given) && sourceText.substring( + nameSpan.start, + nameSpan.end, + ) != df.name.toString() + +end PcSymbolSearch + diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index 86aa895cb4fc..ad8ac02ec811 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -36,7 +36,7 @@ import dotty.tools.pc.buildinfo.BuildInfo import org.eclipse.lsp4j.DocumentHighlight import org.eclipse.lsp4j.TextEdit import org.eclipse.lsp4j as l -import scala.meta.internal.pc.SymbolInformationProvider +import dotty.tools.pc.SymbolInformationProvider case class ScalaPresentationCompiler( buildTargetIdentifier: String = "", @@ -178,6 +178,19 @@ case class ScalaPresentationCompiler( PcDocumentHighlightProvider(driver, params).highlights.asJava } + override def references( + params: ReferencesRequest + ): CompletableFuture[ju.List[ReferencesResult]] = + compilerAccess.withNonInterruptableCompiler(Some(params.file()))( + List.empty[ReferencesResult].asJava, + params.file().token, + ) { access => + val driver = access.compiler() + PcReferencesProvider(driver, params) + .references() + .asJava + } + def shutdown(): Unit = compilerAccess.shutdown() diff --git a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala index 0743361f255d..18d6a4ec8621 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala @@ -1,4 +1,4 @@ -package scala.meta.internal.pc +package dotty.tools.pc import scala.util.control.NonFatal @@ -15,58 +15,19 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.pc.utils.InteractiveEnrichments.deepDealias import dotty.tools.pc.SemanticdbSymbols import dotty.tools.pc.utils.InteractiveEnrichments.allSymbols +import dotty.tools.pc.utils.InteractiveEnrichments.stripBackticks +import scala.meta.internal.pc.PcSymbolInformation +import scala.meta.internal.pc.SymbolInfo class SymbolInformationProvider(using Context): - private def toSymbols( - pkg: String, - parts: List[(String, Boolean)], - ): List[Symbol] = - def loop( - owners: List[Symbol], - parts: List[(String, Boolean)], - ): List[Symbol] = - parts match - case (head, isClass) :: tl => - val foundSymbols = - owners.flatMap { owner => - val next = - if isClass then owner.info.member(typeName(head)) - else owner.info.member(termName(head)) - next.allSymbols - } - if foundSymbols.nonEmpty then loop(foundSymbols, tl) - else Nil - case Nil => owners - - val pkgSym = - if pkg == "_empty_" then requiredPackage(nme.EMPTY_PACKAGE) - else requiredPackage(pkg) - loop(List(pkgSym), parts) - end toSymbols def info(symbol: String): Option[PcSymbolInformation] = - val index = symbol.lastIndexOf("/") - val pkg = normalizePackage(symbol.take(index + 1)) - - def loop( - symbol: String, - acc: List[(String, Boolean)], - ): List[(String, Boolean)] = - if symbol.isEmpty() then acc.reverse - else - val newSymbol = symbol.takeWhile(c => c != '.' && c != '#') - val rest = symbol.drop(newSymbol.size) - loop(rest.drop(1), (newSymbol, rest.headOption.exists(_ == '#')) :: acc) - val names = - loop(symbol.drop(index + 1).takeWhile(_ != '('), List.empty) - - val foundSymbols = - try toSymbols(pkg, names) - catch case NonFatal(e) => Nil + val foundSymbols = SymbolProvider.compilerSymbols(symbol) val (searchedSymbol, alternativeSymbols) = - foundSymbols.partition: compilerSymbol => + foundSymbols.partition(compilerSymbol => SemanticdbSymbols.symbolName(compilerSymbol) == symbol + ) searchedSymbol match case Nil => None @@ -115,8 +76,50 @@ class SymbolInformationProvider(using Context): else if sym.is(Flags.TypeParam) then PcSymbolKind.TYPE_PARAMETER else if sym.isType then PcSymbolKind.TYPE else PcSymbolKind.UNKNOWN_KIND +end SymbolInformationProvider + +object SymbolProvider: + + def compilerSymbol(symbol: String)(using Context): Option[Symbol] = + compilerSymbols(symbol).find(sym => SemanticdbSymbols.symbolName(sym) == symbol) + + def compilerSymbols(symbol: String)(using Context): List[Symbol] = + try toSymbols(SymbolInfo.getPartsFromSymbol(symbol)) + catch case NonFatal(e) => Nil private def normalizePackage(pkg: String): String = pkg.replace("/", ".").nn.stripSuffix(".") -end SymbolInformationProvider + private def toSymbols(info: SymbolInfo.SymbolParts)(using Context): List[Symbol] = + def collectSymbols(denotation: Denotation): List[Symbol] = + denotation match + case MultiDenotation(denot1, denot2) => + collectSymbols(denot1) ++ collectSymbols(denot2) + case denot => List(denot.symbol) + + def loop( + owners: List[Symbol], + parts: List[(String, Boolean)], + ): List[Symbol] = + parts match + case (head, isClass) :: tl => + val foundSymbols = + owners.flatMap { owner => + val name = head.stripBackticks + val next = + if isClass then owner.info.member(typeName(name)) + else owner.info.member(termName(name)) + collectSymbols(next).filter(_.exists) + } + if foundSymbols.nonEmpty then loop(foundSymbols, tl) + else Nil + case Nil => owners + + val pkgSym = + if info.packagePart == "_empty_/" then requiredPackage(nme.EMPTY_PACKAGE) + else requiredPackage(normalizePackage(info.packagePart)) + val found = loop(List(pkgSym), info.names) + info.paramName match + case Some(name) => found.flatMap(_.paramSymss.flatten.find(_.showName == name)) + case _ => found + end toSymbols diff --git a/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala b/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala new file mode 100644 index 000000000000..b4f36c8d27ab --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala @@ -0,0 +1,104 @@ +package dotty.tools.pc + +import scala.language.unsafeNulls + +import java.nio.file.Paths + +import scala.meta as m + +import scala.meta.internal.metals.CompilerOffsetParams +import scala.meta.pc.OffsetParams +import scala.meta.pc.VirtualFileParams + +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.NameOps.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.util.SourceFile +import dotty.tools.pc.utils.InteractiveEnrichments.* + +class WithCompilationUnit( + val driver: InteractiveDriver, + params: VirtualFileParams, +): + val uri = params.uri() + val filePath = Paths.get(uri) + val sourceText = params.text + val text = sourceText.toCharArray() + val source = + SourceFile.virtual(filePath.toString, sourceText) + driver.run(uri, source) + given ctx: Context = driver.currentCtx + + val unit = driver.currentCtx.run.units.head + val compilatonUnitContext = ctx.fresh.setCompilationUnit(unit) + val offset = params match + case op: OffsetParams => op.offset() + case _ => 0 + val offsetParams = + params match + case op: OffsetParams => op + case _ => + CompilerOffsetParams(params.uri(), params.text(), 0, params.token()) + val pos = driver.sourcePosition(offsetParams) + + // First identify the symbol we are at, comments identify @@ as current cursor position + def symbolAlternatives(sym: Symbol)(using Context) = + def member(parent: Symbol) = parent.info.member(sym.name).symbol + def primaryConstructorTypeParam(owner: Symbol) = + for + typeParams <- owner.primaryConstructor.paramSymss.headOption + param <- typeParams.find(_.name == sym.name) + if (param.isType) + yield param + def additionalForEnumTypeParam(enumClass: Symbol) = + if enumClass.is(Flags.Enum) then + val enumOwner = + if enumClass.is(Flags.Case) + then + // we check that the type parameter is the one from enum class + // and not an enum case type parameter with the same name + Option.when(member(enumClass).is(Flags.Synthetic))( + enumClass.maybeOwner.companionClass + ) + else Some(enumClass) + enumOwner.toSet.flatMap { enumOwner => + val symsInEnumCases = enumOwner.children.toSet.flatMap(enumCase => + if member(enumCase).is(Flags.Synthetic) + then primaryConstructorTypeParam(enumCase) + else None + ) + val symsInEnumOwner = + primaryConstructorTypeParam(enumOwner).toSet + member(enumOwner) + symsInEnumCases ++ symsInEnumOwner + } + else Set.empty + val all = + if sym.is(Flags.ModuleClass) then + Set(sym, sym.companionModule, sym.companionModule.companion) + else if sym.isClass then + Set(sym, sym.companionModule, sym.companion.moduleClass) + else if sym.is(Flags.Module) then + Set(sym, sym.companionClass, sym.moduleClass) + else if sym.isTerm && (sym.owner.isClass || sym.owner.isConstructor) + then + val info = + if sym.owner.isClass then sym.owner.info else sym.owner.owner.info + Set( + sym, + info.member(sym.asTerm.name.setterName).symbol, + info.member(sym.asTerm.name.getterName).symbol, + ) ++ sym.allOverriddenSymbols.toSet + // type used in primary constructor will not match the one used in the class + else if sym.isTypeParam && sym.owner.isPrimaryConstructor then + Set(sym, member(sym.maybeOwner.maybeOwner)) + ++ additionalForEnumTypeParam(sym.maybeOwner.maybeOwner) + else if sym.isTypeParam then + primaryConstructorTypeParam(sym.maybeOwner).toSet + ++ additionalForEnumTypeParam(sym.maybeOwner) + sym + else Set(sym) + all.filter(s => s != NoSymbol && !s.isError) + end symbolAlternatives + +end WithCompilationUnit diff --git a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala index 0171d2a0d76d..a37801b3c48c 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala @@ -7,13 +7,13 @@ import dotty.tools.dotc.ast.{Trees, tpd} import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.PcCollector +import dotty.tools.pc.SimpleCollector import dotty.tools.pc.EndMarker final class DefSymbolCollector( driver: InteractiveDriver, params: VirtualFileParams -) extends PcCollector[Option[Symbol]](driver, params): +) extends SimpleCollector[Option[Symbol]](driver, params): def collect(parent: Option[Tree])( tree: Tree | EndMarker, diff --git a/project/Build.scala b/project/Build.scala index c1a8800421a6..592cd23b8a57 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1351,12 +1351,14 @@ object Build { BuildInfoPlugin.buildInfoDefaultSettings def presentationCompilerSettings(implicit mode: Mode) = { - val mtagsVersion = "1.3.1" + val mtagsVersion = "1.3.1+63-1a8f4659-SNAPSHOT" Seq( + resolvers ++= Resolver.sonatypeOssRepos("snapshots"), libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", "io.get-coursier" % "interface" % "1.0.18", "org.scalameta" % "mtags-interfaces" % mtagsVersion, + "com.google.guava" % "guava" % "33.2.1-jre" ), libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.12" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, From 4d7b2a4ecfa21347a350dc38187847ab6a0e9887 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Thu, 20 Jun 2024 11:05:42 +0200 Subject: [PATCH 195/827] chore: update to mtags `1.3.2` --- project/Build.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 592cd23b8a57..82417df75756 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1351,9 +1351,8 @@ object Build { BuildInfoPlugin.buildInfoDefaultSettings def presentationCompilerSettings(implicit mode: Mode) = { - val mtagsVersion = "1.3.1+63-1a8f4659-SNAPSHOT" + val mtagsVersion = "1.3.2" Seq( - resolvers ++= Resolver.sonatypeOssRepos("snapshots"), libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", "io.get-coursier" % "interface" % "1.0.18", From 682589496d5da16508cdfe682be5af14605ea2fd Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Fri, 21 Jun 2024 12:07:30 +0200 Subject: [PATCH 196/827] Fix incorrect paths to sha256 check sum files in release workflow --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 974866930c68..2747830fb7d6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -951,8 +951,8 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/sha256sum-aarch64-pc-linux.txt - asset_name: sha256sum.txt + asset_path: ./dist/linux-aarch64/target/sha256sum.txt + asset_name: sha256sum-aarch64-pc-linux.txt asset_content_type: text/plain - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac x86-64) From bb9128867bce05f4741d993c8717b78e21042c97 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 21 Jun 2024 11:12:34 +0100 Subject: [PATCH 197/827] Upload zip files to sdkman instead of .tar.gz --- .github/workflows/publish-sdkman.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index d4238b9371e4..de12f81426b5 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -36,13 +36,13 @@ jobs: matrix: include: - platform: LINUX_64 - archive : 'scala3-${{ inputs.version }}-x86_64-pc-linux.tar.gz' + archive : 'scala3-${{ inputs.version }}-x86_64-pc-linux.zip' - platform: LINUX_ARM64 - archive : 'scala3-${{ inputs.version }}-aarch64-pc-linux.tar.gz' + archive : 'scala3-${{ inputs.version }}-aarch64-pc-linux.zip' - platform: MAC_OSX - archive : 'scala3-${{ inputs.version }}-x86_64-apple-darwin.tar.gz' + archive : 'scala3-${{ inputs.version }}-x86_64-apple-darwin.zip' - platform: MAC_ARM64 - archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.tar.gz' + archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.zip' - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: From 9d41c7b8530be0c8d355ef2eee324a7f1f9ca3c3 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 19 Jun 2024 12:21:16 +0200 Subject: [PATCH 198/827] replace pack command, do not produce lib directory, write classpath to file --- dist/bin/common | 63 ++--------- dist/bin/common.bat | 16 +-- dist/bin/scalac | 0 dist/bin/scalac.bat | 43 ++++---- dist/bin/scaladoc | 57 +--------- dist/bin/scaladoc.bat | 66 +++--------- project/Build.scala | 7 +- project/RepublishPlugin.scala | 193 ++++++++++++++++++++++++++++------ 8 files changed, 217 insertions(+), 228 deletions(-) mode change 100755 => 100644 dist/bin/common mode change 100644 => 100755 dist/bin/scalac diff --git a/dist/bin/common b/dist/bin/common old mode 100755 new mode 100644 index 4a0152fbc4cb..1ff0ca66274c --- a/dist/bin/common +++ b/dist/bin/common @@ -6,62 +6,21 @@ source "$PROG_HOME/bin/common-shared" # * The code below is for Dotty # *-------------------------------------------------*/ -find_lib () { - for lib in "$PROG_HOME"/lib/$1 ; do - if [[ -f "$lib" ]]; then - if [ -n "$CYGPATHCMD" ]; then - "$CYGPATHCMD" -am "$lib" - elif [[ $mingw || $msys ]]; then - echo "$lib" | sed 's|/|\\\\|g' - else - echo "$lib" - fi - return +load_classpath () { + command="$1" + psep_pattern="$2" + __CLASS_PATH="" + while IFS= read -r line; do + if ! [[ ( -n ${conemu-} || -n ${msys-}) && "$line" == "*jna-5*" ]]; then + # jna-5 only appropriate for some combinations + __CLASS_PATH+="$PROG_HOME/maven2/$line$psep_pattern" fi - done + done < "$PROG_HOME/etc/$command.classpath" + echo "$__CLASS_PATH" } -DOTTY_COMP=$(find_lib "*scala3-compiler*") -DOTTY_INTF=$(find_lib "*scala3-interfaces*") -DOTTY_LIB=$(find_lib "*scala3-library*") -DOTTY_STAGING=$(find_lib "*scala3-staging*") -DOTTY_TASTY_INSPECTOR=$(find_lib "*scala3-tasty-inspector*") -TASTY_CORE=$(find_lib "*tasty-core*") -SCALA_ASM=$(find_lib "*scala-asm*") -SCALA_LIB=$(find_lib "*scala-library*") -SBT_INTF=$(find_lib "*compiler-interface*") -JLINE_READER=$(find_lib "*jline-reader-3*") -JLINE_TERMINAL=$(find_lib "*jline-terminal-3*") -JLINE_TERMINAL_JNA=$(find_lib "*jline-terminal-jna-3*") - -# jna-5 only appropriate for some combinations -[[ ${conemu-} && ${msys-} ]] || JNA=$(find_lib "*jna-5*") - compilerJavaClasspathArgs () { - # echo "dotty-compiler: $DOTTY_COMP" - # echo "dotty-interface: $DOTTY_INTF" - # echo "dotty-library: $DOTTY_LIB" - # echo "tasty-core: $TASTY_CORE" - # echo "scala-asm: $SCALA_ASM" - # echo "scala-lib: $SCALA_LIB" - # echo "sbt-intface: $SBT_INTF" - - toolchain="" - toolchain+="$SCALA_LIB$PSEP" - toolchain+="$DOTTY_LIB$PSEP" - toolchain+="$SCALA_ASM$PSEP" - toolchain+="$SBT_INTF$PSEP" - toolchain+="$DOTTY_INTF$PSEP" - toolchain+="$DOTTY_COMP$PSEP" - toolchain+="$TASTY_CORE$PSEP" - toolchain+="$DOTTY_STAGING$PSEP" - toolchain+="$DOTTY_TASTY_INSPECTOR$PSEP" - - # jine - toolchain+="$JLINE_READER$PSEP" - toolchain+="$JLINE_TERMINAL$PSEP" - toolchain+="$JLINE_TERMINAL_JNA$PSEP" - [ -n "${JNA-}" ] && toolchain+="$JNA$PSEP" + toolchain="$(load_classpath "scala" "$PSEP")" if [ -n "${jvm_cp_args-}" ]; then jvm_cp_args="$toolchain$jvm_cp_args" diff --git a/dist/bin/common.bat b/dist/bin/common.bat index 7aef606d5509..f9c35e432b36 100644 --- a/dist/bin/common.bat +++ b/dist/bin/common.bat @@ -38,20 +38,6 @@ if not defined _PROG_HOME ( set _EXITCODE=1 goto :eof ) -set "_LIB_DIR=%_PROG_HOME%\lib" +set "_ETC_DIR=%_PROG_HOME%\etc" set _PSEP=; - -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-compiler*"') do set "_SCALA3_COMP=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-interfaces*"') do set "_SCALA3_INTF=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-library*"') do set "_SCALA3_LIB=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-staging*"') do set "_SCALA3_STAGING=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-tasty-inspector*"') do set "_SCALA3_TASTY_INSPECTOR=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*tasty-core*"') do set "_TASTY_CORE=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala-asm*"') do set "_SCALA_ASM=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala-library*"') do set "_SCALA_LIB=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*compiler-interface*"') do set "_SBT_INTF=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-reader-3*"') do set "_JLINE_READER=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-terminal-3*"') do set "_JLINE_TERMINAL=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-terminal-jna-3*"') do set "_JLINE_TERMINAL_JNA=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jna-5*"') do set "_JNA=%_LIB_DIR%\%%f" diff --git a/dist/bin/scalac b/dist/bin/scalac old mode 100644 new mode 100755 diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index c8cd0babe60b..fe6d7e3fad4d 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -88,29 +88,10 @@ goto :eof @rem output parameter: _JVM_CP_ARGS :compilerJavaClasspathArgs -@rem echo scala3-compiler: %_SCALA3_COMP% -@rem echo scala3-interface: %_SCALA3_INTF% -@rem echo scala3-library: %_SCALA3_LIB% -@rem echo tasty-core: %_TASTY_CORE% -@rem echo scala-asm: %_SCALA_ASM% -@rem echo scala-lib: %_SCALA_LIB% -@rem echo sbt-intface: %_SBT_INTF% - -set "__TOOLCHAIN=%_SCALA_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_ASM%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SBT_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_COMP%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_TASTY_CORE%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_STAGING%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_TASTY_INSPECTOR%%_PSEP%" - -@rem # jline -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_READER%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL_JNA%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JNA%%_PSEP%" + +call :loadClasspathFromFile + +set "__TOOLCHAIN=%_CLASS_PATH%" if defined _SCALA_CPATH ( set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" @@ -119,6 +100,22 @@ if defined _SCALA_CPATH ( ) goto :eof +@REM concatentate every line in "%_ETC_DIR%\scala.classpath" with _PSEP +:loadClasspathFromFile +set _CLASS_PATH= +if exist "%_ETC_DIR%\scala.classpath" ( + for /f "usebackq delims=" %%i in ("%_ETC_DIR%\scala.classpath") do ( + set "_LIB=%_PROG_HOME%\maven2\%%i" + set "_LIB=!_LIB:/=\!" + if not defined _CLASS_PATH ( + set "_CLASS_PATH=!_LIB!" + ) else ( + set "_CLASS_PATH=!_CLASS_PATH!%_PSEP%!_LIB!" + ) + ) +) +goto :eof + @rem ######################################################################### @rem ## Cleanups diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index 8b9ec41a7f8c..15bc0813f93a 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -53,62 +53,7 @@ addScrip() { } classpathArgs () { - CLASS_PATH="" - CLASS_PATH+="$(find_lib "*scaladoc*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-compiler*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-interfaces*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-library*")$PSEP" - CLASS_PATH+="$(find_lib "*tasty-core*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-tasty-inspector*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-0*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-anchorlink*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-autolink*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-emoji*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-strikethrough*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-tasklist*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-wikilink*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-yaml-front-matter*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-tables*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-ins*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-superscript*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-ast*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-data*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-dependency*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-misc*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-format*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-sequence*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-builder*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-collection*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-visitor*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-options*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-html*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ast*")$PSEP" - CLASS_PATH+="$(find_lib "*liqp*")$PSEP" - CLASS_PATH+="$(find_lib "*jsoup*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-dataformat-yaml*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-datatype-jsr310*")$PSEP" - CLASS_PATH+="$(find_lib "*strftime4j*")$PSEP" - CLASS_PATH+="$(find_lib "*scala-asm*")$PSEP" - CLASS_PATH+="$(find_lib "*compiler-interface*")$PSEP" - CLASS_PATH+="$(find_lib "*jline-reader*")$PSEP" - CLASS_PATH+="$(find_lib "*jline-terminal-3*")$PSEP" - CLASS_PATH+="$(find_lib "*jline-terminal-jna*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" - CLASS_PATH+="$(find_lib "*autolink-0.6*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-jira-converter*")$PSEP" - CLASS_PATH+="$(find_lib "*antlr4*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-annotations*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-core*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-databind*")$PSEP" - CLASS_PATH+="$(find_lib "*snakeyaml*")$PSEP" - CLASS_PATH+="$(find_lib "*scala-library*")$PSEP" - CLASS_PATH+="$(find_lib "*protobuf-java*")$PSEP" - CLASS_PATH+="$(find_lib "*util-interface*")$PSEP" - CLASS_PATH+="$(find_lib "*jna-5*")$PSEP" - CLASS_PATH+="$(find_lib "*antlr4-runtime*")$PSEP" + CLASS_PATH="$(load_classpath "scaladoc" "$PSEP")" jvm_cp_args="-classpath \"$CLASS_PATH\"" } diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index c30a4689244c..16433a83f501 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -105,60 +105,24 @@ goto :eof @rem output parameter: _CLASS_PATH :classpathArgs -set "_LIB_DIR=%_PROG_HOME%\lib" -set _CLASS_PATH= +set "_ETC_DIR=%_PROG_HOME%\etc" @rem keep list in sync with bash script `bin\scaladoc` ! -call :updateClasspath "scaladoc" -call :updateClasspath "scala3-compiler" -call :updateClasspath "scala3-interfaces" -call :updateClasspath "scala3-library" -call :updateClasspath "tasty-core" -call :updateClasspath "scala3-tasty-inspector" -call :updateClasspath "flexmark-0" -call :updateClasspath "flexmark-html-parser" -call :updateClasspath "flexmark-ext-anchorlink" -call :updateClasspath "flexmark-ext-autolink" -call :updateClasspath "flexmark-ext-emoji" -call :updateClasspath "flexmark-ext-gfm-strikethrough" -call :updateClasspath "flexmark-ext-gfm-tables" -call :updateClasspath "flexmark-ext-gfm-tasklist" -call :updateClasspath "flexmark-ext-wikilink" -call :updateClasspath "flexmark-ext-yaml-front-matter" -call :updateClasspath "liqp" -call :updateClasspath "jsoup" -call :updateClasspath "jackson-dataformat-yaml" -call :updateClasspath "jackson-datatype-jsr310" -call :updateClasspath "strftime4j" -call :updateClasspath "scala-asm" -call :updateClasspath "compiler-interface" -call :updateClasspath "jline-reader" -call :updateClasspath "jline-terminal-3" -call :updateClasspath "jline-terminal-jna" -call :updateClasspath "flexmark-util" -call :updateClasspath "flexmark-formatter" -call :updateClasspath "autolink-0.6" -call :updateClasspath "flexmark-jira-converter" -call :updateClasspath "antlr4" -call :updateClasspath "jackson-annotations" -call :updateClasspath "jackson-core" -call :updateClasspath "jackson-databind" -call :updateClasspath "snakeyaml" -call :updateClasspath "scala-library" -call :updateClasspath "protobuf-java" -call :updateClasspath "util-interface" -call :updateClasspath "jna-5" -call :updateClasspath "flexmark-ext-tables" -call :updateClasspath "flexmark-ext-ins" -call :updateClasspath "flexmark-ext-superscript" -call :updateClasspath "antlr4-runtime" +call :loadClasspathFromFile goto :eof -@rem input parameter: %1=pattern for library file -@rem output parameter: _CLASS_PATH -:updateClasspath -set "__PATTERN=%~1" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*%__PATTERN%*" 2^>NUL') do ( - set "_CLASS_PATH=!_CLASS_PATH!%_LIB_DIR%\%%f%_PSEP%" +@REM concatentate every line in "%_ETC_DIR%\scaladoc.classpath" with _PSEP +:loadClasspathFromFile +set _CLASS_PATH= +if exist "%_ETC_DIR%\scaladoc.classpath" ( + for /f "usebackq delims=" %%i in ("%_ETC_DIR%\scaladoc.classpath") do ( + set "_LIB=%_PROG_HOME%\maven2\%%i" + set "_LIB=!_LIB:/=\!" + if not defined _CLASS_PATH ( + set "_CLASS_PATH=!_LIB!" + ) else ( + set "_CLASS_PATH=!_CLASS_PATH!%_PSEP%!_LIB!" + ) + ) ) goto :eof diff --git a/project/Build.scala b/project/Build.scala index c1a8800421a6..fa798e9c25ae 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2127,7 +2127,12 @@ object Build { republishRepo := target.value / "republish", packResourceDir += (republishRepo.value / "bin" -> "bin"), packResourceDir += (republishRepo.value / "maven2" -> "maven2"), - Compile / pack := (Compile / pack).dependsOn(republish).value, + packResourceDir += (republishRepo.value / "etc" -> "etc"), + republishCommandLibs += + ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-staging", "scala3-tasty-inspector")), + republishCommandLibs += + ("scaladoc" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-tasty-inspector", "scaladoc")), + Compile / pack := republishPack.value, ) lazy val dist = project.asDist(Bootstrapped) diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 537c82d62cce..6ce83c2f0abf 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -2,6 +2,7 @@ package dotty.tools.sbtplugin import sbt._ import xerial.sbt.pack.PackPlugin +import xerial.sbt.pack.PackPlugin.autoImport.{packResourceDir, packDir} import sbt.Keys._ import sbt.AutoPlugin import sbt.PublishBinPlugin @@ -66,7 +67,9 @@ object RepublishPlugin extends AutoPlugin { val republishBinDir = settingKey[File]("where to find static files for the bin dir.") val republishCoursierDir = settingKey[File]("where to download the coursier launcher jar.") val republishBinOverrides = settingKey[Seq[File]]("files to override those in bin-dir.") + val republishCommandLibs = settingKey[Seq[(String, List[String])]]("libraries needed for each command.") val republish = taskKey[File]("cache the dependencies and download launchers for the distribution") + val republishPack = taskKey[File]("do the pack command") val republishRepo = settingKey[File]("the location to store the republished artifacts.") val republishLaunchers = settingKey[Seq[(String, String)]]("launchers to download. Sequence of (name, URL).") val republishCoursier = settingKey[Seq[(String, String)]]("coursier launcher to download. Sequence of (name, URL).") @@ -99,7 +102,7 @@ object RepublishPlugin extends AutoPlugin { }.toSet } - private def coursierCmd(jar: File, cache: File, args: Seq[String]): Unit = { + private def coursierCmd(jar: File, cache: File): Seq[String] => List[String] = { val jar0 = jar.getAbsolutePath.toString val javaHome = sys.props.get("java.home").getOrElse { throw new MessageOnlyException("java.home property not set") @@ -108,38 +111,88 @@ object RepublishPlugin extends AutoPlugin { val cmd = if (scala.util.Properties.isWin) "java.exe" else "java" (file(javaHome) / "bin" / cmd).getAbsolutePath } - val env = Map("COURSIER_CACHE" -> cache.getAbsolutePath.toString) - val cmdLine = Seq(javaCmd, "-jar", jar0) ++ args - // invoke cmdLine with env - val p = new ProcessBuilder(cmdLine: _*).inheritIO() - p.environment().putAll(env.asJava) - val proc = p.start() - proc.waitFor() - if (proc.exitValue() != 0) - throw new MessageOnlyException(s"Error running coursier.jar with args ${args.mkString(" ")}") + val env = Map("COURSIER_CACHE" -> cache.getAbsolutePath.toString).asJava + val cmdLine0 = Seq(javaCmd, "-jar", jar0) + args => + val cmdLine = cmdLine0 ++ args + // invoke cmdLine with env, but also capture the output + val p = new ProcessBuilder(cmdLine: _*) + .directory(cache) + .inheritIO() + .redirectOutput(ProcessBuilder.Redirect.PIPE) + p.environment().putAll(env) + + val proc = p.start() + val in = proc.getInputStream + val output = { + try { + val src = scala.io.Source.fromInputStream(in) + try src.getLines().toList + finally src.close() + } finally { + in.close() + } + } + + proc.waitFor() + + if (proc.exitValue() != 0) + throw new MessageOnlyException(s"Error running coursier.jar with args ${args.mkString(" ")}") + + output + } + + private def resolveMaven2(repo: File): Path = { + java.nio.file.Files.walk(repo.toPath) + .filter(_.getFileName.toString == "maven2") + .findFirst() + .orElseThrow(() => new MessageOnlyException(s"Could not find maven2 directory in $repo")) + .toAbsolutePath() } - private def coursierFetch(coursierJar: File, log: Logger, cacheDir: File, localRepo: File, libs: Seq[String]): Unit = { + private def coursierFetch( + coursierJar: File, log: Logger, cacheDir: File, localRepo: File, libs: Seq[String]): Map[String, List[String]] = { + val localRepoPath = localRepo.getAbsolutePath val localRepoArg = { - val path = localRepo.getAbsolutePath - if (scala.util.Properties.isWin) { - val path0 = path.replace('\\', '/') - s"file:///$path0" // extra root slash for Windows paths + val uriPart = { + if (scala.util.Properties.isWin) { + s"/${localRepoPath.replace('\\', '/')}" // extra root slash for Windows paths + } + else { + localRepoPath // no change needed for Unix paths + } } - else - s"file://$path" + s"file://$uriPart" } - IO.createDirectory(cacheDir) - for (lib <- libs) { + val cacheDirPath = cacheDir.getAbsolutePath + lazy val maven2RootLocal = resolveMaven2(localRepo) + lazy val maven2RootCache = resolveMaven2(cacheDir) // lazy because cache dir isn't populated until after fetch + val cmd = coursierCmd(coursierJar, cacheDir) + val resolved = for (lib <- libs) yield { log.info(s"[republish] Fetching $lib with coursier.jar...") - coursierCmd(coursierJar, cacheDir, + val out = cmd( Seq( "fetch", + "--no-default", + "--repository", "central", "--repository", localRepoArg, lib ) ) + lib -> out.collect { + case s if s.startsWith(localRepoPath) => + maven2RootLocal.relativize(java.nio.file.Paths.get(s)).toString().replace('\\', '/') // format as uri + case s if s.startsWith(cacheDirPath) => + maven2RootCache.relativize(java.nio.file.Paths.get(s)).toString().replace('\\', '/') // format as uri + } + } + resolved.toMap + } + + private def fuzzyFind[V](map: Map[String, V], key: String): V = { + map.collectFirst({ case (k, v) if k.contains(key) => v }).getOrElse { + throw new MessageOnlyException(s"Could not find key $key in map $map") } } @@ -148,28 +201,34 @@ object RepublishPlugin extends AutoPlugin { private def resolveLibraryDeps( coursierJar: File, log: Logger, + republishDir: File, csrCacheDir: File, localRepo: File, - resolvedLocal: Seq[ResolvedArtifacts]): Seq[ResolvedArtifacts] = { + resolvedLocal: Seq[ResolvedArtifacts], + commandLibs: Seq[(String, List[String])]): Seq[ResolvedArtifacts] = { // publish the local artifacts to the local repo, so coursier can resolve them republishResolvedArtifacts(resolvedLocal, localRepo, logOpt = None) - coursierFetch(coursierJar, log, csrCacheDir, localRepo, resolvedLocal.map(_.id.toString)) + val classpaths = coursierFetch(coursierJar, log, csrCacheDir, localRepo, resolvedLocal.map(_.id.toString)) - val maven2Root = java.nio.file.Files.walk(csrCacheDir.toPath) - .filter(_.getFileName.toString == "maven2") - .findFirst() - .orElseThrow(() => new MessageOnlyException(s"Could not find maven2 directory in $csrCacheDir")) + if (commandLibs.nonEmpty) { + IO.createDirectory(republishDir / "etc") + for ((command, libs) <- commandLibs) { + val entries = libs.map(fuzzyFind(classpaths, _)).reduce(_ ++ _).distinct + IO.write(republishDir / "etc" / s"$command.classpath", entries.mkString("\n")) + } + } + + val maven2Root = resolveMaven2(csrCacheDir) def pathToArtifact(p: Path): ResolvedArtifacts = { // relative path from maven2Root - val lastAsString = p.getFileName.toString val relP = maven2Root.relativize(p) val parts = relP.iterator().asScala.map(_.toString).toVector - val (orgParts :+ name :+ rev :+ _) = parts + val (orgParts :+ name :+ rev :+ artifact) = parts val id = SimpleModuleId(orgParts.mkString("."), name, rev) - if (lastAsString.endsWith(".jar")) { + if (artifact.endsWith(".jar")) { ResolvedArtifacts(id, Some(p.toFile), None) } else { ResolvedArtifacts(id, None, Some(p.toFile)) @@ -279,6 +338,7 @@ object RepublishPlugin extends AutoPlugin { republishCoursier := Seq.empty, republishBinOverrides := Seq.empty, republishExtraProps := Seq.empty, + republishCommandLibs := Seq.empty, republishLocalResolved / republishProjectRefs := { val proj = thisProjectRef.value val deps = buildDependencies.value @@ -326,13 +386,15 @@ object RepublishPlugin extends AutoPlugin { val s = streams.value val lm = (republishAllResolved / dependencyResolution).value val cacheDir = republishRepo.value + val commandLibs = republishCommandLibs.value val log = s.log val csrCacheDir = s.cacheDirectory / "csr-cache" val localRepo = s.cacheDirectory / "localRepo" / "maven2" // resolve the transitive dependencies of the local artifacts - val resolvedLibs = resolveLibraryDeps(coursierJar, log, csrCacheDir, localRepo, resolvedLocal) + val resolvedLibs = resolveLibraryDeps( + coursierJar, log, cacheDir, csrCacheDir, localRepo, resolvedLocal, commandLibs) // the combination of local artifacts and resolved transitive dependencies val merged = @@ -395,6 +457,77 @@ object RepublishPlugin extends AutoPlugin { val launchers = republishFetchLaunchers.value val extraProps = republishWriteExtraProps.value cacheDir + }, + republishPack := { + val cacheDir = republish.value + val s = streams.value + val log = s.log + val distDir = target.value / packDir.value + val progVersion = version.value + + IO.createDirectory(distDir) + for ((path, dir) <- packResourceDir.value) { + val target = distDir / dir + IO.copyDirectory(path, target) + } + + locally { + // everything in this block is copied from sbt-pack plugin + import scala.util.Try + import java.time.format.DateTimeFormatterBuilder + import java.time.format.SignStyle + import java.time.temporal.ChronoField.* + import java.time.ZoneId + import java.time.Instant + import java.time.ZonedDateTime + import java.time.ZonedDateTime + import java.util.Locale + import java.util.Date + val base: File = new File(".") // Using the working directory as base for readability + + def write(path: String, content: String) { + val p = distDir / path + IO.write(p, content) + } + + val humanReadableTimestampFormatter = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 2) + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2) + .appendLiteral(' ') + .appendValue(HOUR_OF_DAY, 2) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .appendOffset("+HHMM", "Z") + .toFormatter(Locale.US) + + // Retrieve build time + val systemZone = ZoneId.systemDefault().normalized() + val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(new Date().getTime), systemZone) + val buildTime = humanReadableTimestampFormatter.format(timestamp) + + // Check the current Git revision + val gitRevision: String = Try { + if ((base / ".git").exists()) { + log.info("[republish] Checking the git revision of the current project") + sys.process.Process("git rev-parse HEAD").!! + } else { + "unknown" + } + }.getOrElse("unknown").trim + + + // Output the version number and Git revision + write("VERSION", s"version:=${progVersion}\nrevision:=${gitRevision}\nbuildTime:=${buildTime}\n") + } + + + distDir } ) } From c004c74aae23f224617667717bafe50c0f5633b0 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 21 Jun 2024 14:13:57 +0200 Subject: [PATCH 199/827] add back in copy of mapped sequence --- dist/bin-native-overrides/cli-common-platform.bat | 6 +++++- project/Build.scala | 2 +- project/RepublishPlugin.scala | 14 ++++++++++++-- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/dist/bin-native-overrides/cli-common-platform.bat b/dist/bin-native-overrides/cli-common-platform.bat index e0cfa40692b5..d1c4f1c4716b 100644 --- a/dist/bin-native-overrides/cli-common-platform.bat +++ b/dist/bin-native-overrides/cli-common-platform.bat @@ -12,7 +12,11 @@ FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\EXTRA_PROPERTIES") DO ( ) ) +@REM we didn't find it, so we should fail +echo "ERROR: cli_version not found in EXTRA_PROPERTIES file" +exit /b 1 + :foundCliVersion endlocal & set "SCALA_CLI_VERSION=%_SCALA_CLI_VERSION%" -set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" "--cli-version" "%SCALA_CLI_VERSION%" \ No newline at end of file +set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" "--cli-version" "%SCALA_CLI_VERSION%" diff --git a/project/Build.scala b/project/Build.scala index fa798e9c25ae..cdabcd3471f1 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2172,7 +2172,7 @@ object Build { republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishExtraProps += ("cli_version" -> scalaCliLauncherVersion), - mappings += (republishRepo.value / "etc" / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), + mappings += (republishRepo.value / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), republishLaunchers += ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersionWindows/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") ) diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 6ce83c2f0abf..a0a8ce7dae74 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -114,7 +114,7 @@ object RepublishPlugin extends AutoPlugin { val env = Map("COURSIER_CACHE" -> cache.getAbsolutePath.toString).asJava val cmdLine0 = Seq(javaCmd, "-jar", jar0) args => - val cmdLine = cmdLine0 ++ args + val cmdLine = cmdLine0 ++ args // invoke cmdLine with env, but also capture the output val p = new ProcessBuilder(cmdLine: _*) .directory(cache) @@ -441,7 +441,7 @@ object RepublishPlugin extends AutoPlugin { } else { val repoDir = republishRepo.value - val propsFile = repoDir / "etc" / "EXTRA_PROPERTIES" + val propsFile = repoDir / "EXTRA_PROPERTIES" log.info(s"[republish] Writing extra properties to $propsFile...") Using.fileWriter()(propsFile) { writer => extraProps.foreach { case (k, v) => @@ -485,6 +485,16 @@ object RepublishPlugin extends AutoPlugin { import java.util.Date val base: File = new File(".") // Using the working directory as base for readability + // Copy explicitly added dependencies + val mapped: Seq[(File, String)] = mappings.value + log.info("[republish] Copying explicit dependencies:") + val explicitDepsJars = for ((file, path) <- mapped) yield { + log.info(file.getPath) + val dest = distDir / path + IO.copyFile(file, dest, true) + dest + } + def write(path: String, content: String) { val p = distDir / path IO.write(p, content) From bf670324891a4db0c2282ac893dfba9b8578b72b Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 21 Jun 2024 14:58:42 +0200 Subject: [PATCH 200/827] read last line, split-off with-compiler classpath --- dist/bin/common | 16 ++++++++++++---- dist/bin/scalac.bat | 27 ++++++++++++++++++--------- project/Build.scala | 4 +++- project/RepublishPlugin.scala | 8 +++++++- 4 files changed, 40 insertions(+), 15 deletions(-) diff --git a/dist/bin/common b/dist/bin/common index 1ff0ca66274c..63e598d70d7e 100644 --- a/dist/bin/common +++ b/dist/bin/common @@ -10,10 +10,13 @@ load_classpath () { command="$1" psep_pattern="$2" __CLASS_PATH="" - while IFS= read -r line; do + while IFS= read -r line || [ -n "$line" ]; do + # jna-5 only appropriate for some combinations if ! [[ ( -n ${conemu-} || -n ${msys-}) && "$line" == "*jna-5*" ]]; then - # jna-5 only appropriate for some combinations - __CLASS_PATH+="$PROG_HOME/maven2/$line$psep_pattern" + if [ -n "$__CLASS_PATH" ]; then + __CLASS_PATH+="$psep_pattern" + fi + __CLASS_PATH+="$PROG_HOME/maven2/$line" fi done < "$PROG_HOME/etc/$command.classpath" echo "$__CLASS_PATH" @@ -21,11 +24,16 @@ load_classpath () { compilerJavaClasspathArgs () { toolchain="$(load_classpath "scala" "$PSEP")" + toolchain_extra="$(load_classpath "with_compiler" "$PSEP")" + + if [ -n "$toolchain_extra" ]; then + toolchain+="$PSEP$toolchain_extra" + fi if [ -n "${jvm_cp_args-}" ]; then jvm_cp_args="$toolchain$jvm_cp_args" else - jvm_cp_args="$toolchain$PSEP" + jvm_cp_args="$toolchain" fi } diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index fe6d7e3fad4d..dbcbaf11b8e2 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -89,9 +89,16 @@ goto :eof @rem output parameter: _JVM_CP_ARGS :compilerJavaClasspathArgs -call :loadClasspathFromFile +set "CP_FILE=%_ETC_DIR%\scala.classpath" +call :loadClasspathFromFile %CP_FILE% +set "__TOOLCHAIN=%_CLASS_PATH_RESULT%" -set "__TOOLCHAIN=%_CLASS_PATH%" +set "CP_FILE=%_ETC_DIR%\with_compiler.classpath" +call :loadClasspathFromFile %CP_FILE% + +if defined _CLASS_PATH_RESULT ( + set "__TOOLCHAIN=%__TOOLCHAIN%%_PSEP%%_CLASS_PATH_RESULT%" +) if defined _SCALA_CPATH ( set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" @@ -100,17 +107,19 @@ if defined _SCALA_CPATH ( ) goto :eof -@REM concatentate every line in "%_ETC_DIR%\scala.classpath" with _PSEP +@REM concatentate every line in "%_ARG_FILE%" with _PSEP +@REM arg 1 - file to read :loadClasspathFromFile -set _CLASS_PATH= -if exist "%_ETC_DIR%\scala.classpath" ( - for /f "usebackq delims=" %%i in ("%_ETC_DIR%\scala.classpath") do ( +set _ARG_FILE=%1 +set _CLASS_PATH_RESULT= +if exist "%_ARG_FILE%" ( + for /f "usebackq delims=" %%i in ("%_ARG_FILE%") do ( set "_LIB=%_PROG_HOME%\maven2\%%i" set "_LIB=!_LIB:/=\!" - if not defined _CLASS_PATH ( - set "_CLASS_PATH=!_LIB!" + if not defined _CLASS_PATH_RESULT ( + set "_CLASS_PATH_RESULT=!_LIB!" ) else ( - set "_CLASS_PATH=!_CLASS_PATH!%_PSEP%!_LIB!" + set "_CLASS_PATH_RESULT=!_CLASS_PATH_RESULT!%_PSEP%!_LIB!" ) ) ) diff --git a/project/Build.scala b/project/Build.scala index cdabcd3471f1..bd5c06a512e8 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2129,7 +2129,9 @@ object Build { packResourceDir += (republishRepo.value / "maven2" -> "maven2"), packResourceDir += (republishRepo.value / "etc" -> "etc"), republishCommandLibs += - ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-staging", "scala3-tasty-inspector")), + ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), + republishCommandLibs += + ("with_compiler" -> List("scala3-staging", "scala3-tasty-inspector", "^!scala3-interfaces", "^!scala3-compiler", "^!scala3-library", "^!tasty-core")), republishCommandLibs += ("scaladoc" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-tasty-inspector", "scaladoc")), Compile / pack := republishPack.value, diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index a0a8ce7dae74..e4bf40545a6b 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -215,7 +215,13 @@ object RepublishPlugin extends AutoPlugin { if (commandLibs.nonEmpty) { IO.createDirectory(republishDir / "etc") for ((command, libs) <- commandLibs) { - val entries = libs.map(fuzzyFind(classpaths, _)).reduce(_ ++ _).distinct + val (negated, actual) = libs.partition(_.startsWith("^!")) + val subtractions = negated.map(_.stripPrefix("^!")) + + def compose(libs: List[String]): List[String] = + libs.map(fuzzyFind(classpaths, _)).reduceOption(_ ++ _).map(_.distinct).getOrElse(Nil) + + val entries = compose(actual).diff(compose(subtractions)) IO.write(republishDir / "etc" / s"$command.classpath", entries.mkString("\n")) } } From fdd6a4ac1aa23371341b1f7f110cb87ecae7d973 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 24 Jun 2024 11:59:45 +0200 Subject: [PATCH 201/827] quickfix for failing bootstrapped compilation --- .../src/main/dotty/tools/pc/WithCompilationUnit.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala b/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala index b4f36c8d27ab..8110db269b3b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala @@ -31,7 +31,8 @@ class WithCompilationUnit( driver.run(uri, source) given ctx: Context = driver.currentCtx - val unit = driver.currentCtx.run.units.head + private val run = driver.currentCtx.run + val unit = run.units.head val compilatonUnitContext = ctx.fresh.setCompilationUnit(unit) val offset = params match case op: OffsetParams => op.offset() From 790bee903c3ac2ffc2e7254535e03cfc08f3e023 Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Mon, 24 Jun 2024 19:51:00 +0200 Subject: [PATCH 202/827] bugfix: Exclude newer LSP4j, which is built on JDK 11 It seems the CI started failing because of out bump, but that was not picked up aside from windows tests weirdly. Metals recently updated to newest LSP4j, though I think nothing new was added and no changes were done to the presentation compiler so we should be ok to downgrade locally: https://github.com/scalameta/metals/pull/6126/files The update was mostly done so that we done fall behind in the LSP standard, but this might become a problem in the future here. Do we ever plan do drop supporting JDK 8? --- project/Build.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 82417df75756..cafa71e542bb 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1356,8 +1356,10 @@ object Build { libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", "io.get-coursier" % "interface" % "1.0.18", - "org.scalameta" % "mtags-interfaces" % mtagsVersion, - "com.google.guava" % "guava" % "33.2.1-jre" + ("org.scalameta" % "mtags-interfaces" % mtagsVersion) + .exclude("org.eclipse.lsp4j","org.eclipse.lsp4j") + .exclude("org.eclipse.lsp4j","org.eclipse.lsp4j.jsonrpc"), + "org.eclipse.lsp4j" % "org.eclipse.lsp4j" % "0.20.1", ), libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.12" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, From dd711ee841716c0dc11c4389f3e31a6722baad0b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 24 Jun 2024 15:28:29 -0700 Subject: [PATCH 203/827] Avoid jpath in example --- docs/_docs/reference/metaprogramming/reflection.md | 2 +- docs/_spec/TODOreference/metaprogramming/reflection.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/_docs/reference/metaprogramming/reflection.md b/docs/_docs/reference/metaprogramming/reflection.md index 68cb7dafcfbb..12ca856f4940 100644 --- a/docs/_docs/reference/metaprogramming/reflection.md +++ b/docs/_docs/reference/metaprogramming/reflection.md @@ -82,7 +82,7 @@ def macroImpl()(quotes: Quotes): Expr[Unit] = import quotes.reflect.* val pos = Position.ofMacroExpansion - val path = pos.sourceFile.jpath.toString + val path = pos.sourceFile.path val start = pos.start val end = pos.end val startLine = pos.startLine diff --git a/docs/_spec/TODOreference/metaprogramming/reflection.md b/docs/_spec/TODOreference/metaprogramming/reflection.md index b2d492657a4e..6ab1eeab6ac0 100644 --- a/docs/_spec/TODOreference/metaprogramming/reflection.md +++ b/docs/_spec/TODOreference/metaprogramming/reflection.md @@ -82,7 +82,7 @@ def macroImpl()(quotes: Quotes): Expr[Unit] = import quotes.reflect.* val pos = Position.ofMacroExpansion - val path = pos.sourceFile.jpath.toString + val path = pos.sourceFile.path val start = pos.start val end = pos.end val startLine = pos.startLine From e7acd53883a517b39af0b1fa22abfcd6f0adb787 Mon Sep 17 00:00:00 2001 From: som-snytt Date: Tue, 25 Jun 2024 02:28:51 -0700 Subject: [PATCH 204/827] Update docs/_docs/reference/metaprogramming/reflection.md Co-authored-by: Jamie Thompson --- docs/_docs/reference/metaprogramming/reflection.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/_docs/reference/metaprogramming/reflection.md b/docs/_docs/reference/metaprogramming/reflection.md index 12ca856f4940..65ae2f733b7a 100644 --- a/docs/_docs/reference/metaprogramming/reflection.md +++ b/docs/_docs/reference/metaprogramming/reflection.md @@ -82,7 +82,8 @@ def macroImpl()(quotes: Quotes): Expr[Unit] = import quotes.reflect.* val pos = Position.ofMacroExpansion - val path = pos.sourceFile.path + val jpath = pos.sourceFile.getJPath.getOrElse(report.errorAndAbort("virtual file not supported", pos)) + val path = pos.sourceFile.path // fallback for a virtual file val start = pos.start val end = pos.end val startLine = pos.startLine From 2c7d497e9fe60f1390f55908d6283dbff527b549 Mon Sep 17 00:00:00 2001 From: som-snytt Date: Tue, 25 Jun 2024 02:29:03 -0700 Subject: [PATCH 205/827] Update docs/_spec/TODOreference/metaprogramming/reflection.md Co-authored-by: Jamie Thompson --- docs/_spec/TODOreference/metaprogramming/reflection.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/_spec/TODOreference/metaprogramming/reflection.md b/docs/_spec/TODOreference/metaprogramming/reflection.md index 6ab1eeab6ac0..2af1d04d1b32 100644 --- a/docs/_spec/TODOreference/metaprogramming/reflection.md +++ b/docs/_spec/TODOreference/metaprogramming/reflection.md @@ -82,7 +82,8 @@ def macroImpl()(quotes: Quotes): Expr[Unit] = import quotes.reflect.* val pos = Position.ofMacroExpansion - val path = pos.sourceFile.path + val jpath = pos.sourceFile.getJPath.getOrElse(report.errorAndAbort("virtual file not supported", pos)) + val path = pos.sourceFile.path // fallback for a virtual file val start = pos.start val end = pos.end val startLine = pos.startLine From 80076269693bfb7bb4639c9898220ce47b966d87 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 19 Jun 2024 16:40:02 +0200 Subject: [PATCH 206/827] Test products of compilation to jar --- .../xsbt/ExtractUsedNamesSpecification.scala | 5 +- .../test/xsbt/ProductsSpecification.scala | 34 ++++++ .../xsbt/ScalaCompilerForUnitTesting.scala | 102 +++++++++--------- 3 files changed, 89 insertions(+), 52 deletions(-) create mode 100644 sbt-bridge/test/xsbt/ProductsSpecification.scala diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala index e47371175de6..0abefe2985c3 100644 --- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala @@ -1,7 +1,6 @@ package xsbt import xsbti.UseScope -import ScalaCompilerForUnitTesting.Callbacks import org.junit.{ Test, Ignore } import org.junit.Assert._ @@ -227,9 +226,9 @@ class ExtractUsedNamesSpecification { def findPatMatUsages(in: String): Set[String] = { val compilerForTesting = new ScalaCompilerForUnitTesting - val (_, Callbacks(callback, _)) = + val output = compilerForTesting.compileSrcs(List(List(sealedClass, in))) - val clientNames = callback.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) + val clientNames = output.analysis.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) val names: Set[String] = clientNames.flatMap { case (_, usages) => diff --git a/sbt-bridge/test/xsbt/ProductsSpecification.scala b/sbt-bridge/test/xsbt/ProductsSpecification.scala new file mode 100644 index 000000000000..b13defecc4cc --- /dev/null +++ b/sbt-bridge/test/xsbt/ProductsSpecification.scala @@ -0,0 +1,34 @@ +package xsbt + +import org.junit.Assert.* +import org.junit.Ignore +import org.junit.Test + +import java.io.File +import java.nio.file.Path +import java.nio.file.Paths + +class ProductsSpecification { + + @Test + def extractProductsFromJar = { + val src = + """package example + | + |class A { + | class B + | def foo = + | class C + |}""".stripMargin + val output = compiler.compileSrcsToJar(src) + val srcFile = output.srcFiles.head + val products = output.analysis.productClassesToSources.filter(_._2 == srcFile).keys.toSet + + def toPathInJar(className: String): Path = + Paths.get(s"${output.classesOutput}!${className.replace('.', File.separatorChar)}.class") + val expected = Set("example.A", "example.A$B", "example.A$C$1").map(toPathInJar) + assertEquals(products, expected) + } + + private def compiler = new ScalaCompilerForUnitTesting +} \ No newline at end of file diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index f17be692ee50..fd125f25560b 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -1,22 +1,19 @@ /** Adapted from https://github.com/sbt/sbt/blob/0.13/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala */ package xsbt -import xsbti.compile.{CompileProgress, SingleOutput} -import java.io.File -import xsbti._ -import sbt.io.IO -import xsbti.api.{ ClassLike, Def, DependencyContext } -import DependencyContext._ -import xsbt.api.SameAPI -import sbt.internal.util.ConsoleLogger -import dotty.tools.io.PlainFile.toPlainFile import dotty.tools.xsbt.CompilerBridge +import sbt.io.IO +import xsbti.* +import xsbti.api.ClassLike +import xsbti.api.DependencyContext.* +import xsbti.compile.SingleOutput + +import java.io.File +import java.nio.file.Path import TestCallback.ExtractedClassDependencies -import ScalaCompilerForUnitTesting.Callbacks -object ScalaCompilerForUnitTesting: - case class Callbacks(analysis: TestCallback, progress: TestCompileProgress) +case class CompileOutput(srcFiles: Seq[VirtualFileRef], classesOutput: Path, analysis: TestCallback, progress: TestCompileProgress) /** * Provides common functionality needed for unit tests that require compiling @@ -25,29 +22,24 @@ object ScalaCompilerForUnitTesting: class ScalaCompilerForUnitTesting { def extractEnteredPhases(srcs: String*): Seq[List[String]] = { - val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(srcs*) - val run = testProgress.runs.head - tempSrcFiles.map(src => run.unitPhases(src.id)) + val output = compileSrcs(srcs*) + val run = output.progress.runs.head + output.srcFiles.map(src => run.unitPhases(src.id)) } - def extractTotal(srcs: String*)(extraSourcePath: String*): Int = { - val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(List(srcs.toList), extraSourcePath.toList) - val run = testProgress.runs.head - run.total - } + def extractTotal(srcs: String*)(extraSourcePath: String*): Int = + compileSrcs(List(srcs.toList), extraSourcePath.toList).progress.runs.head.total - def extractProgressPhases(srcs: String*): List[String] = { - val (_, Callbacks(_, testProgress)) = compileSrcs(srcs*) - testProgress.runs.head.phases - } + def extractProgressPhases(srcs: String*): List[String] = + compileSrcs(srcs*).progress.runs.head.phases /** * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ def extractApiFromSrc(src: String): Seq[ClassLike] = { - val (Seq(tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(src) - analysisCallback.apis(tempSrcFile) + val output = compileSrcs(src) + output.analysis.apis(output.srcFiles.head) } /** @@ -55,8 +47,8 @@ class ScalaCompilerForUnitTesting { * extracted by ExtractAPI class. */ def extractApisFromSrcs(srcs: List[String]*): Seq[Seq[ClassLike]] = { - val (tempSrcFiles, Callbacks(analysisCallback, _)) = compileSrcs(srcs.toList) - tempSrcFiles.map(analysisCallback.apis) + val output = compileSrcs(srcs.toList) + output.srcFiles.map(output.analysis.apis) } /** @@ -73,15 +65,16 @@ class ScalaCompilerForUnitTesting { assertDefaultScope: Boolean = true ): Map[String, Set[String]] = { // we drop temp src file corresponding to the definition src file - val (Seq(_, tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(definitionSrc, actualSrc) + val output = compileSrcs(definitionSrc, actualSrc) + val analysis = output.analysis if (assertDefaultScope) for { - (className, used) <- analysisCallback.usedNamesAndScopes - analysisCallback.TestUsedName(name, scopes) <- used + (className, used) <- analysis.usedNamesAndScopes + analysis.TestUsedName(name, scopes) <- used } assert(scopes.size() == 1 && scopes.contains(UseScope.Default), s"$className uses $name in $scopes") - val classesInActualSrc = analysisCallback.classNames(tempSrcFile).map(_._1) - classesInActualSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + val classesInActualSrc = analysis.classNames(output.srcFiles.head).map(_._1) + classesInActualSrc.map(className => className -> analysis.usedNames(className)).toMap } /** @@ -91,11 +84,11 @@ class ScalaCompilerForUnitTesting { * Only the names used in the last src file are returned. */ def extractUsedNamesFromSrc(sources: String*): Map[String, Set[String]] = { - val (srcFiles, Callbacks(analysisCallback, _)) = compileSrcs(sources*) - srcFiles + val output = compileSrcs(sources*) + output.srcFiles .map { srcFile => - val classesInSrc = analysisCallback.classNames(srcFile).map(_._1) - classesInSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + val classesInSrc = output.analysis.classNames(srcFile).map(_._1) + classesInSrc.map(className => className -> output.analysis.usedNames(className)).toMap } .reduce(_ ++ _) } @@ -113,15 +106,15 @@ class ScalaCompilerForUnitTesting { * file system-independent way of testing dependencies between source code "files". */ def extractDependenciesFromSrcs(srcs: List[List[String]]): ExtractedClassDependencies = { - val (_, Callbacks(testCallback, _)) = compileSrcs(srcs) + val analysis = compileSrcs(srcs).analysis - val memberRefDeps = testCallback.classDependencies collect { + val memberRefDeps = analysis.classDependencies collect { case (target, src, DependencyByMemberRef) => (src, target) } - val inheritanceDeps = testCallback.classDependencies collect { + val inheritanceDeps = analysis.classDependencies collect { case (target, src, DependencyByInheritance) => (src, target) } - val localInheritanceDeps = testCallback.classDependencies collect { + val localInheritanceDeps = analysis.classDependencies collect { case (target, src, LocalDependencyByInheritance) => (src, target) } ExtractedClassDependencies.fromPairs(memberRefDeps, inheritanceDeps, localInheritanceDeps) @@ -142,12 +135,20 @@ class ScalaCompilerForUnitTesting { * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil): (Seq[VirtualFile], Callbacks) = { + def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil, compileToJar: Boolean = false): CompileOutput = { val temp = IO.createTemporaryDirectory val analysisCallback = new TestCallback val testProgress = new TestCompileProgress - val classesDir = new File(temp, "classes") - classesDir.mkdir() + val classesOutput = + if (compileToJar) { + val jar = new File(temp, "classes.jar") + jar.createNewFile() + jar + } else { + val dir = new File(temp, "classes") + dir.mkdir() + dir + } val bridge = new CompilerBridge @@ -164,16 +165,16 @@ class ScalaCompilerForUnitTesting { } val virtualSrcFiles = srcFiles.toArray - val classesDirPath = classesDir.getAbsolutePath.toString + val classesOutputPath = classesOutput.getAbsolutePath() val output = new SingleOutput: - def getOutputDirectory() = classesDir + def getOutputDirectory() = classesOutput val maybeSourcePath = if extraFiles.isEmpty then Nil else List("-sourcepath", temp.getAbsolutePath.toString) bridge.run( virtualSrcFiles, new TestDependencyChanges, - Array("-Yforce-sbt-phases", "-classpath", classesDirPath, "-usejavacp", "-d", classesDirPath) ++ maybeSourcePath, + Array("-Yforce-sbt-phases", "-classpath", classesOutputPath, "-usejavacp", "-d", classesOutputPath) ++ maybeSourcePath, output, analysisCallback, new TestReporter, @@ -185,13 +186,16 @@ class ScalaCompilerForUnitTesting { srcFiles } - (files.flatten.toSeq, Callbacks(analysisCallback, testProgress)) + CompileOutput(files.flatten.toSeq, classesOutput.toPath, analysisCallback, testProgress) } - def compileSrcs(srcs: String*): (Seq[VirtualFile], Callbacks) = { + def compileSrcs(srcs: String*): CompileOutput = { compileSrcs(List(srcs.toList)) } + def compileSrcsToJar(srcs: String*): CompileOutput = + compileSrcs(List(srcs.toList), compileToJar = true) + private def prepareSrcFile(baseDir: File, fileName: String, src: String): VirtualFile = { val srcFile = new File(baseDir, fileName) IO.write(srcFile, src) From a88312b6bf2ff1a7f0884ddaadbf5736bde91eba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Wed, 26 Jun 2024 13:54:23 +0200 Subject: [PATCH 207/827] Mark genSJSIR as *disabled* (rather than non-*runnable*) when no `-scalajs`. This works around the issue seen in #20296. However, the issue resurfaces if we actually run `-Ycheck:all` in a Scala.js-enabled build. --- .../src/dotty/tools/backend/sjs/GenSJSIR.scala | 5 ++++- .../test/xsbt/CompileProgressSpecification.scala | 1 - tests/pos/i20296.scala | 14 ++++++++++++++ 3 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i20296.scala diff --git a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala index fbb9042affe7..c44c8f19777b 100644 --- a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala +++ b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala @@ -11,8 +11,11 @@ class GenSJSIR extends Phase { override def description: String = GenSJSIR.description + override def isEnabled(using Context): Boolean = + ctx.settings.scalajs.value + override def isRunnable(using Context): Boolean = - super.isRunnable && ctx.settings.scalajs.value && !ctx.usedBestEffortTasty + super.isRunnable && !ctx.usedBestEffortTasty def run(using Context): Unit = new JSCodeGen().run() diff --git a/sbt-bridge/test/xsbt/CompileProgressSpecification.scala b/sbt-bridge/test/xsbt/CompileProgressSpecification.scala index bcdac0547e75..dc3956ada0db 100644 --- a/sbt-bridge/test/xsbt/CompileProgressSpecification.scala +++ b/sbt-bridge/test/xsbt/CompileProgressSpecification.scala @@ -66,7 +66,6 @@ class CompileProgressSpecification { "MegaPhase{pruneErasedDefs,...,arrayConstructors}", "erasure", "constructors", - "genSJSIR", "genBCode" ) val missingExpectedPhases = someExpectedPhases -- allPhases.toSet diff --git a/tests/pos/i20296.scala b/tests/pos/i20296.scala new file mode 100644 index 000000000000..910fd07c1298 --- /dev/null +++ b/tests/pos/i20296.scala @@ -0,0 +1,14 @@ +trait Foo + +object Foo { + inline def bar(): Foo = + class InlinedFoo extends Foo {} + new InlinedFoo + + inline def foo(): Foo = + bar() + class InlinedFoo extends Foo {} + new InlinedFoo + + def Test: Foo = Foo.foo() +} From 32752e28e898473a9f78f1ca8a8a019decd3f665 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 12 Apr 2024 00:20:47 +0200 Subject: [PATCH 208/827] Drop normalization of applied match alias arguments Delay their normalization until it is needed. Avoids overflows from infinite match types that did not need to normalize. Also improves MatchTypeTraces as a side effect. It appears to have been added to avoid some separate issue, which seems to have been fixed. It is no longer needed since the previous fix with constant folding in disjointnessBoundary. --- compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- tests/neg/i12049d.check | 14 ++++++++++++++ tests/neg/i12049d.scala | 14 ++++++++++++++ tests/pos/matchtype-unusedArg/A_1.scala | 8 ++++++++ tests/pos/matchtype-unusedArg/B_2.scala | 2 ++ 5 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 tests/neg/i12049d.check create mode 100644 tests/neg/i12049d.scala create mode 100644 tests/pos/matchtype-unusedArg/A_1.scala create mode 100644 tests/pos/matchtype-unusedArg/B_2.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index cb47bd92352e..987f98e67b6a 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4694,7 +4694,7 @@ object Types extends TypeUtils { case AliasingBounds(alias) if isMatchAlias => trace(i"normalize $this", typr, show = true) { MatchTypeTrace.recurseWith(this) { - alias.applyIfParameterized(args.map(_.normalized)).tryNormalize + alias.applyIfParameterized(args).tryNormalize /* `applyIfParameterized` may reduce several HKTypeLambda applications * before the underlying MatchType is reached. * Even if they do not involve any match type normalizations yet, diff --git a/tests/neg/i12049d.check b/tests/neg/i12049d.check new file mode 100644 index 000000000000..fdb13aae4e43 --- /dev/null +++ b/tests/neg/i12049d.check @@ -0,0 +1,14 @@ +-- [E007] Type Mismatch Error: tests/neg/i12049d.scala:14:52 ----------------------------------------------------------- +14 |val x: M[NotRelevant[Nothing], Relevant[Nothing]] = 2 // error + | ^ + | Found: (2 : Int) + | Required: M[NotRelevant[Nothing], Relevant[Nothing]] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce M[NotRelevant[Nothing], Relevant[Nothing]] + | trying to reduce Relevant[Nothing] + | failed since selector Nothing + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i12049d.scala b/tests/neg/i12049d.scala new file mode 100644 index 000000000000..0011ec1f00b1 --- /dev/null +++ b/tests/neg/i12049d.scala @@ -0,0 +1,14 @@ + +trait A +trait B + +type M[X, Y] = Y match + case A => Int + case B => String + +type Relevant[Z] = Z match + case A => B +type NotRelevant[Z] = Z match + case B => A + +val x: M[NotRelevant[Nothing], Relevant[Nothing]] = 2 // error diff --git a/tests/pos/matchtype-unusedArg/A_1.scala b/tests/pos/matchtype-unusedArg/A_1.scala new file mode 100644 index 000000000000..4364a812f12c --- /dev/null +++ b/tests/pos/matchtype-unusedArg/A_1.scala @@ -0,0 +1,8 @@ + +type Rec[X] = X match + case Int => Rec[X] + +type M[Unused, Y] = Y match + case String => Double + +def foo[X](d: M[Rec[X], "hi"]) = ??? diff --git a/tests/pos/matchtype-unusedArg/B_2.scala b/tests/pos/matchtype-unusedArg/B_2.scala new file mode 100644 index 000000000000..437e53a1691d --- /dev/null +++ b/tests/pos/matchtype-unusedArg/B_2.scala @@ -0,0 +1,2 @@ + +def Test = foo[Int](3d) // crash before changes From ac980239a55d496f9c35f74e0dd6cc9879d1bb01 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 24 Apr 2024 17:22:08 +0200 Subject: [PATCH 209/827] Use cached underlyingMatchType when normalizing applied match aliases Also fixes underlyingMatchType to not use the resType of HKTypeLambdas It should only be in `isMatch` used for `AliasingBounds`, not `isMatchAlias` --- .../src/dotty/tools/dotc/core/Types.scala | 36 ++++++------------- 1 file changed, 11 insertions(+), 25 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 987f98e67b6a..bc6ed80a8b72 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -491,12 +491,10 @@ object Types extends TypeUtils { /** Does this application expand to a match type? */ def isMatchAlias(using Context): Boolean = underlyingMatchType.exists - def underlyingMatchType(using Context): Type = stripped match { + def underlyingMatchType(using Context): Type = stripped match case tp: MatchType => tp - case tp: HKTypeLambda => tp.resType.underlyingMatchType case tp: AppliedType => tp.underlyingMatchType case _ => NoType - } /** Is this a higher-kinded type lambda with given parameter variances? * These lambdas are used as the RHS of higher-kinded abstract types or @@ -4681,6 +4679,7 @@ object Types extends TypeUtils { /** Exists if the tycon is a TypeRef of an alias with an underlying match type. * Anything else should have already been reduced in `appliedTo` by the TypeAssigner. + * May reduce several HKTypeLambda applications before the underlying MatchType is reached. */ override def underlyingMatchType(using Context): Type = if ctx.period != validUnderlyingMatch then @@ -4688,28 +4687,15 @@ object Types extends TypeUtils { validUnderlyingMatch = validSuper cachedUnderlyingMatch - override def tryNormalize(using Context): Type = tycon.stripTypeVar match { - case tycon: TypeRef => - def tryMatchAlias = tycon.info match - case AliasingBounds(alias) if isMatchAlias => - trace(i"normalize $this", typr, show = true) { - MatchTypeTrace.recurseWith(this) { - alias.applyIfParameterized(args).tryNormalize - /* `applyIfParameterized` may reduce several HKTypeLambda applications - * before the underlying MatchType is reached. - * Even if they do not involve any match type normalizations yet, - * we still want to record these reductions in the MatchTypeTrace. - * They should however only be attempted if they eventually expand - * to a match type, which is ensured by the `isMatchAlias` guard. - */ - } - } - case _ => - NoType - tryCompiletimeConstantFold.orElse(tryMatchAlias) - case _ => - NoType - } + override def tryNormalize(using Context): Type = + def tryMatchAlias = + if isMatchAlias then trace(i"normalize $this", typr, show = true): + if MatchTypeTrace.isRecording then + MatchTypeTrace.recurseWith(this)(superType.tryNormalize) + else + underlyingMatchType.tryNormalize + else NoType + tryCompiletimeConstantFold.orElse(tryMatchAlias) /** Is this an unreducible application to wildcard arguments? * This is the case if tycon is higher-kinded. This means From 68ca883017888203a59341f7bd0e1d9c69b45e3a Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 30 Apr 2024 13:27:58 +0200 Subject: [PATCH 210/827] Cache underlying applied compiletime.ops --- .../src/dotty/tools/dotc/core/Types.scala | 28 +++++++++++-------- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index bc6ed80a8b72..0da5fd58573c 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -489,11 +489,11 @@ object Types extends TypeUtils { case _ => false /** Does this application expand to a match type? */ - def isMatchAlias(using Context): Boolean = underlyingMatchType.exists + def isMatchAlias(using Context): Boolean = underlyingNormalizable.isMatch - def underlyingMatchType(using Context): Type = stripped match + def underlyingNormalizable(using Context): Type = stripped match case tp: MatchType => tp - case tp: AppliedType => tp.underlyingMatchType + case tp: AppliedType => tp.underlyingNormalizable case _ => NoType /** Is this a higher-kinded type lambda with given parameter variances? @@ -4612,8 +4612,8 @@ object Types extends TypeUtils { private var myEvalRunId: RunId = NoRunId private var myEvalued: Type = uninitialized - private var validUnderlyingMatch: Period = Nowhere - private var cachedUnderlyingMatch: Type = uninitialized + private var validUnderlyingNormalizable: Period = Nowhere + private var cachedUnderlyingNormalizable: Type = uninitialized def isGround(acc: TypeAccumulator[Boolean])(using Context): Boolean = if myGround == 0 then myGround = if acc.foldOver(true, this) then 1 else -1 @@ -4681,11 +4681,15 @@ object Types extends TypeUtils { * Anything else should have already been reduced in `appliedTo` by the TypeAssigner. * May reduce several HKTypeLambda applications before the underlying MatchType is reached. */ - override def underlyingMatchType(using Context): Type = - if ctx.period != validUnderlyingMatch then - cachedUnderlyingMatch = superType.underlyingMatchType - validUnderlyingMatch = validSuper - cachedUnderlyingMatch + override def underlyingNormalizable(using Context): Type = + if ctx.period != validUnderlyingNormalizable then tycon match + case tycon: TypeRef if defn.isCompiletimeAppliedType(tycon.symbol) => + cachedUnderlyingNormalizable = this + validUnderlyingNormalizable = ctx.period + case _ => + cachedUnderlyingNormalizable = superType.underlyingNormalizable + validUnderlyingNormalizable = validSuper + cachedUnderlyingNormalizable override def tryNormalize(using Context): Type = def tryMatchAlias = @@ -4693,7 +4697,7 @@ object Types extends TypeUtils { if MatchTypeTrace.isRecording then MatchTypeTrace.recurseWith(this)(superType.tryNormalize) else - underlyingMatchType.tryNormalize + underlyingNormalizable.tryNormalize else NoType tryCompiletimeConstantFold.orElse(tryMatchAlias) @@ -5267,7 +5271,7 @@ object Types extends TypeUtils { def apply(bound: Type, scrutinee: Type, cases: List[Type])(using Context): MatchType = unique(new CachedMatchType(bound, scrutinee, cases)) - def thatReducesUsingGadt(tp: Type)(using Context): Boolean = tp.underlyingMatchType match + def thatReducesUsingGadt(tp: Type)(using Context): Boolean = tp.underlyingNormalizable match case mt: MatchType => mt.reducesUsingGadt case _ => false diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index a5380f73a2a5..abd2544fb23b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2044,7 +2044,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => false } - val result = pt.underlyingMatchType match { + val result = pt.underlyingNormalizable match { case mt: MatchType if isMatchTypeShaped(mt) => typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case _ => From a6cadec56e92412b2866895f6fbfc6149193f32b Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 30 Apr 2024 13:44:54 +0200 Subject: [PATCH 211/827] Use `underlyingNormalizable` in `Type#tryNormalize` --- .../src/dotty/tools/dotc/core/Types.scala | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 0da5fd58573c..aaf0c9df3003 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1555,11 +1555,13 @@ object Types extends TypeUtils { if (normed.exists) normed else this } - /** If this type can be normalized at the top-level by rewriting match types - * of S[n] types, the result after applying all toplevel normalizations, - * otherwise NoType + /** If this type has an underlying match type or applied compiletime.ops, + * then the result after applying all toplevel normalizations, otherwise NoType. */ - def tryNormalize(using Context): Type = NoType + def tryNormalize(using Context): Type = underlyingNormalizable match + case mt: MatchType => mt.tryNormalize + case tp: AppliedType => tp.tryCompiletimeConstantFold + case _ => NoType private def widenDealias1(keep: AnnotatedType => Context ?=> Boolean)(using Context): Type = { val res = this.widen.dealias1(keep, keepOpaques = false) @@ -4692,14 +4694,9 @@ object Types extends TypeUtils { cachedUnderlyingNormalizable override def tryNormalize(using Context): Type = - def tryMatchAlias = - if isMatchAlias then trace(i"normalize $this", typr, show = true): - if MatchTypeTrace.isRecording then - MatchTypeTrace.recurseWith(this)(superType.tryNormalize) - else - underlyingNormalizable.tryNormalize - else NoType - tryCompiletimeConstantFold.orElse(tryMatchAlias) + if isMatchAlias && MatchTypeTrace.isRecording then + MatchTypeTrace.recurseWith(this)(superType.tryNormalize) + else super.tryNormalize /** Is this an unreducible application to wildcard arguments? * This is the case if tycon is higher-kinded. This means From 4fbba66ec6376e2d7f4f936cf79738fa86a49f23 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 30 Apr 2024 13:45:54 +0200 Subject: [PATCH 212/827] `stripLazyRef` for `underlyingNormalizable` --- compiler/src/dotty/tools/dotc/core/Types.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index aaf0c9df3003..805e62adae8b 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -491,7 +491,7 @@ object Types extends TypeUtils { /** Does this application expand to a match type? */ def isMatchAlias(using Context): Boolean = underlyingNormalizable.isMatch - def underlyingNormalizable(using Context): Type = stripped match + def underlyingNormalizable(using Context): Type = stripped.stripLazyRef match case tp: MatchType => tp case tp: AppliedType => tp.underlyingNormalizable case _ => NoType @@ -3257,8 +3257,6 @@ object Types extends TypeUtils { private var myRef: Type | Null = null private var computed = false - override def tryNormalize(using Context): Type = ref.tryNormalize - def ref(using Context): Type = if computed then if myRef == null then From 309b1cfceeb812c589f932e16dbd50b09c11a5f4 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 30 Apr 2024 13:57:30 +0200 Subject: [PATCH 213/827] Drop `handleRecursive` from `MatchType#tryNormalize` There is already a `handleRecursive` in `reduced` Having the two makes error messages undeterministic, see #20269 --- compiler/src/dotty/tools/dotc/core/Types.scala | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 805e62adae8b..bc925d71f240 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5158,11 +5158,7 @@ object Types extends TypeUtils { private var reductionContext: util.MutableMap[Type, Type] | Null = null override def tryNormalize(using Context): Type = - try - reduced.normalized - catch - case ex: Throwable => - handleRecursive("normalizing", s"${scrutinee.show} match ..." , ex) + reduced.normalized private def thisMatchType = this From 0b87d7fcdb40808226bf79857fe06246f2840e7a Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 30 Apr 2024 14:01:32 +0200 Subject: [PATCH 214/827] Regroup `tryNormalize` logic --- .../src/dotty/tools/dotc/core/Types.scala | 34 ++++++++----------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index bc925d71f240..9375557d2dba 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -491,11 +491,6 @@ object Types extends TypeUtils { /** Does this application expand to a match type? */ def isMatchAlias(using Context): Boolean = underlyingNormalizable.isMatch - def underlyingNormalizable(using Context): Type = stripped.stripLazyRef match - case tp: MatchType => tp - case tp: AppliedType => tp.underlyingNormalizable - case _ => NoType - /** Is this a higher-kinded type lambda with given parameter variances? * These lambdas are used as the RHS of higher-kinded abstract types or * type aliases. The variance info is strictly needed only for abstract types. @@ -1547,22 +1542,25 @@ object Types extends TypeUtils { } deskolemizer(this) - /** The result of normalization using `tryNormalize`, or the type itself if - * tryNormlize yields NoType - */ - final def normalized(using Context): Type = { - val normed = tryNormalize - if (normed.exists) normed else this - } + /** The result of normalization, or the type itself if none apply. */ + final def normalized(using Context): Type = tryNormalize.orElse(this) /** If this type has an underlying match type or applied compiletime.ops, * then the result after applying all toplevel normalizations, otherwise NoType. */ def tryNormalize(using Context): Type = underlyingNormalizable match - case mt: MatchType => mt.tryNormalize + case mt: MatchType => mt.reduced.normalized case tp: AppliedType => tp.tryCompiletimeConstantFold case _ => NoType + /** Perform successive strippings, and beta-reductions of applied types until + * a match type or applied compiletime.ops is reached, if any, otherwise NoType. + */ + def underlyingNormalizable(using Context): Type = stripped.stripLazyRef match + case tp: MatchType => tp + case tp: AppliedType => tp.underlyingNormalizable + case _ => NoType + private def widenDealias1(keep: AnnotatedType => Context ?=> Boolean)(using Context): Type = { val res = this.widen.dealias1(keep, keepOpaques = false) if (res eq this) res else res.widenDealias1(keep) @@ -4677,9 +4675,10 @@ object Types extends TypeUtils { case nil => x foldArgs(op(x, tycon), args) - /** Exists if the tycon is a TypeRef of an alias with an underlying match type. - * Anything else should have already been reduced in `appliedTo` by the TypeAssigner. - * May reduce several HKTypeLambda applications before the underlying MatchType is reached. + /** Exists if the tycon is a TypeRef of an alias with an underlying match type, + * or a compiletime applied type. Anything else should have already been + * reduced in `appliedTo` by the TypeAssigner. This may reduce several + * HKTypeLambda applications before the underlying normalizable type is reached. */ override def underlyingNormalizable(using Context): Type = if ctx.period != validUnderlyingNormalizable then tycon match @@ -5157,9 +5156,6 @@ object Types extends TypeUtils { private var myReduced: Type | Null = null private var reductionContext: util.MutableMap[Type, Type] | Null = null - override def tryNormalize(using Context): Type = - reduced.normalized - private def thisMatchType = this def reduced(using Context): Type = atPhaseNoLater(elimOpaquePhase) { From 9df3942d63513acd28bc5fcbe87c29e879c9d1fd Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 1 May 2024 10:25:36 +0200 Subject: [PATCH 215/827] Add i974.scala to neg-best-effort-pickling.blacklist --- compiler/test/dotc/neg-best-effort-pickling.blacklist | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/test/dotc/neg-best-effort-pickling.blacklist b/compiler/test/dotc/neg-best-effort-pickling.blacklist index 2daf32509ed1..a582f085dd30 100644 --- a/compiler/test/dotc/neg-best-effort-pickling.blacklist +++ b/compiler/test/dotc/neg-best-effort-pickling.blacklist @@ -15,6 +15,7 @@ illegal-match-types.scala i13780-1.scala i20317a.scala i11226.scala +i974.scala # semantic db generation fails in the first compilation i1642.scala From 9465d65e185236353811e3cada62b91df61a1076 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 3 Jun 2024 16:51:07 +0200 Subject: [PATCH 216/827] Add test for #20482 tryNormalize used to not recursively check if tycon of applied type was normalizable, this may be necessary in the case of an applied type dealiasing to a type lambda. Fixes #20482 --- tests/pos/i20482.scala | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 tests/pos/i20482.scala diff --git a/tests/pos/i20482.scala b/tests/pos/i20482.scala new file mode 100644 index 000000000000..2a7680df054d --- /dev/null +++ b/tests/pos/i20482.scala @@ -0,0 +1,16 @@ +trait WrapperType[A] + +case class Foo[A]() + +case class Bar[A]() + +type FooToBar[D[_]] = [A] =>> D[Unit] match { + case Foo[Unit] => Bar[A] +} + +case class Test() +object Test { + implicit val wrapperType: WrapperType[Bar[Test]] = new WrapperType[Bar[Test]] {} +} + +val test = summon[WrapperType[FooToBar[Foo][Test]]] From 1bfa8191f838b67768e2a44187f3128def83d146 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 12 Jun 2024 10:18:56 +0200 Subject: [PATCH 217/827] Fix `AliasingBounds#derivedAlias` an omission from ef7db7ad --- compiler/src/dotty/tools/dotc/core/Types.scala | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 9375557d2dba..fea03cacfa22 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5692,7 +5692,8 @@ object Types extends TypeUtils { /** Common supertype of `TypeAlias` and `MatchAlias` */ abstract class AliasingBounds(val alias: Type) extends TypeBounds(alias, alias) { - def derivedAlias(alias: Type)(using Context): AliasingBounds + def derivedAlias(alias: Type)(using Context): AliasingBounds = + if alias eq this.alias then this else AliasingBounds(alias) override def computeHash(bs: Binders): Int = doHash(bs, alias) override def hashIsStable: Boolean = alias.hashIsStable @@ -5714,10 +5715,7 @@ object Types extends TypeUtils { /** = T */ - class TypeAlias(alias: Type) extends AliasingBounds(alias) { - def derivedAlias(alias: Type)(using Context): AliasingBounds = - if (alias eq this.alias) this else TypeAlias(alias) - } + class TypeAlias(alias: Type) extends AliasingBounds(alias) /** = T where `T` is a `MatchType` * @@ -5726,10 +5724,7 @@ object Types extends TypeUtils { * If we assumed full substitutivity, we would have to reject all recursive match * aliases (or else take the jump and allow full recursive types). */ - class MatchAlias(alias: Type) extends AliasingBounds(alias) { - def derivedAlias(alias: Type)(using Context): AliasingBounds = - if (alias eq this.alias) this else MatchAlias(alias) - } + class MatchAlias(alias: Type) extends AliasingBounds(alias) object TypeBounds { def apply(lo: Type, hi: Type)(using Context): TypeBounds = From df2bf408b35c109fed5a0e3e6ba98b2e70770d7b Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Fri, 28 Jun 2024 16:33:59 +0200 Subject: [PATCH 218/827] Bump scala-cli to 1.4.0 --- project/Build.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index cc24940de67c..32a4577fe34c 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -118,9 +118,9 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.3.2" + val scalaCliLauncherVersion = "1.4.0" /** Version of Scala CLI to download (on Windows - last known validated version) */ - val scalaCliLauncherVersionWindows = "1.3.2" + val scalaCliLauncherVersionWindows = "1.4.0" /** Version of Coursier to download for initializing the local maven repo of Scala command */ val coursierJarVersion = "2.1.10" From 1efbb9295971513b1c7ebf7efe519faaac2f5ec6 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 28 Jun 2024 19:53:42 +0200 Subject: [PATCH 219/827] chore: fix doc link in namedTuples docstring (#20770) The `See also` section in the `namedTuples` [scaladoc](https://dotty.epfl.ch/api/scala/runtime/stdLibPatches/language$$experimental$$namedTuples$.html) was pointing to the wrong link. --------- Co-authored-by: Hamza Remmal --- library/src/scala/runtime/stdLibPatches/language.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index d89bd9dcf72e..e930b81923ff 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -93,7 +93,7 @@ object language: /** Experimental support for named tuples. * - * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] + * @see [[https://dotty.epfl.ch/docs/reference/experimental/named-tuples]] */ @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") object namedTuples From abf795c6cf23b4464c2f7715b0b4c39a2b24a602 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Sat, 29 Jun 2024 15:01:06 +0200 Subject: [PATCH 220/827] Switch default source version to 3.6 --- compiler/src/dotty/tools/dotc/config/SourceVersion.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 935b95003729..38df682de771 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -28,7 +28,7 @@ enum SourceVersion: def isAtMost(v: SourceVersion) = stable.ordinal <= v.ordinal object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.5` + def defaultSourceVersion = `3.6` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) From 22931f1b3b05b016f8e29a053adabef5d6c116b7 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 1 Jul 2024 10:29:16 +0200 Subject: [PATCH 221/827] Add --skip-cli-updates by default to the scala command --- dist/bin/scala | 1 + dist/bin/scala.bat | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/dist/bin/scala b/dist/bin/scala index c6c6f8807a64..fa4f4cb25a11 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -59,6 +59,7 @@ done # SCALA_CLI_CMD_BASH is an array, set by cli-common-platform eval "${SCALA_CLI_CMD_BASH[@]}" \ "--prog-name scala" \ + "--skip-cli-updates" \ "--cli-default-scala-version \"$SCALA_VERSION\"" \ "-r \"$MVN_REPOSITORY\"" \ "${scala_args[@]}" diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index d473facbbb1c..7418909da263 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -21,8 +21,9 @@ call :setScalaOpts call "%_PROG_HOME%\bin\cli-common-platform.bat" -@rem SCALA_CLI_CMD_WIN is an array, set in cli-common-platform.bat -call %SCALA_CLI_CMD_WIN% "--prog-name" "scala" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* +@rem SCALA_CLI_CMD_WIN is an array, set in cli-common-platform.bat. +@rem WE NEED TO PASS '--skip-cli-updates' for JVM launchers but we actually don't need it for native launchers +call %SCALA_CLI_CMD_WIN% "--prog-name" "scala" "--skip-cli-updates" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* if not %ERRORLEVEL%==0 ( set _EXITCODE=1& goto end ) From 5becb63d609f6f1852f196ea65688a0c5b665ee6 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 28 Jun 2024 15:33:02 +0200 Subject: [PATCH 222/827] Add regression test for #20288 fixed in #20527 Close #20288 --- tests/pos/i20288.scala | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 tests/pos/i20288.scala diff --git a/tests/pos/i20288.scala b/tests/pos/i20288.scala new file mode 100644 index 000000000000..5c22dadf5b6d --- /dev/null +++ b/tests/pos/i20288.scala @@ -0,0 +1,23 @@ + +trait Decoder[A] +object Decoder { + given Decoder[DataRow] = ??? + extension [A <: Tuple](d: Decoder[A]) + def ~[B](fd: Field[B]): Decoder[Tuple.Concat[A, Tuple1[B]]] = ??? +} + +trait Field[A] +object Field: + val int: Field[Int] = ??? + extension [A](self: Field[A]) + def ~[B](that: Field[B])(using Decoder[DataRow]): Decoder[(A, B)] = ??? + +trait DataRow +def simpleQuery[S, A](query: String)(using Decoder[A]): Either[Throwable, A] = ??? + +@main def Test = { + import Decoder.* + val fails = simpleQuery("")(using + Field.int ~ Field.int ~ Field.int + ) +} From 0afd2c1c07a985bbfa2e0d2183edcf007983c41a Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Tue, 11 Jun 2024 19:50:52 +0200 Subject: [PATCH 223/827] Replace "qualifier" by "modifier" --- compiler/src/dotty/tools/dotc/reporting/messages.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index b349cf1fb678..ecf542668bf5 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -1838,7 +1838,7 @@ class WrongNumberOfParameters(tree: untpd.Tree, foundCount: Int, pt: Type, expec class DuplicatePrivateProtectedQualifier()(using Context) extends SyntaxMsg(DuplicatePrivateProtectedQualifierID) { - def msg(using Context) = "Duplicate private/protected qualifier" + def msg(using Context) = "Duplicate private/protected modifier" def explain(using Context) = i"It is not allowed to combine `private` and `protected` modifiers even if they are qualified to different scopes" } @@ -1847,7 +1847,7 @@ class ExpectedStartOfTopLevelDefinition()(using Context) extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID) { def msg(using Context) = "Expected start of definition" def explain(using Context) = - i"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after qualifiers" + i"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after modifiers" } class NoReturnFromInlineable(owner: Symbol)(using Context) From cfbace4e5213d178e931e02485884b597612bd8d Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Mon, 1 Jul 2024 11:01:12 +0200 Subject: [PATCH 224/827] Add an error message for local final defs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Eugene Flesselle Co-Authored-By: anna herlihy Co-Authored-By: Oliver Bračevac --- .../dotty/tools/dotc/parsing/Parsers.scala | 6 ++++ .../tools/dotc/reporting/ErrorMessageID.scala | 1 + .../dotty/tools/dotc/reporting/messages.scala | 6 ++++ tests/neg/17579.check | 30 +++++++++++++++++++ tests/neg/17579.scala | 26 ++++++++++++++++ 5 files changed, 69 insertions(+) create mode 100644 tests/neg/17579.check create mode 100644 tests/neg/17579.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 4c13934f3473..b84313cc972a 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -4560,6 +4560,12 @@ object Parsers { for (imod <- implicitMods.mods) mods = addMod(mods, imod) if (mods.is(Final)) // A final modifier means the local definition is "class-like". // FIXME: Deal with modifiers separately + + // See test 17579. We allow `final` on `given` because these can be + // translated to class definitions, for which `final` is allowed but + // redundant--there is a seperate warning for this. + if isDclIntro && in.token != GIVEN then syntaxError(FinalLocalDef()) + tmplDef(start, mods) else defOrDcl(start, mods) diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 273c3720bc1c..e3613e3f783a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -213,6 +213,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case InlinedAnonClassWarningID // errorNumber: 197 case UnusedSymbolID // errorNumber: 198 case TailrecNestedCallID //errorNumber: 199 + case FinalLocalDefID // errorNumber: 200 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index ecf542668bf5..d33b2c574318 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -1850,6 +1850,12 @@ class ExpectedStartOfTopLevelDefinition()(using Context) i"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after modifiers" } +class FinalLocalDef()(using Context) + extends SyntaxMsg(FinalLocalDefID) { + def msg(using Context) = i"The ${hl("final")} modifier is not allowed on local definitions" + def explain(using Context) = "" +} + class NoReturnFromInlineable(owner: Symbol)(using Context) extends SyntaxMsg(NoReturnFromInlineableID) { def msg(using Context) = i"No explicit ${hl("return")} allowed from inlineable $owner" diff --git a/tests/neg/17579.check b/tests/neg/17579.check new file mode 100644 index 000000000000..1149f9c0faa4 --- /dev/null +++ b/tests/neg/17579.check @@ -0,0 +1,30 @@ +-- [E200] Syntax Error: tests/neg/17579.scala:5:10 --------------------------------------------------------------------- +5 | final val v1 = 42 // error: final modifier is not allowed on local definitions + | ^^^ + | The final modifier is not allowed on local definitions +-- [E200] Syntax Error: tests/neg/17579.scala:6:15 --------------------------------------------------------------------- +6 | final lazy val v2 = 42 // error: final modifier is not allowed on local definitions + | ^^^ + | The final modifier is not allowed on local definitions +-- [E200] Syntax Error: tests/neg/17579.scala:7:10 --------------------------------------------------------------------- +7 | final def v4 = 42 // error: final modifier is not allowed on local definitions + | ^^^ + | The final modifier is not allowed on local definitions +-- [E200] Syntax Error: tests/neg/17579.scala:8:10 --------------------------------------------------------------------- +8 | final var v5 = 42 // error: final modifier is not allowed on local definitions + | ^^^ + | The final modifier is not allowed on local definitions +-- [E200] Syntax Error: tests/neg/17579.scala:9:10 --------------------------------------------------------------------- +9 | final type Foo = String // error: final modifier is not allowed on local definitions + | ^^^^ + | The final modifier is not allowed on local definitions +-- [E088] Syntax Error: tests/neg/17579.scala:14:10 -------------------------------------------------------------------- +14 | final private val v3 = 42 // error: expected start of definition + | ^^^^^^^ + | Expected start of definition + | + | longer explanation available when compiling with `-explain` +-- [E147] Syntax Warning: tests/neg/17579.scala:19:6 ------------------------------------------------------------------- +19 | final given Object with {} // warning: modifier `final` is redundant for this definition + | ^^^^^ + | Modifier final is redundant for this definition diff --git a/tests/neg/17579.scala b/tests/neg/17579.scala new file mode 100644 index 000000000000..268199e9006f --- /dev/null +++ b/tests/neg/17579.scala @@ -0,0 +1,26 @@ +class C: + final var v = 42 // ok + + def f = + final val v1 = 42 // error: final modifier is not allowed on local definitions + final lazy val v2 = 42 // error: final modifier is not allowed on local definitions + final def v4 = 42 // error: final modifier is not allowed on local definitions + final var v5 = 42 // error: final modifier is not allowed on local definitions + final type Foo = String // error: final modifier is not allowed on local definitions + + // We get a different error message here because `private` is also not a + // local modifier token. In the future, we could always parse all tokens and + // then flag those that are not legal at this point. + final private val v3 = 42 // error: expected start of definition + + { + // No error in this case, because the `given` is translated to a class + // definition, for which `final` is redundant but not illegal. + final given Object with {} // warning: modifier `final` is redundant for this definition + } + + { + // Also no error in this case, because we can't easily distinguish it from + // the previous case. + final given Object = new Object {} + } From b0183977c0f79487c7e4bd56136991e4a82d8d18 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Mon, 1 Jul 2024 11:55:12 +0200 Subject: [PATCH 225/827] Bump scala-cli to 1.4.0 (#20859) --- project/Build.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index cc24940de67c..32a4577fe34c 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -118,9 +118,9 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.3.2" + val scalaCliLauncherVersion = "1.4.0" /** Version of Scala CLI to download (on Windows - last known validated version) */ - val scalaCliLauncherVersionWindows = "1.3.2" + val scalaCliLauncherVersionWindows = "1.4.0" /** Version of Coursier to download for initializing the local maven repo of Scala command */ val coursierJarVersion = "2.1.10" From 48282440b243c9c7aa41c104f0124dd1dfe2e2a2 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Mon, 1 Jul 2024 13:22:34 +0200 Subject: [PATCH 226/827] Only consider methods with 0 parameters in valueOf (#20543) `valueOf` should only consider getters, which have 0 parameters. test with: ``` scala3-compiler / testOnly dotty.tools.repl.ScriptedTests -- dotty.tools.repl.ScriptedTests.replTests ``` Fixes #19184 --- compiler/src/dotty/tools/repl/Rendering.scala | 3 ++- compiler/test-resources/repl/19184 | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 compiler/test-resources/repl/19184 diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index d5688d1038b4..c127cc959e25 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -115,7 +115,8 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): val objectName = sym.owner.fullName.encode.toString.stripSuffix("$") val resObj: Class[?] = Class.forName(objectName, true, classLoader()) val symValue = resObj - .getDeclaredMethods.find(_.getName == sym.name.encode.toString) + .getDeclaredMethods + .find(method => method.getName == sym.name.encode.toString && method.getParameterCount == 0) .flatMap(result => rewrapValueClass(sym.info.classSymbol, result.invoke(null))) symValue .filter(_ => sym.is(Flags.Method) || sym.info != defn.UnitType) diff --git a/compiler/test-resources/repl/19184 b/compiler/test-resources/repl/19184 new file mode 100644 index 000000000000..cf4ce6f1d22f --- /dev/null +++ b/compiler/test-resources/repl/19184 @@ -0,0 +1,5 @@ +scala> def o(s: String) = "o"; def oo(s: String) = "oo"; val o = "o"; val oo = "oo" +def o(s: String): String +def oo(s: String): String +val o: String = o +val oo: String = oo From cc2dd1ec43f0f6cef282033a1754c506cd417ad7 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 26 Feb 2024 14:32:18 -0800 Subject: [PATCH 227/827] use Scala 2.13.13 stdlib (was .12) --- community-build/community-projects/stdLib213 | 2 +- project/Build.scala | 8 ++++---- project/Scala2LibraryBootstrappedMiMaFilters.scala | 3 --- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/community-build/community-projects/stdLib213 b/community-build/community-projects/stdLib213 index 6243e902928c..fcc67cd56c67 160000 --- a/community-build/community-projects/stdLib213 +++ b/community-build/community-projects/stdLib213 @@ -1 +1 @@ -Subproject commit 6243e902928c344fb0e82e21120bb257f08a2af2 +Subproject commit fcc67cd56c67851bf31019ec25ccb09d08b9561b diff --git a/project/Build.scala b/project/Build.scala index cc24940de67c..9e5ca1d59881 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -144,8 +144,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.12" - case Bootstrapped => "2.13.12" + case NonBootstrapped => "2.13.13" + case Bootstrapped => "2.13.13" } /** Version of the scala-library for which we will generate TASTy. @@ -155,7 +155,7 @@ object Build { * We can use nightly versions to tests the future compatibility in development. * Nightly versions: https://scala-ci.typesafe.com/ui/native/scala-integration/org/scala-lang */ - val stdlibBootstrappedVersion = "2.13.12" + val stdlibBootstrappedVersion = "2.13.13" val dottyOrganization = "org.scala-lang" val dottyGithubUrl = "https://github.com/scala/scala3" @@ -1361,7 +1361,7 @@ object Build { .exclude("org.eclipse.lsp4j","org.eclipse.lsp4j.jsonrpc"), "org.eclipse.lsp4j" % "org.eclipse.lsp4j" % "0.20.1", ), - libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.12" % mtagsVersion % SourceDeps), + libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.13" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings diff --git a/project/Scala2LibraryBootstrappedMiMaFilters.scala b/project/Scala2LibraryBootstrappedMiMaFilters.scala index bd149d5a910b..0d2b5a7fd945 100644 --- a/project/Scala2LibraryBootstrappedMiMaFilters.scala +++ b/project/Scala2LibraryBootstrappedMiMaFilters.scala @@ -78,9 +78,6 @@ object Scala2LibraryBootstrappedMiMaFilters { "scala.collection.IterableOnceOps#Maximized.this", // New in 2.13.11: private inner class "scala.util.Properties.", "scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5", - // New in 2.13.12 -- can be removed once scala/scala#10549 lands in 2.13.13 - // and we take the upgrade here - "scala.collection.immutable.MapNodeRemoveAllSetNodeIterator.next", ).map(ProblemFilters.exclude[DirectMissingMethodProblem]) } ) From e81b4a3b7c88223af8d262c86d5c798a20f0d0eb Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 1 Jul 2024 13:03:41 +0200 Subject: [PATCH 228/827] Upgrade Scala 2 to 2.13.14 --- project/Build.scala | 8 ++++---- project/Scala2LibraryBootstrappedMiMaFilters.scala | 4 ++++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 9e5ca1d59881..4a5f1609aa24 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -144,8 +144,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.13" - case Bootstrapped => "2.13.13" + case NonBootstrapped => "2.13.14" + case Bootstrapped => "2.13.14" } /** Version of the scala-library for which we will generate TASTy. @@ -155,7 +155,7 @@ object Build { * We can use nightly versions to tests the future compatibility in development. * Nightly versions: https://scala-ci.typesafe.com/ui/native/scala-integration/org/scala-lang */ - val stdlibBootstrappedVersion = "2.13.13" + val stdlibBootstrappedVersion = "2.13.14" val dottyOrganization = "org.scala-lang" val dottyGithubUrl = "https://github.com/scala/scala3" @@ -1361,7 +1361,7 @@ object Build { .exclude("org.eclipse.lsp4j","org.eclipse.lsp4j.jsonrpc"), "org.eclipse.lsp4j" % "org.eclipse.lsp4j" % "0.20.1", ), - libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.13" % mtagsVersion % SourceDeps), + libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.14" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings diff --git a/project/Scala2LibraryBootstrappedMiMaFilters.scala b/project/Scala2LibraryBootstrappedMiMaFilters.scala index 0d2b5a7fd945..102a2a50e9d4 100644 --- a/project/Scala2LibraryBootstrappedMiMaFilters.scala +++ b/project/Scala2LibraryBootstrappedMiMaFilters.scala @@ -172,6 +172,10 @@ object Scala2LibraryBootstrappedMiMaFilters { "scala.collection.mutable.LinkedHashSet.defaultLoadFactor", // private[collection] final def "scala.collection.mutable.LinkedHashSet.defaultinitialSize", // private[collection] final def "scala.collection.mutable.OpenHashMap.nextPositivePowerOfTwo", // private[mutable] def + // New in 2.13.13 + "scala.collection.mutable.ArrayBuffer.resizeUp", // private[mutable] def + // New in 2.13.14 + "scala.util.Properties.consoleIsTerminal", // private[scala] lazy val ).map(ProblemFilters.exclude[DirectMissingMethodProblem]) ++ Seq( // MissingFieldProblem: static field ... in object ... does not have a correspondent in other version "scala.Array.UnapplySeqWrapper", From 2a142cd5d49a050cee78b860d1428679cee0a2a1 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 1 Jul 2024 13:05:24 +0200 Subject: [PATCH 229/827] Phiscally remove the ignored Scala 2 library-aux files instead of filtering them out in `Compile / sources` (not reliable, for some reasone the AnyRef.scala was still compiled) --- project/Build.scala | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 4a5f1609aa24..df90a3e2bc31 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1126,19 +1126,23 @@ object Build { IO.createDirectory(trgDir) IO.unzip(scalaLibrarySourcesJar, trgDir) - ((trgDir ** "*.scala") +++ (trgDir ** "*.java")).get.toSet + val (ignoredSources, sources) = + ((trgDir ** "*.scala") +++ (trgDir ** "*.java")).get.toSet + .partition{file => + // sources from https://github.com/scala/scala/tree/2.13.x/src/library-aux + val path = file.getPath.replace('\\', '/') + path.endsWith("scala-library-src/scala/Any.scala") || + path.endsWith("scala-library-src/scala/AnyVal.scala") || + path.endsWith("scala-library-src/scala/AnyRef.scala") || + path.endsWith("scala-library-src/scala/Nothing.scala") || + path.endsWith("scala-library-src/scala/Null.scala") || + path.endsWith("scala-library-src/scala/Singleton.scala") + } + // These sources should be never compiled, filtering them out was not working correctly sometimes + ignoredSources.foreach(_.delete()) + sources } (Set(scalaLibrarySourcesJar)).toSeq }.taskValue, - (Compile / sources) ~= (_.filterNot { file => - // sources from https://github.com/scala/scala/tree/2.13.x/src/library-aux - val path = file.getPath.replace('\\', '/') - path.endsWith("scala-library-src/scala/Any.scala") || - path.endsWith("scala-library-src/scala/AnyVal.scala") || - path.endsWith("scala-library-src/scala/AnyRef.scala") || - path.endsWith("scala-library-src/scala/Nothing.scala") || - path.endsWith("scala-library-src/scala/Null.scala") || - path.endsWith("scala-library-src/scala/Singleton.scala") - }), (Compile / sources) := { val files = (Compile / sources).value val overwrittenSourcesDir = (Compile / scalaSource).value From 27bd97554cf486b44c15c68d52b3b33cfb5b84ca Mon Sep 17 00:00:00 2001 From: Florian3k Date: Mon, 1 Jul 2024 13:51:43 +0200 Subject: [PATCH 230/827] Add sources of synthetic classes to sources jar --- library-aux/src/scala/AnyKind.scala | 7 +++++++ library-aux/src/scala/Matchable.scala | 7 +++++++ library-aux/src/scala/andType.scala | 7 +++++++ library-aux/src/scala/orType.scala | 7 +++++++ project/Build.scala | 6 +++++- 5 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 library-aux/src/scala/AnyKind.scala create mode 100644 library-aux/src/scala/Matchable.scala create mode 100644 library-aux/src/scala/andType.scala create mode 100644 library-aux/src/scala/orType.scala diff --git a/library-aux/src/scala/AnyKind.scala b/library-aux/src/scala/AnyKind.scala new file mode 100644 index 000000000000..02c4d3747bcc --- /dev/null +++ b/library-aux/src/scala/AnyKind.scala @@ -0,0 +1,7 @@ +package scala + +/** The super-type of all types. + * + * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html]]. + */ +final abstract class AnyKind diff --git a/library-aux/src/scala/Matchable.scala b/library-aux/src/scala/Matchable.scala new file mode 100644 index 000000000000..5cef77929417 --- /dev/null +++ b/library-aux/src/scala/Matchable.scala @@ -0,0 +1,7 @@ +package scala + +/** The base trait of types that can be safely pattern matched against. + * + * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html]]. + */ +trait Matchable diff --git a/library-aux/src/scala/andType.scala b/library-aux/src/scala/andType.scala new file mode 100644 index 000000000000..968cc2e9a8fa --- /dev/null +++ b/library-aux/src/scala/andType.scala @@ -0,0 +1,7 @@ +package scala + +/** The intersection of two types. + * + * See [[https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html]]. + */ +type &[A, B] = A & B diff --git a/library-aux/src/scala/orType.scala b/library-aux/src/scala/orType.scala new file mode 100644 index 000000000000..b6dc8ea2563c --- /dev/null +++ b/library-aux/src/scala/orType.scala @@ -0,0 +1,7 @@ +package scala + +/** The union of two types. + * + * See [[https://docs.scala-lang.org/scala3/reference/new-types/union-types.html]]. + */ +type |[A, B] = A | B diff --git a/project/Build.scala b/project/Build.scala index c1a8800421a6..fdda870703b3 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -987,7 +987,11 @@ object Build { "-sourcepath", (Compile / sourceDirectories).value.map(_.getAbsolutePath).distinct.mkString(File.pathSeparator), "-Yexplicit-nulls", ), - (Compile / doc / scalacOptions) ++= ScaladocConfigs.DefaultGenerationSettings.value.settings + (Compile / doc / scalacOptions) ++= ScaladocConfigs.DefaultGenerationSettings.value.settings, + (Compile / packageSrc / mappings) ++= { + val auxBase = (ThisBuild / baseDirectory).value / "library-aux/src" + auxBase ** "*.scala" pair io.Path.relativeTo(auxBase) + }, ) lazy val `scala3-library` = project.in(file("library")).asDottyLibrary(NonBootstrapped) From 9e498fa23d64b3fd55c8dd8057390421e3d13e83 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 1 Jul 2024 14:53:24 +0200 Subject: [PATCH 231/827] Better error diagnostics for illegal match cases --- .../tools/dotc/core/MatchTypeTrace.scala | 4 +- .../src/dotty/tools/dotc/core/Types.scala | 77 +++++++++++-------- tests/neg/i17121.check | 4 + tests/neg/illegal-match-types.check | 18 +++-- 4 files changed, 65 insertions(+), 38 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index e16a950aa32a..00143f05b4fb 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -138,8 +138,10 @@ object MatchTypeTrace: | ${casesText(cases)}""" def illegalPatternText(scrut: Type, cas: MatchTypeCaseSpec.LegacyPatMat)(using Context): String = + val explanation = + if cas.err == null then "" else s"The pattern contains ${cas.err.explanation}.\n" i"""The match type contains an illegal case: | ${caseText(cas)} - |(this error can be ignored for now with `-source:3.3`)""" + |$explanation(this error can be ignored for now with `-source:3.3`)""" end MatchTypeTrace diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index cb47bd92352e..6d84242648b2 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5307,10 +5307,25 @@ object Types extends TypeUtils { case _ => true end MatchTypeCasePattern + enum MatchTypeCaseError: + case Alias(sym: Symbol) + case RefiningBounds(name: TypeName) + case StructuralType(name: TypeName) + case UnaccountedTypeParam(name: TypeName) + + def explanation(using Context) = this match + case Alias(sym) => i"a type alias `${sym.name}`" + case RefiningBounds(name) => i"an abstract type member `$name` with bounds that need verification" + case StructuralType(name) => i"an abstract type member `$name` that does not refine a member in its parent" + case UnaccountedTypeParam(name) => i"an unaccounted type parameter `$name`" + end MatchTypeCaseError + + type MatchTypeCaseResult = MatchTypeCasePattern | MatchTypeCaseError + enum MatchTypeCaseSpec: case SubTypeTest(origMatchCase: Type, pattern: Type, body: Type) case SpeccedPatMat(origMatchCase: HKTypeLambda, captureCount: Int, pattern: MatchTypeCasePattern, body: Type) - case LegacyPatMat(origMatchCase: HKTypeLambda) + case LegacyPatMat(origMatchCase: HKTypeLambda, err: MatchTypeCaseError | Null) case MissingCaptures(origMatchCase: HKTypeLambda, missing: collection.BitSet) def origMatchCase: Type @@ -5321,18 +5336,18 @@ object Types extends TypeUtils { cas match case cas: HKTypeLambda if !sourceVersion.isAtLeast(SourceVersion.`3.4`) => // Always apply the legacy algorithm under -source:3.3 and below - LegacyPatMat(cas) + LegacyPatMat(cas, null) case cas: HKTypeLambda => val defn.MatchCase(pat, body) = cas.resultType: @unchecked val missing = checkCapturesPresent(cas, pat) if !missing.isEmpty then MissingCaptures(cas, missing) else - val specPattern = tryConvertToSpecPattern(cas, pat) - if specPattern != null then - SpeccedPatMat(cas, cas.paramNames.size, specPattern, body) - else - LegacyPatMat(cas) + tryConvertToSpecPattern(cas, pat) match + case specPattern: MatchTypeCasePattern => + SpeccedPatMat(cas, cas.paramNames.size, specPattern, body) + case err: MatchTypeCaseError => + LegacyPatMat(cas, err) case _ => val defn.MatchCase(pat, body) = cas: @unchecked SubTypeTest(cas, pat, body) @@ -5370,15 +5385,15 @@ object Types extends TypeUtils { * It must adhere to the specification of legal patterns defined at * https://docs.scala-lang.org/sips/match-types-spec.html#legal-patterns * - * Returns `null` if the pattern in `caseLambda` is a not a legal pattern. + * Returns a MatchTypeCaseError if the pattern in `caseLambda` is a not a legal pattern. */ - private def tryConvertToSpecPattern(caseLambda: HKTypeLambda, pat: Type)(using Context): MatchTypeCasePattern | Null = - var typeParamRefsAccountedFor: Int = 0 + private def tryConvertToSpecPattern(caseLambda: HKTypeLambda, pat: Type)(using Context): MatchTypeCaseResult = + var typeParamRefsUnaccountedFor = (0 until caseLambda.paramNames.length).to(mutable.BitSet) - def rec(pat: Type, variance: Int): MatchTypeCasePattern | Null = + def rec(pat: Type, variance: Int): MatchTypeCaseResult = pat match case pat @ TypeParamRef(binder, num) if binder eq caseLambda => - typeParamRefsAccountedFor += 1 + typeParamRefsUnaccountedFor -= num MatchTypeCasePattern.Capture(num, isWildcard = pat.paramName.is(WildcardParamName)) case pat @ AppliedType(tycon: TypeRef, args) if variance == 1 => @@ -5394,13 +5409,13 @@ object Types extends TypeUtils { MatchTypeCasePattern.BaseTypeTest(tycon, argPatterns, needsConcreteScrut) } else if defn.isCompiletime_S(tyconSym) && args.sizeIs == 1 then - val argPattern = rec(args.head, variance) - if argPattern == null then - null - else if argPattern.isTypeTest then - MatchTypeCasePattern.TypeTest(pat) - else - MatchTypeCasePattern.CompileTimeS(argPattern) + rec(args.head, variance) match + case err: MatchTypeCaseError => + err + case argPattern: MatchTypeCasePattern => + if argPattern.isTypeTest + then MatchTypeCasePattern.TypeTest(pat) + else MatchTypeCasePattern.CompileTimeS(argPattern) else tycon.info match case _: RealTypeBounds => @@ -5416,7 +5431,7 @@ object Types extends TypeUtils { */ rec(pat.superType, variance) case _ => - null + MatchTypeCaseError.Alias(tyconSym) case pat @ AppliedType(tycon: TypeParamRef, _) if variance == 1 => recAbstractTypeConstructor(pat) @@ -5437,40 +5452,40 @@ object Types extends TypeUtils { MatchTypeCasePattern.TypeMemberExtractor(refinedName, capture) else // Otherwise, a type-test + capture combo might be necessary, and we are out of spec - null + MatchTypeCaseError.RefiningBounds(refinedName) case _ => // If the member does not refine a member of the `parent`, we are out of spec - null + MatchTypeCaseError.StructuralType(refinedName) case _ => MatchTypeCasePattern.TypeTest(pat) end rec - def recAbstractTypeConstructor(pat: AppliedType): MatchTypeCasePattern | Null = + def recAbstractTypeConstructor(pat: AppliedType): MatchTypeCaseResult = recArgPatterns(pat) { argPatterns => MatchTypeCasePattern.AbstractTypeConstructor(pat.tycon, argPatterns) } end recAbstractTypeConstructor - def recArgPatterns(pat: AppliedType)(whenNotTypeTest: List[MatchTypeCasePattern] => MatchTypeCasePattern | Null): MatchTypeCasePattern | Null = + def recArgPatterns(pat: AppliedType)(whenNotTypeTest: List[MatchTypeCasePattern] => MatchTypeCaseResult): MatchTypeCaseResult = val AppliedType(tycon, args) = pat val tparams = tycon.typeParams val argPatterns = args.zip(tparams).map { (arg, tparam) => rec(arg, tparam.paramVarianceSign) } - if argPatterns.exists(_ == null) then - null - else - val argPatterns1 = argPatterns.asInstanceOf[List[MatchTypeCasePattern]] // they are not null + argPatterns.find(_.isInstanceOf[MatchTypeCaseError]).getOrElse: + val argPatterns1 = argPatterns.asInstanceOf[List[MatchTypeCasePattern]] // they are not errors if argPatterns1.forall(_.isTypeTest) then MatchTypeCasePattern.TypeTest(pat) else whenNotTypeTest(argPatterns1) end recArgPatterns - val result = rec(pat, variance = 1) - if typeParamRefsAccountedFor == caseLambda.paramNames.size then result - else null + rec(pat, variance = 1) match + case err: MatchTypeCaseError => err + case ok if typeParamRefsUnaccountedFor.isEmpty => ok + case _ => + MatchTypeCaseError.UnaccountedTypeParam(caseLambda.paramNames(typeParamRefsUnaccountedFor.head)) end tryConvertToSpecPattern end MatchTypeCaseSpec diff --git a/tests/neg/i17121.check b/tests/neg/i17121.check index 59895dd2474a..4a7dd332d8dc 100644 --- a/tests/neg/i17121.check +++ b/tests/neg/i17121.check @@ -3,22 +3,26 @@ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | The match type contains an illegal case: | case Consumer[List[t]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) -- [E191] Type Error: tests/neg/i17121.scala:15:17 --------------------------------------------------------------------- 15 | type G2[X] = X match { case Consumer[Consumer[t]] => t } // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | The match type contains an illegal case: | case Consumer[Consumer[t]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) -- [E191] Type Error: tests/neg/i17121.scala:17:17 --------------------------------------------------------------------- 17 | type G3[X] = X match { case Consumer[Consumer[Consumer[t]]] => t } // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | The match type contains an illegal case: | case Consumer[Consumer[Consumer[t]]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) -- [E191] Type Error: tests/neg/i17121.scala:19:17 --------------------------------------------------------------------- 19 | type G4[X] = X match { case Consumer[List[Consumer[t]]] => t } // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | The match type contains an illegal case: | case Consumer[List[Consumer[t]]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) diff --git a/tests/neg/illegal-match-types.check b/tests/neg/illegal-match-types.check index f5f0f2d07c51..36862f3b9b92 100644 --- a/tests/neg/illegal-match-types.check +++ b/tests/neg/illegal-match-types.check @@ -3,6 +3,7 @@ | ^ | The match type contains an illegal case: | case Inv[Cov[t]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) 8 | case Inv[Cov[t]] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:10:26 -------------------------------------------------------- @@ -10,6 +11,7 @@ | ^ | The match type contains an illegal case: | case Contra[Cov[t]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) 11 | case Contra[Cov[t]] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:15:22 -------------------------------------------------------- @@ -17,6 +19,7 @@ | ^ | The match type contains an illegal case: | case t & Seq[Any] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) 16 | case t & Seq[Any] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:22:33 -------------------------------------------------------- @@ -24,19 +27,22 @@ | ^ | The match type contains an illegal case: | case IsSeq[t] => t + | The pattern contains a type alias `IsSeq`. | (this error can be ignored for now with `-source:3.3`) 23 | case IsSeq[t] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:29:34 -------------------------------------------------------- 29 |type TypeMemberExtractorMT[X] = X match // error | ^ - | The match type contains an illegal case: - | case TypeMemberAux[t] => t - | (this error can be ignored for now with `-source:3.3`) + | The match type contains an illegal case: + | case TypeMemberAux[t] => t + | The pattern contains an abstract type member `TypeMember` that does not refine a member in its parent. + | (this error can be ignored for now with `-source:3.3`) 30 | case TypeMemberAux[t] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:40:35 -------------------------------------------------------- 40 |type TypeMemberExtractorMT2[X] = X match // error | ^ - | The match type contains an illegal case: - | case TypeMemberAux2[t] => t - | (this error can be ignored for now with `-source:3.3`) + | The match type contains an illegal case: + | case TypeMemberAux2[t] => t + | The pattern contains an abstract type member `TypeMember` with bounds that need verification. + | (this error can be ignored for now with `-source:3.3`) 41 | case TypeMemberAux2[t] => t From 73236c625cceb93a6906f2fc0fb9c1517907552a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 13:22:36 +0000 Subject: [PATCH 232/827] Bump VirtusLab/scala-cli-setup from 1.3.2 to 1.4.0 Bumps [VirtusLab/scala-cli-setup](https://github.com/virtuslab/scala-cli-setup) from 1.3.2 to 1.4.0. - [Release notes](https://github.com/virtuslab/scala-cli-setup/releases) - [Commits](https://github.com/virtuslab/scala-cli-setup/compare/v1.3.2...v1.4.0) --- updated-dependencies: - dependency-name: VirtusLab/scala-cli-setup dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 108c412df5a3..6c6f1f67bc9c 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.3.2 + - uses: VirtusLab/scala-cli-setup@v1.4.0 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From 71597908625cfc25320e3bbbd29cdc0de62096a1 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 1 Jul 2024 17:02:28 +0200 Subject: [PATCH 233/827] fix issue 20901: etaCollapse context bound type --- .../src/dotty/tools/dotc/typer/Typer.scala | 7 +- tests/pos/i20901/Foo.scala | 6 + tests/pos/i20901/Foo.tastycheck | 124 ++++++++++++++++++ 3 files changed, 134 insertions(+), 3 deletions(-) create mode 100644 tests/pos/i20901/Foo.scala create mode 100644 tests/pos/i20901/Foo.tastycheck diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index a5380f73a2a5..c90de0ae19a1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2401,13 +2401,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedContextBoundTypeTree(tree: untpd.ContextBoundTypeTree)(using Context): Tree = val tycon = typedType(tree.tycon) - val tyconSplice = untpd.TypedSplice(tycon) + def spliced(tree: Tree) = untpd.TypedSplice(tree) val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) if tycon.tpe.typeParams.nonEmpty then - typed(untpd.AppliedTypeTree(tyconSplice, tparam :: Nil)) + val tycon0 = tycon.withType(tycon.tpe.etaCollapse) + typed(untpd.AppliedTypeTree(spliced(tycon0), tparam :: Nil)) else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) - typed(untpd.RefinedTypeTree(tyconSplice, List(untpd.TypeDef(tpnme.Self, tparamSplice)))) + typed(untpd.RefinedTypeTree(spliced(tycon), List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else def selfNote = if Feature.enabled(modularity) then diff --git a/tests/pos/i20901/Foo.scala b/tests/pos/i20901/Foo.scala new file mode 100644 index 000000000000..c1277781db38 --- /dev/null +++ b/tests/pos/i20901/Foo.scala @@ -0,0 +1,6 @@ +//> using options -Ytest-pickler-check + +import reflect.ClassTag + +class Foo: + def mkArray[T: ClassTag] = ??? diff --git a/tests/pos/i20901/Foo.tastycheck b/tests/pos/i20901/Foo.tastycheck new file mode 100644 index 000000000000..0201bfec2056 --- /dev/null +++ b/tests/pos/i20901/Foo.tastycheck @@ -0,0 +1,124 @@ +Header: + version: + tooling: + UUID: + +Names (276 bytes, starting from ): + 0: ASTs + 1: + 2: scala + 3: reflect + 4: scala[Qualified . reflect] + 5: ClassTag + 6: Foo + 7: + 8: java + 9: lang + 10: java[Qualified . lang] + 11: Object + 12: java[Qualified . lang][Qualified . Object] + 13: [Signed Signature(List(),java.lang.Object) @] + 14: Unit + 15: mkArray + 16: T + 17: Nothing + 18: Any + 19: evidence$ + 20: [Unique evidence$ 1] + 21: ??? + 22: Predef + 23: SourceFile + 24: annotation + 25: scala[Qualified . annotation] + 26: internal + 27: scala[Qualified . annotation][Qualified . internal] + 28: scala[Qualified . annotation][Qualified . internal][Qualified . SourceFile] + 29: String + 30: java[Qualified . lang][Qualified . String] + 31: [Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @] + 32: + 33: Positions + 34: Comments + 35: Attributes + +Trees (94 bytes, starting from ): + 0: PACKAGE(92) + 2: TERMREFpkg 1 [] + 4: IMPORT(4) + 6: TERMREFpkg 4 [scala[Qualified . reflect]] + 8: IMPORTED 5 [ClassTag] + 10: TYPEDEF(82) 6 [Foo] + 13: TEMPLATE(61) + 15: APPLY(10) + 17: SELECTin(8) 13 [[Signed Signature(List(),java.lang.Object) @]] + 20: NEW + 21: TYPEREF 11 [Object] + 23: TERMREFpkg 10 [java[Qualified . lang]] + 25: SHAREDtype 21 + 27: DEFDEF(7) 7 [] + 30: EMPTYCLAUSE + 31: TYPEREF 14 [Unit] + 33: TERMREFpkg 2 [scala] + 35: STABLE + 36: DEFDEF(38) 15 [mkArray] + 39: TYPEPARAM(11) 16 [T] + 42: TYPEBOUNDStpt(8) + 44: TYPEREF 17 [Nothing] + 46: SHAREDtype 33 + 48: TYPEREF 18 [Any] + 50: SHAREDtype 33 + 52: PARAM(14) 20 [[Unique evidence$ 1]] + 55: APPLIEDtpt(10) + 57: IDENTtpt 5 [ClassTag] + 59: TYPEREF 5 [ClassTag] + 61: SHAREDtype 6 + 63: IDENTtpt 16 [T] + 65: TYPEREFdirect 39 + 67: IMPLICIT + 68: SHAREDtype 44 + 70: TERMREF 21 [???] + 72: TERMREF 22 [Predef] + 74: SHAREDtype 33 + 76: ANNOTATION(16) + 78: TYPEREF 23 [SourceFile] + 80: TERMREFpkg 27 [scala[Qualified . annotation][Qualified . internal]] + 82: APPLY(10) + 84: SELECTin(6) 31 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] + 87: NEW + 88: SHAREDtype 78 + 90: SHAREDtype 78 + 92: STRINGconst 32 [] + 94: + +Positions (72 bytes, starting from ): + lines: 7 + line sizes: + 38, 0, 23, 0, 10, 32, 0 + positions: + 0: 40 .. 108 + 4: 40 .. 63 + 6: 47 .. 54 + 8: 55 .. 63 + 10: 65 .. 108 + 13: 78 .. 108 + 21: 71 .. 71 + 27: 78 .. 78 + 31: 78 .. 78 + 36: 78 .. 108 + 39: 90 .. 101 + 44: 93 .. 93 + 48: 93 .. 93 + 52: 93 .. 101 + 57: 93 .. 101 + 63: 93 .. 101 + 68: 102 .. 102 + 70: 105 .. 108 + 82: 65 .. 108 + 88: 65 .. 65 + 92: 65 .. 65 + + source paths: + 0: 32 [] + +Attributes (2 bytes, starting from ): + SOURCEFILEattr 32 [] From 62605a65a4ec201a8f591d4eb08e1c2e240ad6a5 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 1 Jul 2024 21:39:14 +0200 Subject: [PATCH 234/827] update semanticdb test (restore references) --- tests/semanticdb/expect/Methods.expect.scala | 2 +- tests/semanticdb/metac.expect | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/semanticdb/expect/Methods.expect.scala b/tests/semanticdb/expect/Methods.expect.scala index 4ec723ad584e..e1fcfa6880e1 100644 --- a/tests/semanticdb/expect/Methods.expect.scala +++ b/tests/semanticdb/expect/Methods.expect.scala @@ -15,7 +15,7 @@ class Methods/*<-example::Methods#*/[T/*<-example::Methods#[T]*/] { def m6/*<-example::Methods#m6().*/(x/*<-example::Methods#m6().(x)*/: Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+1).*/(x/*<-example::Methods#m6(+1).(x)*/: List/*->example::Methods#List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+2).*/(x/*<-example::Methods#m6(+2).(x)*/: scala.List/*->scala::package.List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ - def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ + def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->scala::math::Ordering#*//*->example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ def `m8()./*<-example::Methods#`m8().`().*/`() = ???/*->scala::Predef.`???`().*/ class `m9()./*<-example::Methods#`m9().`#*/` def m9/*<-example::Methods#m9().*/(x/*<-example::Methods#m9().(x)*/: `m9().`/*->example::Methods#`m9().`#*/) = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 84c3e7c6a110..221422de6505 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2588,7 +2588,7 @@ Uri => Methods.scala Text => empty Language => Scala Symbols => 82 entries -Occurrences => 156 entries +Occurrences => 157 entries Symbols: example/Methods# => class Methods [typeparam T ] extends Object { self: Methods[T] => +44 decls } @@ -2732,6 +2732,7 @@ Occurrences: [16:29..16:32): ??? -> scala/Predef.`???`(). [17:6..17:8): m7 <- example/Methods#m7(). [17:9..17:10): U <- example/Methods#m7().[U] +[17:12..17:20): Ordering -> scala/math/Ordering# [17:12..17:20): Ordering -> example/Methods#m7().[U] [17:12..17:12): <- example/Methods#m7().(evidence$1) [17:22..17:23): c <- example/Methods#m7().(c) From fa0f1ce0c1131ba185c68c464a5cfee24f4c63bb Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 1 Jul 2024 22:48:33 +0200 Subject: [PATCH 235/827] Fix failing CompletionScalaCliSuite tests due to circe releasing Scala Native 0.5 artifacts --- .../completion/CompletionScalaCliSuite.scala | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala index 0d86922d4e70..0a74aed35f48 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala @@ -28,7 +28,8 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |// //> using lib ??? |//> using lib io.circe::circe-core_native0.4 |package A - |""".stripMargin + |""".stripMargin, + assertSingleItem = false ) @Test def `version-sort` = @@ -51,6 +52,9 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|circe-core_native0.4_2.12 |circe-core_native0.4_2.13 |circe-core_native0.4_3 + |circe-core_native0.5_2.12 + |circe-core_native0.5_2.13 + |circe-core_native0.5_3 |""".stripMargin ) @@ -78,7 +82,9 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|//> using lib "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, - "circe-core_native0.4" + """circe-core_native0.4 + |circe-core_native0.5 + |""".stripMargin ) @Test def `script` = @@ -92,6 +98,9 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|circe-core_native0.4_2.12 |circe-core_native0.4_2.13 |circe-core_native0.4_3 + |circe-core_native0.5_2.12 + |circe-core_native0.5_2.13 + |circe-core_native0.5_3 |""".stripMargin, filename = "script.sc.scala", enablePackageWrap = false @@ -138,7 +147,8 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|//> using libs "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, - "circe-core_native0.4" + """circe-core_native0.4 + |circe-core_native0.5""".stripMargin ) private def scriptWrapper(code: String, filename: String): String = From 81f2c8e4d267f29f9d5aff72144759a2810ac288 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Mon, 1 Jul 2024 23:27:40 +0200 Subject: [PATCH 236/827] Make `WcheckInit` public This is needed to make ScalaSettingsTests.scala compile --- compiler/src/dotty/tools/dotc/config/ScalaSettings.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index bcfc651aeb92..011b31aba50a 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -300,7 +300,7 @@ private sealed trait WarningSettings: def typeParameterShadow(using Context) = allOr("type-parameter-shadow") - private val WcheckInit: Setting[Boolean] = BooleanSetting(WarningSetting, "Wsafe-init", "Ensure safe initialization of objects.") + val WcheckInit: Setting[Boolean] = BooleanSetting(WarningSetting, "Wsafe-init", "Ensure safe initialization of objects.") object Whas: def allOr(s: Setting[Boolean])(using Context): Boolean = From c60699e8bbbfcc81a8ec7921f962bc154d44d65b Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Tue, 2 Jul 2024 12:35:01 +0200 Subject: [PATCH 237/827] Ignore failing tests instead of expecting for completions for both 0.4 and 0.5 SN versins - the outputs seems be non deterministic in the CI --- .../completion/CompletionScalaCliSuite.scala | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala index 0a74aed35f48..79d35944c84d 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala @@ -3,6 +3,7 @@ package dotty.tools.pc.tests.completion import dotty.tools.pc.base.BaseCompletionSuite import org.junit.Test +import org.junit.Ignore class CompletionScalaCliSuite extends BaseCompletionSuite: @@ -44,6 +45,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |""".stripMargin, ) + @Ignore @Test def `single-colon` = check( """|//> using lib "io.circe:circe-core_na@@ @@ -52,9 +54,6 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|circe-core_native0.4_2.12 |circe-core_native0.4_2.13 |circe-core_native0.4_3 - |circe-core_native0.5_2.12 - |circe-core_native0.5_2.13 - |circe-core_native0.5_3 |""".stripMargin ) @@ -77,16 +76,16 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |""".stripMargin, ) + @Ignore @Test def `multiple-libs` = check( """|//> using lib "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, - """circe-core_native0.4 - |circe-core_native0.5 - |""".stripMargin + "circe-core_native0.4" ) + @Ignore @Test def `script` = check( scriptWrapper( @@ -98,9 +97,6 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|circe-core_native0.4_2.12 |circe-core_native0.4_2.13 |circe-core_native0.4_3 - |circe-core_native0.5_2.12 - |circe-core_native0.5_2.13 - |circe-core_native0.5_3 |""".stripMargin, filename = "script.sc.scala", enablePackageWrap = false @@ -142,13 +138,13 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |io.circul""".stripMargin ) + @Ignore @Test def `multiple-deps2` = check( """|//> using libs "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, - """circe-core_native0.4 - |circe-core_native0.5""".stripMargin + "circe-core_native0.4" ) private def scriptWrapper(code: String, filename: String): String = From 3d7f3a1f6f1610cf942a86a26bef6eefe760b195 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Tue, 2 Jul 2024 16:33:55 +0200 Subject: [PATCH 238/827] Set base version 3.6.0-RC1 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 28a8f98637f6..28146989e40c 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,7 @@ object Build { val referenceVersion = "3.4.2-RC1" - val baseVersion = "3.5.1-RC1" + val baseVersion = "3.6.0-RC1" // LTS or Next val versionLine = "Next" From 05f45b1475a56edbd54d0b037540b5ce6d46c64b Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Tue, 2 Jul 2024 16:56:38 +0200 Subject: [PATCH 239/827] fix: semanticdb symbol creation when `targetName` set --- .../semanticdb/SemanticSymbolBuilder.scala | 3 +- .../semanticdb/expect/TargetName.expect.scala | 8 ++++ tests/semanticdb/expect/TargetName.scala | 8 ++++ tests/semanticdb/metac.expect | 37 +++++++++++++++++++ 4 files changed, 55 insertions(+), 1 deletion(-) create mode 100644 tests/semanticdb/expect/TargetName.expect.scala create mode 100644 tests/semanticdb/expect/TargetName.scala diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala index 50ea6ec48510..81f5d37f443f 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala @@ -91,7 +91,8 @@ class SemanticSymbolBuilder: case _ => end find val sig = sym.signature - find(_.signature == sig) + val targetName = sym.targetName + find(sym => sym.signature == sig && sym.targetName == targetName) def addDescriptor(sym: Symbol): Unit = if sym.is(ModuleClass) then diff --git a/tests/semanticdb/expect/TargetName.expect.scala b/tests/semanticdb/expect/TargetName.expect.scala new file mode 100644 index 000000000000..d8bdf46bdc53 --- /dev/null +++ b/tests/semanticdb/expect/TargetName.expect.scala @@ -0,0 +1,8 @@ +package example + +object TargetName/*<-example::TargetName.*/: + @annotation.targetName/*->scala::annotation::targetName#*/("m1") + def m/*<-example::TargetName.m().*/(i/*<-example::TargetName.m().(i)*/: Int/*->scala::Int#*/) = 1 + @annotation.targetName/*->scala::annotation::targetName#*/("m2") + def m/*<-example::TargetName.m(+1).*/(i/*<-example::TargetName.m(+1).(i)*/: Int/*->scala::Int#*/) = 1 + def m1/*<-example::TargetName.m1().*/(i/*<-example::TargetName.m1().(i)*/: String/*->scala::Predef.String#*/) = 1 diff --git a/tests/semanticdb/expect/TargetName.scala b/tests/semanticdb/expect/TargetName.scala new file mode 100644 index 000000000000..5eef99283bb8 --- /dev/null +++ b/tests/semanticdb/expect/TargetName.scala @@ -0,0 +1,8 @@ +package example + +object TargetName: + @annotation.targetName("m1") + def m(i: Int) = 1 + @annotation.targetName("m2") + def m(i: Int) = 1 + def m1(i: String) = 1 diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 221422de6505..16f1b7c13d1f 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -3799,6 +3799,43 @@ Synthetics: .map => *[Int] [3:1..3:5):List => *.apply[Int] +expect/TargetName.scala +----------------------- + +Summary: +Schema => SemanticDB v4 +Uri => TargetName.scala +Text => empty +Language => Scala +Symbols => 7 entries +Occurrences => 15 entries + +Symbols: +example/TargetName. => final object TargetName extends Object { self: TargetName.type => +4 decls } +example/TargetName.m(). => @targetName method m (param i: Int): Int +example/TargetName.m().(i) => param i: Int +example/TargetName.m(+1). => @targetName method m (param i: Int): Int +example/TargetName.m(+1).(i) => param i: Int +example/TargetName.m1(). => method m1 (param i: String): Int +example/TargetName.m1().(i) => param i: String + +Occurrences: +[0:8..0:15): example <- example/ +[2:7..2:17): TargetName <- example/TargetName. +[3:3..3:13): annotation -> scala/annotation/ +[3:14..3:24): targetName -> scala/annotation/targetName# +[4:6..4:7): m <- example/TargetName.m(). +[4:8..4:9): i <- example/TargetName.m().(i) +[4:11..4:14): Int -> scala/Int# +[5:3..5:13): annotation -> scala/annotation/ +[5:14..5:24): targetName -> scala/annotation/targetName# +[6:6..6:7): m <- example/TargetName.m(+1). +[6:8..6:9): i <- example/TargetName.m(+1).(i) +[6:11..6:14): Int -> scala/Int# +[7:6..7:8): m1 <- example/TargetName.m1(). +[7:9..7:10): i <- example/TargetName.m1().(i) +[7:12..7:18): String -> scala/Predef.String# + expect/Traits.scala ------------------- From 413d7b497b38656e480c13228e709653bd735d45 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 27 Mar 2024 15:20:21 +0100 Subject: [PATCH 240/827] Map over `ImportType`s in inliner tree type map The inliner replaces references to parameters by their corresponding proxys, including in singleton types. It did not, however, handle the mapping over import types, the symbols of which way have depended on parameters. Mapping imports correctly was necessary for i19493 since the `summonInline` resolves post inlining to a given imported within the inline definition. Fix #19493 --- .../dotty/tools/dotc/inlines/Inliner.scala | 5 ++++ tests/pos/i19493.scala | 29 +++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 tests/pos/i19493.scala diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 7c79e972c126..6116c00aeff7 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -565,6 +565,11 @@ class Inliner(val call: tpd.Tree)(using Context): def apply(t: Type) = t match { case t: ThisType => thisProxy.getOrElse(t.cls, t) case t: TypeRef => paramProxy.getOrElse(t, mapOver(t)) + case t: TermRef if t.symbol.isImport => + val ImportType(e) = t.widenTermRefExpr: @unchecked + paramProxy.get(e.tpe) match + case Some(p) => newImportSymbol(ctx.owner, singleton(p)).termRef + case None => mapOver(t) case t: SingletonType => if t.termSymbol.isAllOf(InlineParam) then apply(t.widenTermRefExpr) else paramProxy.getOrElse(t, mapOver(t)) diff --git a/tests/pos/i19493.scala b/tests/pos/i19493.scala new file mode 100644 index 000000000000..37af3214ce16 --- /dev/null +++ b/tests/pos/i19493.scala @@ -0,0 +1,29 @@ + +import scala.compiletime.{summonAll, summonInline} +import deriving.Mirror + +type Sc[X] = X +case class Row[T[_]](name: T[String]) + +class DialectTypeMappers: + given String = ??? + +inline def metadata(dialect: DialectTypeMappers)(using m: Mirror.Of[Row[Sc]]): m.MirroredElemTypes = + import dialect.given + summonAll[m.MirroredElemTypes] + +def f = metadata(???) + + +object Minimization: + + class GivesString: + given aString: String = ??? + + inline def foo(x: GivesString): Unit = + import x.aString + summon[String] + summonInline[String] // was error + + foo(???) +end Minimization From 969da606bc2b080e1709bc4d0407fe549dd3aa57 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 27 Jun 2024 12:44:02 +0200 Subject: [PATCH 241/827] Also handle `ImportType`s depending on `this` references --- .../src/dotty/tools/dotc/inlines/Inliner.scala | 5 ++--- tests/pos/i19493.scala | 15 ++++++++++++++- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 6116c00aeff7..a83c5eaa3ba8 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -567,9 +567,8 @@ class Inliner(val call: tpd.Tree)(using Context): case t: TypeRef => paramProxy.getOrElse(t, mapOver(t)) case t: TermRef if t.symbol.isImport => val ImportType(e) = t.widenTermRefExpr: @unchecked - paramProxy.get(e.tpe) match - case Some(p) => newImportSymbol(ctx.owner, singleton(p)).termRef - case None => mapOver(t) + val e1 = singleton(apply(e.tpe)) + newImportSymbol(ctx.owner, e1).termRef case t: SingletonType => if t.termSymbol.isAllOf(InlineParam) then apply(t.widenTermRefExpr) else paramProxy.getOrElse(t, mapOver(t)) diff --git a/tests/pos/i19493.scala b/tests/pos/i19493.scala index 37af3214ce16..93d9023d589c 100644 --- a/tests/pos/i19493.scala +++ b/tests/pos/i19493.scala @@ -22,8 +22,21 @@ object Minimization: inline def foo(x: GivesString): Unit = import x.aString - summon[String] + summon[String] // ok summonInline[String] // was error foo(???) + + + trait A: + val x: GivesString + + inline def bar: Unit = + import this.x.aString + summon[String] // ok + summonInline[String] // was error + + val a: A = ??? + a.bar + end Minimization From ff003fdd3d898fd509f2b551d308eb723dd7c60c Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 27 Jun 2024 19:32:36 +0200 Subject: [PATCH 242/827] Also handle imports on parameters of lambdas returned from inline defs Both i19493 and i19436 require mapping the type of the expr in an `ImportType` which is itself the info of a `TermRef`. In the first issue, for the substitution of an inline def parameter proxy. In the second issue, for the parameter of a lambda returned from an inline def. Both can be handled in `TypeMap` by mapping over references to `ImportType`s. The second case also requires modifying `TreeTypeMap#mapType` such that the logic mapping over imports is done within a `TypeMap` doing the symbol substitutions. Fixes #19436 --- .../src/dotty/tools/dotc/ast/TreeTypeMap.scala | 7 ++++++- compiler/src/dotty/tools/dotc/core/Types.scala | 6 ++++++ .../src/dotty/tools/dotc/inlines/Inliner.scala | 4 ---- tests/pos-macros/i19436/Macro_1.scala | 18 ++++++++++++++++++ tests/pos-macros/i19436/Test_2.scala | 2 ++ tests/pos/i19493.scala | 9 ++++++++- 6 files changed, 40 insertions(+), 6 deletions(-) create mode 100644 tests/pos-macros/i19436/Macro_1.scala create mode 100644 tests/pos-macros/i19436/Test_2.scala diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 668daea5f1fd..98d9a0ca85f6 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -69,7 +69,12 @@ class TreeTypeMap( } def mapType(tp: Type): Type = - mapOwnerThis(typeMap(tp).substSym(substFrom, substTo)) + val substMap = new TypeMap(): + def apply(tp: Type): Type = tp match + case tp: TermRef if tp.symbol.isImport => mapOver(tp) + case tp => tp.substSym(substFrom, substTo) + mapOwnerThis(substMap(typeMap(tp))) + end mapType private def updateDecls(prevStats: List[Tree], newStats: List[Tree]): Unit = if (prevStats.isEmpty) assert(newStats.isEmpty) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 6d84242648b2..aa1813f572f7 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -6295,6 +6295,12 @@ object Types extends TypeUtils { val ctx = this.mapCtx // optimization for performance given Context = ctx tp match { + case tp: TermRef if tp.symbol.isImport => + // see tests/pos/i19493.scala for examples requiring mapping over imports + val ImportType(e) = tp.info: @unchecked + val e1 = singleton(apply(e.tpe)) + newImportSymbol(tp.symbol.owner, e1).termRef + case tp: NamedType => if stopBecauseStaticOrLocal(tp) then tp else diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index a83c5eaa3ba8..7c79e972c126 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -565,10 +565,6 @@ class Inliner(val call: tpd.Tree)(using Context): def apply(t: Type) = t match { case t: ThisType => thisProxy.getOrElse(t.cls, t) case t: TypeRef => paramProxy.getOrElse(t, mapOver(t)) - case t: TermRef if t.symbol.isImport => - val ImportType(e) = t.widenTermRefExpr: @unchecked - val e1 = singleton(apply(e.tpe)) - newImportSymbol(ctx.owner, e1).termRef case t: SingletonType => if t.termSymbol.isAllOf(InlineParam) then apply(t.widenTermRefExpr) else paramProxy.getOrElse(t, mapOver(t)) diff --git a/tests/pos-macros/i19436/Macro_1.scala b/tests/pos-macros/i19436/Macro_1.scala new file mode 100644 index 000000000000..689f64203131 --- /dev/null +++ b/tests/pos-macros/i19436/Macro_1.scala @@ -0,0 +1,18 @@ + +import scala.quoted.* +import scala.compiletime.summonInline + +trait SomeImplicits: + given int: Int + +object Macro: + + transparent inline def testSummon: SomeImplicits => Int = ${ testSummonImpl } + + private def testSummonImpl(using Quotes): Expr[SomeImplicits => Int] = + import quotes.reflect.* + '{ + (x: SomeImplicits) => + import x.given + summonInline[Int] + } \ No newline at end of file diff --git a/tests/pos-macros/i19436/Test_2.scala b/tests/pos-macros/i19436/Test_2.scala new file mode 100644 index 000000000000..aedaf1cb87fb --- /dev/null +++ b/tests/pos-macros/i19436/Test_2.scala @@ -0,0 +1,2 @@ + +def fn: Unit = Macro.testSummon diff --git a/tests/pos/i19493.scala b/tests/pos/i19493.scala index 93d9023d589c..082f1450fd9e 100644 --- a/tests/pos/i19493.scala +++ b/tests/pos/i19493.scala @@ -1,4 +1,3 @@ - import scala.compiletime.{summonAll, summonInline} import deriving.Mirror @@ -39,4 +38,12 @@ object Minimization: val a: A = ??? a.bar + + inline def baz() = (x: GivesString) => + import x.aString + summon[String] // ok + summonInline[String] // was error + + baz() + end Minimization From 532a9daab3cfda88ee11a7fa2df6eb50551b7ad7 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Jul 2024 14:21:28 +0200 Subject: [PATCH 243/827] Re-use isConcrete checking in match types for NamedTyple.From - Move isConcrete to a new object `MatchTypes`. We should also move other MatchType-related stuff from Types and TypeComparer here. Type and TypeComparer are already unconfortably big, and MatchTypes are a coherent topic where everything should work together. - Streamline isConcrete a bit. - Re-use isConcrete for a similar test in CheckRealizable. - Re-use isConcrete for evaluating NamedTuple.From Fixes #20517 --- .../tools/dotc/core/CheckRealizable.scala | 10 +--- .../dotty/tools/dotc/core/MatchTypes.scala | 59 +++++++++++++++++++ .../dotty/tools/dotc/core/TypeComparer.scala | 53 +---------------- .../src/dotty/tools/dotc/core/TypeEval.scala | 2 +- tests/neg/i20517.check | 7 +++ tests/neg/i20517.scala | 17 ++++++ 6 files changed, 86 insertions(+), 62 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/core/MatchTypes.scala create mode 100644 tests/neg/i20517.check create mode 100644 tests/neg/i20517.scala diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index 060189016828..d8241f3ff304 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -116,15 +116,7 @@ class CheckRealizable(using Context) { case _: SingletonType | NoPrefix => Realizable case tp => - def isConcrete(tp: Type): Boolean = tp.dealias match { - case tp: TypeRef => tp.symbol.isClass - case tp: TypeParamRef => false - case tp: TypeProxy => isConcrete(tp.underlying) - case tp: AndType => isConcrete(tp.tp1) && isConcrete(tp.tp2) - case tp: OrType => isConcrete(tp.tp1) && isConcrete(tp.tp2) - case _ => false - } - if (!isConcrete(tp)) NotConcrete + if !MatchTypes.isConcrete(tp) then NotConcrete else boundsRealizability(tp).andAlso(memberRealizability(tp)) } diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypes.scala b/compiler/src/dotty/tools/dotc/core/MatchTypes.scala new file mode 100644 index 000000000000..61caceccd5d4 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/MatchTypes.scala @@ -0,0 +1,59 @@ +package dotty.tools +package dotc +package core + +import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* + +object MatchTypes: + + /* Concreteness checking + * + * When following a baseType and reaching a non-wildcard, in-variant-pos type capture, + * we have to make sure that the scrutinee is concrete enough to uniquely determine + * the values of the captures. This comes down to checking that we do not follow any + * upper bound of an abstract type. + * + * See notably neg/wildcard-match.scala for examples of this. + * + * See neg/i13780.scala, neg/i13780-1.scala and neg/i19746.scala for + * ClassCastException reproducers if we disable this check. + */ + def isConcrete(tp: Type)(using Context): Boolean = + val tp1 = tp.normalized + + tp1 match + case tp1: TypeRef => + if tp1.symbol.isClass then true + else + tp1.info match + case info: AliasingBounds => isConcrete(info.alias) + case _ => false + case tp1: AppliedType => + isConcrete(tp1.tycon) && isConcrete(tp1.superType) + case tp1: HKTypeLambda => + true + case tp1: TermRef => + !tp1.symbol.is(Param) && isConcrete(tp1.underlying) + case tp1: TermParamRef => + false + case tp1: SingletonType => + isConcrete(tp1.underlying) + case tp1: ExprType => + isConcrete(tp1.underlying) + case tp1: AnnotatedType => + isConcrete(tp1.parent) + case tp1: RefinedOrRecType => + isConcrete(tp1.underlying) + case tp1: AndOrType => + isConcrete(tp1.tp1) && isConcrete(tp1.tp2) + case tp1: TypeVar => + isConcrete(tp1.underlying) + case tp1: LazyRef => + isConcrete(tp1.ref) + case tp1: FlexibleType => + isConcrete(tp1.hi) + case _ => + false + end isConcrete + +end MatchTypes \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 140b42e0e9a9..c53c2238a095 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -24,6 +24,7 @@ import reporting.trace import annotation.constructorOnly import cc.* import NameKinds.WildcardParamName +import MatchTypes.isConcrete /** Provides methods to compare types. */ @@ -3409,58 +3410,6 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { // See https://docs.scala-lang.org/sips/match-types-spec.html#matching def matchSpeccedPatMat(spec: MatchTypeCaseSpec.SpeccedPatMat): MatchResult = - /* Concreteness checking - * - * When following a baseType and reaching a non-wildcard, in-variant-pos type capture, - * we have to make sure that the scrutinee is concrete enough to uniquely determine - * the values of the captures. This comes down to checking that we do not follow any - * upper bound of an abstract type. - * - * See notably neg/wildcard-match.scala for examples of this. - * - * See neg/i13780.scala, neg/i13780-1.scala and neg/i19746.scala for - * ClassCastException reproducers if we disable this check. - */ - - def isConcrete(tp: Type): Boolean = - val tp1 = tp.normalized - - tp1 match - case tp1: TypeRef => - if tp1.symbol.isClass then true - else - tp1.info match - case info: AliasingBounds => isConcrete(info.alias) - case _ => false - case tp1: AppliedType => - isConcrete(tp1.tycon) && isConcrete(tp1.superType) - case tp1: HKTypeLambda => - true - case tp1: TermRef => - !tp1.symbol.is(Param) && isConcrete(tp1.underlying) - case tp1: TermParamRef => - false - case tp1: SingletonType => - isConcrete(tp1.underlying) - case tp1: ExprType => - isConcrete(tp1.underlying) - case tp1: AnnotatedType => - isConcrete(tp1.parent) - case tp1: RefinedType => - isConcrete(tp1.underlying) - case tp1: RecType => - isConcrete(tp1.underlying) - case tp1: AndOrType => - isConcrete(tp1.tp1) && isConcrete(tp1.tp2) - case tp1: FlexibleType => - isConcrete(tp1.hi) - case _ => - val tp2 = tp1.stripped.stripLazyRef - (tp2 ne tp) && isConcrete(tp2) - end isConcrete - - // Actual matching logic - val instances = Array.fill[Type](spec.captureCount)(NoType) val noInstances = mutable.ListBuffer.empty[(TypeName, TypeBounds)] diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index af4f1e0153dd..4d5496cff880 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -101,7 +101,7 @@ object TypeEval: expectArgsNum(1) val arg = tp.args.head val cls = arg.classSymbol - if cls.is(CaseClass) then + if MatchTypes.isConcrete(arg) && cls.is(CaseClass) then val fields = cls.caseAccessors val fieldLabels = fields.map: field => ConstantType(Constant(field.name.toString)) diff --git a/tests/neg/i20517.check b/tests/neg/i20517.check new file mode 100644 index 000000000000..55aeff46572b --- /dev/null +++ b/tests/neg/i20517.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i20517.scala:10:43 ------------------------------------------------------------ +10 | def dep(foo: Foo[Any]): From[foo.type] = (elem = "") // error + | ^^^^^^^^^^^ + | Found: (elem : String) + | Required: NamedTuple.From[(foo : Foo[Any])] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20517.scala b/tests/neg/i20517.scala new file mode 100644 index 000000000000..11c4432434dd --- /dev/null +++ b/tests/neg/i20517.scala @@ -0,0 +1,17 @@ +import scala.language.experimental.namedTuples +import NamedTuple.From + +case class Foo[+T](elem: T) + +trait Base[M[_]]: + def dep(foo: Foo[Any]): M[foo.type] + +class SubAny extends Base[From]: + def dep(foo: Foo[Any]): From[foo.type] = (elem = "") // error + +object Test: + @main def run = + val f: Foo[Int] = Foo(elem = 1) + val b: Base[From] = SubAny() + val nt: (elem: Int) = b.dep(f) + val x: Int = nt.elem // was ClassCastException \ No newline at end of file From de34efebb9e9f5238147cd1434da8e406285e78c Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 2 Jul 2024 14:30:05 +0200 Subject: [PATCH 244/827] Streamline isConcrete a bit more --- .../src/dotty/tools/dotc/core/MatchTypes.scala | 16 ++-------------- compiler/src/dotty/tools/dotc/core/Types.scala | 1 + 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypes.scala b/compiler/src/dotty/tools/dotc/core/MatchTypes.scala index 61caceccd5d4..a3becea40886 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypes.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypes.scala @@ -34,24 +34,12 @@ object MatchTypes: true case tp1: TermRef => !tp1.symbol.is(Param) && isConcrete(tp1.underlying) - case tp1: TermParamRef => + case _: (ParamRef | MatchType) => false - case tp1: SingletonType => - isConcrete(tp1.underlying) - case tp1: ExprType => - isConcrete(tp1.underlying) - case tp1: AnnotatedType => - isConcrete(tp1.parent) - case tp1: RefinedOrRecType => + case tp1: TypeProxy => isConcrete(tp1.underlying) case tp1: AndOrType => isConcrete(tp1.tp1) && isConcrete(tp1.tp2) - case tp1: TypeVar => - isConcrete(tp1.underlying) - case tp1: LazyRef => - isConcrete(tp1.ref) - case tp1: FlexibleType => - isConcrete(tp1.hi) case _ => false end isConcrete diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index aa1813f572f7..efb353c4050c 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -76,6 +76,7 @@ object Types extends TypeUtils { * | +- HKTypeLambda * | +- MatchType * | +- FlexibleType + * | +- LazyRef * | * +- GroundType -+- AndType * +- OrType From 21dad7712fdf7359fe183135cbc58284ea71e300 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 3 Jul 2024 04:42:33 -0700 Subject: [PATCH 245/827] Use final result type to check selector bound --- compiler/src/dotty/tools/dotc/transform/CheckUnused.scala | 2 +- tests/pos/i20860.scala | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i20860.scala diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index d8389ff964a4..d396d60c096a 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -729,7 +729,7 @@ object CheckUnused: if selector.isGiven then // Further check that the symbol is a given or implicit and conforms to the bound sym.isOneOf(Given | Implicit) - && (selector.bound.isEmpty || sym.info <:< selector.boundTpe) + && (selector.bound.isEmpty || sym.info.finalResultType <:< selector.boundTpe) else // Normal wildcard, check that the symbol is not a given (but can be implicit) !sym.is(Given) diff --git a/tests/pos/i20860.scala b/tests/pos/i20860.scala new file mode 100644 index 000000000000..1e1ddea11b75 --- /dev/null +++ b/tests/pos/i20860.scala @@ -0,0 +1,3 @@ +def `i20860 use result to check selector bound`: Unit = + import Ordering.Implicits.given Ordering[?] + summon[Ordering[Seq[Int]]] From 275d891932dd97a135be02294c6d66b7edaa1148 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 7 Jun 2024 16:41:49 +0200 Subject: [PATCH 246/827] Make sure all arguments to Java annot constructors are NamedArg's. The Java model of annotations is unordered and name-based. Even though we typecheck things from source with a particular ordering, semantically we must always use `NamedArg`s to match the Java model. --- .../src/dotty/tools/dotc/typer/Applications.scala | 13 +++++++++---- tests/run-macros/annot-arg-value-in-java.check | 12 ++++++------ tests/run-macros/annot-java-tree/AnnoMacro.scala | 7 ++++++- 3 files changed, 21 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index c3369ac58e31..74c20812893b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -945,12 +945,17 @@ trait Applications extends Compatibility { val app1 = if (!success || typedArgs.exists(_.tpe.isError)) app0.withType(UnspecifiedErrorType) else { - if !sameSeq(args, orderedArgs) - && !isJavaAnnotConstr(methRef.symbol) - && !typedArgs.forall(isSafeArg) - then + if isJavaAnnotConstr(methRef.symbol) then + // #19951 Make sure all arguments are NamedArgs for Java annotations + if typedArgs.exists(!_.isInstanceOf[NamedArg]) then + typedArgs = typedArgs.lazyZip(methType.asInstanceOf[MethodType].paramNames).map { + case (arg: NamedArg, _) => arg + case (arg, name) => NamedArg(name, arg) + } + else if !sameSeq(args, orderedArgs) && !typedArgs.forall(isSafeArg) then // need to lift arguments to maintain evaluation order in the // presence of argument reorderings. + // (never do this for Java annotation constructors, hence the 'else if') liftFun() diff --git a/tests/run-macros/annot-arg-value-in-java.check b/tests/run-macros/annot-arg-value-in-java.check index d49aaf91ae6a..74821d24bf26 100644 --- a/tests/run-macros/annot-arg-value-in-java.check +++ b/tests/run-macros/annot-arg-value-in-java.check @@ -3,9 +3,9 @@ new java.lang.SuppressWarnings(value = "a") new java.lang.SuppressWarnings(value = "b") new java.lang.SuppressWarnings(value = _root_.scala.Array.apply[java.lang.String]("c", "d")(scala.reflect.ClassTag.apply[java.lang.String](classOf[java.lang.String]))) JOtherTypes: -new Annot(value = 1, _, _) -new Annot(value = -2, _, _) -new Annot(_, m = false, _) -new Annot(_, m = true, _) -new Annot(_, _, n = 1.1) -new Annot(_, _, n = -2.1) \ No newline at end of file +new Annot(value = 1, m = _, n = _) +new Annot(value = -2, m = _, n = _) +new Annot(value = _, m = false, n = _) +new Annot(value = _, m = true, n = _) +new Annot(value = _, m = _, n = 1.1) +new Annot(value = _, m = _, n = -2.1) diff --git a/tests/run-macros/annot-java-tree/AnnoMacro.scala b/tests/run-macros/annot-java-tree/AnnoMacro.scala index 3dae57868eab..3adb0c4ffe0e 100644 --- a/tests/run-macros/annot-java-tree/AnnoMacro.scala +++ b/tests/run-macros/annot-java-tree/AnnoMacro.scala @@ -8,12 +8,17 @@ def checkSuppressWarningsImpl[T: Type](using Quotes): Expr[Unit] = val sym = TypeRepr.of[T].typeSymbol // Imitate what wartremover does, so we can avoid unintentionally breaking it: // https://github.com/wartremover/wartremover/blob/fb18e6eafe9a47823e04960aaf4ec7a9293719ef/core/src/main/scala-3/org/wartremover/WartUniverse.scala#L63-L77 + // We're intentionally breaking it in 3.5.x, though, with the addition of `NamedArg("value", ...)` + // The previous implementation would be broken for cases where the user explicitly write `value = ...` anyway. val actualArgs = sym .getAnnotation(SuppressWarningsSymbol) .collect { case Apply( Select(_, ""), - Apply(Apply(_, Typed(Repeated(values, _), _) :: Nil), Apply(_, _ :: Nil) :: Nil) :: Nil + NamedArg( + "value", + Apply(Apply(_, Typed(Repeated(values, _), _) :: Nil), Apply(_, _ :: Nil) :: Nil) + ) :: Nil ) => // "-Yexplicit-nulls" // https://github.com/wartremover/wartremover/issues/660 From 6730f197db241ca69be3ac76f9fca3225ea1ba8b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 10 Jun 2024 10:19:02 +0200 Subject: [PATCH 247/827] Fix #19951: Align TASTy with the Java annotation model. Scala annotations are classes, with a real constructor, which has a real signature where order is relevant but names are irrelevant. On the contrary, Java annotations are interfaces, without any real constructors. The names of "fields" are relevant, whereas their order is irrelevant. As illustrated by #19951, trying to shoehorn Java annotations into the Scala annotation model is not sustainable, and breaks in real ways. Therefore, in this commit we align how Java annotations are stored in TASTy with the Java annotation model. During pickling: * Selection of the constructor is pickled without a signature. * Default arguments are dropped. * (Due to the parent commit, all arguments are `NamedArg`s at this point.) During unpickling: * Selection of the constructor resolves to the unique constructor (instead of complaining because a signature-less `SELECT` should not resolve to a member with a signature). * Arguments to the constructor are reordered and extended with defaults to match the target constructor; we can do this because all the arguments are `NamedArg`s. For backward compatibility, during unpickling: * If we read a `SELECTin` for a Java annotation constructor, we disregard its signature and pretend it was a `SELECT`. * We adapt arguments in best-effort way if not all of them are `NamedArg`s. --- .../tools/dotc/core/tasty/TreePickler.scala | 14 ++- .../tools/dotc/core/tasty/TreeUnpickler.scala | 94 ++++++++++++++++++- .../java-annotations-3.4/app/Main.scala | 21 +++++ .../java-annotations-3.4/build.sbt | 7 ++ .../java-annotations-3.4/lib/AnnotMacro.scala | 7 ++ .../java-annotations-3.4/lib/JavaAnnot.java | 10 ++ .../java-annotations-3.4/lib/ScalaUser.scala | 25 +++++ .../project/DottyInjectedPlugin.scala | 11 +++ .../scala3-compat/java-annotations-3.4/test | 1 + .../AnnotMacro_1.scala | 11 +++ .../JavaAnnot_1.java | 10 ++ .../JavaAnnot_2.java | 11 +++ .../ScalaUser_1.scala | 25 +++++ .../Test_2.scala | 4 + ...i19951-java-annotations-tasty-compat.check | 9 ++ .../AnnotMacro_2.scala | 11 +++ .../JavaAnnot_1.java | 10 ++ .../JavaAnnot_3.java | 11 +++ .../ScalaUser_2.scala | 25 +++++ .../Test_4.scala | 4 + 20 files changed, 314 insertions(+), 7 deletions(-) create mode 100644 sbt-test/scala3-compat/java-annotations-3.4/app/Main.scala create mode 100644 sbt-test/scala3-compat/java-annotations-3.4/build.sbt create mode 100644 sbt-test/scala3-compat/java-annotations-3.4/lib/AnnotMacro.scala create mode 100644 sbt-test/scala3-compat/java-annotations-3.4/lib/JavaAnnot.java create mode 100644 sbt-test/scala3-compat/java-annotations-3.4/lib/ScalaUser.scala create mode 100644 sbt-test/scala3-compat/java-annotations-3.4/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/scala3-compat/java-annotations-3.4/test create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat-2/AnnotMacro_1.scala create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat-2/JavaAnnot_1.java create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat-2/JavaAnnot_2.java create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat-2/ScalaUser_1.scala create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat-2/Test_2.scala create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat.check create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat/AnnotMacro_2.scala create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat/JavaAnnot_1.java create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat/JavaAnnot_3.java create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat/ScalaUser_2.scala create mode 100644 tests/run-macros/i19951-java-annotations-tasty-compat/Test_4.scala diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 8d1eca8fb5f0..4e32db2ae602 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -466,7 +466,10 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { } case _ => if passesConditionForErroringBestEffortCode(tree.hasType) then - val sig = tree.tpe.signature + // #19951 The signature of a constructor of a Java annotation is irrelevant + val sig = + if name == nme.CONSTRUCTOR && tree.symbol.exists && tree.symbol.owner.is(JavaAnnotation) then Signature.NotAMethod + else tree.tpe.signature var ename = tree.symbol.targetName val selectFromQualifier = name.isTypeName @@ -507,7 +510,14 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { writeByte(APPLY) withLength { pickleTree(fun) - args.foreach(pickleTree) + // #19951 Do not pickle default arguments to Java annotation constructors + if fun.symbol.isClassConstructor && fun.symbol.owner.is(JavaAnnotation) then + for arg <- args do + arg match + case NamedArg(_, Ident(nme.WILDCARD)) => () + case _ => pickleTree(arg) + else + args.foreach(pickleTree) } } case TypeApply(fun, args) => diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 91a5899146cc..4750276f4553 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1281,7 +1281,14 @@ class TreeUnpickler(reader: TastyReader, if unpicklingJava && name == tpnme.Object && qual.symbol == defn.JavaLangPackageVal then defn.FromJavaObjectSymbol.denot else - accessibleDenot(qual.tpe.widenIfUnstable, name, sig, target) + val qualType = qual.tpe.widenIfUnstable + if name == nme.CONSTRUCTOR && qualType.classSymbol.is(JavaAnnotation) then + // #19951 Disregard the signature (or the absence thereof) for constructors of Java annotations + // Note that Java annotations always have a single public constructor + // They may have a PrivateLocal constructor if compiled from source in mixed compilation + qualType.findMember(name, qualType, excluded = Private) + else + accessibleDenot(qualType, name, sig, target) makeSelect(qual, name, denot) def readQualId(): (untpd.Ident, TypeRef) = @@ -1335,7 +1342,16 @@ class TreeUnpickler(reader: TastyReader, readPathTree() } - /** Adapt constructor calls where class has only using clauses from old to new scheme. + /** Adapt constructor calls for Java annot constructors and for the new scheme of `using` clauses. + * + * #19951 If the `fn` is the constructor of a Java annotation, reorder and refill + * arguments against the constructor signature. Only reorder if all the arguments + * are `NamedArg`s, which is always the case if the TASTy was produced by 3.5+. + * If some arguments are positional, only *add* missing arguments to the right + * and hope for the best; this will at least fix #19951 after the fact if the new + * annotation fields are added after all the existing ones. + * + * Otherwise, adapt calls where class has only using clauses from old to new scheme. * or class has mixed using clauses and other clauses. * Old: leading (), new: nothing, or trailing () if all clauses are using clauses. * This is neccessary so that we can read pre-3.2 Tasty correctly. There, @@ -1343,7 +1359,9 @@ class TreeUnpickler(reader: TastyReader, * use the new scheme, since they are reconstituted with normalizeIfConstructor. */ def constructorApply(fn: Tree, args: List[Tree]): Tree = - if fn.tpe.widen.isContextualMethod && args.isEmpty then + if fn.symbol.owner.is(JavaAnnotation) then + tpd.Apply(fn, fixArgsToJavaAnnotConstructor(fn.tpe.widen, args)) + else if fn.tpe.widen.isContextualMethod && args.isEmpty then fn.withAttachment(SuppressedApplyToNone, ()) else val fn1 = fn match @@ -1365,6 +1383,68 @@ class TreeUnpickler(reader: TastyReader, res.withAttachment(SuppressedApplyToNone, ()) else res + def fixArgsToJavaAnnotConstructor(methType: Type, args: List[Tree]): List[Tree] = + methType match + case methType: MethodType => + val formalNames = methType.paramNames + val sizeCmp = args.sizeCompare(formalNames) + + def makeDefault(name: TermName, tpe: Type): NamedArg = + NamedArg(name, Underscore(tpe)) + + def extendOnly(args: List[NamedArg]): List[NamedArg] = + if sizeCmp < 0 then + val argsSize = args.size + val additionalArgs: List[NamedArg] = + formalNames.drop(argsSize).lazyZip(methType.paramInfos.drop(argsSize)).map(makeDefault(_, _)) + args ::: additionalArgs + else + args // fast path + + if formalNames.isEmpty then + // fast path + args + else if sizeCmp > 0 then + // Something's wrong anyway; don't touch anything + args + else if args.exists(!_.isInstanceOf[NamedArg]) then + // Pre 3.5 TASTy -- do our best, assuming that args match as a prefix of the formals + val prefixMatch = args.lazyZip(formalNames).forall { + case (NamedArg(actualName, _), formalName) => actualName == formalName + case _ => true + } + // If the prefix does not match, something's wrong; don't touch anything + if !prefixMatch then + args + else + // Turn non-named args to named and extend with defaults + extendOnly(args.lazyZip(formalNames).map { + case (arg: NamedArg, _) => arg + case (arg, formalName) => NamedArg(formalName, arg) + }) + else + // Good TASTy where all the arguments are named; reorder and extend if needed + val namedArgs = args.asInstanceOf[List[NamedArg]] + val prefixMatch = namedArgs.lazyZip(formalNames).forall((arg, formalName) => arg.name == formalName) + if prefixMatch then + // fast path, extend only + extendOnly(namedArgs) + else + // needs reordering, and possibly fill in holes for default arguments + val argsByName = mutable.AnyRefMap.from(namedArgs.map(arg => arg.name -> arg)) + val reconstructedArgs = formalNames.lazyZip(methType.paramInfos).map { (name, tpe) => + argsByName.remove(name).getOrElse(makeDefault(name, tpe)) + } + if argsByName.nonEmpty then + // something's wrong; don't touch anything + args + else + reconstructedArgs + + case _ => + args + end fixArgsToJavaAnnotConstructor + def quotedExpr(fn: Tree, args: List[Tree]): Tree = val TypeApply(_, targs) = fn: @unchecked untpd.Quote(args.head, Nil).withBodyType(targs.head.tpe) @@ -1491,8 +1571,12 @@ class TreeUnpickler(reader: TastyReader, NoDenotation val denot = - val d = ownerTpe.decl(name).atSignature(sig, target) - (if !d.exists then lookupInSuper else d).asSeenFrom(prefix) + if owner.is(JavaAnnotation) && name == nme.CONSTRUCTOR then + // #19951 Fix up to read TASTy produced before 3.5.0 -- ignore the signature + ownerTpe.nonPrivateDecl(name).asSeenFrom(prefix) + else + val d = ownerTpe.decl(name).atSignature(sig, target) + (if !d.exists then lookupInSuper else d).asSeenFrom(prefix) makeSelect(qual, name, denot) case REPEATED => diff --git a/sbt-test/scala3-compat/java-annotations-3.4/app/Main.scala b/sbt-test/scala3-compat/java-annotations-3.4/app/Main.scala new file mode 100644 index 000000000000..41ca1fadf011 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/app/Main.scala @@ -0,0 +1,21 @@ +object Test: + def main(args: Array[String]): Unit = + val actual = listAnnots("ScalaUser") + val expected = List( + "new JavaAnnot(a = 5, b = _, c = _)", + "new JavaAnnot(a = 5, b = _, c = _)", + "new JavaAnnot(a = 5, b = \"foo\", c = _)", + "new JavaAnnot(a = 5, b = \"foo\", c = 3)", + "new JavaAnnot(a = 5, b = _, c = 3)", + "new JavaAnnot(a = 5, b = \"foo\", c = 3)", + "new JavaAnnot(a = 5, b = \"foo\", c = 3)", + "new JavaAnnot(a = 5, b = \"foo\", c = _)", + ) + if actual != expected then + println("Expected:") + expected.foreach(println(_)) + println("Actual:") + actual.foreach(println(_)) + throw new AssertionError("test failed") + end main +end Test diff --git a/sbt-test/scala3-compat/java-annotations-3.4/build.sbt b/sbt-test/scala3-compat/java-annotations-3.4/build.sbt new file mode 100644 index 000000000000..67b61a3e9edd --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/build.sbt @@ -0,0 +1,7 @@ +lazy val lib = project.in(file("lib")) + .settings( + scalaVersion := "3.4.2" + ) + +lazy val app = project.in(file("app")) + .dependsOn(lib) diff --git a/sbt-test/scala3-compat/java-annotations-3.4/lib/AnnotMacro.scala b/sbt-test/scala3-compat/java-annotations-3.4/lib/AnnotMacro.scala new file mode 100644 index 000000000000..4bf3a238f9c9 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/lib/AnnotMacro.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +inline def listAnnots(inline c: String): List[String] = ${ listAnnotsImpl('c) } + +def listAnnotsImpl(c: Expr[String])(using Quotes): Expr[List[String]] = + import quotes.reflect.* + Expr(Symbol.requiredClass(c.valueOrError).declaredMethods.flatMap(_.annotations.map(_.show))) diff --git a/sbt-test/scala3-compat/java-annotations-3.4/lib/JavaAnnot.java b/sbt-test/scala3-compat/java-annotations-3.4/lib/JavaAnnot.java new file mode 100644 index 000000000000..9aa3537d4266 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/lib/JavaAnnot.java @@ -0,0 +1,10 @@ + +import java.lang.annotation.*; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +@interface JavaAnnot { + int a(); + String b() default "empty"; + int c() default 5; +} diff --git a/sbt-test/scala3-compat/java-annotations-3.4/lib/ScalaUser.scala b/sbt-test/scala3-compat/java-annotations-3.4/lib/ScalaUser.scala new file mode 100644 index 000000000000..a14a69eae21b --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/lib/ScalaUser.scala @@ -0,0 +1,25 @@ +class ScalaUser { + @JavaAnnot(5) + def f1(): Int = 1 + + @JavaAnnot(a = 5) + def f2(): Int = 1 + + @JavaAnnot(5, "foo") + def f3(): Int = 1 + + @JavaAnnot(5, "foo", 3) + def f4(): Int = 1 + + @JavaAnnot(5, c = 3) + def f5(): Int = 1 + + @JavaAnnot(5, c = 3, b = "foo") + def f6(): Int = 1 + + @JavaAnnot(b = "foo", c = 3, a = 5) + def f7(): Int = 1 + + @JavaAnnot(b = "foo", a = 5) + def f8(): Int = 1 +} diff --git a/sbt-test/scala3-compat/java-annotations-3.4/project/DottyInjectedPlugin.scala b/sbt-test/scala3-compat/java-annotations-3.4/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..fb946c4b8c61 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion") + ) +} diff --git a/sbt-test/scala3-compat/java-annotations-3.4/test b/sbt-test/scala3-compat/java-annotations-3.4/test new file mode 100644 index 000000000000..63092ffa4a03 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/test @@ -0,0 +1 @@ +> app/run diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat-2/AnnotMacro_1.scala b/tests/run-macros/i19951-java-annotations-tasty-compat-2/AnnotMacro_1.scala new file mode 100644 index 000000000000..75252699b015 --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat-2/AnnotMacro_1.scala @@ -0,0 +1,11 @@ +import scala.quoted.* + +inline def showAnnots(inline c: String): Unit = ${ showAnnotsImpl('c) } + +def showAnnotsImpl(c: Expr[String])(using Quotes): Expr[Unit] = + import quotes.reflect.* + val al = Expr(Symbol.requiredClass(c.valueOrError).declaredMethods.flatMap(_.annotations.map(_.show))) + '{ + println($c + ":") + $al.foreach(println) + } diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat-2/JavaAnnot_1.java b/tests/run-macros/i19951-java-annotations-tasty-compat-2/JavaAnnot_1.java new file mode 100644 index 000000000000..9aa3537d4266 --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat-2/JavaAnnot_1.java @@ -0,0 +1,10 @@ + +import java.lang.annotation.*; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +@interface JavaAnnot { + int a(); + String b() default "empty"; + int c() default 5; +} diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat-2/JavaAnnot_2.java b/tests/run-macros/i19951-java-annotations-tasty-compat-2/JavaAnnot_2.java new file mode 100644 index 000000000000..9741cf9ee1e3 --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat-2/JavaAnnot_2.java @@ -0,0 +1,11 @@ + +import java.lang.annotation.*; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +@interface JavaAnnot { + int c() default 5; + int a(); + int d() default 42; + String b() default "empty"; +} diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat-2/ScalaUser_1.scala b/tests/run-macros/i19951-java-annotations-tasty-compat-2/ScalaUser_1.scala new file mode 100644 index 000000000000..a14a69eae21b --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat-2/ScalaUser_1.scala @@ -0,0 +1,25 @@ +class ScalaUser { + @JavaAnnot(5) + def f1(): Int = 1 + + @JavaAnnot(a = 5) + def f2(): Int = 1 + + @JavaAnnot(5, "foo") + def f3(): Int = 1 + + @JavaAnnot(5, "foo", 3) + def f4(): Int = 1 + + @JavaAnnot(5, c = 3) + def f5(): Int = 1 + + @JavaAnnot(5, c = 3, b = "foo") + def f6(): Int = 1 + + @JavaAnnot(b = "foo", c = 3, a = 5) + def f7(): Int = 1 + + @JavaAnnot(b = "foo", a = 5) + def f8(): Int = 1 +} diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat-2/Test_2.scala b/tests/run-macros/i19951-java-annotations-tasty-compat-2/Test_2.scala new file mode 100644 index 000000000000..82524fa06d6e --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat-2/Test_2.scala @@ -0,0 +1,4 @@ +object Test { + def main(args: Array[String]): Unit = + showAnnots("ScalaUser") +} diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat.check b/tests/run-macros/i19951-java-annotations-tasty-compat.check new file mode 100644 index 000000000000..c41fcc64c559 --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat.check @@ -0,0 +1,9 @@ +ScalaUser: +new JavaAnnot(c = _, a = 5, d = _, b = _) +new JavaAnnot(c = _, a = 5, d = _, b = _) +new JavaAnnot(c = _, a = 5, d = _, b = "foo") +new JavaAnnot(c = 3, a = 5, d = _, b = "foo") +new JavaAnnot(c = 3, a = 5, d = _, b = _) +new JavaAnnot(c = 3, a = 5, d = _, b = "foo") +new JavaAnnot(c = 3, a = 5, d = _, b = "foo") +new JavaAnnot(c = _, a = 5, d = _, b = "foo") diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat/AnnotMacro_2.scala b/tests/run-macros/i19951-java-annotations-tasty-compat/AnnotMacro_2.scala new file mode 100644 index 000000000000..75252699b015 --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat/AnnotMacro_2.scala @@ -0,0 +1,11 @@ +import scala.quoted.* + +inline def showAnnots(inline c: String): Unit = ${ showAnnotsImpl('c) } + +def showAnnotsImpl(c: Expr[String])(using Quotes): Expr[Unit] = + import quotes.reflect.* + val al = Expr(Symbol.requiredClass(c.valueOrError).declaredMethods.flatMap(_.annotations.map(_.show))) + '{ + println($c + ":") + $al.foreach(println) + } diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat/JavaAnnot_1.java b/tests/run-macros/i19951-java-annotations-tasty-compat/JavaAnnot_1.java new file mode 100644 index 000000000000..9aa3537d4266 --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat/JavaAnnot_1.java @@ -0,0 +1,10 @@ + +import java.lang.annotation.*; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +@interface JavaAnnot { + int a(); + String b() default "empty"; + int c() default 5; +} diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat/JavaAnnot_3.java b/tests/run-macros/i19951-java-annotations-tasty-compat/JavaAnnot_3.java new file mode 100644 index 000000000000..9741cf9ee1e3 --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat/JavaAnnot_3.java @@ -0,0 +1,11 @@ + +import java.lang.annotation.*; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +@interface JavaAnnot { + int c() default 5; + int a(); + int d() default 42; + String b() default "empty"; +} diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat/ScalaUser_2.scala b/tests/run-macros/i19951-java-annotations-tasty-compat/ScalaUser_2.scala new file mode 100644 index 000000000000..a14a69eae21b --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat/ScalaUser_2.scala @@ -0,0 +1,25 @@ +class ScalaUser { + @JavaAnnot(5) + def f1(): Int = 1 + + @JavaAnnot(a = 5) + def f2(): Int = 1 + + @JavaAnnot(5, "foo") + def f3(): Int = 1 + + @JavaAnnot(5, "foo", 3) + def f4(): Int = 1 + + @JavaAnnot(5, c = 3) + def f5(): Int = 1 + + @JavaAnnot(5, c = 3, b = "foo") + def f6(): Int = 1 + + @JavaAnnot(b = "foo", c = 3, a = 5) + def f7(): Int = 1 + + @JavaAnnot(b = "foo", a = 5) + def f8(): Int = 1 +} diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat/Test_4.scala b/tests/run-macros/i19951-java-annotations-tasty-compat/Test_4.scala new file mode 100644 index 000000000000..82524fa06d6e --- /dev/null +++ b/tests/run-macros/i19951-java-annotations-tasty-compat/Test_4.scala @@ -0,0 +1,4 @@ +object Test { + def main(args: Array[String]): Unit = + showAnnots("ScalaUser") +} From aad6f7d1c091c6a59f44ae5b56bffd180f22b64a Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Wed, 3 Jul 2024 14:26:25 +0200 Subject: [PATCH 248/827] Adapt the test suite to scala 3.6 --- tests/neg/given-loop-prevention.check | 14 ++------ tests/neg/i20415.scala | 2 -- tests/neg/i6716.check | 14 -------- tests/neg/i6716.scala | 17 ++++------ tests/neg/i7294.check | 30 ++++------------- tests/neg/i7294.scala | 2 +- tests/neg/looping-givens.check | 48 --------------------------- tests/neg/looping-givens.scala | 11 ------ tests/pos/i20415.scala | 2 ++ tests/pos/i6716.scala | 18 +++++----- tests/pos/looping-givens.scala | 7 ++-- tests/run/i6716.scala | 2 +- 12 files changed, 32 insertions(+), 135 deletions(-) delete mode 100644 tests/neg/i20415.scala delete mode 100644 tests/neg/i6716.check delete mode 100644 tests/neg/looping-givens.check delete mode 100644 tests/neg/looping-givens.scala create mode 100644 tests/pos/i20415.scala diff --git a/tests/neg/given-loop-prevention.check b/tests/neg/given-loop-prevention.check index 460adf03be49..cbaeec2474f4 100644 --- a/tests/neg/given-loop-prevention.check +++ b/tests/neg/given-loop-prevention.check @@ -1,14 +1,4 @@ --- Error: tests/neg/given-loop-prevention.scala:10:36 ------------------------------------------------------------------ +-- [E172] Type Error: tests/neg/given-loop-prevention.scala:10:36 ------------------------------------------------------ 10 | given List[Foo] = List(summon[Foo]) // error | ^ - | Result of implicit search for Foo will change. - | Current result Baz.given_Foo will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: No Matching Implicit. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that Baz.given_Foo comes earlier, - | - use an explicit argument. + | No given instance of type Foo was found for parameter x of method summon in object Predef diff --git a/tests/neg/i20415.scala b/tests/neg/i20415.scala deleted file mode 100644 index 14582e40aa9d..000000000000 --- a/tests/neg/i20415.scala +++ /dev/null @@ -1,2 +0,0 @@ -class Foo: - given ord: Ordering[Int] = summon[Ordering[Int]] // error diff --git a/tests/neg/i6716.check b/tests/neg/i6716.check deleted file mode 100644 index 0144f539f53c..000000000000 --- a/tests/neg/i6716.check +++ /dev/null @@ -1,14 +0,0 @@ --- Error: tests/neg/i6716.scala:11:39 ---------------------------------------------------------------------------------- -11 | given Monad[Bar] = summon[Monad[Foo]] // error - | ^ - | Result of implicit search for Monad[Foo] will change. - | Current result Bar.given_Monad_Bar will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: Foo.given_Monad_Foo. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that Bar.given_Monad_Bar comes earlier, - | - use an explicit argument. diff --git a/tests/neg/i6716.scala b/tests/neg/i6716.scala index 8b37d4e223ac..eece8af9e560 100644 --- a/tests/neg/i6716.scala +++ b/tests/neg/i6716.scala @@ -1,17 +1,12 @@ - -trait Monad[T]: - def id: String class Foo -object Foo { - given Monad[Foo] with { def id = "Foo" } -} -opaque type Bar = Foo object Bar { - given Monad[Bar] = summon[Monad[Foo]] // error + given Foo with {} + given List[Foo] = List(summon[Foo]) // ok } -object Test extends App { - println(summon[Monad[Foo]].id) - println(summon[Monad[Bar]].id) +object Baz { + @annotation.nowarn + given List[Foo] = List(summon[Foo]) // error + given Foo with {} } diff --git a/tests/neg/i7294.check b/tests/neg/i7294.check index d6e559997f78..30c076470899 100644 --- a/tests/neg/i7294.check +++ b/tests/neg/i7294.check @@ -1,25 +1,9 @@ --- Error: tests/neg/i7294.scala:7:10 ----------------------------------------------------------------------------------- -7 | case x: T => x.g(10) // error // error - | ^ - | Result of implicit search for scala.reflect.TypeTest[Nothing, T] will change. - | Current result foo.f will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: No Matching Implicit. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that foo.f comes earlier, - | - use an explicit argument. - | - | where: T is a type in given instance f with bounds <: foo.Foo --- [E007] Type Mismatch Error: tests/neg/i7294.scala:7:18 -------------------------------------------------------------- -7 | case x: T => x.g(10) // error // error - | ^^^^^^^ - | Found: Any - | Required: T - | - | where: T is a type in given instance f with bounds <: foo.Foo +-- [E007] Type Mismatch Error: tests/neg/i7294.scala:7:15 -------------------------------------------------------------- +7 | case x: T => x.g(10) // error + | ^ + | Found: (x : Nothing) + | Required: ?{ g: ? } + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than ?{ g: [applied to (10) returning T] } | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i7294.scala b/tests/neg/i7294.scala index fbb00f9b7e89..2725109e79e8 100644 --- a/tests/neg/i7294.scala +++ b/tests/neg/i7294.scala @@ -4,7 +4,7 @@ package foo trait Foo { def g(x: Any): Any } inline given f[T <: Foo]: T = ??? match { - case x: T => x.g(10) // error // error + case x: T => x.g(10) // error } @main def Test = f diff --git a/tests/neg/looping-givens.check b/tests/neg/looping-givens.check deleted file mode 100644 index 1e7ee08d79df..000000000000 --- a/tests/neg/looping-givens.check +++ /dev/null @@ -1,48 +0,0 @@ --- Error: tests/neg/looping-givens.scala:9:22 -------------------------------------------------------------------------- -9 | given aa: A = summon // error - | ^ - | Result of implicit search for T will change. - | Current result ab will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: a. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that ab comes earlier, - | - use an explicit argument. - | - | where: T is a type variable with constraint <: A --- Error: tests/neg/looping-givens.scala:10:22 ------------------------------------------------------------------------- -10 | given bb: B = summon // error - | ^ - | Result of implicit search for T will change. - | Current result ab will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: b. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that ab comes earlier, - | - use an explicit argument. - | - | where: T is a type variable with constraint <: B --- Error: tests/neg/looping-givens.scala:11:28 ------------------------------------------------------------------------- -11 | given ab: (A & B) = summon // error - | ^ - | Result of implicit search for T will change. - | Current result ab will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: Search Failure: joint(ab, ab). - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that ab comes earlier, - | - use an explicit argument. - | - | where: T is a type variable with constraint <: A & B diff --git a/tests/neg/looping-givens.scala b/tests/neg/looping-givens.scala deleted file mode 100644 index 57dc95f99aab..000000000000 --- a/tests/neg/looping-givens.scala +++ /dev/null @@ -1,11 +0,0 @@ -//> options -source 3.4 - -class A -class B - -given joint(using a: A, b: B): (A & B) = ??? - -def foo(using a: A, b: B) = - given aa: A = summon // error - given bb: B = summon // error - given ab: (A & B) = summon // error diff --git a/tests/pos/i20415.scala b/tests/pos/i20415.scala new file mode 100644 index 000000000000..500dcb83ba15 --- /dev/null +++ b/tests/pos/i20415.scala @@ -0,0 +1,2 @@ +class Foo: + given ord: Ordering[Int] = summon[Ordering[Int]] diff --git a/tests/pos/i6716.scala b/tests/pos/i6716.scala index f02559af1e82..617adc3c09f0 100644 --- a/tests/pos/i6716.scala +++ b/tests/pos/i6716.scala @@ -1,14 +1,16 @@ -//> using options -Xfatal-warnings -source 3.4 - +trait Monad[T]: + def id: String class Foo +object Foo { + given Monad[Foo] with { def id = "Foo" } +} +opaque type Bar = Foo object Bar { - given Foo with {} - given List[Foo] = List(summon[Foo]) // ok + given Monad[Bar] = summon[Monad[Foo]] } -object Baz { - @annotation.nowarn - given List[Foo] = List(summon[Foo]) // gives a warning, which is suppressed - given Foo with {} +object Test extends App { + println(summon[Monad[Foo]].id) + println(summon[Monad[Bar]].id) } diff --git a/tests/pos/looping-givens.scala b/tests/pos/looping-givens.scala index 0e615c8251df..d7d086358099 100644 --- a/tests/pos/looping-givens.scala +++ b/tests/pos/looping-givens.scala @@ -1,4 +1,3 @@ -import language.future class A class B @@ -6,6 +5,6 @@ class B given joint(using a: A, b: B): (A & B) = ??? def foo(using a: A, b: B) = - given aa: A = summon // error - given bb: B = summon // error - given ab: (A & B) = summon // error + given aa: A = summon // resolves to a + given bb: B = summon // resolves to b + given ab: (A & B) = summon // resolves to joint(aa, bb) diff --git a/tests/run/i6716.scala b/tests/run/i6716.scala index 3bef45ac7465..e793381cce1c 100644 --- a/tests/run/i6716.scala +++ b/tests/run/i6716.scala @@ -1,4 +1,4 @@ -//> using options -Xfatal-warnings -source future +//> using options -Xfatal-warnings trait Monad[T]: def id: String From 2236ac9efbe16c7024335378ee048b8fae1e95e9 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 3 Jul 2024 17:36:20 +0200 Subject: [PATCH 249/827] use explicit result type in test --- tests/pos/i20901/Foo.scala | 2 +- tests/pos/i20901/Foo.tastycheck | 60 +++++++++++++++++---------------- 2 files changed, 32 insertions(+), 30 deletions(-) diff --git a/tests/pos/i20901/Foo.scala b/tests/pos/i20901/Foo.scala index c1277781db38..9173d10e0a1a 100644 --- a/tests/pos/i20901/Foo.scala +++ b/tests/pos/i20901/Foo.scala @@ -3,4 +3,4 @@ import reflect.ClassTag class Foo: - def mkArray[T: ClassTag] = ??? + def mkArray[T: ClassTag]: Nothing = ??? diff --git a/tests/pos/i20901/Foo.tastycheck b/tests/pos/i20901/Foo.tastycheck index 0201bfec2056..565c5c793bad 100644 --- a/tests/pos/i20901/Foo.tastycheck +++ b/tests/pos/i20901/Foo.tastycheck @@ -41,14 +41,14 @@ Names (276 bytes, starting from ): 34: Comments 35: Attributes -Trees (94 bytes, starting from ): - 0: PACKAGE(92) +Trees (98 bytes, starting from ): + 0: PACKAGE(96) 2: TERMREFpkg 1 [] 4: IMPORT(4) 6: TERMREFpkg 4 [scala[Qualified . reflect]] 8: IMPORTED 5 [ClassTag] - 10: TYPEDEF(82) 6 [Foo] - 13: TEMPLATE(61) + 10: TYPEDEF(86) 6 [Foo] + 13: TEMPLATE(65) 15: APPLY(10) 17: SELECTin(8) 13 [[Signed Signature(List(),java.lang.Object) @]] 20: NEW @@ -60,7 +60,7 @@ Trees (94 bytes, starting from ): 31: TYPEREF 14 [Unit] 33: TERMREFpkg 2 [scala] 35: STABLE - 36: DEFDEF(38) 15 [mkArray] + 36: DEFDEF(42) 15 [mkArray] 39: TYPEPARAM(11) 16 [T] 42: TYPEBOUNDStpt(8) 44: TYPEREF 17 [Nothing] @@ -75,47 +75,49 @@ Trees (94 bytes, starting from ): 63: IDENTtpt 16 [T] 65: TYPEREFdirect 39 67: IMPLICIT - 68: SHAREDtype 44 - 70: TERMREF 21 [???] - 72: TERMREF 22 [Predef] - 74: SHAREDtype 33 - 76: ANNOTATION(16) - 78: TYPEREF 23 [SourceFile] - 80: TERMREFpkg 27 [scala[Qualified . annotation][Qualified . internal]] - 82: APPLY(10) - 84: SELECTin(6) 31 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] - 87: NEW - 88: SHAREDtype 78 - 90: SHAREDtype 78 - 92: STRINGconst 32 [] - 94: + 68: IDENTtpt 17 [Nothing] + 70: TYPEREF 17 [Nothing] + 72: TERMREFpkg 2 [scala] + 74: TERMREF 21 [???] + 76: TERMREF 22 [Predef] + 78: SHAREDtype 33 + 80: ANNOTATION(16) + 82: TYPEREF 23 [SourceFile] + 84: TERMREFpkg 27 [scala[Qualified . annotation][Qualified . internal]] + 86: APPLY(10) + 88: SELECTin(6) 31 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] + 91: NEW + 92: SHAREDtype 82 + 94: SHAREDtype 82 + 96: STRINGconst 32 [] + 98: -Positions (72 bytes, starting from ): +Positions (73 bytes, starting from ): lines: 7 line sizes: - 38, 0, 23, 0, 10, 32, 0 + 38, 0, 23, 0, 10, 41, 0 positions: - 0: 40 .. 108 + 0: 40 .. 117 4: 40 .. 63 6: 47 .. 54 8: 55 .. 63 - 10: 65 .. 108 - 13: 78 .. 108 + 10: 65 .. 117 + 13: 78 .. 117 21: 71 .. 71 27: 78 .. 78 31: 78 .. 78 - 36: 78 .. 108 + 36: 78 .. 117 39: 90 .. 101 44: 93 .. 93 48: 93 .. 93 52: 93 .. 101 57: 93 .. 101 63: 93 .. 101 - 68: 102 .. 102 - 70: 105 .. 108 - 82: 65 .. 108 - 88: 65 .. 65 + 68: 104 .. 111 + 74: 114 .. 117 + 86: 65 .. 117 92: 65 .. 65 + 96: 65 .. 65 source paths: 0: 32 [] From 36146eb6f7c5090a2be81df1164a07af1d5a5fd3 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Fri, 28 Jun 2024 18:00:23 +0200 Subject: [PATCH 250/827] Stabilise SIP-47 --- .../src/dotty/tools/dotc/config/Feature.scala | 3 +- .../dotty/tools/dotc/parsing/Parsers.scala | 7 +---- .../generalized-method-syntax.md | 10 ++----- .../runtime/stdLibPatches/language.scala | 1 + .../SignatureHelpInterleavingSuite.scala | 3 -- .../src/tests/extensionParams.scala | 2 -- .../src/tests/methodsAndConstructors.scala | 2 -- tests/neg/interleaving-ab.scala | 3 +- tests/neg/interleaving-params.scala | 1 - .../neg/interleaving-signatureCollision.scala | 1 - tests/neg/interleaving-typeApply.check | 28 +++++++++---------- tests/neg/interleaving-typeApply.scala | 3 +- tests/neg/interleaving-unmatched.scala | 1 - tests/neg/interleavingExperimental.check | 4 +++ tests/neg/interleavingExperimental.scala | 3 ++ tests/neg/namedTypeParams.check | 8 +++--- tests/neg/namedTypeParams.scala | 1 - tests/neg/overrides.scala | 6 ---- tests/pos/interleaving-ba.scala | 1 - tests/pos/interleaving-chainedParams.scala | 1 - tests/pos/interleaving-classless.scala | 1 - tests/pos/interleaving-functor.scala | 1 - tests/pos/interleaving-newline.scala | 1 - tests/pos/interleaving-overload.scala | 1 - tests/pos/interleaving-params.scala | 1 - .../pos/interleaving-signatureCollision.scala | 1 - tests/pos/interleaving-typeApply.scala | 2 -- tests/pos/interleavingExperimental.scala | 5 ++++ tests/pos/namedTypeParams.scala | 1 - tests/pos/overrides.scala | 5 ---- tests/run/interleaving.scala | 1 - 31 files changed, 38 insertions(+), 71 deletions(-) rename docs/_docs/reference/{experimental => other-new-features}/generalized-method-syntax.md (92%) create mode 100644 tests/neg/interleavingExperimental.check create mode 100644 tests/neg/interleavingExperimental.scala create mode 100644 tests/pos/interleavingExperimental.scala diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index c04c58b419c9..fed67b380092 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -121,7 +121,8 @@ object Feature: def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) - def clauseInterleavingEnabled(using Context) = enabled(clauseInterleaving) + def clauseInterleavingEnabled(using Context) = + sourceVersion.isAtLeast(`3.6`) || enabled(clauseInterleaving) def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 4c13934f3473..07fb97191f2d 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3836,9 +3836,6 @@ object Parsers { /** DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] * | this TypelessClauses [DefImplicitClause] `=' ConstrExpr - * DefSig ::= id [DefTypeParamClause] DefTermParamClauses - * - * if clauseInterleaving is enabled: * DefSig ::= id [DefParamClauses] [DefImplicitClause] */ def defDefOrDcl(start: Offset, mods: Modifiers, numLeadParams: Int = 0): DefDef = atSpan(start, nameStart) { @@ -3878,13 +3875,11 @@ object Parsers { val ident = termIdent() var name = ident.name.asTermName val paramss = - if in.featureEnabled(Feature.clauseInterleaving) then - // If you are making interleaving stable manually, please refer to the PR introducing it instead, section "How to make non-experimental" + if Feature.clauseInterleavingEnabled(using in.languageImportContext) then typeOrTermParamClauses(ParamOwner.Def, numLeadParams) else val tparams = typeParamClauseOpt(ParamOwner.Def) val vparamss = termParamClauses(ParamOwner.Def, numLeadParams) - joinParams(tparams, vparamss) var tpt = fromWithinReturnType { typedOpt() } diff --git a/docs/_docs/reference/experimental/generalized-method-syntax.md b/docs/_docs/reference/other-new-features/generalized-method-syntax.md similarity index 92% rename from docs/_docs/reference/experimental/generalized-method-syntax.md rename to docs/_docs/reference/other-new-features/generalized-method-syntax.md index 072052c1ae10..2dd537cacdd8 100644 --- a/docs/_docs/reference/experimental/generalized-method-syntax.md +++ b/docs/_docs/reference/other-new-features/generalized-method-syntax.md @@ -1,15 +1,9 @@ --- layout: doc-page title: "Generalized Method Syntax" -nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/generalized-method-syntax.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/generalized-method-syntax.html --- -This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: - -```scala -import scala.language.experimental.clauseInterleaving -``` - The inclusion of using clauses is not the only way in which methods have been updated, type parameter clauses are now allowed in any number and at any position. ## Syntax Changes @@ -51,7 +45,7 @@ trait DB { } ``` -Note that simply replacing `V` by `k.Value` would not be equivalent. For example, if `k.Value` is `Some[Int]`, only the above allows: +Note that simply replacing `V` by `k.Value` would not be equivalent. For example, if `k.Value` is `Some[Int]`, only the above allows: `getOrElse(k)[Option[Int]](None)`, which returns a `Number`. ## Details diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index d89bd9dcf72e..6272970ab0ed 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -67,6 +67,7 @@ object language: * @see [[https://github.com/scala/improvement-proposals/blob/main/content/clause-interleaving.md]] */ @compileTimeOnly("`clauseInterleaving` can only be used at compile time in import statements") + @deprecated("`clauseInterleaving` is now standard, no language import is needed", since = "3.6") object clauseInterleaving /** Experimental support for pure function type syntax diff --git a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpInterleavingSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpInterleavingSuite.scala index 15546d086033..735a2eb13fab 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpInterleavingSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpInterleavingSuite.scala @@ -8,9 +8,6 @@ import java.nio.file.Path class SignatureHelpInterleavingSuite extends BaseSignatureHelpSuite: - override protected def scalacOptions(classpath: Seq[Path]): Seq[String] = - List("-language:experimental.clauseInterleaving") - @Test def `proper-position-1` = check( """ diff --git a/scaladoc-testcases/src/tests/extensionParams.scala b/scaladoc-testcases/src/tests/extensionParams.scala index 0e2225d8aa3c..12850778c793 100644 --- a/scaladoc-testcases/src/tests/extensionParams.scala +++ b/scaladoc-testcases/src/tests/extensionParams.scala @@ -61,8 +61,6 @@ extension (using Unit)(a: Int) def f14(): Any = ??? -import scala.language.experimental.clauseInterleaving - extension (using String)(using Int)(a: Animal)(using Unit)(using Number) def f16(b: Any)[T](c: T): T = ??? diff --git a/scaladoc-testcases/src/tests/methodsAndConstructors.scala b/scaladoc-testcases/src/tests/methodsAndConstructors.scala index cddd0f56e9fe..b4c354d174c4 100644 --- a/scaladoc-testcases/src/tests/methodsAndConstructors.scala +++ b/scaladoc-testcases/src/tests/methodsAndConstructors.scala @@ -1,7 +1,5 @@ package tests.methodsAndConstructors -import scala.language.experimental.clauseInterleaving - class A class B extends A class C diff --git a/tests/neg/interleaving-ab.scala b/tests/neg/interleaving-ab.scala index e446626a2982..afdb2f0a192f 100644 --- a/tests/neg/interleaving-ab.scala +++ b/tests/neg/interleaving-ab.scala @@ -1,11 +1,10 @@ -import scala.language.experimental.clauseInterleaving object Ab: given String = "" given Double = 0 def illegal[A][B](x: A)(using B): B = summon[B] // error: Type parameter lists must be separated by a term or using parameter list - + def ab[A](x: A)[B](using B): B = summon[B] def test = ab[Int](0: Int) // error diff --git a/tests/neg/interleaving-params.scala b/tests/neg/interleaving-params.scala index dc6762cf0214..20f6bbb98d3d 100644 --- a/tests/neg/interleaving-params.scala +++ b/tests/neg/interleaving-params.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving class Params{ def bar[T](x: T)[T]: String = ??? // error diff --git a/tests/neg/interleaving-signatureCollision.scala b/tests/neg/interleaving-signatureCollision.scala index a6a729ed3b62..096073e7bda8 100644 --- a/tests/neg/interleaving-signatureCollision.scala +++ b/tests/neg/interleaving-signatureCollision.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object signatureCollision: def f[T](x: T)[U](y: U) = (x,y) diff --git a/tests/neg/interleaving-typeApply.check b/tests/neg/interleaving-typeApply.check index a50c1455bfbb..ca2ab6fa3f3e 100644 --- a/tests/neg/interleaving-typeApply.check +++ b/tests/neg/interleaving-typeApply.check @@ -1,29 +1,29 @@ --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:10:11 -------------------------------------------- -10 | f3[String]() // error - | ^ - | Type argument String does not conform to upper bound Int - | - | longer explanation available when compiling with `-explain` --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:11:16 -------------------------------------------- -11 | f5[Int][Unit] // error +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:9:11 --------------------------------------------- +9 | f3[String]() // error + | ^ + | Type argument String does not conform to upper bound Int + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:10:16 -------------------------------------------- +10 | f5[Int][Unit] // error | ^ | Type argument Unit does not conform to upper bound String | | longer explanation available when compiling with `-explain` --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:19 -------------------------------------------- -12 | f5[String][Unit] // error // error +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:11:19 -------------------------------------------- +11 | f5[String][Unit] // error // error | ^ | Type argument Unit does not conform to upper bound String | | longer explanation available when compiling with `-explain` --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:11 -------------------------------------------- -12 | f5[String][Unit] // error // error +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:11:11 -------------------------------------------- +11 | f5[String][Unit] // error // error | ^ | Type argument String does not conform to upper bound Int | | longer explanation available when compiling with `-explain` --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:13:11 -------------------------------------------- -13 | f7[String]()[Unit] // error +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:11 -------------------------------------------- +12 | f7[String]()[Unit] // error | ^ | Type argument String does not conform to upper bound Int | diff --git a/tests/neg/interleaving-typeApply.scala b/tests/neg/interleaving-typeApply.scala index ad21fe2f0329..5ad6e3dc148e 100644 --- a/tests/neg/interleaving-typeApply.scala +++ b/tests/neg/interleaving-typeApply.scala @@ -1,7 +1,6 @@ -import scala.language.experimental.clauseInterleaving object typeApply: - + def f3[T <: Int](using DummyImplicit)[U <: String](): T => T = ??? def f5[T <: Int](using DummyImplicit)[U <: String]: [X <: Unit] => X => X = ??? def f7[T <: Int](using DummyImplicit)[U <: String]()[X <: Unit]: X => X = ??? diff --git a/tests/neg/interleaving-unmatched.scala b/tests/neg/interleaving-unmatched.scala index 2ce3074d07fa..3a4371798a50 100644 --- a/tests/neg/interleaving-unmatched.scala +++ b/tests/neg/interleaving-unmatched.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object unmatched: def f1[T (x: T)] = ??? // error diff --git a/tests/neg/interleavingExperimental.check b/tests/neg/interleavingExperimental.check new file mode 100644 index 000000000000..a5e10506bdc3 --- /dev/null +++ b/tests/neg/interleavingExperimental.check @@ -0,0 +1,4 @@ +-- [E040] Syntax Error: tests/neg/interleavingExperimental.scala:3:15 -------------------------------------------------- +3 |def ba[A](x: A)[B](using B): B = summon[B] // error: clauseInterleaving was experimental until 3.6 + | ^ + | '=' expected, but '[' found diff --git a/tests/neg/interleavingExperimental.scala b/tests/neg/interleavingExperimental.scala new file mode 100644 index 000000000000..ed13707fcb68 --- /dev/null +++ b/tests/neg/interleavingExperimental.scala @@ -0,0 +1,3 @@ +//> using options --source 3.5 + +def ba[A](x: A)[B](using B): B = summon[B] // error: clauseInterleaving was experimental until 3.6 diff --git a/tests/neg/namedTypeParams.check b/tests/neg/namedTypeParams.check index 5e0672f20f25..f203f482d117 100644 --- a/tests/neg/namedTypeParams.check +++ b/tests/neg/namedTypeParams.check @@ -92,11 +92,11 @@ | illegal repeated type application | You might have meant something like: | Test.f[Y = String, Int] --- [E102] Syntax Error: tests/neg/namedTypeParams.scala:33:9 ----------------------------------------------------------- -33 | f2[Y = String][X = Int](1, "") // error: Y is undefined +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:32:9 ----------------------------------------------------------- +32 | f2[Y = String][X = Int](1, "") // error: Y is undefined | ^^^^^^ | Type parameter Y is undefined. Expected one of X. --- [E102] Syntax Error: tests/neg/namedTypeParams.scala:34:9 ----------------------------------------------------------- -34 | f2[Y = String](1, "") // error: Y is undefined +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:33:9 ----------------------------------------------------------- +33 | f2[Y = String](1, "") // error: Y is undefined | ^^^^^^ | Type parameter Y is undefined. Expected one of X. diff --git a/tests/neg/namedTypeParams.scala b/tests/neg/namedTypeParams.scala index 53ef14188e12..489ac1e8cdb6 100644 --- a/tests/neg/namedTypeParams.scala +++ b/tests/neg/namedTypeParams.scala @@ -27,7 +27,6 @@ object Test: object TestInterleaving: import language.experimental.namedTypeArguments - import language.experimental.clauseInterleaving def f2[X](using DummyImplicit)[Y](x: X, y: Y): Int = ??? f2[Y = String][X = Int](1, "") // error: Y is undefined diff --git a/tests/neg/overrides.scala b/tests/neg/overrides.scala index 8016f5646d09..c8f577103a6a 100644 --- a/tests/neg/overrides.scala +++ b/tests/neg/overrides.scala @@ -44,8 +44,6 @@ class A[T] { def next: T = ??? - import scala.language.experimental.clauseInterleaving - def b[U <: T](x: Int)[V >: T](y: String) = false } @@ -57,8 +55,6 @@ class B extends A[Int] { override def next(): Int = ??? // error: incompatible type - import scala.language.experimental.clauseInterleaving - override def b[T <: Int](x: Int)(y: String) = true // error } @@ -68,8 +64,6 @@ class C extends A[String] { override def next: Int = ??? // error: incompatible type - import scala.language.experimental.clauseInterleaving - override def b[T <: String](x: Int)[U >: Int](y: String) = true // error: incompatible type } diff --git a/tests/pos/interleaving-ba.scala b/tests/pos/interleaving-ba.scala index 69fe2d9537a0..4a7d721c804e 100644 --- a/tests/pos/interleaving-ba.scala +++ b/tests/pos/interleaving-ba.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object BA { given String = "" diff --git a/tests/pos/interleaving-chainedParams.scala b/tests/pos/interleaving-chainedParams.scala index a54885d28002..02dc7a5ccc9c 100644 --- a/tests/pos/interleaving-chainedParams.scala +++ b/tests/pos/interleaving-chainedParams.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object chainedParams{ diff --git a/tests/pos/interleaving-classless.scala b/tests/pos/interleaving-classless.scala index 5aec92db3409..bddfc821385d 100644 --- a/tests/pos/interleaving-classless.scala +++ b/tests/pos/interleaving-classless.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving def f1[T]()[U](x: T, y: U): (T, U) = (x, y) def f2[T](x: T)[U](y: U): (T, U) = (x, y) diff --git a/tests/pos/interleaving-functor.scala b/tests/pos/interleaving-functor.scala index 35bed59f77f0..b588e35f60a2 100644 --- a/tests/pos/interleaving-functor.scala +++ b/tests/pos/interleaving-functor.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object functorInterleaving: //taken from https://dotty.epfl.ch/docs/reference/contextual/type-classes.html diff --git a/tests/pos/interleaving-newline.scala b/tests/pos/interleaving-newline.scala index de8fb98a2f81..d71bdc910de2 100644 --- a/tests/pos/interleaving-newline.scala +++ b/tests/pos/interleaving-newline.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object newline { def multipleLines diff --git a/tests/pos/interleaving-overload.scala b/tests/pos/interleaving-overload.scala index 1902551f9036..e1c3db1abe37 100644 --- a/tests/pos/interleaving-overload.scala +++ b/tests/pos/interleaving-overload.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving class A{ diff --git a/tests/pos/interleaving-params.scala b/tests/pos/interleaving-params.scala index 36963ff2e123..b12608f4b291 100644 --- a/tests/pos/interleaving-params.scala +++ b/tests/pos/interleaving-params.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving class Params{ type U diff --git a/tests/pos/interleaving-signatureCollision.scala b/tests/pos/interleaving-signatureCollision.scala index 77190284ae6d..be016e7bdbfe 100644 --- a/tests/pos/interleaving-signatureCollision.scala +++ b/tests/pos/interleaving-signatureCollision.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving import scala.annotation.targetName object signatureCollision: diff --git a/tests/pos/interleaving-typeApply.scala b/tests/pos/interleaving-typeApply.scala index 3c669cc76bfc..d8a7fd5d2ec1 100644 --- a/tests/pos/interleaving-typeApply.scala +++ b/tests/pos/interleaving-typeApply.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object typeApply: @@ -12,7 +11,6 @@ object typeApply: def f7[T <: Int](using DummyImplicit)[U <: String]()[X <: Unit]: X => X = ??? @main def test = { - import scala.language.experimental.namedTypeArguments f0[Int][String] f1[Int][String] f2[Int][String]() diff --git a/tests/pos/interleavingExperimental.scala b/tests/pos/interleavingExperimental.scala new file mode 100644 index 000000000000..63227ef1ebfe --- /dev/null +++ b/tests/pos/interleavingExperimental.scala @@ -0,0 +1,5 @@ +//> using options --source 3.5 + +import scala.language.experimental.clauseInterleaving + +def ba[A](x: A)[B](using B): B = summon[B] \ No newline at end of file diff --git a/tests/pos/namedTypeParams.scala b/tests/pos/namedTypeParams.scala index 388bcfa98bef..d538bef52a69 100644 --- a/tests/pos/namedTypeParams.scala +++ b/tests/pos/namedTypeParams.scala @@ -11,7 +11,6 @@ object Test { } object TestInterleaving{ - import language.experimental.clauseInterleaving def f2[X](using DummyImplicit)[Y](x: X, y: Y): Int = ??? f2[X = Int][Y = String](1, "") diff --git a/tests/pos/overrides.scala b/tests/pos/overrides.scala index c3b6235d7c1f..e56c4c941a7f 100644 --- a/tests/pos/overrides.scala +++ b/tests/pos/overrides.scala @@ -4,8 +4,6 @@ class A[T] { def f(x: T)(y: T = x) = y - import scala.language.experimental.clauseInterleaving - def b[U <: T](x: Int)[V >: T](y: String) = false } @@ -15,9 +13,6 @@ class B extends A[Int] { f(2)() - - import scala.language.experimental.clauseInterleaving - override def b[T <: Int](x: Int)[U >: Int](y: String) = true } diff --git a/tests/run/interleaving.scala b/tests/run/interleaving.scala index 6749e59168bc..cc52528486b1 100644 --- a/tests/run/interleaving.scala +++ b/tests/run/interleaving.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object Test extends App { trait Key { type Value } From bedb0f843ffa49bb771f8db199f76057f5bd9a23 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Wed, 3 Jul 2024 20:03:30 +0200 Subject: [PATCH 251/827] Add 3.7 and 3.7-migration --- .../dotty/tools/dotc/config/SourceVersion.scala | 1 + .../scala/runtime/stdLibPatches/language.scala | 15 +++++++++++++++ tests/pos/source-import-3-7-migration.scala | 1 + tests/pos/source-import-3-7.scala | 1 + 4 files changed, 18 insertions(+) create mode 100644 tests/pos/source-import-3-7-migration.scala create mode 100644 tests/pos/source-import-3-7.scala diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 38df682de771..caf1187614b7 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -12,6 +12,7 @@ enum SourceVersion: case `3.4-migration`, `3.4` case `3.5-migration`, `3.5` case `3.6-migration`, `3.6` + case `3.7-migration`, `3.7` // !!! Keep in sync with scala.runtime.stdlibPatches.language !!! case `future-migration`, `future` diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index d89bd9dcf72e..9e08b82b4082 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -300,6 +300,21 @@ object language: @compileTimeOnly("`3.6` can only be used at compile time in import statements") object `3.6` + /** Set source version to 3.7-migration. + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.7-migration` can only be used at compile time in import statements") + object `3.7-migration` + + /** Set source version to 3.7 + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.7` can only be used at compile time in import statements") + object `3.7` + + // !!! Keep in sync with dotty.tools.dotc.config.SourceVersion !!! // Also add tests in `tests/pos/source-import-3-x.scala` and `tests/pos/source-import-3-x-migration.scala` diff --git a/tests/pos/source-import-3-7-migration.scala b/tests/pos/source-import-3-7-migration.scala new file mode 100644 index 000000000000..2e80fcb0bab2 --- /dev/null +++ b/tests/pos/source-import-3-7-migration.scala @@ -0,0 +1 @@ +import language.`3.7-migration` \ No newline at end of file diff --git a/tests/pos/source-import-3-7.scala b/tests/pos/source-import-3-7.scala new file mode 100644 index 000000000000..7fa68fd496f6 --- /dev/null +++ b/tests/pos/source-import-3-7.scala @@ -0,0 +1 @@ +import language.`3.7` \ No newline at end of file From 91bccfb810287ddfd45e65e88ddb6f5fb0c43ce6 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Wed, 3 Jul 2024 20:05:32 +0200 Subject: [PATCH 252/827] Add missing source-import tests --- tests/pos/source-import-3-5-migration.scala | 1 + tests/pos/source-import-3-5.scala | 1 + tests/pos/source-import-3-6-migration.scala | 1 + tests/pos/source-import-3-6.scala | 1 + 4 files changed, 4 insertions(+) create mode 100644 tests/pos/source-import-3-5-migration.scala create mode 100644 tests/pos/source-import-3-5.scala create mode 100644 tests/pos/source-import-3-6-migration.scala create mode 100644 tests/pos/source-import-3-6.scala diff --git a/tests/pos/source-import-3-5-migration.scala b/tests/pos/source-import-3-5-migration.scala new file mode 100644 index 000000000000..d47e0307473e --- /dev/null +++ b/tests/pos/source-import-3-5-migration.scala @@ -0,0 +1 @@ +import language.`3.5-migration` \ No newline at end of file diff --git a/tests/pos/source-import-3-5.scala b/tests/pos/source-import-3-5.scala new file mode 100644 index 000000000000..615ae8638c24 --- /dev/null +++ b/tests/pos/source-import-3-5.scala @@ -0,0 +1 @@ +import language.`3.5` \ No newline at end of file diff --git a/tests/pos/source-import-3-6-migration.scala b/tests/pos/source-import-3-6-migration.scala new file mode 100644 index 000000000000..d566362cfe41 --- /dev/null +++ b/tests/pos/source-import-3-6-migration.scala @@ -0,0 +1 @@ +import language.`3.6-migration` \ No newline at end of file diff --git a/tests/pos/source-import-3-6.scala b/tests/pos/source-import-3-6.scala new file mode 100644 index 000000000000..1d85eea86f54 --- /dev/null +++ b/tests/pos/source-import-3-6.scala @@ -0,0 +1 @@ +import language.`3.6` \ No newline at end of file From 32b5843eee864d166ce78a1031504bc5429dd082 Mon Sep 17 00:00:00 2001 From: Florian3k Date: Wed, 3 Jul 2024 20:38:00 +0200 Subject: [PATCH 253/827] apply fixes from review --- library-aux/src/scala/AnyKind.scala | 2 +- library-aux/src/scala/Matchable.scala | 2 +- library-aux/src/scala/andType.scala | 4 ++-- library-aux/src/scala/orType.scala | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/library-aux/src/scala/AnyKind.scala b/library-aux/src/scala/AnyKind.scala index 02c4d3747bcc..56d51be114ea 100644 --- a/library-aux/src/scala/AnyKind.scala +++ b/library-aux/src/scala/AnyKind.scala @@ -2,6 +2,6 @@ package scala /** The super-type of all types. * - * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html]]. + * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html]]. */ final abstract class AnyKind diff --git a/library-aux/src/scala/Matchable.scala b/library-aux/src/scala/Matchable.scala index 5cef77929417..598ded9d3bc3 100644 --- a/library-aux/src/scala/Matchable.scala +++ b/library-aux/src/scala/Matchable.scala @@ -2,6 +2,6 @@ package scala /** The base trait of types that can be safely pattern matched against. * - * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html]]. + * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html]]. */ trait Matchable diff --git a/library-aux/src/scala/andType.scala b/library-aux/src/scala/andType.scala index 968cc2e9a8fa..de3c3ff36bc5 100644 --- a/library-aux/src/scala/andType.scala +++ b/library-aux/src/scala/andType.scala @@ -2,6 +2,6 @@ package scala /** The intersection of two types. * - * See [[https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html]]. + * See [[https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html]]. */ -type &[A, B] = A & B +type &[A, B] diff --git a/library-aux/src/scala/orType.scala b/library-aux/src/scala/orType.scala index b6dc8ea2563c..ff1947a9498e 100644 --- a/library-aux/src/scala/orType.scala +++ b/library-aux/src/scala/orType.scala @@ -2,6 +2,6 @@ package scala /** The union of two types. * - * See [[https://docs.scala-lang.org/scala3/reference/new-types/union-types.html]]. + * See [[https://docs.scala-lang.org/scala3/reference/new-types/union-types.html]]. */ -type |[A, B] = A | B +type |[A, B] From 97b8e377050fb236c22c154e6e53501b028b2150 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Thu, 4 Jul 2024 02:00:23 +0200 Subject: [PATCH 254/827] Add workflow to release to winget --- .github/workflows/publish-winget.yml | 36 ++++++++++++++++++++++++++++ .github/workflows/releases.yml | 9 ++++++- 2 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/publish-winget.yml diff --git a/.github/workflows/publish-winget.yml b/.github/workflows/publish-winget.yml new file mode 100644 index 000000000000..8515f95ad799 --- /dev/null +++ b/.github/workflows/publish-winget.yml @@ -0,0 +1,36 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO PUBLISH SCALA TO WINGET ### +### HOW TO USE: ### +### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### +### - IT WILL PUBLISH TO WINGET THE MSI ### +### ### +### NOTE: ### +### - WE SHOULD KEEP IN SYNC THE https://github.com/dottybot/winget-pkgs REPOSITORY ### +################################################################################################### + + +name: Publish Scala to winget +run-name: Publish Scala ${{ inputs.version }} to winget + +on: + workflow_call: + inputs: + version: + required: true + type: string + secrets: + DOTTYBOT-TOKEN: + required: true + +jobs: + publish: + runs-on: windows-latest + steps: + - uses: vedantmgoyal9/winget-releaser@b87a066d9e624db1394edcd947f8c4e5a7e30cd7 + with: + identifier : Scala.Scala.3 + version : ${{ inputs.version }} + installers-regex: '\.msi$' + release-tag : ${{ inputs.version }} + fork-user : dottybot + token : ${{ secrets.DOTTYBOT-WINGET-TOKEN }} \ No newline at end of file diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 4b75dd1b737d..a4977bc5ffd9 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -29,5 +29,12 @@ jobs: secrets: CONSUMER-KEY: ${{ secrets.SDKMAN_KEY }} CONSUMER-TOKEN: ${{ secrets.SDKMAN_TOKEN }} - + + publish-winget: + uses: ./.github/workflows/publish-winget.yml + with: + version: ${{ inputs.version }} + secrets: + DOTTYBOT-TOKEN: ${{ secrets.DOTTYBOT_WINGET_TOKEN }} + # TODO: ADD RELEASE WORKFLOW TO CHOCOLATEY AND OTHER PACKAGE MANAGERS HERE \ No newline at end of file From 6d5388bcf8613f7a951c7b3617717f4138ffb4a9 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Thu, 4 Jul 2024 15:01:11 +0200 Subject: [PATCH 255/827] Update .github/workflows/publish-winget.yml Co-authored-by: Piotr Chabelski --- .github/workflows/publish-winget.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-winget.yml b/.github/workflows/publish-winget.yml index 8515f95ad799..03ebc5d0fa7d 100644 --- a/.github/workflows/publish-winget.yml +++ b/.github/workflows/publish-winget.yml @@ -2,7 +2,7 @@ ### THIS IS A REUSABLE WORKFLOW TO PUBLISH SCALA TO WINGET ### ### HOW TO USE: ### ### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### -### - IT WILL PUBLISH TO WINGET THE MSI ### +### - IT WILL PUBLISH THE MSI TO WINGET ### ### ### ### NOTE: ### ### - WE SHOULD KEEP IN SYNC THE https://github.com/dottybot/winget-pkgs REPOSITORY ### From fc1ce142af7c8a36ea00de0531e5554eb0217c24 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 2 May 2024 15:36:19 -0700 Subject: [PATCH 256/827] Supplement structural givens doc --- docs/_docs/reference/contextual/givens.md | 41 ++++++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index bf018278c9fc..5499fc39b53c 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -88,6 +88,45 @@ given (using config: Config): Factory = MemoizingFactory(config) An alias given can have type parameters and context parameters just like any other given, but it can only implement a single type. +## Abstract Givens + +A given may be an abstract member, with the restriction that it must have an explicit name. + +```scala +trait HasOrd[T]: + given ord: Ord[T] +``` + +## More Structural Givens + +If an alias given instance is analogous to a lazy val, +and a structural given instance is analogous to an object, +albeit an object with an explicit type, +then a structural given may also be specified without an explicit type: + +```scala +class IntOrd extends Ord[Int]: + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given IntOrd() +``` + +Compare this syntax to: + +```scala +object intOrd extends IntOrd() +``` + +The empty parentheses are optional in the extends clause when defining a class, +but are required when defining a given. + +Further mixins are allowed as usual: + +```scala +given IntOrd() with OrdOps[Int] +``` + ## Given Macros Given aliases can have the `inline` and `transparent` modifiers. @@ -191,4 +230,4 @@ of given instances: - A _structural instance_ contains one or more types or constructor applications, followed by `with` and a template body that contains member definitions of the instance. - An _alias instance_ contains a type, followed by `=` and a right-hand side expression. -- An _abstract instance_ contains just the type, which is not followed by anything. +- An _abstract instance_ contains just the name and type, which is not followed by anything. From f2829c3fab28cc6ab47a5627abda855884476572 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 4 Jul 2024 19:15:59 +0200 Subject: [PATCH 257/827] Fix isomorphism tests of `AndOrType`s under non-empty `BinderPairs` (#21017) Before the changes, when comparing two `HKTypeLambda` result types, the list of binding pairs was lost when entering comparison of `AndOrType`s, which caused the `equals` to fail, and hence prevented hash-consing. Even though `M1` and `M2` in pos/i20858-min should still conform to one-another, we entered a deep-subtype comparison because of the order in which the TypeComparer does dealiasing of AppliedTypes, and comparison of MatchCases and AndTypes. Fix #20858 --- .../src/dotty/tools/dotc/core/Types.scala | 10 +++++++ tests/pos/i20858-min.scala | 10 +++++++ tests/pos/i20858/defns_1.scala | 27 +++++++++++++++++++ tests/pos/i20858/usages_2.scala | 2 ++ 4 files changed, 49 insertions(+) create mode 100644 tests/pos/i20858-min.scala create mode 100644 tests/pos/i20858/defns_1.scala create mode 100644 tests/pos/i20858/usages_2.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index efb353c4050c..62880b14cdeb 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3544,6 +3544,8 @@ object Types extends TypeUtils { else this match case tp: OrType => OrType.make(tp1, tp2, tp.isSoft) case tp: AndType => AndType.make(tp1, tp2, checkValid = true) + + override def hashIsStable: Boolean = tp1.hashIsStable && tp2.hashIsStable } abstract case class AndType(tp1: Type, tp2: Type) extends AndOrType { @@ -3589,6 +3591,10 @@ object Types extends TypeUtils { case that: AndType => tp1.eq(that.tp1) && tp2.eq(that.tp2) case _ => false } + + override protected def iso(that: Any, bs: BinderPairs) = that match + case that: AndType => tp1.equals(that.tp1, bs) && tp2.equals(that.tp2, bs) + case _ => false } final class CachedAndType(tp1: Type, tp2: Type) extends AndType(tp1, tp2) @@ -3741,6 +3747,10 @@ object Types extends TypeUtils { case that: OrType => tp1.eq(that.tp1) && tp2.eq(that.tp2) && isSoft == that.isSoft case _ => false } + + override protected def iso(that: Any, bs: BinderPairs) = that match + case that: OrType => tp1.equals(that.tp1, bs) && tp2.equals(that.tp2, bs) && isSoft == that.isSoft + case _ => false } final class CachedOrType(tp1: Type, tp2: Type, override val isSoft: Boolean) extends OrType(tp1, tp2) diff --git a/tests/pos/i20858-min.scala b/tests/pos/i20858-min.scala new file mode 100644 index 000000000000..9c47b04031e6 --- /dev/null +++ b/tests/pos/i20858-min.scala @@ -0,0 +1,10 @@ + +type M[F[_,_]] = Int match + case 0 => String & M[F] + +type M1 = M[[x,y] =>> x | y] +type M2 = M[[x,y] =>> x | y] + +def Test: Unit = + val x: M1 = ??? + val _: M2 = x // was error diff --git a/tests/pos/i20858/defns_1.scala b/tests/pos/i20858/defns_1.scala new file mode 100644 index 000000000000..7b4b84745b58 --- /dev/null +++ b/tests/pos/i20858/defns_1.scala @@ -0,0 +1,27 @@ +import scala.compiletime.* +import scala.deriving.* + +sealed trait ZIO[-R, +E, +A] +sealed abstract class ZLayer[-RIn, +E, +ROut] +object ZLayer: + def apply[RIn, E, ROut](zio: => ZIO[RIn, E, ROut]): ZLayer[RIn, E, ROut] = ??? +type URIO[-R, +A] = ZIO[R, Nothing, A] +type IAnyType[T <: Tuple] = Tuple.Fold[T, Any, [x, y] =>> x & y] +type UAnyType[T <: Tuple] = Tuple.Fold[T, Any, [x, y] =>> x | y] + + +trait AutoLayer[A]: + def zlayer(using + p: Mirror.ProductOf[A] + ): ZLayer[IAnyType[p.MirroredElemTypes], Nothing, A] + +object AutoLayer: + inline given derived[A](using p: Mirror.ProductOf[A]): AutoLayer[A] = { + val a: ZIO[IAnyType[p.MirroredElemTypes], Nothing, A] = ??? + new AutoLayer[A]: + override def zlayer(using + pp: Mirror.ProductOf[A] + ): ZLayer[IAnyType[pp.MirroredElemTypes], Nothing, A] = ZLayer { + a.asInstanceOf[ZIO[IAnyType[pp.MirroredElemTypes], Nothing, A]] + } + } \ No newline at end of file diff --git a/tests/pos/i20858/usages_2.scala b/tests/pos/i20858/usages_2.scala new file mode 100644 index 000000000000..3a05ba54e97a --- /dev/null +++ b/tests/pos/i20858/usages_2.scala @@ -0,0 +1,2 @@ + +case class TestService(port: Int) derives AutoLayer // was error From 2d0e37353defcec46206e9f0845c738286aabce5 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 5 Jul 2024 20:24:44 +0200 Subject: [PATCH 258/827] Do not propagate `TypeError`s of ops from `TypeComparer#tryAlso` In pos-deep-subtype/i21015.scala:30, we ask the TypeComparer if `M1[Int] <:< M1[A]` `isMatchingApply` first tries `isSubArgs` which succeeds, but then also checks if a weaker constraint is generated by `recur(tp1.superTypeNormalized, tp2.superTypeNormalized)`. The latter throws a `RecursionOverflow` which, before the changes, bypassed the former successful check, and failed the overall subtype test. Fix #21015 --- .../dotty/tools/dotc/core/TypeComparer.scala | 3 +- tests/pos-deep-subtype/i21015.scala | 36 +++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 tests/pos-deep-subtype/i21015.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index c53c2238a095..dca8bf206bac 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -1954,7 +1954,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // check whether `op2` generates a weaker constraint than `op1` val leftConstraint = constraint constraint = preConstraint - if !(op && subsumes(leftConstraint, constraint, preConstraint)) then + val res = try op catch case _: TypeError => false + if !(res && subsumes(leftConstraint, constraint, preConstraint)) then if constr != noPrinter && !subsumes(constraint, leftConstraint, preConstraint) then constr.println(i"CUT - prefer $leftConstraint over $constraint") constraint = leftConstraint diff --git a/tests/pos-deep-subtype/i21015.scala b/tests/pos-deep-subtype/i21015.scala new file mode 100644 index 000000000000..390462f19df4 --- /dev/null +++ b/tests/pos-deep-subtype/i21015.scala @@ -0,0 +1,36 @@ + +type Init[Coll[_], A, T <: Tuple] = T match + case EmptyTuple => A + case head *: rest => InitCons[Coll, A, head, rest] + +type InitCons[Coll[_], A, H, Rest <: Tuple] = H match + case Int => Init[Coll, Coll[A], Rest] + case _ => Unit + +def fillVector[A, T <: Tuple](dims: T)(x: => A): Init[Vector, A, T] = + dims match + case _: EmptyTuple => x + case (p : (head *: rest)) => + val (head *: rest) = p + head match + case size: Int => fillVector(rest)(Vector.fill(size)(x)) + case _ => () + + +object Minimization: + + type M1[A] = Int match + case 1 => M2[A] + + type M2[A] = Int match + case 2 => M1[Option[A]] + + def m1[A](x: A): M1[A] = ??? + + val _: M1[Int] = m1(1) // was error + val _: M1[Int] = m1[Int](1) // ok + val _: M1[Int] = + val x = m1(1) + x // ok + +end Minimization From 78b3f4aa9b43b6009ad08d4fb5ce5c9085e2ee6e Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 8 Jul 2024 15:22:57 +0200 Subject: [PATCH 259/827] Use pathing jars in cli commands --- dist/bin/common | 20 ++------------------ dist/bin/common.bat | 2 +- dist/bin/scalac.bat | 31 ++----------------------------- dist/bin/scaladoc | 11 ++--------- dist/bin/scaladoc.bat | 27 +-------------------------- project/Build.scala | 2 +- project/RepublishPlugin.scala | 14 +++++++++++--- 7 files changed, 20 insertions(+), 87 deletions(-) diff --git a/dist/bin/common b/dist/bin/common index 63e598d70d7e..2de8bdf9f99a 100644 --- a/dist/bin/common +++ b/dist/bin/common @@ -6,25 +6,9 @@ source "$PROG_HOME/bin/common-shared" # * The code below is for Dotty # *-------------------------------------------------*/ -load_classpath () { - command="$1" - psep_pattern="$2" - __CLASS_PATH="" - while IFS= read -r line || [ -n "$line" ]; do - # jna-5 only appropriate for some combinations - if ! [[ ( -n ${conemu-} || -n ${msys-}) && "$line" == "*jna-5*" ]]; then - if [ -n "$__CLASS_PATH" ]; then - __CLASS_PATH+="$psep_pattern" - fi - __CLASS_PATH+="$PROG_HOME/maven2/$line" - fi - done < "$PROG_HOME/etc/$command.classpath" - echo "$__CLASS_PATH" -} - compilerJavaClasspathArgs () { - toolchain="$(load_classpath "scala" "$PSEP")" - toolchain_extra="$(load_classpath "with_compiler" "$PSEP")" + toolchain="$PROG_HOME/lib/scala.jar" + toolchain_extra="$PROG_HOME/lib/with_compiler.jar" if [ -n "$toolchain_extra" ]; then toolchain+="$PSEP$toolchain_extra" diff --git a/dist/bin/common.bat b/dist/bin/common.bat index f9c35e432b36..510771d43b6e 100644 --- a/dist/bin/common.bat +++ b/dist/bin/common.bat @@ -38,6 +38,6 @@ if not defined _PROG_HOME ( set _EXITCODE=1 goto :eof ) -set "_ETC_DIR=%_PROG_HOME%\etc" +set "_LIB_DIR=%_PROG_HOME%\lib" set _PSEP=; diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index dbcbaf11b8e2..7ad368582127 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -88,17 +88,8 @@ goto :eof @rem output parameter: _JVM_CP_ARGS :compilerJavaClasspathArgs - -set "CP_FILE=%_ETC_DIR%\scala.classpath" -call :loadClasspathFromFile %CP_FILE% -set "__TOOLCHAIN=%_CLASS_PATH_RESULT%" - -set "CP_FILE=%_ETC_DIR%\with_compiler.classpath" -call :loadClasspathFromFile %CP_FILE% - -if defined _CLASS_PATH_RESULT ( - set "__TOOLCHAIN=%__TOOLCHAIN%%_PSEP%%_CLASS_PATH_RESULT%" -) +set "__TOOLCHAIN=%_LIB_DIR%\scala.jar" +set "__TOOLCHAIN=%__TOOLCHAIN%%_PSEP%%_LIB_DIR%\with_compiler.jar%" if defined _SCALA_CPATH ( set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" @@ -107,24 +98,6 @@ if defined _SCALA_CPATH ( ) goto :eof -@REM concatentate every line in "%_ARG_FILE%" with _PSEP -@REM arg 1 - file to read -:loadClasspathFromFile -set _ARG_FILE=%1 -set _CLASS_PATH_RESULT= -if exist "%_ARG_FILE%" ( - for /f "usebackq delims=" %%i in ("%_ARG_FILE%") do ( - set "_LIB=%_PROG_HOME%\maven2\%%i" - set "_LIB=!_LIB:/=\!" - if not defined _CLASS_PATH_RESULT ( - set "_CLASS_PATH_RESULT=!_LIB!" - ) else ( - set "_CLASS_PATH_RESULT=!_CLASS_PATH_RESULT!%_PSEP%!_LIB!" - ) - ) -) -goto :eof - @rem ######################################################################### @rem ## Cleanups diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index 15bc0813f93a..f4ef37af00ee 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -36,6 +36,7 @@ CompilerMain=dotty.tools.dotc.Main DecompilerMain=dotty.tools.dotc.decompiler.Main ReplMain=dotty.tools.repl.Main ScriptingMain=dotty.tools.scripting.Main +JVM_CP_ARGS="$PROG_HOME/lib/scaladoc.jar" PROG_NAME=$CompilerMain @@ -52,12 +53,6 @@ addScrip() { script_args+=("'$1'") } -classpathArgs () { - CLASS_PATH="$(load_classpath "scaladoc" "$PSEP")" - - jvm_cp_args="-classpath \"$CLASS_PATH\"" -} - #for A in "$@" ; do echo "A[$A]" ; done ; exit 2 while [[ $# -gt 0 ]]; do @@ -79,12 +74,10 @@ case "$1" in esac done -classpathArgs - eval "\"$JAVACMD\"" \ ${JAVA_OPTS:-$default_java_opts} \ "${java_args[@]}" \ - "${jvm_cp_args-}" \ + -classpath "${JVM_CP_ARGS}" \ -Dscala.usejavacp=true \ "dotty.tools.scaladoc.Main" \ "${scala_args[@]}" \ diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index 16433a83f501..fe4055633e02 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -21,8 +21,6 @@ call :args %* @rem ######################################################################### @rem ## Main -call :classpathArgs - if defined JAVA_OPTS ( set _JAVA_OPTS=%JAVA_OPTS% ) else ( set _JAVA_OPTS=%_DEFAULT_JAVA_OPTS% ) @@ -31,7 +29,7 @@ if defined JAVA_OPTS ( set _JAVA_OPTS=%JAVA_OPTS% set "_JAVACMD=!_JAVACMD:%%=%%%%!" call "%_JAVACMD%" %_JAVA_OPTS% %_JAVA_DEBUG% %_JAVA_ARGS% ^ --classpath "%_CLASS_PATH%" ^ +-classpath "%_LIB_DIR%\scaladoc.jar" ^ -Dscala.usejavacp=true ^ dotty.tools.scaladoc.Main %_SCALA_ARGS% %_RESIDUAL_ARGS% if not %ERRORLEVEL%==0 ( @@ -103,29 +101,6 @@ goto :eof set _RESIDUAL_ARGS=%_RESIDUAL_ARGS% %~1 goto :eof -@rem output parameter: _CLASS_PATH -:classpathArgs -set "_ETC_DIR=%_PROG_HOME%\etc" -@rem keep list in sync with bash script `bin\scaladoc` ! -call :loadClasspathFromFile -goto :eof - -@REM concatentate every line in "%_ETC_DIR%\scaladoc.classpath" with _PSEP -:loadClasspathFromFile -set _CLASS_PATH= -if exist "%_ETC_DIR%\scaladoc.classpath" ( - for /f "usebackq delims=" %%i in ("%_ETC_DIR%\scaladoc.classpath") do ( - set "_LIB=%_PROG_HOME%\maven2\%%i" - set "_LIB=!_LIB:/=\!" - if not defined _CLASS_PATH ( - set "_CLASS_PATH=!_LIB!" - ) else ( - set "_CLASS_PATH=!_CLASS_PATH!%_PSEP%!_LIB!" - ) - ) -) -goto :eof - @rem ######################################################################### @rem ## Cleanups diff --git a/project/Build.scala b/project/Build.scala index 28146989e40c..54b4a6bf7801 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2134,7 +2134,7 @@ object Build { republishRepo := target.value / "republish", packResourceDir += (republishRepo.value / "bin" -> "bin"), packResourceDir += (republishRepo.value / "maven2" -> "maven2"), - packResourceDir += (republishRepo.value / "etc" -> "etc"), + packResourceDir += (republishRepo.value / "lib" -> "lib"), republishCommandLibs += ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), republishCommandLibs += diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index e4bf40545a6b..8b95c6423e68 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -213,16 +213,24 @@ object RepublishPlugin extends AutoPlugin { val classpaths = coursierFetch(coursierJar, log, csrCacheDir, localRepo, resolvedLocal.map(_.id.toString)) if (commandLibs.nonEmpty) { - IO.createDirectory(republishDir / "etc") + IO.createDirectory(republishDir / "lib") for ((command, libs) <- commandLibs) { val (negated, actual) = libs.partition(_.startsWith("^!")) val subtractions = negated.map(_.stripPrefix("^!")) def compose(libs: List[String]): List[String] = libs.map(fuzzyFind(classpaths, _)).reduceOption(_ ++ _).map(_.distinct).getOrElse(Nil) - + + // Compute the classpath entries val entries = compose(actual).diff(compose(subtractions)) - IO.write(republishDir / "etc" / s"$command.classpath", entries.mkString("\n")) + // Generate the MANIFEST for the pathing jar + val manifest = new java.util.jar.Manifest(); + manifest.getMainAttributes().put(java.util.jar.Attributes.Name.MANIFEST_VERSION, "1.0"); + manifest.getMainAttributes().put(java.util.jar.Attributes.Name.CLASS_PATH, entries.map(e => s"../maven2/$e").mkString(" ")) + // Write the pathing jar to the Disk + val file = republishDir / "lib" / s"$command.jar" + val jar = new java.util.jar.JarOutputStream(new java.io.FileOutputStream(file), manifest) + jar.close() } } From c9b9ad4832860059d623eab406c3de0eed8bbf20 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Jul 2024 14:13:45 +0200 Subject: [PATCH 260/827] Refine implicit priority change warnings Fixes #21036 Fixes #20572 --- .../dotty/tools/dotc/typer/Implicits.scala | 32 +++++++++++++------ tests/neg/given-triangle.check | 4 +++ tests/{warn => neg}/given-triangle.scala | 4 +-- tests/{warn => pos}/bson/Test.scala | 0 tests/{warn => pos}/bson/bson.scala | 0 tests/pos/i20572.scala | 7 ++++ tests/pos/i21036.scala | 16 ++++++++++ tests/run/given-triangle.scala | 2 +- tests/warn/bson.check | 10 ------ tests/warn/given-triangle.check | 6 ---- 10 files changed, 51 insertions(+), 30 deletions(-) create mode 100644 tests/neg/given-triangle.check rename tests/{warn => neg}/given-triangle.scala (73%) rename tests/{warn => pos}/bson/Test.scala (100%) rename tests/{warn => pos}/bson/bson.scala (100%) create mode 100644 tests/pos/i20572.scala create mode 100644 tests/pos/i21036.scala delete mode 100644 tests/warn/bson.check delete mode 100644 tests/warn/given-triangle.check diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 80f9b4f2fd31..36fed0b15d70 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1302,9 +1302,8 @@ trait Implicits: // A map that associates a priority change warning (between -source 3.4 and 3.6) // with the candidate refs mentioned in the warning. We report the associated - // message if both candidates qualify in tryImplicit and at least one of the candidates - // is part of the result of the implicit search. - val priorityChangeWarnings = mutable.ListBuffer[(TermRef, TermRef, Message)]() + // message if one of the critical candidates is part of the result of the implicit search. + val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() /** Compare `alt1` with `alt2` to determine which one should be chosen. * @@ -1319,11 +1318,16 @@ trait Implicits: * return new result with preferGeneral = true * 3.6 and higher: compare with preferGeneral = true * + * @param only2ndCritical If true only the second alternative is critical in case + * of a priority change. */ - def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = + def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, only2ndCritical: Boolean = false): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) def warn(msg: Message) = - priorityChangeWarnings += ((alt1.ref, alt2.ref, msg)) + val critical = + if only2ndCritical then alt2.ref :: Nil + else alt1.ref :: alt2.ref :: Nil + priorityChangeWarnings += ((critical, msg)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else @@ -1443,8 +1447,8 @@ trait Implicits: compareAlternatives(newCand, cand) > 0) else // keep only warnings that don't involve the failed candidate reference - priorityChangeWarnings.filterInPlace: (ref1, ref2, _) => - ref1 != cand.ref && ref2 != cand.ref + priorityChangeWarnings.filterInPlace: (critical, _) => + !critical.contains(cand.ref) rank(remaining, found, fail :: rfailures) case best: SearchSuccess => if (ctx.mode.is(Mode.ImplicitExploration) || isCoherent) @@ -1454,7 +1458,15 @@ trait Implicits: val newPending = if (retained eq found) || remaining.isEmpty then remaining else remaining.filterConserve(cand => - compareAlternatives(retained, cand) <= 0) + compareAlternatives(retained, cand, only2ndCritical = true) <= 0) + // Here we drop some pending alternatives but retain in each case + // `retained`. Therefore, it's a priorty change only if the + // second alternative appears in the final search result. Otherwise + // we have the following scenario: + // - 1st alternative, bit not snd appears in final result + // - Hence, snd was eliminated either here, or otherwise by a direct + // comparison later. + // - Hence, no change in resolution. rank(newPending, retained, rfailures) case fail: SearchFailure => // The ambiguity happened in the current search: to recover we @@ -1601,8 +1613,8 @@ trait Implicits: throw ex val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) - for (ref1, ref2, msg) <- priorityChangeWarnings do - if result.found.exists(ref => ref == ref1 || ref == ref2) then + for (critical, msg) <- priorityChangeWarnings do + if result.found.exists(critical.contains(_)) then report.warning(msg, srcPos) result end searchImplicit diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check new file mode 100644 index 000000000000..bf92efac17fd --- /dev/null +++ b/tests/neg/given-triangle.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/given-triangle.scala:14:18 ------------------------------------------------------------- +14 |@main def Test = f // error + | ^ + |Ambiguous given instances: both given instance given_B and given instance given_C match type A of parameter a of method f diff --git a/tests/warn/given-triangle.scala b/tests/neg/given-triangle.scala similarity index 73% rename from tests/warn/given-triangle.scala rename to tests/neg/given-triangle.scala index ee4888ed1e06..9cc23104fcce 100644 --- a/tests/warn/given-triangle.scala +++ b/tests/neg/given-triangle.scala @@ -1,5 +1,3 @@ -//> using options -source 3.6-migration - class A class B extends A class C extends A @@ -13,4 +11,4 @@ def f(using a: A, b: B, c: C) = println(b.getClass) println(c.getClass) -@main def Test = f // warn +@main def Test = f // error diff --git a/tests/warn/bson/Test.scala b/tests/pos/bson/Test.scala similarity index 100% rename from tests/warn/bson/Test.scala rename to tests/pos/bson/Test.scala diff --git a/tests/warn/bson/bson.scala b/tests/pos/bson/bson.scala similarity index 100% rename from tests/warn/bson/bson.scala rename to tests/pos/bson/bson.scala diff --git a/tests/pos/i20572.scala b/tests/pos/i20572.scala new file mode 100644 index 000000000000..4ee4490c839c --- /dev/null +++ b/tests/pos/i20572.scala @@ -0,0 +1,7 @@ +//> using options -Werror +trait Writes[T] +trait Format[T] extends Writes[T] +given [T: List]: Writes[T] = null +given [T]: Format[T] = null + +val _ = summon[Writes[Int]] diff --git a/tests/pos/i21036.scala b/tests/pos/i21036.scala new file mode 100644 index 000000000000..1c98346e4ef3 --- /dev/null +++ b/tests/pos/i21036.scala @@ -0,0 +1,16 @@ +//> using options -source 3.5 -Werror +trait SameRuntime[A, B] +trait BSONWriter[T] +trait BSONHandler[T] extends BSONWriter[T] + +opaque type Id = String +object Id: + given SameRuntime[Id, String] = ??? + +given BSONHandler[String] = ??? +given [T: BSONHandler]: BSONHandler[List[T]] = ??? + +given opaqueWriter[T, A](using rs: SameRuntime[T, A], writer: BSONWriter[A]): BSONWriter[T] = ??? + +val x = summon[BSONHandler[List[Id]]] // this doesn't emit warning +val y = summon[BSONWriter[List[Id]]] // this did emit warning diff --git a/tests/run/given-triangle.scala b/tests/run/given-triangle.scala index 5ddba8df8b7b..0b483e87f28c 100644 --- a/tests/run/given-triangle.scala +++ b/tests/run/given-triangle.scala @@ -1,4 +1,4 @@ -import language.future +import language.`3.6` class A class B extends A diff --git a/tests/warn/bson.check b/tests/warn/bson.check deleted file mode 100644 index 258ac4b4ff2c..000000000000 --- a/tests/warn/bson.check +++ /dev/null @@ -1,10 +0,0 @@ --- Warning: tests/warn/bson/Test.scala:5:60 ---------------------------------------------------------------------------- -5 |def typedMapHandler[K, V: BSONHandler] = stringMapHandler[V] // warn - | ^ - |Given search preference for bson.BSONWriter[Map[String, V]] between alternatives (bson.BSONWriter.mapWriter : [V²](using x$1: bson.BSONWriter[V²]): bson.BSONDocumentWriter[Map[String, V²]]) and (bson.BSONWriter.collectionWriter : - | [T, Repr <: Iterable[T]](using x$1: bson.BSONWriter[T], x$2: Repr ¬ Option[T]): bson.BSONWriter[Repr]) will change - |Current choice : the first alternative - |New choice from Scala 3.6: none - it's ambiguous - | - |where: V is a type in method typedMapHandler - | V² is a type variable diff --git a/tests/warn/given-triangle.check b/tests/warn/given-triangle.check deleted file mode 100644 index e849f9d4d642..000000000000 --- a/tests/warn/given-triangle.check +++ /dev/null @@ -1,6 +0,0 @@ --- Warning: tests/warn/given-triangle.scala:16:18 ---------------------------------------------------------------------- -16 |@main def Test = f // warn - | ^ - | Change in given search preference for A between alternatives (given_A : A) and (given_B : B) - | Previous choice : the second alternative - | New choice from Scala 3.6: the first alternative From 59a0f3638ad1e2417343aae9883042085dded68b Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Jul 2024 16:21:22 +0200 Subject: [PATCH 261/827] Fix -source for neg test --- tests/neg/given-triangle.check | 4 ++-- tests/neg/given-triangle.scala | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check index bf92efac17fd..f548df0078de 100644 --- a/tests/neg/given-triangle.check +++ b/tests/neg/given-triangle.check @@ -1,4 +1,4 @@ --- [E172] Type Error: tests/neg/given-triangle.scala:14:18 ------------------------------------------------------------- -14 |@main def Test = f // error +-- [E172] Type Error: tests/neg/given-triangle.scala:15:18 ------------------------------------------------------------- +15 |@main def Test = f // error | ^ |Ambiguous given instances: both given instance given_B and given instance given_C match type A of parameter a of method f diff --git a/tests/neg/given-triangle.scala b/tests/neg/given-triangle.scala index 9cc23104fcce..61273ef93925 100644 --- a/tests/neg/given-triangle.scala +++ b/tests/neg/given-triangle.scala @@ -1,3 +1,4 @@ +//> using -source 3.5 class A class B extends A class C extends A From 668daf15eed85a57affd93f19df4d7707f0264d6 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Jul 2024 16:53:27 +0200 Subject: [PATCH 262/827] Filter out more false positives in priority change warnings --- .../dotty/tools/dotc/typer/Implicits.scala | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 36fed0b15d70..7f86b330d455 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1318,16 +1318,14 @@ trait Implicits: * return new result with preferGeneral = true * 3.6 and higher: compare with preferGeneral = true * + * @param disambiguate The call is used to disambiguate two successes, not for ranking. + * When ranking, we are always filtering out either > 0 or <= 0 results. + * In each case a priority change from 0 to -1 or vice versa makes no difference. * @param only2ndCritical If true only the second alternative is critical in case * of a priority change. */ - def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, only2ndCritical: Boolean = false): Int = + def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, disambiguate: Boolean = false, only2ndCritical: Boolean = false): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) - def warn(msg: Message) = - val critical = - if only2ndCritical then alt2.ref :: Nil - else alt1.ref :: alt2.ref :: Nil - priorityChangeWarnings += ((critical, msg)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else @@ -1336,6 +1334,12 @@ trait Implicits: if sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) if cmp != prev then + def warn(msg: Message) = + if disambiguate || cmp > 0 || prev > 0 then + val critical = + if only2ndCritical then alt2.ref :: Nil + else alt1.ref :: alt2.ref :: Nil + priorityChangeWarnings += ((critical, msg)) def choice(c: Int) = c match case -1 => "the second alternative" case 1 => "the first alternative" @@ -1362,7 +1366,7 @@ trait Implicits: */ def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => - var diff = compareAlternatives(alt1, alt2) + var diff = compareAlternatives(alt1, alt2, disambiguate = true) assert(diff <= 0) // diff > 0 candidates should already have been eliminated in `rank` if diff == 0 && alt1.ref =:= alt2.ref then diff = 1 // See i12951 for a test where this happens @@ -1463,7 +1467,7 @@ trait Implicits: // `retained`. Therefore, it's a priorty change only if the // second alternative appears in the final search result. Otherwise // we have the following scenario: - // - 1st alternative, bit not snd appears in final result + // - 1st alternative, but not snd appears in final result // - Hence, snd was eliminated either here, or otherwise by a direct // comparison later. // - Hence, no change in resolution. From 4bc7c10a90abf778cadb517c144f6e0564d1a994 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 5 Jul 2024 18:22:45 +0200 Subject: [PATCH 263/827] Fix -source for neg test (2) --- tests/neg/given-triangle.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/neg/given-triangle.scala b/tests/neg/given-triangle.scala index 61273ef93925..16aca7c44dee 100644 --- a/tests/neg/given-triangle.scala +++ b/tests/neg/given-triangle.scala @@ -1,4 +1,4 @@ -//> using -source 3.5 +//> using options -source 3.5 class A class B extends A class C extends A From 1474e69e86465ac6a3eb4f45462d6b428435df90 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 8 Jul 2024 19:36:27 +0200 Subject: [PATCH 264/827] Fix priority change logic for ranking As worked out in collaboration with @EugeneFlesselle --- .../dotty/tools/dotc/typer/Implicits.scala | 37 ++++++++----------- tests/warn/i21036a.check | 6 +++ tests/warn/i21036a.scala | 7 ++++ tests/warn/i21036b.check | 6 +++ tests/warn/i21036b.scala | 7 ++++ 5 files changed, 41 insertions(+), 22 deletions(-) create mode 100644 tests/warn/i21036a.check create mode 100644 tests/warn/i21036a.scala create mode 100644 tests/warn/i21036b.check create mode 100644 tests/warn/i21036b.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 7f86b330d455..dac0c0e78448 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1305,6 +1305,9 @@ trait Implicits: // message if one of the critical candidates is part of the result of the implicit search. val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() + def isWarnPriorityChangeVersion(sv: SourceVersion): Boolean = + sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` + /** Compare `alt1` with `alt2` to determine which one should be chosen. * * @return a number > 0 if `alt1` is preferred over `alt2` @@ -1321,25 +1324,21 @@ trait Implicits: * @param disambiguate The call is used to disambiguate two successes, not for ranking. * When ranking, we are always filtering out either > 0 or <= 0 results. * In each case a priority change from 0 to -1 or vice versa makes no difference. - * @param only2ndCritical If true only the second alternative is critical in case - * of a priority change. */ - def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, disambiguate: Boolean = false, only2ndCritical: Boolean = false): Int = + def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, disambiguate: Boolean = false): Int = def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else var cmp = comp(using searchContext()) val sv = Feature.sourceVersion - if sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` then + if isWarnPriorityChangeVersion(sv) then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) - if cmp != prev then + if disambiguate && cmp != prev then def warn(msg: Message) = - if disambiguate || cmp > 0 || prev > 0 then - val critical = - if only2ndCritical then alt2.ref :: Nil - else alt1.ref :: alt2.ref :: Nil - priorityChangeWarnings += ((critical, msg)) + val critical = alt1.ref :: alt2.ref :: Nil + priorityChangeWarnings += ((critical, msg)) + implicits.println(i"PRIORITY CHANGE ${alt1.ref}, ${alt2.ref}, $disambiguate") def choice(c: Int) = c match case -1 => "the second alternative" case 1 => "the first alternative" @@ -1356,7 +1355,9 @@ trait Implicits: |Previous choice : ${choice(prev)} |New choice from Scala 3.6: ${choice(cmp)}""") cmp - else cmp + else cmp max prev + // When ranking, we keep the better of cmp and prev, which ends up retaining a candidate + // if it is retained in either version. else cmp end compareAlternatives @@ -1367,7 +1368,8 @@ trait Implicits: def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => var diff = compareAlternatives(alt1, alt2, disambiguate = true) - assert(diff <= 0) // diff > 0 candidates should already have been eliminated in `rank` + assert(diff <= 0 || isWarnPriorityChangeVersion(Feature.sourceVersion)) + // diff > 0 candidates should already have been eliminated in `rank` if diff == 0 && alt1.ref =:= alt2.ref then diff = 1 // See i12951 for a test where this happens else if diff == 0 && alt2.isExtension then @@ -1461,16 +1463,7 @@ trait Implicits: case retained: SearchSuccess => val newPending = if (retained eq found) || remaining.isEmpty then remaining - else remaining.filterConserve(cand => - compareAlternatives(retained, cand, only2ndCritical = true) <= 0) - // Here we drop some pending alternatives but retain in each case - // `retained`. Therefore, it's a priorty change only if the - // second alternative appears in the final search result. Otherwise - // we have the following scenario: - // - 1st alternative, but not snd appears in final result - // - Hence, snd was eliminated either here, or otherwise by a direct - // comparison later. - // - Hence, no change in resolution. + else remaining.filterConserve(newCand => compareAlternatives(newCand, retained) >= 0) rank(newPending, retained, rfailures) case fail: SearchFailure => // The ambiguity happened in the current search: to recover we diff --git a/tests/warn/i21036a.check b/tests/warn/i21036a.check new file mode 100644 index 000000000000..673c01374ef3 --- /dev/null +++ b/tests/warn/i21036a.check @@ -0,0 +1,6 @@ +-- Warning: tests/warn/i21036a.scala:7:17 ------------------------------------------------------------------------------ +7 |val y = summon[A] // warn + | ^ + | Given search preference for A between alternatives (b : B) and (a : A) will change + | Current choice : the first alternative + | New choice from Scala 3.6: the second alternative diff --git a/tests/warn/i21036a.scala b/tests/warn/i21036a.scala new file mode 100644 index 000000000000..ab97429852d6 --- /dev/null +++ b/tests/warn/i21036a.scala @@ -0,0 +1,7 @@ +//> using options -source 3.5 +trait A +trait B extends A +given b: B = ??? +given a: A = ??? + +val y = summon[A] // warn \ No newline at end of file diff --git a/tests/warn/i21036b.check b/tests/warn/i21036b.check new file mode 100644 index 000000000000..ff7fdfd7a87c --- /dev/null +++ b/tests/warn/i21036b.check @@ -0,0 +1,6 @@ +-- Warning: tests/warn/i21036b.scala:7:17 ------------------------------------------------------------------------------ +7 |val y = summon[A] // warn + | ^ + | Change in given search preference for A between alternatives (b : B) and (a : A) + | Previous choice : the first alternative + | New choice from Scala 3.6: the second alternative diff --git a/tests/warn/i21036b.scala b/tests/warn/i21036b.scala new file mode 100644 index 000000000000..16dd72266613 --- /dev/null +++ b/tests/warn/i21036b.scala @@ -0,0 +1,7 @@ +//> using options -source 3.6-migration +trait A +trait B extends A +given b: B = ??? +given a: A = ??? + +val y = summon[A] // warn \ No newline at end of file From fd7a463fbaa4fa5034eb1ec7da4af5f430dae011 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 9 Jul 2024 11:01:06 +0200 Subject: [PATCH 265/827] Add test for #20377 fixed in #20504 --- tests/pos/i20377.scala | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 tests/pos/i20377.scala diff --git a/tests/pos/i20377.scala b/tests/pos/i20377.scala new file mode 100644 index 000000000000..7a4c0fccfd7e --- /dev/null +++ b/tests/pos/i20377.scala @@ -0,0 +1,23 @@ +import language.experimental.namedTuples +import NamedTuple.{NamedTuple, AnyNamedTuple} + +// Repros for bugs or questions +class ClassToMap[A]() +abstract class ClassToFind[Rows <: AnyNamedTuple]: + def mapped: NamedTuple.Map[Rows, ClassToMap] + +given TDB: ClassToFind[(t1: Int, t2: String)] with + override def mapped = ( + t1 = ClassToMap[Int](), + t2 = ClassToMap[String]() + ) + +type TypeAlias = (t1: Int, t2: String) +class Repro1_Pass(using val testDB: ClassToFind[TypeAlias]) { + def query() = + testDB.mapped.t1 +} +class Repro1_Fail(using val testDB: ClassToFind[(t1: Int, t2: String)]) { + def query() = + testDB.mapped.t1 // fails to compile +} \ No newline at end of file From b33e4f3d9354bbfa580ea77c27cbe109a9e3b04a Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 8 Jul 2024 13:50:44 +0200 Subject: [PATCH 266/827] fix: don't use color codes for pattern match code action --- .../src/dotty/tools/dotc/reporting/messages.scala | 11 +++++++---- .../src/dotty/tools/dotc/transform/patmat/Space.scala | 4 ++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index d33b2c574318..1d906130d4e4 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -15,6 +15,8 @@ import printing.Formatting import ErrorMessageID.* import ast.Trees import config.{Feature, ScalaVersion} +import transform.patmat.Space +import transform.patmat.SpaceEngine import typer.ErrorReporting.{err, matchReductionAddendum, substitutableTypeSymbolsInScope} import typer.ProtoTypes.{ViewProto, SelectionProto, FunProto} import typer.Implicits.* @@ -856,12 +858,13 @@ extends Message(LossyWideningConstantConversionID): |Write `.to$targetType` instead.""" def explain(using Context) = "" -class PatternMatchExhaustivity(uncoveredCases: Seq[String], tree: untpd.Match)(using Context) +class PatternMatchExhaustivity(uncoveredCases: Seq[Space], tree: untpd.Match)(using Context) extends Message(PatternMatchExhaustivityID) { def kind = MessageKind.PatternMatchExhaustivity private val hasMore = uncoveredCases.lengthCompare(6) > 0 - val uncovered = uncoveredCases.take(6).mkString(", ") + val uncovered = uncoveredCases.take(6).map(SpaceEngine.display).mkString(", ") + private val casesWithoutColor = inContext(ctx.withoutColors)(uncoveredCases.map(SpaceEngine.display)) def msg(using Context) = val addendum = if hasMore then "(More unmatched cases are elided)" else "" @@ -889,12 +892,12 @@ extends Message(PatternMatchExhaustivityID) { val pathes = List( ActionPatch( srcPos = endPos, - replacement = uncoveredCases.map(c => indent(s"case $c => ???", startColumn)) + replacement = casesWithoutColor.map(c => indent(s"case $c => ???", startColumn)) .mkString("\n", "\n", "") ), ) List( - CodeAction(title = s"Insert missing cases (${uncoveredCases.size})", + CodeAction(title = s"Insert missing cases (${casesWithoutColor.size})", description = None, patches = pathes ) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 3ad13ec011b5..97816bd71b84 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -840,7 +840,7 @@ object SpaceEngine { if uncovered.nonEmpty then val deduped = dedup(uncovered) - report.warning(PatternMatchExhaustivity(deduped.map(display), m), m.selector) + report.warning(PatternMatchExhaustivity(deduped, m), m.selector) } private def reachabilityCheckable(sel: Tree)(using Context): Boolean = @@ -903,7 +903,7 @@ object SpaceEngine { def checkMatch(m: Match)(using Context): Unit = checkMatchExhaustivityOnly(m) if reachabilityCheckable(m.selector) then checkReachability(m) - + def checkMatchExhaustivityOnly(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then checkExhaustivity(m) } From e93430ff5a079b6493e30a4cef3e7b69338502ce Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 8 Jul 2024 19:18:51 +0200 Subject: [PATCH 267/827] Add support for Class-Path entries in Manifest --- .../dotc/classpath/ClassPathFactory.scala | 21 +++++++++++++++++-- compiler/src/dotty/tools/io/ClassPath.scala | 13 ++++++++---- dist/bin/scalac | 1 + dist/bin/scalac.bat | 2 +- dist/bin/scaladoc | 1 + dist/bin/scaladoc.bat | 1 + project/RepublishPlugin.scala | 2 +- 7 files changed, 33 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala index 0b66f339bf53..080f8d4e63d2 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala @@ -7,6 +7,7 @@ import dotty.tools.io.{AbstractFile, VirtualDirectory} import FileUtils.* import dotty.tools.io.ClassPath import dotty.tools.dotc.core.Contexts.* +import java.nio.file.Files /** * Provides factory methods for classpath. When creating classpath instances for a given path, @@ -52,14 +53,30 @@ class ClassPathFactory { // Internal protected def classesInPathImpl(path: String, expand: Boolean)(using Context): List[ClassPath] = - for { + val files = for { file <- expandPath(path, expand) dir <- { def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None Option(AbstractFile.getDirectory(file)).orElse(asImage) } } - yield newClassPath(dir) + yield dir + + val expanded = + if scala.util.Properties.propOrFalse("scala.expandjavacp") then + for + file <- files + a <- ClassPath.expandManifestPath(file.absolutePath) + path = java.nio.file.Paths.get(a.toURI()).nn + if Files.exists(path) + yield + newClassPath(AbstractFile.getFile(path)) + else + Seq.empty + + files.map(newClassPath) ++ expanded + + end classesInPathImpl private def createSourcePath(file: AbstractFile)(using Context): ClassPath = if (file.isJarOrZip) diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index f77bc1efca91..01a3f2cc1870 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -152,13 +152,18 @@ object ClassPath { val baseDir = file.parent new Jar(file).classPathElements map (elem => - specToURL(elem) getOrElse (baseDir / elem).toURL + specToURL(elem, baseDir) getOrElse (baseDir / elem).toURL ) } - def specToURL(spec: String): Option[URL] = - try Some(new URI(spec).toURL) - catch case _: MalformedURLException | _: URISyntaxException => None + def specToURL(spec: String, basedir: Directory): Option[URL] = + try + val uri = new URI(spec) + if uri.isAbsolute() then Some(uri.toURL()) + else + Some(basedir.resolve(Path(spec)).toURL) + catch + case _: MalformedURLException | _: URISyntaxException => None def manifests: List[java.net.URL] = { import scala.jdk.CollectionConverters.EnumerationHasAsScala diff --git a/dist/bin/scalac b/dist/bin/scalac index d9bd21ca425b..a527d9767749 100755 --- a/dist/bin/scalac +++ b/dist/bin/scalac @@ -86,6 +86,7 @@ eval "\"$JAVACMD\"" \ ${JAVA_OPTS:-$default_java_opts} \ "${java_args[@]}" \ "-classpath \"$jvm_cp_args\"" \ + "-Dscala.expandjavacp=true" \ "-Dscala.usejavacp=true" \ "-Dscala.home=\"$PROG_HOME\"" \ "dotty.tools.MainGenericCompiler" \ diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index 7ad368582127..e2898bdc2890 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -24,7 +24,7 @@ call :compilerJavaClasspathArgs @rem we need to escape % in the java command path, for some reason this doesnt work in common.bat set "_JAVACMD=!_JAVACMD:%%=%%%%!" -call "%_JAVACMD%" %_JAVA_ARGS% -classpath "%_JVM_CP_ARGS%" "-Dscala.usejavacp=true" "-Dscala.home=%_PROG_HOME%" dotty.tools.MainGenericCompiler %_SCALA_ARGS% +call "%_JAVACMD%" %_JAVA_ARGS% -classpath "%_JVM_CP_ARGS%" "-Dscala.usejavacp=true" "-Dscala.expandjavacp=true" "-Dscala.home=%_PROG_HOME%" dotty.tools.MainGenericCompiler %_SCALA_ARGS% if not %ERRORLEVEL%==0 ( set _EXITCODE=1 goto end diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index f4ef37af00ee..0af5a2b55acb 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -78,6 +78,7 @@ eval "\"$JAVACMD\"" \ ${JAVA_OPTS:-$default_java_opts} \ "${java_args[@]}" \ -classpath "${JVM_CP_ARGS}" \ + -Dscala.expandjavacp=true \ -Dscala.usejavacp=true \ "dotty.tools.scaladoc.Main" \ "${scala_args[@]}" \ diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index fe4055633e02..b9e4820b006d 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -30,6 +30,7 @@ set "_JAVACMD=!_JAVACMD:%%=%%%%!" call "%_JAVACMD%" %_JAVA_OPTS% %_JAVA_DEBUG% %_JAVA_ARGS% ^ -classpath "%_LIB_DIR%\scaladoc.jar" ^ +-Dscala.expandjavacp=true ^ -Dscala.usejavacp=true ^ dotty.tools.scaladoc.Main %_SCALA_ARGS% %_RESIDUAL_ARGS% if not %ERRORLEVEL%==0 ( diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 8b95c6423e68..5611af798b33 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -220,7 +220,7 @@ object RepublishPlugin extends AutoPlugin { def compose(libs: List[String]): List[String] = libs.map(fuzzyFind(classpaths, _)).reduceOption(_ ++ _).map(_.distinct).getOrElse(Nil) - + // Compute the classpath entries val entries = compose(actual).diff(compose(subtractions)) // Generate the MANIFEST for the pathing jar From 22dce185306a7e6923859e7d45e2adfac2903f36 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 10 Jul 2024 22:10:51 +0200 Subject: [PATCH 268/827] expand classpath in scala_legacy --- dist/bin/scala_legacy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dist/bin/scala_legacy b/dist/bin/scala_legacy index bd69d40c2b97..18fc6d874e34 100755 --- a/dist/bin/scala_legacy +++ b/dist/bin/scala_legacy @@ -65,7 +65,7 @@ done # exec here would prevent onExit from being called, leaving terminal in unusable state compilerJavaClasspathArgs [ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 -eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" +eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "-Dscala.expandjavacp=true" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" scala_exit_status=$? From b62b53ce0b02889cdd6e2a7be35c9192f7706bf1 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Thu, 4 Jul 2024 10:23:02 +0200 Subject: [PATCH 269/827] Set baseVersion to 3.6.0 instead of 3.6.0-RC1 --- project/Build.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 28146989e40c..4cfb4b081ab0 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,9 @@ object Build { val referenceVersion = "3.4.2-RC1" - val baseVersion = "3.6.0-RC1" + val baseVersion = "3.6.0" + // Will be required by some automation later + val prereleaseVersion = s"$baseVersion-RC1" // LTS or Next val versionLine = "Next" @@ -169,9 +171,9 @@ object Build { if (isRelease) baseVersion else if (isNightly) - baseVersion + "-bin-" + VersionUtil.commitDate + "-" + VersionUtil.gitHash + "-NIGHTLY" + baseVersion + "-RC1-bin-" + VersionUtil.commitDate + "-" + VersionUtil.gitHash + "-NIGHTLY" else - baseVersion + "-bin-SNAPSHOT" + baseVersion + "-RC1-bin-SNAPSHOT" } val dottyNonBootstrappedVersion = { // Make sure sbt always computes the scalaBinaryVersion correctly From 8dbfcbe993dd1271a2eb5d062c3419f64b29d7b6 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 11 Jul 2024 14:16:05 +0200 Subject: [PATCH 270/827] Set reference version to 3.5.0-RC4 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index c729f6036985..ebbc1c977e01 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -86,7 +86,7 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.4.2-RC1" + val referenceVersion = "3.5.0-RC4" val baseVersion = "3.6.0" // Will be required by some automation later From 6c3c19f3e0a5c7de4169cb2b6e66d50755329b86 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 11 Jul 2024 14:56:13 +0200 Subject: [PATCH 271/827] Move experimental annotation sources back to shared library/src --- .../src-non-bootstrapped/scala/annotation/experimental.scala | 3 --- .../scala/annotation/experimental.scala | 0 2 files changed, 3 deletions(-) delete mode 100644 library/src-non-bootstrapped/scala/annotation/experimental.scala rename library/{src-bootstrapped => src}/scala/annotation/experimental.scala (100%) diff --git a/library/src-non-bootstrapped/scala/annotation/experimental.scala b/library/src-non-bootstrapped/scala/annotation/experimental.scala deleted file mode 100644 index e879b47e12ff..000000000000 --- a/library/src-non-bootstrapped/scala/annotation/experimental.scala +++ /dev/null @@ -1,3 +0,0 @@ -package scala.annotation - -final class experimental extends StaticAnnotation diff --git a/library/src-bootstrapped/scala/annotation/experimental.scala b/library/src/scala/annotation/experimental.scala similarity index 100% rename from library/src-bootstrapped/scala/annotation/experimental.scala rename to library/src/scala/annotation/experimental.scala From 9a17f4b7729d9bc72a908db2f8df9568c160dd65 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 11 Jul 2024 14:56:51 +0200 Subject: [PATCH 272/827] Move scala.runtime.TupledFunctions from `bootstrapped` to shared sources --- .../{src-bootstrapped => src}/scala/runtime/TupledFunctions.scala | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename library/{src-bootstrapped => src}/scala/runtime/TupledFunctions.scala (100%) diff --git a/library/src-bootstrapped/scala/runtime/TupledFunctions.scala b/library/src/scala/runtime/TupledFunctions.scala similarity index 100% rename from library/src-bootstrapped/scala/runtime/TupledFunctions.scala rename to library/src/scala/runtime/TupledFunctions.scala From 8f71e8f6c01bc0aef0165fcf4f382be89fc64d53 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 11 Jul 2024 14:57:48 +0200 Subject: [PATCH 273/827] Revert bootstrap specificic settings after upgrade of reference version --- project/Build.scala | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index ebbc1c977e01..7b48c57dfca3 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -617,7 +617,7 @@ object Build { // Settings shared between scala3-compiler and scala3-compiler-bootstrapped lazy val commonDottyCompilerSettings = Seq( // Note: bench/profiles/projects.yml should be updated accordingly. - Compile / scalacOptions ++= Seq("-Yexplicit-nulls"), + Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Wsafe-init"), // Use source 3.3 to avoid fatal migration warnings on scalajs-ir scalacOptions ++= Seq("-source", "3.3"), @@ -894,8 +894,6 @@ object Build { } lazy val nonBootstrappedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq( - // FIXME revert this to commonDottyCompilerSettings, when we bump reference version to 3.5.0 - scalacOptions += "-Ysafe-init", // packageAll packages all and then returns a map with the abs location packageAll := Def.taskDyn { // Use a dynamic task to avoid loops when loading the settings Def.task { @@ -923,8 +921,6 @@ object Build { ) lazy val bootstrappedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq( - // FIXME revert this to commonDottyCompilerSettings, when we bump reference version to 3.5.0 - scalacOptions += "-Wsafe-init", javaOptions ++= { val jars = packageAll.value Seq( @@ -1356,7 +1352,7 @@ object Build { BuildInfoPlugin.buildInfoScopedSettings(Test) ++ BuildInfoPlugin.buildInfoDefaultSettings - def presentationCompilerSettings(implicit mode: Mode) = { + lazy val presentationCompilerSettings = { val mtagsVersion = "1.3.2" Seq( libraryDependencies ++= Seq( @@ -1371,11 +1367,7 @@ object Build { ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings - // FIXME change this to just Seq("-Yexplicit-nulls, "-Wsafe-init") when reference is set to 3.5.0 - Compile / scalacOptions ++= (mode match { - case Bootstrapped => Seq("-Yexplicit-nulls", "-Wsafe-init") - case NonBootstrapped => Seq("-Yexplicit-nulls", "-Ysafe-init") - }), + Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Wsafe-init"), Compile / sourceGenerators += Def.task { val s = streams.value val cacheDir = s.cacheDirectory From ec5cbcc6b93f96755f7b4fa68a86cab845eca2f4 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Thu, 11 Jul 2024 17:34:44 +0200 Subject: [PATCH 274/827] Do not crash when typing a closure with unknown type, since it can occur for erroneous input --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 -- tests/neg/i20511-1.scala | 7 +++++++ tests/neg/i20511.scala | 8 ++++++++ 4 files changed, 16 insertions(+), 3 deletions(-) create mode 100644 tests/neg/i20511-1.scala create mode 100644 tests/neg/i20511.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index e28ba5fd669e..ba3f93a42b91 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1725,7 +1725,7 @@ object Parsers { case arg => arg val args1 = args.mapConserve(sanitize) - + if in.isArrow || isPureArrow || erasedArgs.contains(true) then functionRest(args) else diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index dbc9818abf23..f5f974b33c88 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1977,8 +1977,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Polymorphic SAMs are not currently supported (#6904). EmptyTree case tp => - if !tp.isErroneous then - throw new java.lang.Error(i"internal error: closing over non-method $tp, pos = ${tree.span}") TypeTree(defn.AnyType) } else typed(tree.tpt) diff --git a/tests/neg/i20511-1.scala b/tests/neg/i20511-1.scala new file mode 100644 index 000000000000..03bd475ffafd --- /dev/null +++ b/tests/neg/i20511-1.scala @@ -0,0 +1,7 @@ +package pakiet + +def toppingPrice(size: Int): Double = ??? + +def crustPrice(crustType: Double): Double = ??? + +export toppingPrice.apply, crustPrice.unlift // error // error // error diff --git a/tests/neg/i20511.scala b/tests/neg/i20511.scala new file mode 100644 index 000000000000..657609536bf0 --- /dev/null +++ b/tests/neg/i20511.scala @@ -0,0 +1,8 @@ +package pakiet + +def toppingPrice(size: Int): Double = ??? + +def crustPrice(crustType: Double): Double = ??? + +export toppingPrice, crustPrice // error // error +val i = 1 // error From 2e91c88983e24beb660a75c49af1720ffc0cb640 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 12 Jul 2024 17:44:11 +0200 Subject: [PATCH 275/827] emit generatedNonLocalClass in backend when callback is not enabled --- compiler/src/dotty/tools/backend/jvm/CodeGen.scala | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index 2286ad6c2c25..c5b0ec0929b8 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -133,8 +133,15 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( if (ctx.compilerCallback != null) ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - if isLocal then - ctx.withIncCallback(_.generatedLocalClass(sourceFile, clsFile.jpath)) + ctx.withIncCallback: cb => + if isLocal then + cb.generatedLocalClass(sourceFile, clsFile.jpath) + else if !cb.enabled() then + // callback is not enabled, so nonLocalClasses were not reported in ExtractAPI + val fullClassName = atPhase(sbtExtractDependenciesPhase) { + ExtractDependencies.classNameAsString(claszSymbol) + } + cb.generatedNonLocalClass(sourceFile, clsFile.jpath, className, fullClassName) } } From 2e31fb3bfec1f29b470dfd162de49e599129b4af Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Sat, 13 Jul 2024 13:44:58 +0200 Subject: [PATCH 276/827] Add the project's icon for Intellij --- .idea/icon.png | Bin 0 -> 80003 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 .idea/icon.png diff --git a/.idea/icon.png b/.idea/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..8280fd4bfc3fdcec03961d30dc51fe2caaff1b1e GIT binary patch literal 80003 zcmZs@bx<7b6E%uMaCb@25Xj>05Zpot?(Xg^?yd>$B!L77?zY%21P`{jJImrOm%P94 zyH&U9{;{=HQ`56OPj{c^obH*8{ir67gGq^rfPjFbsPN$v0sw@uUf|;G9#xlLRL*uD-A+B zr3~u%FL}%vM02V*1Coa;X}pSy2u_ovZx@KQP44uIBGyJBG z$o|=Tq2G-?mYGRuI+9GIBhTZ?_%Ucz0Q@lCc(t>>P3=yT_S|}XH!cLZ0SkchJVxAR zS8NIc`EwhqL~4#fZOGNVG`?F;_p9z_?-GDGe#@a==(99XN4mfZF$Y8gBC~%7jK7Mw zHVHjW=!1;E$~U8R{ ztQdb<%pJXg%=(x*a995O)e3ub+UG5n;H{r!UDxW~Ha0FMFDAc|oN>JWnSAbeYF~D~ zzDisPszP-fjsEA6@5~WsPU396``5qU;y)}JjeVCczVE>;tqn*fj!CN*@ zVR3WMKic@>$Ov5K;GxS)=*@D?$DqSNblRuA<>yq)xz$}9ox z>-jOL^g8&$NnOJGPNLlR@%G7G!-VI~NGJH!{_SXc1Yg~M-isZB1mO33Wop&OPmtx~ zqd;ScNM&S_`hckd%$=qnqHcb3*es2%&l*(ZBvB&p8ZR%<=HA3vZTzWC>GNX@nXLVx z3-nrqtLu88z4}z`2pU@bH23fP%T8pI_T6KXWoNY~ht5NP7x)J<_HnJMtf>e$4T~ji z@c&&T<4GFdo349tPjBz-?oV2GyRfWgt%mRdA0txLNbDYcP@U1C$A7`@p}84j5kDnT zH)(wkXj)u0(SFmmRi`C8EE~&(Oq`xq`1mI?CAAv$h(xquX67Y>u3s{!>wd+{%I8u- z>&+e8J8>6RsRPxY(%W{)Mm1+*oZXvg1it`=%K`%n)&I*nfq<7)Z7l@C=VykW*PYvM zm2>NtbI)d#oX=*5pWe!iYfXIa&FKA(Iot9Q$&*t3SO4y|$Y;%=@L@jLNzLcE7IVzx z<4mii5d7E0I-MNMN7v>K+|kWXEWP;@S`DyYXA@bJuOqEF>bo=imnKvV$nE#=?rjJ4 zKy`_bw#P%8_IeX?S-D7v)5TBCAO|Md?urG-*5N>vaOEi9(VWW$uzD8KhJ1)?_>3EH zI8_*QU7uNaJN%9>*vY2}s7|hF*(T^U-u@nJ)Nz+T+iG4`=XJabk}G#Wq%x?*{V$jU za0GZ$SJ3UgM8#^-U8dzD+dIQM?$m1~zvgMw;jp0H{tufTvK3oR8lmT8*){`AoI8Wk z$);WKytU1N7OwED4^*KqN zzh#$>@?`1ADo2Rr8n%ovfjcohEg-J_cPNEIRYsinC+Rn(UZJHN1=x;bu1@~~aYDHi zy4>ydOhp&4c0qO-cs;&PGsbaP7Yy?HiJ2I$SiKm`@V38rOjfSwKvSe#TGNea(m1rv z`<-A8o>*Hf+eBSq_cPPL5f;@>!{v3T72WLaOY#1s`Irh7zp)WNFJAt-Y~!`6RpWiF z{3BSf(*)ZP<5!F?GA<_O02oyU^!>z_-j|G-y`mf&kad`^Wxo{0MV*fpg%e__>}G!C zJ(0ZN9TI8(KVRQ1zaZ34lbf}n{VSe3s=oV6ak=WzMTkV)F`pKf!m=Pcsg_2AD*oq3k85}J*@Jj04XJ1G0+b><4&`qYo?`$&mYSLO3RCOhg z1nVL`cUhXu4rtYW4$#fCz=M!?#$2tEfJxT~1XiC5asF1($~&3n2gr_F)w4i)4B6vH zmTymw`ie=&HUX8!wOcgG-k`n9+JBh~Sqep$c)k^Yb)Apit5p{S9$2Bu}r}E$$mSD1$j@Bo$`HNcm9SBg>7M zq^PtKIpLrmOqbPc9r%(azve@$mx7<$o65>0stc(m)Z1MozOw}OA^1Ss$LEB_PTakUQjKx z;^pNT-QDaTUx+XFBp((M1$}6~#Ie4{nJe$cJttZ2KH0Mw)su2g>Xf+68Dklf)__Lsc5szCpXYOKn+j99kPGv(D5W* zbF}=jp{#8$ghn5CKePNc4!jaKT1!PgnuZc(&S3?kp%t=GUU49BCiWS~_LtnqqYuKP;yenK{a+LA zA;JtHH$W* zhrV5(-IBAhK_5Xpb?{|nysr4&CnZ=3XM2HVx)%xAv73=489K;+lq~;tY2v5euhSTk zf{i^NV*sI(fptrbaw-V zz~`@1Fvta))$t_kG3)obSZ@7j@-1^_bJz4{ekclY%onCMe+a_Is`zLY($jLRL{80$ z+ELHH$gICZYLY6k7T=N%Ue%WA-GLL zl$n1hbo>jF;pSKRC;zGirHI^p&dTJP`lo^A1|UoWJH9mj^R<(}U7XzrE3-YUGk*S) z<2UvZP-3eiq@{OuX}!HV5<0M9bHm9;UHSDDZCJPufo#*0IOmnk3%VIvpm*IW7y9mA z7cMOYy4!g)0~VIsgnh7rGL4((Rgiv9eM0}NnmX!_tQ|U!75}$1=^9FmTA{I*)R>F{di)0x>4qTia7!=btIC@QLwtTzDxhM}J7JTLCSTdN@J$$unG8`p)rq{P*!O zn_npGm4f%0=#Zud+1|^RSDWhqd=CR1S1$n%>4Nt&3Wb;RR=%H=nm5ik_+$n-t|Pah z-JMl)Aj=lnuzqHJ=Oxs@x|BOjh}bHT@&1lz1#{Vl-bC^`Js=SsC8rC^$jV4+?Zm4N^0wnQf%OyrQt(4Gv{aXP?{ z$C6(zz9giy<6~upk5hf341s9gI^}GHUr{}eDNi}+6g6?y8fjQRonJLi(LP0(Vv(8K z{5G$1Yft?W^BIHQ?2nbAIN91LEVKPT;`fA$wOwHHB(?BJ1iJvMEw|wv@!C8Ha})o+ zLN_j^@jXc|44B&g$|e!mc9|LIvtNNm@+-e9yQ@=1@%Qtx9?Z~N_?(GD)+A~vhQKcl zXQ$_MAPth^KG%k@6~ZZw{jkl4z@2KvvH1Sf6ajw z5~0>|yR5T8_NxgsrWQTGs1m!B6Y*Q?2{ADF6`zt=JhUxmUfi8b|D?bkL5tqig|@#* zO8yYZHj-%@buA0H6sK5lnG5#4uldJz*Q}tS@-LwLe)ayzf^J&p?VY=N@FJw0wUalf zn)^9oNz`Zc9WEJKS~tQsAHbzwsVUA{Qj6IvzelZpfZDzUj>RaE+3mZrwyN3e?&%(L zo+TxM0iDf81D`b}7{aPT@LsWm z|LQiIjn4tP82q#KL1U-pbJDRI$*;l*cnv*V4k?yv6_?>$A&mjwJDC0>XC z_=ne2saXyg^7BAYPDPd}PhuxUnoxt@7^Ov5nKD;LmQP_k25DgxmJLCBJ3kq_k#48? z0QA%bU9!??Idu1hR;&Qw?LI6vKF4Z4J_Q=zymLh^tqQwhyE=_1ybfh|Hr3U?Ov$P# zR>U*5x^vqn-=>r5`Ku_NGFDX2i|hPd#4zO5@h=l6wtt5uL&wjvsG~{R-Kq7l)r3Uy zLiqt|gnw92*im7Z8&P+&%BD@EQ{BA`L``22B{+u7bRg2?TJ;Lwt?2L%qB5W6V!paN z<~iXn_Nnib<5o{dAlj~}$Dmb4+pWp$z$&&d{2v`M+{JyE?7Z&s@^JUx-TC69AwtF) zIJ9d#?c%3+(dN0FHYPT_4c9Z7X6*cia{y>4GC`i%5gsp_tsf0-uTK-c#h_Xr&y-Ev!p3^ErOgjYp3Vak1Dmv z=wY&aRp^7+il3rjyz?~GVRn(Wdu=D1HB1HE7(~~;1R!=4ttY7#Fxg2M%R@M*wi?>Z zlpoghZ=@#<>t^}Ws2W~V^yJI>Adp^p}z!` zu68^gd7eKlAAf#6tb6u`mwiFHbBh?2DBBYDYt@-21GAP}9!7Z65BptQ+^2@f$RGA1 z%ac#5%YidE`*t15{=xQ}VpZ8L+4xT+iZ+M%+pjHGBWKU7=nJcR;czRZjos@a zCzn8Y;Q3VUD2@NzhtR(c=Rtw$?>3i@S-vE508{;?;tv*CPTum0&B`vMyM4Wwk925l zFsz5F3$#Wk6T40ZNbnFFkrkd5=DeP6#0>51iO_g3Ek1o~nN&ZG>N78t!t$dor=?dU zi~!hjq4MTb{}Xm`*?cgJ;@o|hbCh391Des>zTM!5E7>5IcuJ5>EwR;v=+d;!`t?d9xzF z-Xh+e#?tU^FO#Rq4d=EH3H1f*J0pyD$Gt_6GK1l)*Bp0N|D%v1$Lx5UOn?j5{cs;2 z-ooe5PLlWaoy|(-aSmYRH$47<)E4O8CO5r9nZftX!5e>f4ruPG6yK?ckd$8tTax@< zHb^FzOFR?8oC&&o>t8jI88%#N7_?nRr8rhbltEP-HRjIDG>i}Arb1@4rwpSZKC=XInU7Nn5-Sj>t`gjo$1(?sAfu;;q^rq)-6_B+qcGoE=d}UMxM-LKYn^$P0!3=r$P&g zkxQ$&xcPWWJTxW1lZ}CQ`Z4RZwGq%9L|~wBc3zX?F32Y)=Z(Zed6m$5D;leVP}0RXCr8WBb4g;bJtp&5C%7?ed&2 z)WW!<2O55gY;k^*Z6&8&*-17_gdQ||^&oPRuO6@wO}lCow3@vloA0P@nD+&v*+FKt zEHI#&{7hm@RAS~3eSKusU9pQ&go5b+OHJQL;|-O(bQ5gd`OWTcY*UXnA7}^83;`F! zay9g)-_sJknmDx3H5*{nn^D2#j@8)_%Axjkp_pz|%#!K8kDcW&(i)5sWu|=?|D&no zA@@wuC6Ckwc*opZC?k_u5(A>9;RZXZgdCd#4Cnn<89gLcCGJ6FQ9kt(;A+tLl(eNU$ zUoKFr{yD2(OiD5M{qhc7j-My<8nMgc6pQbw#k&GC66TP=0-O#w+Mn>^k}Ay|pr zy`)h)O4Un;yj0weQNh2iKJdpjzkPR(dL7Vpf~mgz7;sKt4CLg zwfZZUmZ-IEZ+AC>o%8urhx^6rvs%Pj{14=#PRT|XF3ESeF-#x>C zNg~b4=^(K}x8ZbMTDPr^gJWH-91HB~%cqNx0@jhMeV;8$v*@~dN)2s%99;?CeoF3w zZN;XGT;vMDCq&9bn(h`mmR~(*WpytvK5QKy9erAk$_-?#e!RJpPK}@xacMm03h5v! z`Y!03)tdU2R8MYz)0b*$Br;9>nh}Ng18|PFnBAS7T3tFe|CXqpLdF``qd2ot<=s;O zf%naK17vCYPTpjsp|X4L)ZT7s-hrw>uRYMx>5@0C)eA1eaaP1@I`wjpmc)I>#o#NB z>RR8L;BzA7B26q$2A$Vzwm~uH(Xq>k`4*F2rZOvcjom`tMdPuy2bJ7g%iw~+(<`C2 z8aJ~a=cC`nU5MEq{7FKW^&2{X_J(iEE()V?dz0xM=q>sCr7$5ccu~{8x=&G1$6dck z-IJsxtD_D%z$y$&5Ip4oa${4MidveI7mU*P~FDkIN_Ve2CeCs zQNOs+=9?Kso`OZ%xz7zXP7o_;%mGnBoACsSpe@i27C;C*700-YXnn)bB{X&wn#Vst zjmRy2ni)0pW{j-cmyOWEy0z-?H?#`8lh!MUEPA?!zZt28UqAMJPsleg!yJotVZi3F zu7=(_^QC8q_!SSl64(QJX(j`cj<#)+gDy&4fMmX2Ixt8pWhBvJj2=rFuKIRqfhHIo>_-?kRz~H%Ah=1rDLC9jC1_@$ z!6NFx)R`!0$f^;DiPvSSdtpY@7SP56t{Y>dg0>*VJQHM&W*VZhPKN#?<5cyDH?3>^ zS^U+&Bh~#q>qq?firNfSstI2gv@3rcc0s?U;AECZkY9?d-5zsgU#4FRV<6z(7yI`} zkM$_?U{2EQJ*exlHo0^6Z0Eq}4qhq#4XzycQG(%YD(uR^*Z*pK6rn{@fV{F5Qx#c6!rK>SxJ zG~YgPhMRINFsusv8Bq2W&HS$VWFQuhfk(AhOSbfl%KHT^R_@JsM6ji%pd;}a{hmZD z@!CA4;J&=qC26E4{$VpVmCANNhv+09(+BD-;2_okj`eYqYm|C#oYlU=v(mA-?kpBF z;(*Bj|0(uyQ@i%KL~QFBv>86T+7h(2WqH0{zkJ+xSH>k#8jN(p|G2P*vPYw@tGjrk zDcJ^4Xk@a0#Yf_xWx|*!fuBJ;j(&C8^~le_j>9cM5zbvX8rwn>q)+^xrEhNDsT(d~ zuC}<@-Q`_h6#WWOJtL+8!1IFL$jLXibZyzP+WyciE=3Pp`z74)O|bZk7<$*7Fa>ck zQMqw*{9;vO?9&hLYG0w+kMc{dM;Q5QX$rjC_&%NVIv1bUA{2dvo7O(DLVVqUk3ZBU z3lE63?XYu5KU2#KPoS`*7!=?2#?@Rp^*EWv^Io|cqF=(5sx8w$g^>TWTRmxqR?e*0GR>D%~HY`JfVL?$J; zxKwCej8@yL?=@#Xo(ES>p69xBz#bO)9eH_S%e#=l0JGRa2MGf&sgOll-!DMG`#i5x z=1!>*RX#KZ!b88oS$!Rhhv5(WFbCpZ{uuiDrpZdUrpIKvpOJ;RNk8xAWS4yqT^7=6 z&^ZI$ZA%x*0}tk%`u_4vpm2kLzf_};kofywb7C-ijwKOxArBWyXzzTNrx)R9!#L|t z7+wtr^x4!1U0?ur`j%+;h3+$}3+`g_k_C%DcQp)&)F>q(n^BOl@T`uvRk{AS`SNT= z>!&2^27>cj-=$7Wn*aa&;yy7a}VTRgo900jiBU{EFV?_t>4RRjir|*mWu1v)NwIeG zx|+Tgz|u*FrCOl^i1@=gUC`au(dt}g~Mo*ZBaXybB($j8`m<>Iud~h@ zrSW7p8}HAhIgAJdt(7z5E#Y-=pAYL%njGr>A(aoH^;!f%rl5!!=6TEaMAjv+cy@Hw ztTc~xbQ{+J(tPF&Oab%AGQlD5_(Le6!ZlxKuK+iWWC&{Vyn;enK$v}h>IJ1C-Oa_BH7&1`9WEYmmkcP~RxlLJ-x1!dI*)MB^m%ckUxETwmC@JPq{ z_F-P&>F?!JpAe#>`m^lwlnL&?E#NKnpJ`NIw zFfdGlMl&B+^IKWa1w8@|-fLH0M4u9!gZM-qvRHNiv;gQPGg`tE==jk{85C$a7*y zJl-KE4_(hD4I?CPS)Vh^n!>^jdB~bzY;C5T5H!P<9|--SUny#K)LNN>;&3u^9-^m; zq10pj$t9jmI8r6=>=()2URB_;HX{S3Qt(mM7_f;$ISu_ z5M5Ay%v%kd#EK5(G4p)NaRUdAA!;!=C1C1v!qf0*1ez(9En z2{UQxe~^=T8CV!uWS8vKf>PPEl@P%=6uKx(*e}@nWyxL;g<)F+&M9x+SgZ-TL zO_BU$LmiWRF%aGKI1;oZnsKwa+9BwSluV=?A-YK?0UdYE)l!*Nc4)9PVwXF@29=!S z|A^t9fQGX+6@+h=1_(8UyuS{jWJny!Yynb*zz0?h-=YnsyEY|sZIFI??|{Xf*J#`K zz+)6jkze#?vO)lL)A`&wLGs&$xmA61Bzk_KAE?za!4f@d$4~JIFF@6KZk@op=-kE| z_V6=7a$HpCYe8iLjj%hil^J(R2#UP37tV;Hvtj~<7ali`v9LS4w!gJ$He?`tBTp>v zPC;+7#k^lt@D0{?ATdE=Rtiz+Hk842_&pb%_g}+3leR0XLwd<3!PpvDA2nZ&DN&fw zqDfd>Ft8iiy-^oPe~~WhtG)+wT_c_8o`nv2&Lm4Ot~KU6R5~FHOa|Xgi99~Q zqxf#o(k>@QKrDe8bylxB!1BQtR#id!%q7$z@gon)vrBRO*Mb9)yV^>-1V3D7Rd6^4 zFW(qtm*7dv(6d+WTN!%qd8}RIio_Jo?|x**y+uiICmy~T60RQoC8LS-M*U02vF%m6 zi}7{3_*P@V@3osvIYWX(B`qAoD`m*vR-Oi=oG|gJ&VMqQqni0<~HHDtMVE;3z zFz+H4*t3h#nq_%3#R$t)H@S98YKY;}-_OthPkNXZCU{8Uis}Ada1dYgiRc zwwoDc=}wOVIm-vPJ6HTUEQMw_K_b$87}iC}qrmL~oejEhGC)vsH_U@6yRY7a7*X8z ze%FNJV{5CWr-?IH%#?j@G$a#4j+GEo37{r^^ea!~+Zha(+w*`B>me4PD=*ZtG3-h; z4+ixk`J=>8J2xJPHYILG+dPMPs1u^|2b70anS#Z*iQ1sKKf$qGvsO_@@f5NcMoU;j zcRl5YZV;Pi6w?~Cg!pQVX+D8cbH1_^5)T72XG|2{wSGgv2X1x`j8C92w%7U}t$6+2 zV5y5zHTZqCWiCKh-cb?!ad~2z-LB`*eBTj?(N4lqjtdu-!%bZg&arBwlRWAyq%+x+ zqH#Q}U5!N_b?@y5sri^5+tIIiub01&$>jP0St8Sdqy&LfOJc8^FJe~5ZH$aXDRcBemdkGC;R1|$Dy{&4eJ7b#*rp9Mt+yFgY59VuFk|2 ztY6t7l|IHiX$=>kGJL&>7>R-f`dv#lvm7R>x8SrMGCkjb756nCQ*r8j0)N8-G4;>8 z1>hdx`88?@=QN-(dG!z9XjLhbFV7pk#?J$t-jI1jmMlXb@yiY(V!y+`ENb9HJ1X8l zy%OTEj$i!0qXxDZF~zbD`*f<#CUd|-2I=SgQu7_8S+75T_30S^e|(Ky@-`U%Yq_Mw z`^fsjzw_eEK*KJUf$+dwZt(^?BlDm05mZm>xmzFR4RyJk0X!=d?eWpK>&8epuaJs-g zo+o^bLWG_DA()6zeAyqYfa}5>Or9XZZk&Wo4V07t!JIxEyD}6}M)B>y*H66dp#tOX&qDw9PYn5=J5(5(Jl< zugGo03f_=TL2$V0gB3==ub=p6%n)YT3=!h0QIfhx`moTX?m4hxH?YJu@jc-=A#_8f zua1{A$<;7b9#m~#ou%LCYP^NMp&a{hYH6P4_<^$tq;1`|!bs}7RTPhh4c%$TFXmVd z_9!@af&Z^S`hISxxRKsHe6^VLI3`tRofAH@93vhYqQ$m$+g2yK`JJVS$0vC5wc+?7 z{Ix}R-0Wr~X!eHE!R_fs3L#n|E%W_EUO@6YGLD|6ep5-^JDyq2@0WqKk*;#~VX=Yx zllSJ>)@h04qej!82D;3f;_2B$qTi&#P@CwgiypsNY!ZcA5J&lL{Dj;f(^2c|Lv{*o zw)=nVCFpWmJievI+nqan>#E)O@Sai531eXstjz&v#`1E)LjwAHhgri1s`vR|J;v3+ zB1UwOl~A`aM!MD_o$W1RefJ0(IV}ufA36WIuY92?7Y*d^AJWpL2ZyC6#&$!B#X0ek zXMiZ)h=QjOK*uOPhQD}53{M0)S{4$RE43=q^W0ftTk4*A{;}GAI@Gfolk?%(EYIEp|dJ zogk~B;zVPH)l4jke+pPpiS69wxX;iZBB!kTaA`P8bFk$zSk%{D zJ@DFM2(+mw_;q6CA8>o!pqaGZ+q9pQuQvra5Q3%Q88;4(Y3!$c`v_RC93UoVJJc8$ z3(WT}B=kcxU%?j7DKlvLJ0}a`PxWaNfT!U%ALk8zb3u3H_#kvo=IN_4k1>m<=w$av zpx+0Ow-^hOz*sqMh2@DUYHBGi_FQAn{rxr^Aj*k_iJ>z2I%PZ?HPp<6zlq~a<{8ZY z8(H_h6b=dih@Yn_VSj1SeELulkn-a^0yV*6CIrvMG<@=qK}S-W`WF>{4t-6_R+1fb z`ukT`C+pC@@oAPL)_KpR!1{kdO>jKK||+ z;I0dLV*zFEdjO3_s{r5c7i8;;U1p3Sz?gmYM_bf> ztj|vTZX1E2Gvj&wMNgfHOzg((<~d!Ci0JW_6F1}6j_V?`IBxc@nT9xfP$Bi_6S9NePOB_etDC^z{7a%IenIqL4#Dh!MQ@#xNT z7mEvA#~z!8lces*h>=K1G^Z_N%RO5d%I%xktH|u|-Z3hZ4yU;ufh)N?$nJu30nN!R zTVARTr4OxOW|mINNInsR$;S^OfN;;4FoTOO&b_$s;>J%O@`eXnIP!(^19}#`@p0+T?TXl1!k_A;v zO7(0y{d_fpzLvjIrvyXQpKkAbEO35}g9B9c4WpB9jZpuh7vE#zo*%mG77nY=j}GBr@Hqimw? z0Q$ayE~4&KNm7@Do|6gXwmryeVab?1Kj9#CQS}EyZ~%f3-Sg{|L=0(dBa(zSMB_2R zk}%ix{0AOJ497ZzB)P8*BbtI=v8zKV1Wpp06`z zEm;lwoNui4Slw}dm(&-7hVklf-{AyM{)}kJreUFAWK~g!>@gF02dHV{pn(#om@TP# zUuiA%1@#(Fb-Q|0rq2tDf^nyXgyqRAmH9a23_M4Z(?Q+ZkjPTtfEceWn9uat~5R_ z;X+&KK}jM=^7%JME(;PbGtQeN=VZuEzdRyaGJ>rdrKSI6aRaVc6WG_BS+36+4+x@3 z562M!8D<7=fQA;F<7oCCvcuh;rdf+^3i6cHwHJP$SAVJ$-et<-&m$kP)LQ)PjPsnv z;NHVUDTSo463Q4(-ys+T@U0s;6&c&<2h1M3k)}TC|3v2k%bEar*F7DRfUUu{?dv?J z7VRf2Pwv`kB2S#dRz1L~L^an}@19=4g&vfhfvA?x{%8AQ#m)7c&Cf~8V!uMRNa^^; zbFwZF_0h8)jrFTnS0eRZS~7J4hkIynNfu$n`Jp@qLis9qp6a{ZF!nYzlQg}m9ws%e zq5jO`V+6g`$SCpH?kqBj*WF;NE&dFA9oh<$4f!lx{na<2KS;jIzc8_jeu@dgH+TIe zsM1kxg)65P%zh9g9Z8<{@AZE+YlB7|UCw%i2Nw`gdwB1Y;rfZ;V45cj0ndJ74iXF$ zSK{tA0-m}at1L0>z}!J77bAJD>A_{D05y9smYx#TkQ9;kYR55)7^A655@EEx@kByB z@qn(qasq74QDVOH^K*Cr4Ydop;GwX~m;lST=@19YH{v0>-zb@H=uo=yo@r$U@mnJ) ze(&_nkA#O~%95^$*l0vVE_pmPgKFN208)%+%{Rzjp{ls5>K#(MWe_~jB*2w#@d2l$ zco5xgSBs@Cm}+{~{V~(iRrv1~F~>Ff>oL5963F;Rorj$8!xT5@W`s$H3cag}sUUKE z2Rdn%#Frk6M9S4Dy`SiDNvfDXoGm6+k<&dLanTr3QZllKY8x3iJ28)_c!$0#QJButvPlRc#ufn%C z{618$il0%m>w~Ux>c@}n8t299uE#0toQz#33@YvfPjQx463{lzKXP(STz2MC?~1)} ze+Z&|oe@N~MdO$vO_F$;)VkdPFN`dTuzpd-kEn0%Sw?nOF1&&1eX^`WGB9jK%+Po6*w2kjeQZWpj4fJL zvM*(NBIk6`Jgh_La~d`zhlhV3Mt(HUuN__d>7R==7q}@8N1ag#ty%FKRrvcCxj8wF z&;SBQ-574mONaDJtd}OSADdw$&#|K468_bv)^21QFA!LlDnwTOLOYDM%P?5bhhx&c zHVNe)nBOGZqN8oxy);>J@X4cb4Z9TDAMsy#_2>2$TZBw0w35rI5A&|>1M7!v_BJ;@ zcIH)csbpM(7WSwLx3SqR^Fy?1R0J*%b4(EP(_S5JKvQ=@qF3Ijiya0EvD>~Q9Z{@s z&?_jFFy8LX_-bJ!>3xow3!U;qi`o14(VdWg;$#Xxgb{d1hLCsN1TK0}aM>-LVS&$A zdM__YnKuO{jzVP4V+bkKH;MaRHt}<52kdg>Xo%B`x-y%D$Vz4Va7Nantw^2`{~&XI z=odPoG7%mwgaqeSKAw7;=Ghe5MTX_+6pGlp_j0E#{*-+h0A0rvqw}%R=!lRp#jY6c zmpVxNs1ee}w>3AR%&E4NiLQIh0KSTe*U9+mrIHXnb;#0&ko$V9qvf&Pw4=t=Y%|EM zTI0Ubl^_Q4utOz9LH>;GFg|U?o@a1xPVeHXhs#3mO0JhR=3a^wt|l#oGY8cs>28Ec z@EVg9CFQQvN+a-pNVSU_ek}XZn^4&Ag1qTVjA+S~wBstXz>0{MD+#F&k8pryl8SUM zm>66>px#8E>p$hb^jlkyy$XhFnJWUkg*)<0zL$UYDgm}e^WR!{g7_RCbj>_65ZagG z<&x6K`VoUpJW*f}tn%Ua_?q3iq2itIAaqfcX>J&V5*~IRN$Jv@n_lIAFa!Db)BVb7 zPW7e7?HagnlNmXpBdq6&309RS54pr73)x@`-Q+kw36q+58kR1(kscRBS#D=ulRvW! z6S*diN=G6JCer^mhtFTE)p9NM1}S1@yE`Z3rzkEB`ue18eG@QLT)tuI)KF5Ryk|x~ ztel;=p+7O=vcXuFOwJQmX(g1B#MiF}pyXgSlSy2$e%l^f*Nwwtk6HbYoolM#YMDaI zr%l(9=swS;7Uum8i7{v+g3PX*8($uM>xRteh;66ooeF9pE-(}ZA!wv~m7AMF6R)St zj_6RN>ERd|kCOX#*`eczAj2-vEtLq&oD_ZTVcRD^dZ4V3_wIlnW!nu{U65i&cCbK; zaD{kwVJO#koOAWTzw>F+E0ER?9W0a{u!PyEFgsQnkx4P0;7$+?Q-HVCG2^Edkd{W8 zxPQX%)5BP}rRFTA&WeT{h#|Ejs?1q26Aj=MoN4qI}p>=O8On0X4BSdVud282^07ERLlnh+~e2#GBpU;IUb0IXMJYJ)6i>U5|P^!V7P*5H*YS zgBtP{{HHz~{=e#DKab>}`WOm4rfd?zdKu9W?{Mqak*WRlua$u;l{5FHZVzb+E-i)g z%M3AM_eecN=wV_!=yfqRv0bO2y@U$k$b!=3HIw_BO5Mzx^#}($Fqe~`2Ove3MZS-g z(v0ToBN<2Cf+YNBpFx1RORjtC4M()3RpE_Z^iqxFbxU4L%I5H&BJ0PMoIeg1G*Tny z7R2~`LY-t9(edHl-^8WfyrUxJ+o$;`mf_0ub_EPDi*P`BMtYD5-dpVwPCD!Mei2i* zh5*!j+~-dKFTtyr%KUDTME zGPhhsGYikVsm0RqpSY``#r7{iD7CPDb9`EjhsT5C%=}jD;VM+q;D@~2>*dV8-WHy2 z*rFQ$D4`l_ilRbG{}y=ZL3`IMTc^;fIC?bSpD$k2eCe(qZ}!0*+?m|vHbWLVZ>ffd zwc-1*S}G`^7jydihd$-^JI+=n;jIlVZwzqvBWD@h^yUu~7N{rkN~Bu54Bnpm=44d0}s-zJ`^V>`6J@|N5^sxL^d z?Q=)+{NZ{v#LAeF#sO_-GN6~AIKu|c-cGgh@0yf{jgXHuB|NuLWRbaD(1@QXT&O4@ z%(VE%wA9=p_=-PCy{4jws1yeL{`nd^9tkn5qiB*zGWlFLE_80i|C$xE&p5n4l80}H zf+=V`HE%U|_>VQ_>g>xX%F()9P@h^=DBQH6fAAKk-8h`{{dkY1y5qZ(v+zn;YnK0} zC?iR#XntnW?^K3xYnxm;?g&(FJ#<2Ox&Y5kE`KjyJj(KjsMSA<#eyhsxJxs9>aW__ zB5O7OPHeiA5-R(#*ERC>t>*tbK_NURre^Nz9(rjppKOf(HRPVkLd~+olKRHKi>UL! za~6=)J!cSf?@uVyktMJcIAGb?pa<`Ic2_bDg3SvR^rx;9Q=Jl2%$&Lq82`cTUQxG* z;1|+PvOFpk+PPtKKEE*5i|S?#@+9#IkVh{xDz0bSC=DT@K*&i)%Qg;tTO}i*Aj(Bl zYStVrww5&16GdbCdsxD4&C=;*!tMDq(jypr@MVHWUCql_o7?9}D^6>-!*eHNz2B61 z*XRFNjXW6&rX;Pp@w0f>dMDS%9L*_-#c%=c7D^R&g!v(zOA-ysC)qc(MmI#kLTqI; z!K<;jWpfLAD49hYg(n4l-CIpMl2_F%g~V^Jy-9bjR%_ekw{h?d5l6WwR?%%~Os{}; zrW+EcFAqUG&gXi0hWiu|xn4Ba46B&gAD1o#}4lyww^%86evJ_L!O zm|x?@wl(i<$W6#cP62TkyBY@r7hz)=-I46y6C<3Xmj$Zt73t1CD^hco`O)xu{#ZPX_^3JlWq`hSP_PX%a2g#PKWA~wvcS& z@pa*_HL9QsP;Xn&il)IsDRk^R!gjuAAYV>w6&(#GguBz5Sh_;H1SuA2lRhTxLYH2} zpS3;UU3s~Fp?NJYtS(G;@FGm|ysXrc)94t_WN)KYFa+&Fmhaq^U#nH%yh`RENchCq z`7tg2wuY_6+}UVS`+xB%#H40wgNIDC_5<~E#vJ7@w#JJ$Aw9zlk(H2c)Nf?>Xb1l^f(tvO}~P0JC+|5+tiR+Brc zBcJ1G^Dfm0neV@y^?h--?C>+ocXaa>cI<)s}8ERi42$K0Jr_<=;`H8H-2U9*-3Vn9b7!EUOsthKV?i8DeZTQMRp<@;r z(Jw!CT#Y$`>mFPZi?N?@qH{Cai6hAN!~NoM5K)xagHf_#r*}U0Q#~Vw=AVG=|5uNU z|F2K7%Pu6Joxz!I@A5Xa*6IoORWHMPy=T2ZSxWifC5d*;*F3rN&P1|wAY8Wak5*>f zX@2h(>$xxad)6e@sj7NA`G;}FxA;IZ_C=|(GO53Z$xNMdQM)fW*?-9_JtWR6#aeudS^Z@*CR z;EBM<)=0#+;4tQksIyb=wBN31B$KBl3afGLDl#k8sy!n8qAODA$*uIfqh(rQKF}rq zWA(R!|FukVq0JoW2hRaD{OPdaZhg3oI5jDz&ngJe1P1s*+F26Y9+caR1cxzhNBn3n^4iB)aL6qG$ z=uDUefrYAeA#C&L@1h^~T4?QShiyuFMsB{ky=(f!b2Z5VE~={yj|Fn1v};pYr29<` zYMfiT1p&qAr0(F@hg*cbGd%4F|H`I|Hyoc%r$pu#N(Mzw4Etsq5IpuNEj+|FcC*yR74hS9>yys1d^S z;}eA~fPr5`Dl%Nwy>F!`7Fp=uu7UOB-C z>k_g_idX*mkkO2NW>gzxBj579XO1f9XgEvqgRfx>Rp5{!!;UqB?VfvAsGB)Gec9FH zV{DhLcoXkz2C@tPSG!4RC2zQ5JHwgv0OnqCtLZEL38@p^ygdN>qNlvkTXG0|6_Lp2 zbqxl};XN!+iYSV>jck+7Q96jL7MezyDTa>6Thx)~vPF{bCF0vAiAL?98G z(RUpQ$QHtjDKJXf?`Oy;_t@(XlXEn2G^U;PO;FAGm37~@UCQHV8?|R z4bDwXc@?;S@9CNDVsACaPrVs;+K(+qwk zqz|c%8%6{Cs)j5S=K|oH>jKpV0e{hUSdr_{IMCYFwvCAi;1%Py4&-K6_yxRGT-fJ! znu`^Ju3W$CP$(+*GAHpBIOEoJd6lU*F_F&{HSdH5 zUiYlqMjGC9uoVs9;_9p zud4gr$qcVA4SO#+yE)7i{>1MO zy~9agNKGBRgUNb4cqUfd`~9{n9NGcSFB_=`<4Iph#j}R61^!bC&lqg|`Vb5YHTM~V zTP^Mp66bxc@gt(qGNoOBX08qmIkz80L#~!3p`OY#FS+9#VyyXDqC967Xon>PPM=S9 zrLqDta7;&jI3LGefOuT!fDf@_gXEZWM>QVMQli zF1l`g0Q#WO zrQj|78K3k-ixK8w_4ZJFF9My^m5Y3(pK9@O$!)PlqD`zwPE|7_@Q*qzv0ers-_ zgY11^$*a+?dWBEGn>iL=omqHAx(>CTtU&o?Q3t4H`2{RE2|~P<9a%wFB=_xlMs$}q zES*&JO_5wBf5U5kB&Av>N!bdN2@t2|s)-GTi~pr)43PybecBW1I^;hlc$;%lCaiH) zcqzh?{MEahuGD>Ia|cDKaQ+*8Va<|j9pJ(bLp0SMw-XEfwd@_qeIX_g`%5=jCdutY zB^?;ne)aXveb0i#B5K%?v|M`65% zwd0s_top2by>>@E_5qto3Zn^LE)3P?Pd5Iuz?a3|_Ix6zYxo;}@N_VOnC#6Qs4qSa zc`Sgr_glLy&iB3pp{7Bq!~l@9EABh$qizl1(uHsm!V!@S5?ME38g$Hx6A|f`))1fm z4}_lcY^U4M6z2IwmZUY5bTb`W|F2-zye*bjSg>oyYL7POTKU!&>L0^{7^I-L{MOHR z#Z@QZmY3Bn*mTY3*Y_gZC7e_JxVT3G-95v*1(ph}^ASrv@9Geyv1)w1ketbnP83v# zATPhDv-FZr8Cly`-oTwN=siWw|6=kT-I1xahCH8<+XJ~@3X_`;R6Xvj5f;AjT0=Z6 ztX%K*OZSJBHNTdx<3}edso5&rqIhUL7?V_=_9TrcjwQX+v2DWfTS1rV$x-GL+U@6x zGkWO~>2|yWJl_Q-tYa`#EGYBa+Auy2+kgm)@kSD`l|1UXUqvV)n&&uzh{9mCvhAyp@mF6+ODO`QwVz zN1%&Low4=`asw5_Oq%f2drJipk*YAb?pzp#Zl8T5k``#1_DjT_C^DKjTn_ASFnzal zCLCY$v!Nq`yeTyrZHT66y;N8U7O_8%N~W`s*u}GH-ccV14@qbi^0~%aFw}{Rws_P$ z5ox^S#?n@-QLG$1#tDi0C&6nDd%Kmati=6|;7A?897t|%oA&Y|#z--3R}V z84%RlRPL>%lAy^CalZmyy-|b*^IkE*#J>o0iZ}9WE${Mf0peU&wzX_6e5^5FG zjs0Guhi;=QHTGmv|06x!Z4+H{6wYMyr85-iT2Hre?5(4QG?+#VXR^9`Uq(mw5>z;f zoe~EnQ)xYA;l8O|N;vb_jGTb(XzmuBXNP{FHiCHV-GRb>bAFmr0|t0}r->E3F&&v-aylx$Qu>Ydy|HPI^JD19e0I6e zF&09qPmO(IEAuK9Wi(!@KQ=VsnJd=bQGXK+L(hW1>&k4VmzPM=bsZ1d4~}YLMLg;B zaSDBDH=ks3WC$WM#PhG;))H4SJ7)uSAoqQ3!tI4gup8%x=#lk9UET^1$#%DqCDG=t zkLz>~u`slYRjA!j$Fk&WP8k_`zZ>RoH5j3siSI?ihoZJ5x$a^%^EpMx1Bm}%vv8}f zeir^_caQaIyYAlGmqcOQtLD##n9NYv#^gM0MZqR%X#Fx4PM>zk8jY(Qmm^4^OkG(nHAWqMx|Tk>(Jr5neb!XFsrPTB2w#;kGNg z|5l+_kh=>gQ2h1+I-F_cr(K8vwD~z{qHd}e*k&jOFDBiVH{ zam`C#Q@Xon!LTc(BmCl>^N#n-P{$GfaEre;`d)`tEny)|ok&$AJ6HyIa<`GDEazg` zO}V`JC_1XNOHvwKCkkHGzlVHtKkEZtq>jU6pT@3ku$e{VsGAC}qR?J>YpB25>duwt z>A3`|!e{CN%gLAxB6WWx81zvb=5fd0fur2@&v$|8o5sXDcK>AFlCRdl+aZ?kVigf3 zH-DVRq1lf9rLhC)rm;tXm(I(3XOMw5wYWMQ#G(*G8cI=8WK-l*!MJEHB-&_`W^bgY zR}$&x%OQU|qpGbyA5n#PD`x@O$sokgjY?NvFGd3$8lFM$Z_6*Z+B7|BAJIYv)f;x_C1ZC#nU(6Fzzu}v`%hTDJCua`g?p*&>%4G?GX z0Bb~i=iLl+L|yDiXv{q}V(wzOsng<44^!t1Iy>Vx_xsb~Wp3-=Mf`>Fw(D6e%W8rj zUTBW)UAE$O2BjIr`M;1;8}K0XCYY=B;WuiTN52t$%Uz_I#dyr;_3iGv@PO+KqrA)Z zH^KyXb&}?zknl)e=oJpOM2q3GhkTh8IU=%jXU@%#K!A#9+7OzlPhRm7b>2Y!i z{6)u&%*XJ%L?#hq-N=fk->eqP$~nH*eot0elCSHW@MOFBjy-D2oNn7+JclvX6`%~d zG~Yb`P{6WuKsV=vpzQc}i=hwrEQ94&qdIbZ`q}SbeYULzaxd+mfJMVQBVV0B+xni1 z+$xOLX{W)U!^MiZspBJ*i*{RPoJ`ekva@mkoLqT7R)dqv8pRU!%Hk{c@zCJg5}iGu zz+W%4$GE{4iwm*MqMN|9Rd+zGJ>@qRUD&m%5nN$@1PTQ93xQyd>Q|FgL1Qb?z)%bZmsf#-{qzNxnCoz9NZm} zWgRT9x1|};7!B}dBMA^j^vnW!c(d67;=&xHwR@NKJZ+m6aTPA`lHqdN!E)h7hZe2z26fMS<3(8Ll` zR9l5`Z*>3CJgBvP8GQ&w@8DprUyHv3R3F!+Np~?S)f)^aD1UN5)~!V>Tu#Tc#98o& zUpmfb0FEFhSvIu$a%ci?x`(d#@m)FAgpkei34rErh8A~OaX|FMjlLDhaL@(48|6=a zi%3WM8)GK+3WDJ1c*J`sNf)<@i`1iCApTQwv-9@o7r_yhiR3x}k$${tkKTx>g&>#u zvDc<3nQd`b6h-DUGWNE!`$q}|%Jn`?Hoi^B zuB$wFdWTkUa9xNB-XEp_O4{h zY0g5iHSJeoFM*dxp9{Llrw1$4;!X07IpWD7M1#2;)WZd_Y;OdLhaz6(8ht15N_=nj z77og^*Oyok4Eco4iMF7O9wOMMAj2Qs0r#3=hZ1v5h)d3J-SC{gcG39!8S#_)Qv6HX z)V#<*&ZC*7wi7a;x3@1DwfHI>%FE%5>LzRcH#H@KX1bfchOl8y{`|S`0Q;3e&og zHP0m%|5cJm{+?TNPkBPjS=DSNC?-#MXjiRw&_Z=eks4loEm_6#x1ipfEh^e_6KlGV z%*Khdn4)C=3=gK6@f?^p6V=6#UYZ{1ly>#aZ_UgfGOI>pLj%GYQ1LN%(rzak_R zE(ihpAc8t&QN!Enj@KIBtpxhq|Exz~XduXkb%dZDxTdF;>#Gp>jGy{>Ki1LMu`91U z*51m&%u2vMo|LmTy$3u{bNLXF9?zLemz6Plew9nmq9PH1 z=2Zi!%)2UeL=7})x2wYXDtkokO~f+|1~&9(4DbKXE3%lI>@24?4n20FW}XVBBhwaR~$JtwX&#o&$KO)Gq$rCD`f z{aCKc+IhaU8YfWxrliciQo7>mJ&O;`(7D9AxU@9Hjod!!XWiX<_Nd+9d6KpV0C= zRL}|98^7CdFux64YgiQ%-KZ;fIUM&%ZddSe>-GuBI;W@~sJ4Mj@c)GcrXG4&^IAHq+>7yX=eZKy#^W!~164epM zn-*mpw+>C(g~-+dEe!*14iO_P~0_m{^6 z{-+!PRwfF#r^bgVD2s$G?)gO>8RxFk~N(_W0<1A6yLI4R~=-- z+DCkMhHL~%l5=CtcH>tU7r{l$s0#Q)IKOml)RZ%O{@tZQl}H9*j`hUm zP9gy>Y3kU%pZ*M@kBM-ayZNuj2zNfFY*+d;vfOptb_OtqCy&@Mv+n8*(QqGo@}+72 zBJtSBh1blohECSc@w@?-Syt{fGAWN}o;X%QOc_=5R_t7sdTy~}p2;i{l+`0{hf4O< zLsoZkUMR!r^IC^GvDRJJ1_S-v4w3^2pik%3uArle0`cjLf8HKmv<@O&^^cAZic@3} z;&;0{xg?*ukHO~2c7;2_2-6V=*W5tOLqf+IZ=JE-Q8!f0}qva{jEgg|Jk#1R4dH zt_I*oSpD#S0HEEG>*EabN?kPHZ~aHbh92rBokf<&W9TNk;kp)<-)hq;REFE!s<_@l zt$k^Qw%jXlk;5ZaM#PH$1W^+u%(|y|>?^iy%8hYt|Ibx$mv|Nd`*}MaZG844(fyk; zky7%Gna4s)X2EA_>X$T8#IUHGqEePz*HjNhgY+Lm>BOUYKQhT|zaOuh{wOq;n>3DV zaG*8GtWop$`%L==iztFZ&boW8Y9Ozr9A;uNN17~g1hWvYq2h#1tXVa+1fV8Xwq;*V z5ez_;=d#6E>8*fdzG|h8S;ert#pmmT*jn%iIC4XTf-GT^5c@U_?EwlP$kd|wrHrW0|hFY{Yyu0D}?fzoz zTfCP)FWe1aAKSflq=XV$y`ec?p9Y1a)1oLrb7vJLc^wB}`ZC>s88uAdK>WJqKu;u7 z#DWhj5SSB}vGs9`5t!$)K;bz^^C3R>LiobP1c+q<|xCYhth{uCMd2aC9x(!J4YH`_VWAryqf1`@3soHyy+{O^9*s_F_m zI_(v=XRQ;lxdLSUMX~Qxh;=B!R8W)&Jxhn{&-0u*fAbLBJSAD5P7ht5TBkmb|1p2L z<+-7n-pG3MT+@^L-GX}957IWfJ8Jw!%EJg^KN2H?tu1`~OlTvI$(>!4_}!M@5IWoA zB)%1AB$N9lQTlB}YWd-jab0BVC+T!#Tu?@gh)vKjO4XM-DW2Ede_TxL z(FT9Rlci5v1cqwG$ba??Z1Lv|w4e~A31zACk;!ZmlC%g?QG~ojh2Pa12-;q!8L6LR zq-!61`?#O#P2?4R+G-?fkbXufKc`sw?gSi(&p@c9)nl~My8E5A=HhH%o5EKvOu2=l z;3mmM3vEuzM1xyo>D|Sk^r6~#R-l2_NO#n&TpQ1@b>FD+9m6pHgw# zo;j)FhV;vDd_4!|m=b2~`YWur-NvDxT{K<%J6%VkQ^&S%<-m^Q-4A>_&y9g~4`o7X zg2bIk-Mg;Z-L%b$C_?dmN$;o*>8O8KBtlDtuKSOUUbzF`)4xSiXP_a-NQRxkV zr%4bl>Lg@BHnT?Uv+4fXW8@P#t>yvBc+osKJ|#%QyI_sWz0Zyh(5;^bVCZR4q3s^b z$3&kZhlxop-{yXZ$!ElON^vHOZmB{1KKe}E!hO0)+}+rC0|;M=Yr+Yrl-*dFEOrm| za4AC|Qut_7L)*`41U@!Ces3ajbQ`Z*cJVgdVw{+$yo}A{0d#X_P!IvR;HZY)BkjKpu3W& zoSVZdsp$#aYEMLuoGh&}iEbeF$~p3(Vy}APYHhrOp^iD9ZBdDqj8AV^?SjP$9xW2^ zslQL)yeX;a5|5i$BzVAFmQH27Re?~dCG~ca}yHw>@pvvycuQEz2unJ+3H^Cd$FhmzPU2# zsPi@M`{PcY_a8g#>@Tp@5#iKQykF$mbEZg$j_rP)6(HhuVrNd|K8g;kP>2d=E6| zKQc7(`SBZ!KdK)X-`0t%?Yfmq0sFHLoJFOgX9u5n2bfNfw4XF4kFkO;Vv7bd9~iI4 zzMk$49Z-@OP%}%)VXS8SPB96u@=>d7@RysSuepSieae^CBYv&b%!T>wZcvyWSJ>fU zc)lgr8XWCc_2tBQhol`o?THwH-3Fs;HD=mzw7Pfp`Kx|xm?T*f3`xI@{Rsm-y))4{ zYJ>6c&;XKs2?j#vI;ls@5Dz_5`*WTvbo*W#IW^Il-I<6Ig#4jWM@o6)5q8G6wCVYl zi=EwnW83Z8IrRTHw*NiF%+4>F`iZ(64S6CsVCdj6BWImhm&6Y@@brY#mxGG7(Q*UcwLRM7sCDewXG*1Ybo)^{|p=A!sJ#hJX?ZQB09YR1^v9B6+q z{=xoC@dY4Haedc?;)j}{*yFwTGP>JDL`5NmwS)oR9bb5CsMya1r`oc8uLD!qi>{Hl zc-9}NytvEZ8qfb#(7E@zuZ?|813u2a{jVtKvtt#6&busd3GEIGtXuOjIo}ecE9FE< z?O}f|OmeLHjx<%p3*M@k|I_TT`9hBPBlAg@eWwlhW5yeoI!v(m1?b_Ff^lgliL1fc z<{S}rkD5vj1f*j3wXv<4h`Zi6-1CN6(N3$LB04}WM^L~z{MQ%_Pwoz>`22VO!u>Br zKAoZ3ZW^K9vArCbdjL+4K~|c;%g+?N*PjHhvt%+f%9Z9a06S*w_5TyTz8Z!-GFuaO z-Tkwsllyyb1hl65`{QpjhzxQ#f1qrvIB=vFx(&L7j;%WXoACYJ!ab=aI-^~&qUCVT zyki{thU}?!yz7KjF7~IE#VCq$s2trJ1iKJ4qo2=-;{Acm14+vC5161v!ZeMpfDCTE zPTqbIz#gwtp67k(a1Renw;nJ9{y=%$^K>Fqvw3cZ4LE@w zsa%$K3ipY!5t6URZ;ePm>XIDpS#FOwU&K|fhz6cDj8dkG@YU2}ZskN=1AyL|qxWNG{a z?=C-fuEr|}?2Qr+_?^ykb58PSfTFtL9&_AbP`oVm{%W&amQtHx=5vHm1|!M z>6U(<>5_5~@XKpEgx&?L8y-O>elM@iKCoTN7BMDY(SERoPGm*Hm8susONrC!lGnz` zT5$Y9KF+-XWo7;c&8~$j;l_cpb_PyE(!-t*JS~hcbiu-t*kew3)iGPP{o)iz$ z0?u_S^?e_9YYJd%8jnEuM%9J5^P1oxrv6pdmC5B<#2!*DiU7GOQkMfwy{83KJGx$lhg&mO^c5o!>zB4MVPCL?0B`ZR zA3u5#QrSoqmG(7}S65n8o~kVs51Zc(4kImF8N)EiygE zd^qlx4;)b&h|-cH=PU2Mi~7zLtMQ1v3EOsX)r_ORx1X(G1CE}KAbl(Bv_}u*0o!Cm zPYJ#&&eL{JotnWbJHit8jncBX@0ZsK?EhpJg2g(DMszKFZz>Qk<3Z-l3A&#<-&{+u ztx5f~HJMNM^^5c+YVPdsgqxS=MUAI=84~=56_!-EachTymR)s+VRrJ zVq9LATWz*!+gWYE0nS96Q&jLA(g8rc-CvbgGXt}B+W-FMI%tWebpAYZPTaNa%lNO) z>Eb^8iD>`rEYP8&3{1ZeeQP8t!33KqXw`=|`A>AFJkTPR+dx-z3tmBV-V=0sq@;%< zJr7`eg?z`=gNa{K+J7(P0)fZ&R3yeKpI;vlv6;MTC+WyAZ3MKJsgVb(I}=t=`&^<#)jHdP0EhduP9|4wZXbt2>9#n72oC8mj?c zB+aaC@0l3{*UrsNpoe)1>&+DdbCuc>!#`7A7_nMN;NZM@2Vg3)hn1;N4}W}I_KIu4 zHK?2hi;p|!WYgh(I{y~PSwm4`UF({&poD(C{<(_)`hd3cC)xGVWH+R!iuUJny4vi- zxg#;I#xbd6Gw=vQ);V-$)P*sFv3#eoJ`2)s4$=b){>;bLR{GJxQjyKSh zlqIeIbMGW(uac-e@!22V{@LtO^31UssmdoZ3csW}6LmCc5m{Xih8mt9-+5oSrB=-n zQ8aZYwDfVmsoQa5(GO?Q$%$CsT9D+t=y3tz-#;Cir88_w0bUrDD}`_OL_Cixw_<{ ztl0_ zUhpzs4{rKQa6Q2*QTPpM7hrJBhJ-h6E6-HPUUE%$2Yi`lcgYB|_6}Dn0+GsG>;TI9nEN zpN13;I*j#|efmv2jU>3~R-inue3s3x-?6k7m%*r1SX;UC)P(Wz9@n*xIqK<})vay_ z@BmU@RAMe(DUBROn?M!z8q_c8yxY7soB{~i*R6ls!nu#cZ%?l&LQ=5%O=M}aiIh?q zDM=xUu2O}KJ0nej^E)k%-@c&JCqEdPkJQji|E1w=*y$Q;SoAhVmbNiMxngWHqx-jO zXDCI%y1@$r1WSNCFa`4wlMN@;DWIP{Qn-ydWDAgQXbK>>q-#deHEXTQ-omfe+^-|A z5YrIx3$Gp?R~_ELwsdw5Be{&jjU<U=lK-L{EW2wDWz3bLEi|qslGr12|Mjxh}bE;blv3m42j|B_8Rmtw?s6j=$SNQ=q zXMPjy9@pdUw{y5-SBZSVkp`C>njJLGWVDN$&!%d?m#bHrX zd$EFgjUv_`-Y^R++-+ZtbBfo1-{;nVmq)6z8k??(U%2(SBcx9jvn;doCml$Q-t!$^ z*Gw${eJDA?-(7dMa+L1T>m_p81CsSvB8XHxgs(u9+&&N@ zH+Vw7T;Jo^Cyj)uWyIg`&rW}`s{`HSNVHZ* z)(AIQ?te3?2@-fNk@H~BgLA~DS%Mb^ONfun zYcwEAG+?fxR_PMLwznqt>&3aa#_v$ont9{GZG`kem171KE+5wsWfLvkq=Fvydep@C z6ZN5^d~IS_8(BNi6`glFtSi9FOs7z@MzgTUJm9*+>#)Z&N}XF|{sC$^;LfoZ<%}0K zV(Qs4yl4z@FMOk)u32NS{1?uqGDQYVnZ9gez&Rl(r0g@2`&3=~jTP51=yCfy4Pk7*JJgxSGLv36bd;lHa3-`O#2d3x zWilkv_Q)=`v+R!XGX@xZT5vLcc@-vDOFuZ{P~qB1dn8R37q}M|&^}mj>=4tw9}Cxi zSITSafaEGlcZ((xH@@Y^pk2QM=(Cwm7xYlFJg(R2`hOu)m-hMS7Qg(!+cvW~GbVpI zOu_-j?UqSL|6T;opvk=+lXFTy)3Vown^%yRF0fSrR;pbtu`_RiM67*H^howEe6LSL zq&J%Gl~@e64C{67dVyu2{Cvz?gSo8D&URfa|fT2u4HhTSiHBNzRG{5lb;`t#* zg#l@~Nsb}+GbSDJ>0m^w!O4DTWEuPWn-Y-10(MS+(|Dv~Zg!GB-TU~jV%9Fg?kKaE>+!Yi2%MVwr8W1{>5w7)Wu4yIs_Wz+XE zO_Xn9kqcKYw*0%hIafAof?Rlq`*p;>M4TJzO*qrTc$1sy*U_ZC*EM@v8Lx-rxG~Yf zKNPe8lzg%|mc?+2?3W6;)l)6M^IgMY%?^IqKFi;%S@h@&>If7q$0Mk!=^7q*3QC=A zq;W2ARG6qz{?cymcb*9p^SC@8F`Hos(6I2=AiDW>8*y>_-oEq4q-PWC8(p(9$oe8M zDmAcD6tj>i-IoLAA52|54C0P_=Xz?oDhtS3sbF*5UEi0RjH05E|b*t zK^>lp#P#l4Sg_&0cwR4mNUKbV^ZMYmZ3pk)Mv}Q0N7Kbu+|{--tSq0B-c$eJb$m`g zL0sX3Feldgd+_~4o9Z@cAizP*0gyF9CxN<|Sa-kPL=rUck{$$&(yE-Yh1DJ{XgP+|stcbR=5Oot0vpZU1Ld4f4N9HH101jg9U43mK1AnMpQ> z=k8bvTLzril(1isIAe{rzy~nrh%TjF84FH-RNk3Kdn}ms!r$PQ6q63Qdck%0x9(_< z)J|D@MtfykbE7^H=+9bi6%U_-b#>eZvZG6KNY}DP!+899rj;?7`>lzwpzOwi}gD6Wr%pa`5Pq zvdFo^g0hbbtD5hRH4xAo=_$WY<@LAXQ?UUjRuXp4Gerx0HN$iEBeK+xj{dv1f1da*G1esp#|v@fdO35 z4t8yzOJR}zs_TZ#CF$YUbTm~0R9RMi>+0tZg+gfW)yMc&tQKm1wnCqjw|TnjIiHfauq|Yz{cw-HlE?wyR42NCbEoSNH$Ud&_S&ZrmpJlzy8npat z(6+#5CM@V&ZiaePOHKN+?NahgipFouM)joXVsLdvP+~nh&0qxkmiM7_5kXuAX4kLh zY$}1{z!@`^vkD;l7m8K{LHz|mg8~rEyZ+5<=KT5{49DxI5WMmbhh4M%8pW=&;Vo<; zDYD19jpwdW+O`ob_G^TaS`JIKOW!)Wo43im?k_HwH`c$LXL6CNno>L3F0Q4X$Y(2F zqsl#PGKB1cGr%$2gnOI&HGYNLlVh{mW>~jIv_|=6cddtYe$o%>%&Xg%R^CH`*gyIi z-RvuqJDadiDMhy%!Gc$$;QfbsU-^LGTx?*swVBu2Ah99crip#whl4n_}bLAs^KU7Q7L zaL_Dkm5ff}Tia_VH&r6P6{beX zE@5Y8IGq~pirUqzl(&WrF+bI1`Wa60VY$3-*b8SR3!TSkWR%GNjQx&F_nKej4GorI z)>t{IxEa zG{uW}@x&Lv>=?$8+p@vI&hqFoOI}dpm&?S>n5jC_eKC^qdoMFxzbkpKjZO8yZSjab zdhprVc{IC|4f?r`&DeU*`Qq|k!~IpK#;I4*s>$uZFbt6%s7G@3TYuFH04!fW$a~mj zx)TTgCGD=vO$oCl0vl@I(}(G+@y^NRDFf9%I}ON<1bn~IWs1Y52Dve3LzT_Y#e?6} z-uB>m#MW&~su@|vJuiC+cf%~WWEf_BbK!xBeanK~@ zfr>-l`B)mEn6RQ2R?FNi4z-?-9%<7)M86+hUe28a3EUm8WA<%8uDf_^zjMXC13l4` zo2^kf7|Xw~HHphP9|&?3#aaT>vS?KgJG}SOEu6mYY^$O@-9`NVIhLO{m=+zA1C^c0 z5dD-mu$oX5Ddk5ns5^>uzTEg_usvw0&` z`90%e|ErmHeSC`NmMTa0B8SQfO~VT9$C=h08Bvi7w2v-*Rm%sLAp_BYF7%I?)SFWdWOPY*TryOSUeB86PffmY40LC>z2B_Pp79}apDS6V-`o4q5Dt0ec*mFB!ccRaO9j8{)Zq?;cJOi8wV)JYWi_BeF$iJon^`L_6Lkbq&dYJ^NYPHy&{ zrLlk9j*wFiAlIirveD#xExxNb8Zx;!dM8~zN~K0+l0?C+$~n!*PBcoW3$Zd$S~e0D znL7YitCIR4Qg~z^XY=`g*Hx4G@;fn~N?6~`HcFA~BfJSt-1PTuyKy-fgQ8h)Mi8v~ zA0y^T=7H0cJ|Bb&Uh82jH&G}oy4~#C=?nURk7|jBTfJ?6D9ShxT9WG*zeaa8TXm=| zMI&95)mW7F+R6qlxuwpBrYWANKaCmt6;VN`f5R48`n50$z{*BAz&yah0Ai_JINo~` zxKKvfk7r z2f%lH`|SVWdp{{19gwJx2Jhp3gIL*N6eM^#LAYH84JKETMI*N0P8=`E2%k$m(D_gG zPE@zDZ@aNc+XlsV8zT#*g+-gz8Dmd<)MZ;_dVo*5>0heI+2qF4Ru2dze*(l zm%|_7{%w&|O1w_wBB9RufbVIaOgVf#g5~lwEgE$>d+{m@;t4aa zv&-%s;Sub<*owGuRn4@r?VavTl~(^;`pPY32+y1>X-PjF4GBT~*qRfJ z?)p-&6CRm9ap6dN%PYsO!mTM{vgQMCSvFHIxjkNguP`fL&28546vqCH(CYd97; z+MH}CbI^pXV?L4<{ebK)>?JsFeKgi~`(ixz?O!|r!ayILe*x5aJapa%p{L{5wuOp@G-W~AUOM5+ zw+4T%=X6n6-)^M0A)_OTIOqxdvovP0)!XGQ(^vYUu&BgMF*fiduj- z=s7r_Qf*n4C3;+E%(`6pXX=Gq&fV}~W7I~2+XUIq-C6>zN!^zDFhDP$b*IOKX$&w* z=6s3OHzz0(U@EeE3>p>?mHDA+Pr6 z%gL(eV_NEf*u>v2^aOOrg<2cc&4_BE$JDJh9?a->HK7EAcTuGdf1iWz>KzX(tvkKP z=!Ms&Pgc1Wg5B@yW1KOYf*|*kJ`PgTsdd^zzs?J1?g{!CBgcUC^as5}x{56bdlS`C znp+UHR;#4lznWC})qrrG&*MJthm!|)*h$04VSvRegj}`M&@%!2uVx|i#d^u)cbN%| zMa^si^eQTSl2bo7-YU69aeum3WB)kx)Uv00PGb0+bGnB zjor|vzR*b25&EpW{YEnte7A;ei!fJ~2veLs*N4~12a+b3>(2CR**84mLuHTQvZw{e zD)y|--xetnFG5-`SZ}ZQzom31clF^H(rayqH4c6byf!CRG1eXV<>04oLP10|UPSh& z#;ebmjCgYU8k~+2t6oZLsy~`2GM$F_wFi27zru^kH!aMU@~oUFa+)U{U7t4bA>T3n zGww0<4B-0yahK^w8W+sRDbsMZ;u}e-a2>k|FQtGZ7!R| z^a`s3V&PUqV?@ku^7Y%6_yr4Q&&VtFd$x=1IuDmI=8SLD)Q@d0_g7Jft}h&Bb$4s| zn9O(y%rPaH>L)Pfx(lA}dz&~1s?3+`fR3+-NCJwHZpdZ_Y8}s2$hTODMsaRXTu&2VphfqpN_0e${$U{3V56m-#)+| z(p&G}##U+4hA2^Ebg-*Ga8l~+1dCAo?OB9FYR9vz*?31SvqG@RX^)6~V0do*fX3*Z zKI`-m?`0<53ODs#t#|vz$H(QCQ}m%k7X6O-0g)#TI#$FAC4c3E-gmw`Z??A2*LUib zM#ed=V+J6XHG6pw8F2PM#O`fD4p7@W&?Iaam<7WeOG_Uwg&epgx@8nC{)xe=i~Jyj<*7>esDZ)bbYWphSQu?_5!qxG8f}<>$tD znf&}2I4OxG{Zuvq{7}o+e$&{;@yoZ~2=4a-Ld{ey0p@<90%&;{Y3F^sgqINHb`oiS zn^#)rubz_h9ZX&dnHBZR7c3;<9D&g3opZ-WA{ZF#zUsbkV~CdxzYuPH`&d$4CU487 zSfd&rcaZG=VeBp2+EBZ7ZQPxf;I2i2yHl)q1ussaxI=Jvr??g=ZE<&p0L5F}U5WjULW>9$cN;}xbAC=^K7!LBPko(ONNeUGS3jC0VA#BOLRAY0!i)$pQj>9 z%aQoR&n@4nZFFLrV_9Vm!}R;X-7&c)3aWuyR1N%Gy>U*}aQ&r<52G9lJGPbujz*RR znD5Z}KJ=NxQP3a#rQ{0-CLza!8Bc02&gZWlPFDrfA(~08hiz;3x#&isUFWE%7hxBN zbDd899y>AQ7ee0oUH&Y}L8z1;A~X z!r-24VAu>~#1oE^?D=IdJ5J+&`-A4OqU-LCeQwaV!G-Ex zJIUbmR}!1AqJA4hG`*t{G=5Fipw=$j7dZ~u?sZ3iNI8?$?z}LvlW<0%fnHEsqs0Fh9!62n z!syy()aCo(`N-Yz*+n640SU4W@$io=ay<9dRP8_KA^7P|- zj7l_*G$}iSxS?I&&8v4>;L7d0M^D@Vk1vU(h(*d;5fGf2LyzeS@{mH@J`{ktK!tj8i7q zdAZ*+X-y>dSsNJ1-q-b7T#w=;hShU{>7?5wPg0tw*0sl!Zn7Yu%8ko`l3TP1KD2KX zQymbpHWj)~gQ&&mGJF!Ohp`N&Po@Ez-l6>ZL{TFfDE9M{Iy%4ig!@Wg@Md_8+BffO{&4v6j=Od6(jPU0|txEJu zKY=LD6HI-bTzT)Nh_Syzl6oS`g}%J96QMnsnS(oe_QQA_De#Gt-oKL63e41YiModh zoOr-MCPI%X>58z?ImxF@>wqQkKU|~MMn2o;RIpZXP}x6(R$iPPtrnY6;9)V z=?`3IsbxROnQ^%nRXE?d;gBa7@hnio%XWE6-G!T*uImQ$@rkOZTD-@6!riiQ+A`wh zSD~)E!UqM~k3_%9Ha9yb*#Y!pAFtnZpbi=L!1}G%q0RR2o!?}ySw73+uFKYtVx ztH4;qr=(Un(QP7C>l(^pW*%)u+A%irONvnOJ{YyPkVyQtuz17bvC9_UGUAk;*^=hm zCX-j={gsE6W~GVM5b3ic(!*qyl6WGScFoBRkZi*Ke z8TdlUZ6%bf<+AL(T%ww6A11#2M!qvYi0hu=;dxyG5blc?2GDp!^zUf+mtb(TFe!0a z{rnuicxNv1Oj#p^&ebDxlK#*jPx5@%1_?le$>PVNAksa^N_yBp`1Ua}c~4IfgL0LA z&NvBUjB;f=W#C53+!9X-p5Vdk@Bu%Euhq`?|sYN3|%D7Lg1H2G{tFUre7NcmLN(cPUp{AXRGov6D>1C|Qi+4f zxY|>t@nw9$4%v12MX9U(SeGVhrn667$Z|RnZsVipZ8K+hD?*JtR(Q-r+*t*J!8NV} z2xP~*@o{jVo1<27@6cS8n!*vg$!xu$$r#R!gU~(pr%GJ4TT@DT`^?mV(w1zfsd;a_ z>=S{s{KFtvS8l}Cvg_JlmPvab}!7fr~hanILSg}QAd=6O1}`|n6~nRp|yh<{>f z*S;w~rl|4{CY-Au7{4BPYJG}vx{`W)Sa$Y)Zg0NPzAOK1&3O?QacSuF-&A5 zysv6{yuM;=6oBClw42o$Wt2CaFg`kXV9f*DY};NFDu%1rHLhHL@gocfnz5Xy-r2FV zopint)CJ~)136*YNnzYd_5JuV1DqxLZi?c1^`>`5Tz#?e3`&=x-wU*S`j|7~Nqll_ z;)@&riy;RC2psW^ws}JE!-%n8JV#GE@ZSoI9I++6es@3RIxR^OA(f(0PSHVEm`VYx z>mYz&lq25qJd(M_Y7)2IXj`|OKuM1`AqIV80J7IBb8t&#c;j_OYiiuX{~&% z_lMzN_zL@7Iv2#8-4HU~!J`-Ymm$Y8&ohJIdrI@*cv{u&52Pin!#L$DY2v#rP&-wY zB53HigoJ$^r^*f6N2psfklVf0>j3M}s_Z*kGguHzwNBVomsRFgZ7jChA5e`FSwnK~ zh173_4!aI+pywVY5`BzC=aZfJ!&6$fyfwYk+3kNB;Q}12~ z4(S4r6%}ykxHTeu^@>^5xi@NGp2gKw#C<(PHnvM7w*?r4?kBORvn?RMDQ+@!y)L>T za+hXrD=GW2rPuYLQ3zz(G5HVhI^HqRdj=_U&gmE7oA$S~1l?MXznMQ+^jVTls;x2e zdf;G!nb+H)t11VNOpN0O9~nff|H9?uH9ncxCHw_mN5g^F91Tq1#(*WDhvPIX@ib&) z330r2Jo{p7m~Z}k1d~nvoCOeG)(>3tfGXn9$!(Kx7>-5U{rJCm*HLyc2SfclM6U`b zEaOu-oJ5_|Xl}(w*V51F3^x^NXzKhoJ}hr67BTY-Hg7sg?ME;5p*gsq3uVlQM1S3! zqcn_&?W1OS{6my8N9xPd+m@&{>1~jGRj&2$=rn^XFA^N`Z3*X|k6YQwl^emnijI>{ ztY|y7`u0j*zERa_^q&j1H@9_u_3puO*P$j`itqp8uCJu* z-jiAyOUh;GCT0=S2(!n3t|hlp=rzIkwOZF7#MY&`pV~cBp(==l#ysoTyzs-5k%u@R z&Xr5PGB4rsuk+u%rnxX7O-E0$-o52qo0Wn9Ymg2A91eJ)e0(w$`NFS&Eo(ujI@0x| z{Ggj!Of|A!&(i|eg?|D)8YkP4A-;tsNt#p-G)KMWP)q1{=&?nB;Vj|$E<9gDvI8J}`4zgbnIN-?-`mR3jkLs@{^qogz%sCNEr7YB8i^G zDv`!H&H|@hS3ug=KS~XJ^oeool`^!cI#~Pq)m8rQpu$H6&M8-CjRk*CyQW_}48mp; zusR{GSnsrx5043BlFjv085c)<6-tR0zg-p8)O8hhg&RJxq_Pf+B8q?2JLqTRsr@e< z&${Gi6V#xiqmkV!3>`OCh?bV z#Lx|20hkKw!&xnSfOHj`wMd;%k4V3`5I8GRzhu49WDJPKYYvag-x09FtRCsG-u~21 ztA5t^SI0wZXp(V1fOWa_|3H*m(O-5;l6GiBpxqKGB-vuomx%cz<8j(RjVyRd4|_>f zN+@t}^%rzqh~{BV9G>zb9gMHk>dPWYgoP^hvTuRch$;|aPr4e>_-US@=((af!|#5c z-s>@u>#AQ;9%F>FPayc-m`C0)*Gl=~^#xdDi&pfiaDjF*f#{uBEBeO3JlR&E)G07?EPF1XF8GxmA>8s3+|dGdV4 zHNeQTmq$EcDU%cIB`Y3q0?2k{w(!?`?$b-fHMNvdm?4^j%X)KwTr2K0iX*S8#! zY~*Iyy#uV!IF#bKM#U_WuUq+7MNZ;($VE4FP7`n*wiDJ@+$9-^dIaWJGtQpKwGu|6 z|4Q+0Q*$OlN!(Ly_pJMxcGW=_?lttar^&c-)P9Xifs0ENd+86pBE$Zg+{;RKTO_)J zk35UFOe|qJw-n|(cHPB0Q6R5!N&0H+?kFjbsA4iCnF(O=`ypoGKUC~kFONqi<@+Vm zx8%LbqaXe5lHx%KxaVjbqFdnw*S))Pv23=q9;0>OMw#x6s|}i^^EHA(OAa2=1jPst zX$=RGoVcQeGmha8373YNQ^@8TC3^zr^D?eiu*L>F(779rC&~bUP(Mv@$}LmgZ5ZVAoT)winA+-jPp{>Fm~$5P zHQ~gS750$y<;OU#<@ua)V;6?wfV!Kk;h%L<=YZ z#L&-TSB}Ok(4OOC7f%xjKt71d_gp%vZ5#i^*<~)vOR2%{h+VNZ+}=+=o+nmgfYFc+ z`$Cp^;?#DkLd}tzE)8Om{3t?k^*0B(pufz&8OMOkKl7u1!JxvBH*K*U>aIX3QT#w7 z{B9%*d+8w~upFOR?w8q`2f2`naEUZCT}D(&%XZ+wOuC#U832? zUh3Q~s!v^T6VC~;PTU6gU1ywGj_|lOGXV$DGX)0$$8RChbf6KWauaVC8yy$B+Uo3! zwSh{%jx3l*$4z=2{94ZI26Rx9YcNpU{`FZtQJc#z{n^WctAR=8vIedM1! zlY!kzHr63KbMwVCJLr)h@OZOnOM1$!gsUz-W1pDqy${-mZQPm?I}zeMoF>=*bML6n ztxwckwEdzfv74vAAZ}SsSRu6AOmCWJ_Im$!m@@iaAY8>VT4%=Zu#$b1aN4mV2|u>e z5q7O4ud-Tck?S|f5lR6n$s}r|?*631!c)6^HGIp8XEH0QQ-qC%Z3kaK(n8p5a^H`` z@oPryXNFKXyrFes_FsS)03SbpUeH0)_L34+8g9A`8S{7k+&9pEV}Jdub5clVfUN~t zT6I*_(%4Ulf;f1VX?zvEc*;}!i-47BAqkh{^4$EacstX}o(xGI9zisboUEk;o+PS& zVAy=zvG!lb!|4ykJ}VkK)Zq+Ur=_mCQX2dk32QvU{Ls@VweA=O!!keR@2cm;$`r&b`x=GtZc=0lG*3nf0ta*m^x&dKV ze(P33k$!EAFGfuq)26O(+0H+7499Y9)B4AP6R$>W&f0vO<1CGct*fEh=Y#9Ga;AFX zd^Cz5$?8a9IEB5TZ!?k?I-9|rkl!pBc)`@;SzT7Ts4(;~96ib2<3QV52K=j@$){NbIqsQQ%DOJV(XiW&-RB z6;km2RodDM!t0O@x@!Z|x2o2nB%908xJK25Bjl8hOQ~2&@I)iHcnyZ6+p)qqa zlchf9W6y&0hGXL`JZjY=w1_BMW(0=DoL(b;URkGUH!=*?r9<%Y<G<5@GB-RSlZ_%|tCSo7^j|`fzwWOAAvQp35Afjf+6tu@AwXMh@#$@!s(28(*cYu)Xbo)N zsOMkcbr+lL$+0TZrBOu05^|~#?Y#i=)|B2nbm6q(>fRab1%D4)T0JTa_&2ePoh}%6Z;!g zV*JPJ+*^d-I|XDV-7=!0Wc^)v)Y1iO8QO@!1ApU@SS=z*WFg= zr{2A)Qefm8{(VkZJZWPwkI$Xv2|vxalAC|aaJ%d`B~dFa6xkCIm_eZL<{B4X;ef?3 zEWYb16&C70P0NaQ-Z4+g3>8M;IZSr}4?K4?KWR(3#g-O=01ac7#iTM8yLifOdk!r` zEjEvxdyQv*9-3krDPR@iXSnY+ku5kH5$UNNDH?~s8{RW8SWHJ}$Pd*gM+Eg8d!TNhP0q3POea)L|& zOa!Gf;h|CVyM%WvO2Wefk1o7vt`XAp^40tFR2KA_pI z2I+>{3n*Z!;|z$*hYdcCwATRc9nU+uKEY>R_R~K}gg=KOSW@b3uA=>#M0bu9D+Nym zoN$dvw{1I8{a3qliQT)fw0^<@M_jj*nT~DiXq&{a3w6IXx8w3V3yr#=8p zj_QJ|;S7xoNKBgTGtKLUDyEU2KIGPz9>47v>yfQnU*g)+&Qkg=eNOpK6F+hIw)&5Ehw#<9v|d=&9`(vCH(uKSIG+3kh!TBk=Gp1| zGr?@v6llzrz>_DXPXHS{hg3SE3bJqLwAk;Bk|o}~ z-zn3+vsT-3d2Yv`rGKGs60G@d6*b+*jFkXQ>r(13Re`A+9yzVV<5& zHM0E0dajyjyPi1M=HihMS9zjPdis%lnxlqICQ{(V>a6Y;2MXXDXP*S(0D?gm^ZAI) zray#h38?v;#+F5s9|bC-4PHGsP}GSXR4%Lw;IMfxEkN6v2XIB(r-%bIfREbuLO0On z=|2fX{G0VZ3FPE&0vYNeWfT**F8ENgWwTevIF-%r=XQ+V7#jQ>~c+-pt{bgI*{a;!gE1O-87nv0VT{LBD=Q5oA8%0-%(5qo(^R;c0+!=CmW0X$zT^u^f{iI(JY&!H9 z7_zpranf%#;x`8+!|InsC{-XqsE}+{SK?R{MYIDg^Q)dE*7S}|Mr*|v(wi*mUP<9E zke%J6;{$%dU7rqRy>HJBssZ!4MAc}b0)yJkO4U(6JHSiSrkS&( z01;EWO)p`o2ti33ZMcT{tyUA9X1(Nd6{^~%Id_dvjh|S$$4lO()4=jm=upb&X75j# z$TvJ#F&SfVEdI(X6EeyQaJFOV_!nj^`2T}ho55k$3I8X|`cd|WJcovsKDi&RN+nnK z31u6w_Kuc4Kg>KFzPUv456qh5UzoKyu`8B5mg3nMm+@}=S~D9AmyQS=4`|PA1UxJk zkd6J3?bJchs`F!?_B#dw{30iUiPb~qLlD%Md5*)KwQCP7wwOWtBVU}%yYBl6j!A*K zX4Kg+;Fti{a&f2Q<8#k~iBLGjdPF(7)=Jb$J}W0l)g`|z+I4Yl81u-Zf9T;`|L!!8cY5*VMAp;ChiZaAtpXKxy z-dN0XRyA17p|JQhJT))JP+n9E+mzR)*q3nk%GNltf5RmN+adURL@affNI|ksosJD~ z@zo=n{w}5e1{BxH5DDsxLn>r0y`lf@hGh&C=WGUB`7yIwDPQ^HtWl-gQd zn{6IB@Yi#lzMB8VwPPtkdMS>mFgPl^yk*=aKFZ&mT5x%`gsyI%Nlg~yW{iiuHU8M> zD7L`=Ppa4;N}+JQFvyLtH1bOeqZai1J;QMx>3m2mrF$_#b|ZNYerJsGAw@)jBXUco z1MN-_i(M1`f(~pzSz{)Nt1Q7oO10D19>Ve_T5r z|HHK-iDuK{l-)V1!WD=!m=Dn{S%{9A+FmI-87ckjd^XRKz9NU|LWj#cZa8Gwb#IS% z^g=(u<$;8Tw^ySoMtM)7QRs->UvrWqLh+xn#3GZs|FplQ-_dn5a)8tm&Z#ao0aZY< zm!bpL58Quc>v&fYQ+lKCn%<2ygZ3Qhg4zg%|AY%t3NiE8$7i-aZdbjJ~qd zF%-WE5T3=kBqIe2zO|A|y4;;CYGN?)X39!VrLLjk)8vcqJcg$WUdbJcKYdBx-@ZiT z;orW5EyvFVE2?B0``TeRZHTa5yf4ok>Q-5C2l!6Q7hcD?)#ZS;D^KYcfiF%ZK};kq zz(q~ZW#^CB&I6@OjKh)DrUJ~pB?y-fLXa_<+$Sm+1lj!)FP??f%+We;sK$X5qSjm) zg-*IGE$c!ZZR_p`1^M1ma>jqjk>Td#aq%~wvnBYlBa(3xrG~oNnba_J)L)wPDl&Vf zW|$TTmw>V@z?-Z%9jTsAQl8I+w}f`Oj@4SFxcty(DNW1Md^dY}e#YcXMm}>c6SruMNi5wY!~&6zi3c+gKSeL)Hls30<5mc- zL&ca`C_I8eZ=g}gEIw_VaDxQ(b|KLK-{J^5bIDM@#`xPy?dEYjHIrDIy^2P%;>CtDk zIq0#a@wii-g9=MtGD2MF%6t5R(ekhN^MzyJI+k_Z@U5iu z;h&xanHU!3_&<_|Yo#(}L#OMkP+(}P$uIZ&-Phea<-i)CdPy@d#6+4vK3y`3298MX z7Or+BYYE$BK+|N_-&7cNJUVb0rr5NCC`0m$L7ccl@W@s7Z zyO(LBe@r`vbJN#lKV@d|nx*^kx1(g&7x{NP0*V-z+e{I0LicVq&CL`94Z{)DR5LtM zYFrVBZU?Z(4^L-abv|)gu-P-Zi5+!fuXH1A-RCf=s6f2Wc##B z0d*GTC#uXEJH~7+AR4i$opDaAeFE0g&kwEuj^9E%0UK)(>D9>FN_)zCvbo<}ds>o5 zHx__Do8R2sJ`R(AUN0Va8`8N<6#2_Wu6IiXrRbS5@YA-L|3~3*s$|sgw~u5jB)7!x ztABDDLc$nbmPj*n>Ei>?-`Hl9bc2y(3mjVb;L^{HZ~Gw?CcD%NNy9eTHcyeNT<9J@ z7#_?N)Yw~TqfiT4-JeE}rFMI`1=-;FV{DAI_Y9-OzMz6zD2&rS_E(Q-M3kJYpT|xc zm&v4pp1!8qY6rLCc2@Qu@#mi^M0&#jIghotbqHnxpuPZJ=v+CLK5W&(u~Jn?XE~I* zNG|j$lXoHCFTNhPv9!5B3+(z$^j(=)l!SJ6toa;Ii#54q2*y{@Jf%P%s)pagi;xd` zL7bH>Imsb4EtfsS7^?!dh>h4XZKxmT`)QD8e|`CX&mSE4g!WNV)My{EBY17`I*%un zXYdCHMJL9qVA`Fb_1d8rWOY~q6B4ZuQ$y3GKM5pWm`hSPCS8(($SJ|`M}Twtm%o37 zmgvGNi`xog3~}x52gJ-cj8mIC%e=x(cS_z|_Td=5`9K1LYSc%=K zRGfy5BM|y;XqO>6{`*+d*Tg92Kbg{MjhlTn7{M&?G6eEX?T|=i%;Hxw#Mtm(>F4$E zVfsH^2wbs4x01!q=N$20T}bapYg^6JC_{8k=~lbTPJsBsi3aR#G0JMVoteN$S__8| zWrloHi@j}c85*AdkwN1%`{@roLFD(&7&=FR*FE+P$RlJ)dWUyam}}?i-Y&4qTfbF= z?8wzO=P$69spJ@?nKy`D6=j?7iNY^0&P(FL@+sCfyRNq0ggD?N1RJ=8V6t|m(bM`} zv;SS=%F@k*uW_|9E?W-TCQ)lC$KXr!{Pz62$fYpj$$6`d1J582NH{7io{45q0$7*hxCh(>gE(t5{LOAwxK%ck266cB!%ZmP2{edcEh3PA@o?4f| zSmhI5{WZMMuN7!{J_lemJF+`e+;o3boXrZQz1~OpdAgjrpZJr&5#sg1_I+5tu~T+P z3Fl$7i4Qr8gpYbTa{^##0pMa32r1&i4bsP?4A|&h`mr#~361M_rpQE~jCoRBwgy_e z{0y+}UrU6t+`x7nB|G5}1pC?!u#!c!PcMHyF^@UiF&yUNofeDzYRPRmTV1!bDkW12 zBcZ>RCxGxaK7%jR>2jw8pSXR5W_%r)1>q2mb0UMt!@p;1`O-;H$1sBb%@s9N>>^k8MKsz)pU8}lo!4CpAb37zg2 z*K~sf!V&UzW-7-$JcIoHPX?)rpEln?__z%EbjG`tNf_0(YfMB_am74oO`QGVPql%N z9#$|0z{lE}79X#Wq*sb03X}5K7#NjuWQsVwf!*OKSx~Srp7NRiwi-PtQu@fN{xN`4eS^HwOSBB za-ef@^mGg-V-x4J80G$=uDUC-PT`YHl$!rIzsvE(vW|O5*u#K)DO)WLYE+jlY`pdf8BUF?&>VMw!r6_Y~tg_9}!BTrjbAg9ahd z6)m9xUDb+vj_5m8;14lA`_Qud+p9Hb>TbzIla@8X=IWbFHCkPkuRP@-6)KchNaUJm zV}}dh-ka8Z%sK9PK6_3MF#PT{Ow3AA_@&b_ZslQQB}h`I@N8j!%8Gwbe-Gd{K54~) z2oe|8LrW15z^~h}Sws}vpNnaR(5_|a4Qh~ieCkw!SQwTKaQi0j$VUJ2rsI*}dg#%! z2+WN=rOc4q?SnOx~PXl6YowM)_;2zu1;Z)re@pq>S|AIqzX^kHC{A6{7`7doS z6`4OT7kx<7vz$SLm)nR9F)L!bF?8sU5fp#+pS7;C{}zJo##4deDW$qUPed?k%_$ox zh#0o}iA}r)H)w|$iLzVC~nA7_E+L@=BV9ZMk}8@X&DnFZxt1Oj5$1sxizIAvdP z7XOEI6Kw$1MLD**83%@DLw|WEcojg*AZHUs9fgTxo;bHSYPva8Y1nd}@3$Fa3@xea zy6`kYYi#tT(IwOW4Ef03L8T+(>Jf9NuU-pyBvzanwHh@GHSWxGnZj7aA1>`z zgT^C%5Vq@Zl~ti=d${pPi!97FmePqOM1OVL_|C!BcGgdkb?vO0ts*RX+lbI;0sswC zeRwv;2_z~^m?W1F^#QaXW8t`Csba>E>NSP88E@|!fS+mxqE;zO_`clQNAWyV8Mg!s zn6_}1e0YuI&I1@Pe8z2<)n>A>d_*WF=qe*5ov;OK#f@cu6IV6d*LCr2?Y0Y6M6@!w zBloCJyXeeFCKGqTJI0J5Ptb1D*S?Weg&6CnZC!q>(WiT~`Uw^f`xKCGJdRf`(yPmP z_o;Uy)1|LgJtr&0VEc!QXDynA3D4$EybMONR7rSWmZ4_$0<+{)>$;NmM{)5F-GGXx zkEGciR8Py8Au230Bms{Gamp8<9Nos-4y1=3|1cJR#;785)2i9vxhc9PJl~2EQMumw z{tvtMj*H+LRKr8ZSpJ4IuH}w|?u1RpK~}VFWnlvaV7a*O>58-cn#*izU&N7)aU7?Lx(0r^noy?fc@XS6oKGjHdb$$3HK5ssW> zEvU{zt+F0w*Bp0FVo$cmvjFT8dN4}je#m&9u;Abm7PLg=aXz}>U{lC{ zE5No+=l5wK$7k%%e*0zH~m1}-{{z0+yse+*9#Fw>@i@Ci1SN-VKFD!(Ox<@Nf zVMSN!d@i(WLTssulq9%%_>D-oj-2G1lf>%JsT4I04s|l8!nn!)%(vCrzNXdc=J%^H zt15{OI~--!8%aM>YUW~VLL8c(EYUG~p-gcQ*kSUG{gjS4(TMSjy{ok`|3uW*T}V9U*f$CX@s{Wmkoc)c}u6 zYE)YvpkYxM49$mPPuiY~g1b>s6I!uEliv1^JTwI^49%svLu1?d2CQI1d0oP)wDqDbVZZb9irL`z{ZKG-9o~gFxw=>WSZLJRWv@#5gx&xrHZ`} zunu3qzl87{-bkX(aX=KMnKExFg1BDEl5$NS2P}|YY*1v!);}oem+iT}o#T6w`v!~2DlcOw-yv1NkT~DnSbTdZ z$94OF)Vbw%;ipBzcCnw`;<1-)7aY6t1e&UkFAH|Z6{xfAHvxyY-z^TiEc76bho zb?H$EL_RDj4J|b3GQm6Q+=C7PsTdUHmrvL{E(Pw{iLOm7N3?Yv$?P-3=~!F)-*-Fj zo|is7Q0XLrC_94}nMdqw(>CI+^sKKv@%MCKsTxoBRWLtYBS%+6J}Q~vl;Pu=uW2=a z4sY7TfiJT7NNrftNMi{{BP1(uU4zf>fE_;pSd>|S(N8>8uGD}D0VCI9*NTVPyQ9P9 zCTwMBq9gr=N*hyv(-b8q{$ggBXnM()T-m!c`gAhC9R_t*x{6eXX8tm0{YRX`2yR=mVU^cfBgTlVICFAX(o&fArm4ZkMMS z)dX*>qauw(vkH~M)to(+dew1z^eRSj4ExZsfgrfwdU#>u{_e_g)p_SfmG3$1eypn{ z$Oy)2EL$68Q|FbhG-e$1D?9YPW5)ezDE&w~_iE-QRrclyF1+>~fMh|9=}z81ULSQc zdgTU8q^!0ONcIiq_lK49*oQDL?8s1`X&-eA;6q*J2F3yG^Q_#v0YNkMuSL$~M~!1=mAluzv!(y?9bCE|InZj|>MEv`c&*p$_1njC!gBl8kJrqSiW7a^Iz^9o6-xFf{ z2Zeih^V^a1e#zcw4ez!P9O`z>#2PNJVX&&KaguTq17b$VX1!rZA0Dj*pJ34cbC~jP zpsAQYXqijk~W+F+~gL1jh)+pt?k~wE=-%ABuYZJ%-%+jTL$7eZ}MfU|GUSyV_ zd4t{CBI@fWF7a9N{G>C%gq1T=gHJ?)EKfxfUB0*I36egy9W2JA!b$@d^|Lo~C$QW1 zUDv>;lhe-0S)rwTKSilY0`z*l2|IA3a>e0lsy-s0XF*Kd@Nd%&Hz=|7fqmV*VH`>2 zhVdJ9qhUOGW4ZYqKF@NYvwHBm$yMw-K)q^uiB)QOeIA7Xs5 zWFScE0B{fw*9-h270uV>zc$q#1xJdwhkCa1O({pasgK)IiCHr07bwh0ZU(S__)jN* z5=PKsvyo~%y${-}?Bx^NTX{&l&V~$Dntu~Zo{n~-zuh%Yxbw^dZJZe-NR~q7zf{dX zTWadRsDh@JLeB!cPTE4XdQ@K*@-k}8fR|MKq06Np!apf((z%EI;8gNF*njcC!0H=lcsT1_1|3x}gITqn;H< z&HDgAtqwbiN}~w$R;hgIsK>LQ}sAG0HpCcz&CMO2gN}(kB zGHH?Qjl){a7n88xRy@)c+fb3 zyW7gn%qyVO<8~8lZ_X?u09!e5D_Kq3`~=kp%!plMXil=%xK8m3X4Cv;b)(m5Lu|l3 zC-MQJV^a{lUrbVo`5Z&2b1Bd$TN&2do zb|-hrC~ZF0W8(BNCNc1M0H~>*%QG{UYMG)32VSYHdtP9@GO&ngcq;JF((`pNlqF8_ z3oh5bc~-fcUqP_30Da&p6RNBpFcGuHMc#tkZCF0p4y^f{-}Rk}UmtCtPjvF18So?~ z7)v}J2HdTA-)=-);4=Yf-4QQ>u#S;4S;Ib*3%Y>4=}L^RTnj8i7LP+c8*GOTuhu-^ zPiox{>%1Q8;qChC@VVSIcSM;zP!)cLj?!Jg7^#6u{M^fqnx z}WOk>~@eyHX= zXngNU1Did~|4IGC$tX64)L|*004{7-zmhni|2fYrIY1{hP$@rpZSNcND73P=MlHq- zm`M-ES#&8JB>62PIXNqSsOM6`|Kqt3Hp8KK9i<db{)?WAAH?Drv>gFnATZNrEO{7ZQoH4-p`Q&}RXbM`+7d@4Uc0ZMh;VGs8 zQLz?n4xG^fw$ldLd=fh2gmi)C71L{wSwv(pI!RB&@YBp_nY^1Ck3l!%3~FdY1eIHl zaanuX#rKth&(o~ix)(lp2}FX{nG&jqpNrU^_nRSa0U3wFUP$&f2A~OZ5Cbx>>iK1I zj$+(*);_U#zhM>p^%Kw)0^i*{)HyMBN{}cmyy|!5GE6WwoBF<^Q92oL2xljCotW?~P80GbElnKVXiw>~sJciK9YIv#!rLOHu{T-Wz z@%C|2DJ28wy-R}Y@&fXU>hN8cX1qt|wtgROr%0i46peh9lB|ykk=f>)l1UXssU9o4u#u&j*EN<;jOWt&0E;g7j`w*v|M(r4y!#$ z$oJnkLw}*)p63g>@;eO4ZCAs$pMzE6U&Dr>Ca|M6$gePWy}*yC~a zcNJ_;;^aE_716%B!MBBpw{dKRS4;o_ia8<6=;Vu2g6uz_Ks_j{n(`{32@wm1?X{!Tj6mFphxCu3&Slg||Z3x?4tZkhL1b#QOa{R<}BAqJ$N@dR~ zTijDIqJ#ydu{Y%Up99&2&E}KAQOP*J#SeY$sD08!HW&wz-$#ANV!Wg(-2x*_kU|WE zpIj1_d9&X_vk{QD#7!*hT;*-X(}*>X-|(TD9E+!UQUI(iastkkEaF@hiaM(=`S)I0t@nmhw&+tWd`LN3 za^hJtQ*P+Cu}`%RY$`zcszhMx^`k+y2oA9tKrF-*ENMJO`l`Fb9#%KHT^{4VwXEW= z1`FLozZZ&FdsA~iPCP+PJ#+i%VrZ92d@8Tq&FeHbe-#e!6uTvG+?#B>b&JijO)jJS z2n$;9J9KG!ax=wht*nvpwoC2%y=x7cT)J{6EdO?x1U|jYM+cW`OHHQ-ul+iFfcNA3 zH0oWcYj3k$nq>d@=Rjb+G%e^o;@|TZofG4=k=GPDo}!^df$T;f1wU zM}X1WvhAEZQrkdv`y=DwF0DV+OSj1M-rJS}xol6A92)!a`Qh4~n8UZZ70$Kw^2yi* z#~{_3WUjk#&6GU8$y3@C*<%{{rHrZeIH4f>+%cq#FLox3ROS=3)ZN`XowwvVD5R!; zWIQ69S#6f{cR|5dtf(pjzaKO@3Oo%Qk3D;Il$u;`G@1j)6q2kLKB9L}sX8|H+xc!u z&&ZILu|>>|MPSfW9C^2IuD*a>H%M{0qtE)%6YPcFH+1W%>+@)KY9?l=nod0v6GD4J zF5u?o7kozM|LXf2+?hhY%49CK!(_%98L(&|^oB1}IFI$1s-EAMOve>P>18W7eRRMq z!+T#Wa-^j|8LhWj>s;YZkf<5_eE(IN<^W@fwclqBpopC@uj4^vPcMAL>!^AydqxUz z9|zq=v0?dMrySnB(8Vtz-yO^G%ecoxA|ib4$J~jb_SY|mh$VI&x0;jV#GN(y4IoII3#=ULgQQql+HdA>N z#5{}GLUk&u z`~siVysHd*&Re53k_k$%PM-Z~b-rKm(E)NeII(YK`6PW>PCXTI3|Er8xwjJYX}hDE zoWN}pGM4q8=(w^7CKK(^ZvBDHvi8F2sJ9QnCpk)P`^?}=&9TayKay}w`kC6D-c=ZMY&W)>nk&^Kx+TnQ59(QQ<9bOrBI4Om(|LDvrr>-0VfE@0%CBp>ReHWh2xD4kEVdhKiG_*!o5i;_tw`}h40 z*NTciMf#?X82D~7YRjqlOMcST*XWs|7q1icN4jag2mBUU|8a+2kJ}CFrUMfdD_LVLz3zUHfIY*lXTGQkf zu#6|xjd|5_I|@>=Ce!Hzzo(34amocjGnV3M^hSBG50+jq z2iK^gV@xPvQ{OlEyj3r}L@cqp-6Y=_TijKPv)9TBv*3r^L}p z;>-iqO%%C3{yWGeBsE`f9KgBfx90w=%1lb_cxx(hP_e-DdQ5TA${v0;!mJ3lGgG0} z_|!+{lRNu8GsK95)x&CD8p=W7e(i{_=)_m$a7o#$f76_Y81zp+dE9p9Db_*8dp&?> z4`7U6?-L&GZyz8xi#m@KRiRqUlsm8Xx=!Z=A{_{RGVK!XN{HdFWLkKVWV!ETJ1n`+ zI)4cl*bcwrIcsj*r^(1FxVVu3B^vahc{_C!W`5>Wum6auf!l)wdj346@GH7+XPC8$ z%6I7IHfewMfXkCk=d%vcX-dZzOW#-Q$IV8E-d*8}l~nbf9s9kF<`54?&tvVLa%y z_my~9Caw{EHI>xXJ>#99Tl8aHN=qfjC*f!ooMK}jy%ZYS8Hu9ai&lAr(ovC{V)vF7 zcq>w|tl;Vp|FBIf^+D#M(vv{PAoG$diT~C4+>cEA{lpjO9-RNSN^{U!yVsjz9)NAM7(Xr}@ zDvp-MuHHjaF}Hf86m%$HHyBWM0D5v6*eZXQj_8vLBHW|Vov+79RFzcZ`M^xp-B%TB z)?QF~p33!5R&=p=m5>k~+JDmcTN5#KXYsIR14izaKFWs^6|p+zNzd#`fE%{I{IqW5 z_GPnT^Nv+fIPa*oxvf8#AyC^+mP>czQwZfjP1iw%!6Ih}?)M+As9T)isj8^jZ5APK}R=8^P$A>m@GS7CM3 z^Rv|JhkTm;jq#^Srhvk?S;`%`qk^tD8YTzXB(-98gq9c;uKACcq2(gGP?w5G_#@3V zV$QQRUFFwW6Y_erXT5Ya+OC)W|^~k zEF`PIdx^X0egojK+fN_*yD%zpZNp~S0?pL24hDJq=){lcNnP+if`@ggHy4jmVSbQp zmTE*ZM8?f+LTW}M;q@ZiSca~%aCTk3uNq68o|PZjnXopJ)9FwhB`rZ=<9U;psMXGA z|Eydz#PE*)h~_k%^>S;V8Li3Yw#Wy5tMf6VW%V(z7c>RDutEM3l1DTdr(VGHquZ9+ zvr-awiOK4Aff*dXNljHGWgum+m=W-*eDw8PK(X>q1Hu|RZQ5`uTpd8cX3Bk!tKHCr z#YTkh(dx;MNP@Kw~tdTGZ3eyuwtYLvUj^w zE9G!~Si1z-K000cS#Z+p>XZr9S^Ej|yiJc~Tke$;uEIODO_*+9mEqeWiK zu76kV=3biqSnTY5l<&~|(B3Jrdj=%?R)nKj0CXubQYS2UmZq&ByPCv^JUA?e2J-O@ zlhtW7|G?^G8sHp&&f`At$IDGhrZ#r8eN6LyGA$BO)_1Vy`uP5q-TQcjTE^;L2>Lxg zZ8axrDm{N*X2CO_c7Ko4_Hf6PaMAM`mmA0~_Q!@@0Y|6VeGK|%?^ZUvfDg7Vu>|%i z($C4r)N8@nDGXy{!r%UTM}OwighEbbCa#5pt1M9D%ysPZe(E{M9z;$ftavEFM-*0eH`%k7tzXah_2fOpX5loC z{E`236$YhCtX<#mbTA`hXmu_kJObvVdCYfPBZkisz(HEz) z!mHh@C3OeZ*EZ=Qyf^Amtof{|L7WP$1_lTCvGkw-+rR40#O#Ge{8*sE{;MMWK=jg- zFBrMCOtR(TOAHa-CJT#f2nGXFL&K(&HI@tRkgVU{-xE$|&bdR%_YYbDD;ocEj_gC767l`?@P7tQ&KjraJl zl`zcPbNCJF?Hg#KVrJ2d&K2atNIgVixw;t*Tz_=C!q@~GFK=*vg&2CKH9&W$wX!>< z8D~W%q{ATIGbNeV_IZg;JECRj`U{4$tbh~3+qfC}_~H}PDMc;jYAdTq{X^3w2G23Usib5S-Q_aFkdyTIWzr)cgoMy$pF-4}~me!Lcgd zu?;lVve~%Sdy|u7l_e4J>zoAzKC>SgZ!FRf5~9-=o%iXkt~UG4RbuXi`B2+RY{e%o@XvUUil`q` z*EzF7s?8hM-7_J7&AIL9F`T_jTKJ?I!QC0MH~AQn)~d#y9<(}B8%KVO{_oE2XjNJ$>pA8I8IGmOgf|BJJM`7 za!;~JD@89wQk1llv&8n{0OSkM1g0L=UyJ23JMfTjB_s7$su=O*fvy=FCgBzo?wLNWo>!pMA zH(+SgK=SAH)*?>xaw=1khH-JForY2J>M`BaW%i7Gt(;RB!Kr)~Sz-C{crEpDYyUp- zcSgDyEi}tUEf4LUghx{nrHb(L<&{*zO&Be!VqC$3-lEyV(9lRKOIA~8*0=S%MnTFg z*pd*(e8bVSl`Z#RQJ}iwv9W6ZK7a-pTndL6j^V346^73F&{1Z7NPH{_Aw>Y_6hc5R z?!zCh6o*dsk;0BqFijmI4cGhB@Tg+xylsWp+h`AUsQ77jGaOYb)4^{v8>jm@Tzgp^ zO?|?EBSdW`mp`S0R9y<7iW8e0qOdQNUl8AvIT>TmW&t_A@>^7|zCX(+4C2}AY>}A= za);(YK0tfQt;e=?BI9EA2~;}d@6JQ#=Bb`W3L(c-mYMt$=nhJ3r`VQ&)y;#0sw!m6m{qP&!2gR#_bHb zrd%q8=tNF7mUv#&bJemxb-&W6bt%Tuq+wOMzbGvaQb7q&4B;bQ-G&&J2^5kV<5)i3 zd7r5UxLJ%xi_Tf+eM$%2Itw&Vxqs4Txqx@;Y#g$Gf91FHesTqY+;}p5z*&(QgW6rN zuxsCq9Jm`?^&-p8-2{?f;8Hy({lTQj8qO(a)YlKs(Anlzf_@pl}bmi2pY1~8d*cq&qkc6*ETDe6&M&&@5oqqX*XZ^ zwZfJJM%jXRhYQv)Cy@Bge5J9hN$maY%!O7fkIzkiCm8y$)A_i#y}z)8bdDrkeF?GQ zH$8U*M;>TLRUa!pw%smbdDliO8`l@Bvn%a2EE}&4z%k^yjd{)C-I<>YUWa`?JpZ2V zT-GN7GDNBMpWx)qvfx1QTweJ-%qFzlxOkIpQ^_^`!=o*;yDlfSn4Ft<+jMraYadNE z7DHNAmO3A9qoN=z58MUT-$N7`%nmb)2DIylC|-F3V!1Uazkz30<((?XigY{Kkf=~t z4r{7(=|-h#ab;;mLWtv+6GF4qU$kedox%I!(iN_@tp%%shoz<0-(nAMX9#-{uW)aV zUS9{VE;at8oZNmwmQl&`A-gjiXNSlq!~-68Xjin?@q)iw1`Y9|_=XL63NtdeMCiM- zLus*+jcA)^2fithR#^%teRgc}bEe$!Atpc%5gb|KXzO_lZ@PH^KU{_n(2Z9iVveE8 zxtDiccRkZb4=qS3ZNUSx?l*XBtE@qHtW~QV*N~~qMLLchRv)jMn@cbixMag zdt1TT=I~?KuwPM7K_2hcb0F(eCMPTV#xGnI4~4Q>ux#gfM+7tBugNA;E7ool*QUA9Dk7xKxIu1Pp*TFAC7&j5dJ9{;Ly>~ z3}lRTuFuop{ew>Yi?svaW~6t=udolcFFeRAm6R$GXRb1jX$SF{=?gCH%=C8#~n;!<1{@;V+457g)7AOg|ZEQhgn@ z9?!C6SdhFm&JPwtg?9uUm0l_u&Marssj@ z>p-d#+)ttg&Fl1viwvpIO1&CeO`Vdu_13+4#oHaIlDek}nR-04m+NfMYnhOpyx+5{Ajxd2!&x+OEc{Nev?7iUA zw>^3sq$+M14)2KLxpY^-=`~!2C4+$SImTXthJ@y`pe)3Lj2q(Qv?qusUCc)}+DEro z9qJ|*>iMp_6w3K%9%&A#I6{$V3-a_D4uO31f#h~{ryxi;xw;tv?@0jP$s$`U4Tdu4 zF9KAMfoYt`P1ulOr!CHe3t-~iQy@pN47H70d37K9b8R7t7&n))QlZl)jbeQ-H z982e*&_OS&WyheB2b))4%MEVMCP$A6)Erl94RDNB-ikVg!$WK~!2ZR%h%xvz)y(6K zn)l(uZT}yZbIoP7ZO`|#I*IJI3A==mHV=eAE!#%;ObKrC z`Ssp-V^Vd^ZNoMjFs8nzF3XJ{Llwuf+UFP7*DiD1l@R$!%|V7vd$w0kQ1HRTrjtXJ zmC%cuiXRgq!-=mQJAU2j9URtF3O`Aj?%AV^~4;Xn~uM5N^kJ zbOKx?SSW@E3w>n8GnSP; zl938}$YTDu*!HsLNAJ3u#;;JuA+gyecRVUp0b8`^~%G2 zZu@(=)9X(XDYBpBOJf{Xc5a-Y>F+j|-zps9)_ul2oC4-b~WCPV{`={4bEiSzgkK?cTq|ouwpQ*IUoA$+UYiF?sPKxJ9*1|7JXb#qo%Sa z!|~G(!;xW6_AccT1tQhh8VHXa*Gn4J{Q>jry(_;D+-ib*Mnu=G^;w7MiQ08ptA%UmIauykr}-*;jMx0 ztVsv-;nnPPn3=hLsqf$%(E3<%{fT)8I~)2(LXCUV6P(a*YH|OFgeuYZ(gGYTSqC zm%kfQzJ^XdB1IIBxhFEf<*GTG~e3fD#z$bQnRU#09a-dB0e_G2~V?ikBL znrc51(1ezLyNr1xbH9TeiDJTJklSV&<$T@mG1RtB8Xmdmz@vy|NxI8L0|k|%lEa^n zk7mY}>v4HZ%%`zEK%F)>G9e>A7GO)rbW;7G#9BRvBk{uf#1lrGxxDY|{LaJ&G9l(} zxZD|zU46io{+|l97^FMb$vf}snn+DPtSu#K(VAynoNlmCRO$7QVb6>841P%628N^! zZN4Vh+x+!l_MotTsCN(0`J`gvavuY^Z>Ht-Z0wXcE`&%u4tI*AJARPZ4aK?LWeYfT zJdl2{SzUyA&zuK;AFQw@@fPWP=asUqHABt zknfT{_vqE_x~%DO<$3@I`fT?oELHD>zEay3yq5yM4I|ufIP&cFcPCKvGV9g3WPQt~ z4$!J7qhU23nN#Yq4r`#LPs%Y|SN4b-b}@OS?fl7R;dO3j4?yl=h{*3eL88HlxOaEJ z?`Ndq??XWLb)jyBNZQS6>(rX_t2WgFfPW49_(2RLETWHq)IiZne%*-vS2tZ%jBWSR z;Pnsg`xR}w;QzV6btVNm3=J86lsaZoiGO&lS&2sgatOi*9$vfY{Bl8f0iT& z;$aFd6q{=lba0l%=$mDWuy`>Wwja8)+7%4)@2a|~BCIRn8O~Ed*~pIqK6dmJ;mSEP z*j)knMfX%*%|DXB{lmAQX35+YPc1zS6pQP71AS z;>Gji+N=eX%Q)e*RmpT*F8RJ=3M=9Q{t)GTBexn7ZB{Mx?yXh83}yx2PJpmlw?;}n za3yG#?l7M4vf1%<``_jH)7o$YX>YLLtPwO0CN2r5D-WTmn;Z#$TslHrW{aeajfCtv zTi(j#rm0(RyST|=XO;UHElI%}Yk;gQP6rMw44RNG8$qjp#b9OjCY^nY||^ z709pqF4B$qq?wi72At)Qq4}%2$1Y*@i+)@~#%#vE$HIBfY_4r)U|nS>PvB+0totX% z$mY$*o8A{jZVvM)HSoHm+EC8X=0$Uf<2#vSG{Ob9TdGJY=enLqr4xaSNDlK0v5J9S z7hk%Mb>$&+kx6n2G2`MZQY)vi`C3FwXc*t##GK7whstqn3KbH#hxq<}`8w;R_rOGg z+dltsz>mf2pOo(%LQw*Z3!lwiiT_|7q7qwbbQ2|+IF2%cRxoVhQ8F&tmekG$KF^(l z?OIZ3eCc9lJsGn#G{qfNnQgt|VDEp|)K3F=8%rMfjg5)ux2jZ)bzqVD(A(oF^lbPl zuJqYZ%Q=?qh;PjbKRmsBo4{))7P(_q8=`;+@)oPEJ7ng?u8Zv-OGoDx27}D0931DTA(c;COm9{ew@0(0&9Lby^KX`1) zmTu)vCFJ(qczsm|pdt^urMP&$SZ21r9@-R3OTWOg3+aVbq7`2FX zWi2{wGJR}Ld!X`Y>AkzPl^czYBvF`RGIpEhT$hj!N0|W7dH(@-)=%d8@d$cX%aiW4 zzv^%@8%}jEjOYD5A9Ryl3-=Ovyo=+UF+y3i!QCC4j)}ACKV_n2OM~!n>NGM_6Y>8v zbFqP?K_Z(f7-O``N}0?7v}v#aqYl=I*=i`Pizo~IE02D^gneGN(|yvMnXkDZtFG{{ zzw9T3g@9kMrm^W0G>0q8yYVU?%kjNPbE&V*5i&^ z?G@etLa|eR!Sw4l5dA?E%Dr6Iw){i~t@BlrU^0Q)-S&5WcKO>SwZb-&3juc*mv?Q} zcSc{X8sS&oN7A%u9`E2fxsVS4It`C^e&elzM^oB&DMr7)&J>y~%~f~~TVE;9*!J}| zhWYEEJ zFX_tM;Smz*Z*m<;7y0%W)dVZYT(Z!6tlLdo&rBVmnV#I5-F$_QZl!XcbHAlJeSs5K zqhRV?0GW(DSa=V2jau^0j$y_1_Z93fZV)$vaUPItd`k{8bga0jNIDOi?1 z%ow(L*$kMGVGNR!d^8W$WiJDk@`{qRn?5hAoQ~7gPA&B<&Cq|hwmHuIcXkK^&)GAV z4^<49H`$R?l7XY=Tg@Cn0m03@Ww@lv0qRvq zsw1rRxu@Or$ra+yRm)Z(*sy?p=gMQJ*zNXx3Acu#cVfN}{IYQZ6mH|A)wKW!VVazE zFK)Yga;fm%aaNm*sqOub4}5rF)1-R!ac5{!P{&%OF*b9TVHCBRp(yO93|}&z*(NW* zqn~Jk1Ue#&oEuQAAMh4mA{&4=1)7|-*ESsAOh+sb9`OUGCbr&V-7-$iws4$Pc7k5r z(A`D0P=ndt%n>y*0y^&PB(-LDLu1c&l}p_Do2p_(he0$Aexdd1jZFGQxbD{$AJ1PFA_|b%_RY4lF^x@^Z%Hf=sf_vIR+Kv zwfyu9fymhQeisO9mXT*|RYQo4QQW=CgqyK(Tr z#_D6iIl0v8hWbpvB~U84^X0+8F2pz{LjA|D`kk;G=c+W5ltPvPR+Y66QTTm7Tsx}C zxcIXN7YDwPg&-MDRo*p4<79B%I^~F_5q|~lumkbZbIV@8l$tJTRQ|_~_`rU#VL+=J z2Z^b6ed%7=EEfiST#LNarNy#bd91%2I(b)-Gn)s+k|uYGowNa9n87vSWEX9q-DW$L ze2@;=@Q*@gdDAm*P4b*TnSuFCb+zFr?Vhe z_1j>$e8(G#A4|!+ecldJ61-c=^e!v>?{@z~`9!2`3S1Q+EuXg@D#d3B4H{V<`7K;( z7b`-)!see2ZkGCYhz-3QQv(Lt*5eFT;!q4P*YaUi2dCqzhJD^^umrzVtld@IzjlYL zqQENedL$_KFRYjP{>Y$~-L|{aGyb0XR?X4h!F=;;uGiF|xOv9bzhuY$*4y~LWQh(j zeB^apXxZ{GM%B9pv9g0_hTnbPeTk3|ft&a^7t2><5z3JYvo>nIk62oCD|hlBEwEt z?ZBl9&w_h5yJ}<)((4Vra62%v2a(5m|KvTj?8ZTcD`B$dy|CRk4SKo{)SDl0Z z<$L@XMqdSC#|6dCxenr=`ZBB(`jLpW>#KnNi=?VZXbA z;TG=3EUCCvJqvt6R$gop1Q*$y(plTWo|-n)Hu$%P^J>5dbD*8%W~|@Aj?IH}F};mk zTNnmylexv5L%_)VZ>VtlSz@F&Cjil7wp95WL7M*F;>mG5VQ90~yu*H)&f3-`clymG z3;JWI{_52>ri@G(VtF+iXL*VGja5}#Rtf%D2|>1?Y9wvB2S5}(;K&~mK~oQLFz5+J zkY;(rPEqEd zV(jvQd_@1S#{WB5u`Ro4V?0yoK6#1$%-XDTLa9Wg>8b&^uINeJ?MNTalk?$cYM<0YAUKJGuqcVt7`r&g+B(k zYSx~h(G+iA{9ekF{L@=PmKH%*#W&za%v{!0jMyBhob|BHN_GjsX$B~OsZ@p+@cc~i zv-Sl~F2)#}LgM+n?9rk)9|wvL{7GfF1;+p@wxFIv3q<0a-g3C(myHQND@kHcnU$H) z%Eufx9zLHP()8=Lm7Z8cv`_0~YdKi$`h_2zukW%pgAylLgTZ)W^68e|Ci+Bsd7bl8 zyK&=j>RQ?mqW%cSfA}?ryN12xzRk zMLt*$guX+8962h&Cg)gqZLN@;kDhylWI~ag{uAUef1iwFU8}+w$`7Sg-Sa-`Ymke2 zM{L6c>IFeFYln@;k0r+y6lnpnV4%s)LKL8xk5*+G*J!`J(~lMS+)V@BVTbVOFpOrE zzv>H(X{)19`PGU_`t>1Hu@$@v4n8jTAm(^xdS)z@$s|g~#Yi9COKVuG#hsGaQ}&`y zbw4<&=6FOjNfPi7{17tNtJ8ock{U|bD6cUsmd^oYdO5Irp1O{eQlTufr{q(+OL-?J zI>7h+x$eM(qS0ql4yH$dVaJI8U(zxKs%-?ROkvVJr1edGU+;`E(uoLo8pXz_WYe(C zwRDf>Tvwc~r!@B2UVq)XrmPZUo7qd~QMzKUHy?a2qu(X3rC+74t$8pPLXv;*o|rpm zvmN>JJXa0%_nD4>m2|98Qb7piA2om_#q1W-7;7-3o#4El zTi*Bft7>QIS6NN|jA77i-eJ=lV*z7pSxZ}vYg=@OF_Kr6bB$j%iP7A*^M-vAce{hs z%~zq5R8OZ{+E&z{#+Gp{W|yc|1_|`-614qOwNS>NFuBO?mHtPSmSH>fh+l)eOwXy` zewKL=NFiFV1Qs+Reg3XQbHvy2yUGi*eq4L$weyO%zJbf0;Q;r5;#>1XGrVC5>Iuwj}x|g>0UdI(NJ0G<_N_b`Emm z8lV@CiywFXZ1%OUel1A0h58b;yuRX~i9iIvx;g8Mt5aV=BGu4Tqhe`~TB%Hn>|)Q> zsRx5xm2&rK?#P0C~lm>!?;r{9r&S`hRmjid3wLgC{}@V+46?3g9T zm%d)Z+O}}1w6xcECNY)uEn>DP5x8tQj-qeU-i}VB=Y2!+sS2v}o_bFC{z)%0+5@}# z-!}HJg1>ARwKAS$WK_O#1ulOL(S-if$iMZvn%4HWK?kX)ghxw<{{NTz;Y|IneBILo zYS{sSeZ4KUt@$KqCJw);D2)Gc&%)TKYYcMT#yBSM0`=oS?xugt$*@O-NVg1n6|rnz zkp$OMi)bk#5Z%H@n44mZ176(wJD2n<^|F0|Tm3eYn6)eF^NxSh*c}iei!;WwL3PL9 zqDc|;_nr;pWtDqEN+b32HPZr;&)1>j*1m1pRm2`9-;?>QklK%Ce(L+!eQKM#p5k=l zE+M;0Pvu92x?8$OZwDcXicKuj>h;*uHpZbo?0)6{SU(CS&x|$wNy_w+?y-Fa;9N%} zZ@fNjabTO@rQ^j^e4>TACR0c5dEkgBWSdrt5n{i7#cBV==DvC9)PPye;+8R)-2T^U zrO|xJ)g)Bx{e9QVgoD;jUr}PNO0l-TDF{?dPO9tYns+Y^dl&Dp?wKB4!#=%A`}WKH z+?Xm-rr#egAMh>y8{qv@-_48{W5tBIF55FhRci?)Mfs;u&asIK zzaZNJ5Qkh>b$Yi{hd6urah{jhM^XZSz z9LTyIsVkxVd={a55fk;`XV=PAP6gsEa{t`ozs?WOG_(+59Iy21in99zj)wuCccM|_`AKv8FjYEFGDvrK#%%M8voaH!Y4o}v%>FI2qv);%uz}XQ|*r+V{ z6$T)C?5@w%FIG;N{Xc}{0Ny(BLHBXW5)E{IlS5zkD_oPNcJdfIcLVl+Uk_raaT52o zZd3>EzUA`9GCtpmB+RnzK;}yD z+ij7{RqE)KAj%y7)eB3{IHnC-GfgV8H#x1xq;_`p10!?fnm%?)w}fzj zP(shh0Mt=3|8LB$+zb<_zFvJKx_q_%7|2b zK2@djQhg=tuS$RUN`uzCmok~T?7Ft&RiP%cJ%8)3p1s9+AJr!|shl0jJ z#;R3tcU+*${V$(uYPHAqir!O`0IyB?e^j4fCe$PdEzzibp^YcXEN0(?^Rw_pyMqLb z9yd>EFe15Mx}k$6(OG-ky@a|T1(@}x@bsVdwjHXZ4ja0hD?_iE0 z%=t{T_Vyb}?^Bk?S>_l5dZP1t+1*bjju;6UrGHT$3bQEJ@}Qd>OM(*Aq?KZorSGVM zeN?=YZo%M4-|(r~t`xawxi}$V?vplucp~XkqQEJrkd!LV&K^i9M4hc*XVtqQ_mYJ) z@6$h~k7$e*4yA0eJL8c&uilRR^T;4-eSQ||Odj`@9-j-$+RX_HPF(6_gKz+1Q`bGv zfeyGsaQ@R_)5B%lqbY^%SyT$K+`sMZ=la5Xj{e3oH?gm4>t44w2!BUB@5s}wi=x#} z!JbyIO(l9arMkv4sA0U_S}h$+Q|l>-4k3|2**CTJq5V#+!{rq!^N5M14K+@GZ#_)@ zQBt%Dx@Njoun*_lJV~PdVYUc>zu%)n;3NArOg_nLQ*YDO()mu8WV+qTWb{b=!n}J1 z`5>syqs^mB=%3dbqRjooPAiFh2&&MY6boObS-tc&t>x_y3iYH#iau9hkmX;|$NGOo zANmCAA?)K<`ftviOV%T+*JH?DS}K+OFn2LU^FvndXn)ld-p4!GQfJfE^w2Wi31d-x z?20t38^;s@y76!d2Dg4A_?A_>AY}GGk-H-Ep@0L=*{j~~Mm8R>+SIxv<`fP3*eC}U z)y=L|=|ox>bdT#B(BI#EnJ?@GZ$i%|U|NCru|2*u=I1}4Py5;*3_SJk2QQPb(Qzk? zW3|tIsG9$1GRLF;gejI9pu~y8$ORw<4pF&fT7Wrx_TkxLCFt z!A%(R@#5zcO8u7er)4;kc>7(q`XkX1a6EXxHE0g`}&a?2ob8M^ja_Hz9@6)d)0&X<~20JV#X56hWo#$Rn|BhFD|{;0RBwURG5n)_acMCU!j3 zekHOdyS_Lu>9+D&i5LuQjE@U)HPRqih$lslY+Hvf7)r#j`cf%G3_a}o}4(d(b< z-n|;6!|HnG{-qiGD_?j$Q@d^U^WlGLcDAM@bR)s=N9M#Sge%Ha=E4mmE0a89ktXO}7tCJqW$SxC6j<|KgV+NFi?QpIlv6fayoE z%&~P2u}{XQJh8JcRB6X%254eayiW5cPoE9_fM7F9kKT%72|luXWhzPJ@>k*#UP;e) z&=d^o|9bC&!f-5#j!iCYgq%}gGo&&$k5nPb-Db_a-XLuRO~M*W%@hGc(k))XY{@JV zBo$k(8n~tUuOTG_rizJkp^tQ~?i-|-hVjXXpZ;Np9e$UrcGNwO%l1KZonvE(#w&&|n( ziGH`p>Yrd;mF_#3C` zI&Sqj|#%~E_k+Hdse`0fi#WxN$r^^{~(HPhF0}(Q6D{Obfl-djA0A8L@ zKmBo+wYT>}>d4ldsi_?%nk+^e?}^0q>-y!IW-P*%cd0}P*D%6K8kxV8=rb=4#JZ0dRT0^1{g%HpA&#!s+6 z|D40FaWzA_lW$<`Luk{o1xG`U74P*Q>Y=ds^1x45{`a*jOQ0cwFSJ^!rIx!uhcYH- zw@VH{?1c14>mL-i&wNw#i0}IJs>S%&u{_bdCvzz(nVkC9F)iI{n9qDBnSHKT>`I_3<7V@Dc&xlGLJFFnCBA4}be_1U;t12p>2t_oQ?nMUpDko3)BQVZ$RlSB zPiIV~Tt>ZXdI>0JW#6PzpTdhNRNhXN zPVXwfnmt>>r7E041`cbjb;5VyxE$+48_b@`8?adYJe1|8TA?3>u%3aB3rlNVbX|oD zl$V*;b|BLz$4JEN#f0cwy>F&nF9DgHkQq%K2C;{gSP3pgzyl@mmMKWIc$q7#F`*x^ z`-u+u>3S#alJ=3Z&)sDy^Cb%>qg|^0ItVW&`njk2uW`p}=MOR4L-EoLmuLoZ?Ku9I zxtTX_g~f*)b+{*(xj)%(!{fs%S4TW}X%XF~z5s{^z@xS4WppEa!DdpL1Bl)nxtW}S zCzx(mgTX{Gt;|8EJwOHg;Vk_8V;IX`Ej1z>J2KD3NKf-9xO$FN;H>ROYeeoUq~sdS zruEC!qQbIXf^kM_dJueRo7JA^KCyNAm!XSDxkzQ|iZ z?lAa?j~wX6YDr7b7!8-lXcmc&#V*Jv-}8wtKNt4Vo=stH!rdZ3go!ly3ID zL=EoYB!Q^%3(eu7S?r%(;b9OKWmS*P_{nhiI92X?P9@l>V%Qo#oaYIg+&Pel;Jk0A zGme889X(TM?@dOb1IvkNx}5$YbwuuUoB#FuxO(h2{)}JG3o37y?cxTE97%pNlGC4% zvNS2l$NPzj)AzRSd;;;+YMHQKf0@O#m5C^LP?}x%wy!&={|Q?cEj0!EO6Be4nE9@wlk}gk0Csa)M!m+3TMike3(Ci_Xx2W-7a1U&rbVucauKeyS1j&K^ zTwl}PR;Y5NWo{aDvxo~7t%+T7F@I0bB*pcHs&zew=(LLH_nDZnoR|P%9&% zK;$Z8NSz;B-ho2Ko}y+cQ}&{|?mlbA?fUTk({ z&>LMvb3}J&iSVfB?y+$CEG!Rb|2XhMeU8Ov`h^JRs1!GVd;PkKzd#FGm3R8wTaoC@ ziwfiQd(|U_$(7X%nnE@8?|%XnxI8dFMsfkMkr=1}DB`{D8G5c!B& z>i@KLmQig++t$XVxCV-QTims{I}~>u3It7XYjG*=7BpzSwCCLW z*}M$y6C#oCIClH(G<9~!ax{uzu-UOHlq z14eUIkka@M4%8El12s92I!eB_n7TwEqvFN#NDg~~aQ*r#ANt*Bgqm#wZBS-HgxL~A z;c(&;n_p}j=}6g8Q7WpgW8-c<)L#Ow4M_tRR=(>u4l#hLH6AU-Iqg;+_9^s32(r|6 zuj*)%(63)^FTbjiG*#y7{7o2~Yrxm>MFc~q!NHJ@MQAdZa(lix8`W{@|CTbC!&qea{~CyxSmM@ z^BJ{|xj0T@Bp6NwrB+VCa!B#mmF9T(3kB7m4oqKeg{BaM2AZmhoYi+Zm_s5XT7<-U z*)P@~uNX$7=t5a0m_l=j6xcv*3_b}=sDF{5D|#p>8nwA=oijoC>r>$XowDfRmoo6^~!8w0B}C{F8Zd?6x< z31z!w`~`rHrf4}5-8&U$+jsyc?Q<)1%lhW`M*+8phBR!3cHgRk@}cbciI-_mfAz;3 z&fY2xk==5pXV3uUIrM#;dvRYz5y&Y@8t<4_;3Ie5jIg*hy=a{Ix&iOhUJqAGu?WSd zuYxhuT~vs_Sf|sgu3}HJ!gD;XTr{|_zCeUoE=6|=$!>fB#~&sZ?Yfw)V|GyXRX2{) z-YK#%nb-NLw8$nE?U3J8T~>Ixi~}`Mv$^wJ$YDObT%4QxqQUE*_p?(Fet1CeJuENL z*T*4v;m$Vr4To)Q-cDGvqL~F9tya8B=t7IbvU42cRemgBXb*s~ zv*t1d8|RKE*JT%P+I#TrU`IH_eZHjlX%@-FKG|!d;r!+tjs=x_&mx|vBMny;YG?tL z(r;9UC+b_=I`1*dAKNQ?$bpP77q^)wkDdP-A4Gtp&t}_E@?T?($A>vR2bEvG4OMW^D_`+Rx!at5^Ex+4Tx(EwAxw8D)8)nwwH@L^Nu z(*q9%8j6b;Sv2#rvAwtzf^7*?XcV1&NDN92T3>mfK)K7*GEmAmDoL`* zOUl+h>5x4}|8h;GZA44XW>}0+ro1s!4X!?@Y)(F4$v#RJyh5o_vtD};6tQZcAnlR# zf94Qpes9xnOExtSi6;Y-rBJ_XFjmz<2-h~TRK*NN&;rc&!W2!~O62nm2C2PY`JTJQJIGXBBdY~m- zcJMRb=E2yx*lwr*T>kXUan{Z?>qh2BvH>%2aOj`R1ajuktBtH{72&PTzRd6Oj`3k0 zdZ9h}3$0-^c)jHV=|1J z!jKPdAn_@mK%MB|sZbckBfQhQi&4c8a`>=%@2T=nb0l*TBxo~sX{N^CS?jwGu5^@C zLm8Ag`U>|cLlJXXyDcgP?Ku3c;)I^rYyYZ_0~v(KOb0A54Ge+x-hwr&UE%!s*#0_sA&=ti+m#w{2fD2U#dE!UB6 zevlj^;faV<^8qD6D$S2Gm&jA-g($jF=PBvW0zcYE||8ZFMpSiGaw|gY; zYjpFwTNAt1-4xC!xbBb~M&*JdLudXM85+0HKZ{dvqyB>FDdK$4(fu~f?k2~duPWQ# z7PX~q*Hal6v^#^YLA(P*UEn}DA!U>wpfJQH1U66SuNGDXvARm_VrtY2J2FE^X(;Mh z7sml&Ygc^#;f1~qpqy7;XIra(UDeJPY6b^}3jYI!GKsE7&`;)Tyk%jrA2QAiO%sIg z|0~B->mBCHr9y5p_pqi$Dn-|LFN)vl>19wyVC)#K17k1(A z*-tP64mzK+guYwea52j4s#=VofXM=fg4o-@j;mO`3zAMt{|&Il{?r1p zJ;fh;Mjh->t#YUyoEYf_?QRtZ$li#1P;+vG^3X^tFDFrb6BMwa5)yMQYowe*jTh&5 zF(OugZkOn+M>*X^`AjtU+&My(6?`c6-QqvQP;~=&PtoRdmW)^Vr^6398A(VPSf6Fx zYObc6X;xT>t7z@bg%pe%PB9Vuc6dxT8%83ktSzgcvjK>oIMidnA7&QWYi1Jwq#VvG zDrTpN4nOLNlJDTY1{IqJTJ!y6TlI2g7X^)z5S3=9AQRW9p_H}qtC6|Wo?RRMX|WL(0UdQN`d z(ER0_#{g)|rSEgA;>~o%iDdqsZ`JUd!hBrLJ^k0p&+?wBLH219^5`c0pvn83MwEpe zGd@pcGkiPf#sS7sGC&icPg@>JQ2QLHnP#dO*Z1LifaXV5 z>t}y@-0s0E51FxJ8p1~I*f2E~?4H1AEJ;X44@facW9*&Je)Y zyLv_ZF%5|*OltHQ7CZ?EyDU5b;vJV$Eu;E5qUU{0dA;}kI7)R>9`rMfk)>-x`4HrG zaB}6tvs~2J?VRvWeaK^?2X0@%BbTg2QE7yF4ee-qGfmwWzNQEitZC!aZEJsg1ILCA ztn<>SP}*v6?c!&aPBTL6aJo1lGHrcC7~>G4cSgso@8drua0@(MV+GvR^FxB<5XlNH zUl53^yn1W{mkUkLhzQ&wxdz$nV}e%tvL+$!p}6C3`$wI?Fv`-qH;nQw7$erdT=Bk0 z|17IHLA0Qu)tI#9^KFH5Lo*Nmazjbr+|WkeiR^u+>Hi>mv^X{wmXK^r4Lv@q#FN>2 zs$3}3q-8`GiiOo#^O#motH1phdhuV&ApI^!e zE=HPT(dogu^{x9h(dkzcS4uJAsNfP0xfXe|%QsWF9 z{13Ay z&VAEHo$WO8p#G_KO?rswRTesRKN*sjgL9~5`|an9LeLhk9_~;TxF6VY_Ur`PfGC7r zDY0I=Bg!l?A|48d<9C+1@Ls}oNfSy_v6!Qo#-|FnE)g2-`OEDQ^a1>=&s-bT{R7Y9 zf0{j&EM(Q%cCNHri_!frwI}>ceQN+;b&k(U&1*y8?FyXQBlOIMhpu6)mXm2I#`NJO z#k?aoUZG8U3Nvq^&rs#7rq*3Dpq|e2k{GT5P`;6aIsZ0mrKuJwC6(JV;T|t0B=O|l z^{r?lTPPE%Vj8M#!F8&r<&A9-aNRCT_m#xgL920US?}JW4^Hi&eja~2&-uKxEb(v@ zhJPgX*XrON_)8ZxAU#336P~;23pb4H`~6p=gqzp<%}9vf`raOYm^9x|HG^ZY)=jL; zHhBD=RlAE~eK{h%VzQkTiP!lE!g+J6{|t|TmuXe56PMY-<^s(|tx4bI`d)SCzk}jPPfq6Y_gB5tP z{inXRS=xIegv*j*cgsy^z!J4Br@%`J=Z)nh42eEaS}>H=w_-Awun?J}J*W_kVkET%6-kK)Lzk?U3>z0v09xwyws1-IbctV8x!_b^ zu3dS69{O606M!6tSvV*ex`r?l72%CoX)4>FGPFN<$e@u*Va~Iu{R4@%`R4m&F~kMD zz)gd=d2$h=b>21d`3>^DM+6O?p$_>e8c(-gCRPJ$xDtv^>5Z z3lr9jfqiN{q14zMa*s=d#?Br>TQN9oW6aMgxTW>osr=j`g}2i}6Gf2}@}b9WixL=b zTthw{(NNZ|UPj`<4r`x}20Sh=I~ZOme9Pw}ksF+=U%qF|O7g9*^g&~N8{C?z2GZ$C zY)^Zp8hCDNpjy{{5X;0!eJ6a9ro#F8m(tygXWi<1le?L_Uzx83vBNfcBcEy)SqL9m z@v>Bk2s}Ww>{?5x!A|i&iwMCjD;k;w=im*;f+PZ)s_Cf4t#j28-dn?81E12eTBuYB zHZQN9ZNq}9;R8>%S07LMN;k@4$5(0n30&-Q_6f=tuBX#Cpskh?q!Y)<#Gc%cAQynS z1Uv>h?e_7o_KDNw#`y_uo)x&*`QE9lA*Rl_w7V+~!HtpdJ5R%RnKjN7ym~mUj^+^6 z^(+-eE!Z!m!qJ4un;i^jYGdfQGw{v-^j#&neFzk`mxm@NPZPZB)S<=iA)uU$ zBVTkUNAvzggC`NL^@GwQ*IYR%rP| z);{$&?fSz>e=N{M`+_o|!-ceyBwLXas|v|jB% zlQk#4{GATPkN)hfqwsT}*h7>!r-TAb`pUUU1RX#EA-_Xd5X4;5zx)Im+zk1 zVK>Wf>j1~FF*txH+RtLtJ(an=N7$D`C@fW2>|s!D6Yu2ekrjEp42~8xamD)Agmut| ztbfX@%wh1npewAQ3;d4bV?C|LoCcl80gO7&cl|8NuQ%p)f4@gCQ2#E_HzS52Yrq+6!KfB%s~`T- z?$)fx#A^6?f3dUmGWle!#u;64f-zY7+@zmhLBy@YZ8eV_pV?&Hu`71P z4cX39jLwg$DHE zy@1W2vgwhxWY=Q$KnoCZ-3fEugxOTo~qJkDjAcX zi|Hs&=rA;UL=XB|va4*KiDAM-bzxxXinJ^g@H%*iCr%5S>x~+T((d5SPi+AvWy&gr zqGEf;I^SjzA7~KVLm|Ijs@f`^Goku^i(M?}glJ(Nzjn&wltqgGju~&CSF|5c(>J@P zL6NmBAj6sMM=_fLk;8hr&|^k5%qeSN`e%2KdtrJdqBAwMx$b@Y8AHu_QV8qzdlY>u z_luJ6D_oX4m!_f{0~)pBSFz6BDwm<=2Dcl!a1p~oh51YPbFegRyEi4n*0!#v8&<$i ze6);2NLq0;X(p$U#_+q(%Y~PCt~IoG@iH)5?>S{{4{(9d2K_|DEh;*0{x(0GBu1cV z91!EU56_xBJLvPWk=Fw_8U*47DWgHw$x|IbN`H>*R;?2m_$7?TOt!d4%q_NP$}*iz z(#51M`_?YEAmkzY&{D7y82B_Kan}d<+EIEE7|l|v`|a4UW|1yu_XxcA<^DY4Ah}C{ zx>9VThk9bun#h4I=?i&eDo_zdH#+U#DNz%Qe&6T5Kuz>HDLd^(ZZ4biJHcQHvN$TU zUZ6mDIzFT%GCrKz9ql7q7@>-Lc$vEzh?u`9n^xK>_ig?X)j*m6%$rUG%I@+JO z&y3JXq;gVNX<(>NAlN^+F*?tY47(iM*bEE4-IKWe*%l~lBpyZFD2Rh+rip5><(n7H zzM%I_qCPVpFtqW@5%M>46mxydOnMn;@R!O1pIk6jcit0?eowEWr(3@7d1X6`3c1_! zj4kFU!5*%hNX*iB9%ROU8nFxt_p*XOnZ5RZtv9P+-m~ZJx+Hhh`Wv=2!e>s-(Z^F> z{DQzixTp=W4OIh2g1EZ_VY#w`M+tXm?vFg$6hz6bi{@}~&IN1qlEUJe2i z1-ycKijQFjgAk}6Jo5(nS>ZX^{O%($r;`feyQ$8!3qnIoudXaV4PrCO`mFX&<~3NLbWsPj?4Z>-OMt^zZ!bmrpI_UlIo}KeD&y z#J#qSG-VvNF3lMYadup=(6W|ebtj^m_HWW=f%P7g6%ob6#2H(FP4rkGxquHaT(?aP zx?X)RM+1|DnNEFXcP<2sHzAQmXlcKvOMe?h$q77EC;>QZELH)T^BS6t#d5WfK}co9 zvm5vb5C#hYLKnlVdRVXA5l5v7&z&P88O55{h_4rQWS8YvZD?3IYd&^uQW8_}e{#3= z`b6Z~OxcZgQu$c=GXtiJ_TCNN6lkMa7_X5L^Nu~T9V3GXMAS0T!Kpzb(a?}8F80x} z!A#owTPq4%SG1BGJ_Qg2%`EVDKK64{wbxn@rHmd#gigQ?dM^i2mP^1Wv|*#Q(Fko= zBmzHRKkVr`!ruyoAOBi^7ci(+frl5i z;eT>2VH&iR3!DBKc39D!G|Reb(xVQjm@<^(=M2m?HmPjNM-dMuPHi0%dK>slOrlk$ zb{^~6EsRocIetDU6bp9qQv&|~Gk19#a8RFb9Id+-vm(5$vT11gj9d6PqXdw;EHYKj zmAHua8Qoy#Wf}S6w;X~Zal!g3UuF}+a6NfU-;ZJ z@hW8aaFsI03=j+wE!a&Yl7K`ax_J0Ez>d8Z(JSnPTiT8Zad69`T4Psfl7VGHq1*Rw z1w*%2C2kl0@K2J6?^dQH)sqh8bSUD#6|wp9D@bcG0W|kg!=rB|r?TFf)6{EC3Fg}& zahgMQe&;o}Lg=ZwHGvsnvr^4z5T?w~VPgS&x1r6i@p)JbP<`^0?9QYgyn%=vuq<9F z#`dhmEMCC316<)<-u&aAkah0G!uCtF?%W?F_XnO%+{5yW-u>){X0tggWxVA z0a8bwurGe9ERV2d1wecSxC8Rj@#%x94awdz0*Nk;`w0<8_fto)52OX z`PeUs4-O0P@PPi|!~;m&?{mT*UC`Wb{#D_?d|y!bX^7RDm9LX`lzJ*7c_4D#Pt$Au!b7wgQvC(Er6O+A6A zu%M`E@u3Yw9&fsnA2hZs0Zox{U>SS+K~`p$UHeR$E?hU1^wu53=2k>FS@nI(Rs5GA zpzqVH*$B}2<*Av|(k5O3>d$ZWW`cpmPHVp3@VQy8iU=j>1Mbxbo}e2`JIQbF8-Z5= zHhw|yT;GM&Q0dxDsQPnpr4N+n?r^`a%~5tEJ|kDa)4XYLLI!~7YT3l0uD2~BkZjkd zT8zSw*0$cqiG=e&GuJ&g_u?J@hwsLZv$KM6PU#MNY|#k2pVX)}Gi^MnOc@wgnra#mAE7iExMb@R>=0;T>z}@1tx04m9Vf5Z z65ao!eglSIg1`Ygv-%70`e=}tTOo&S2_7YXtSV-?Ksa=0^UnNnVsW?LFIy&q7|X8Y zM+8C$*rAXNpIH8pL_KVUp>>QoslMHcAkicu6#0WBU9jIrM%nwEtp$h@lYqm(_bOgJ55tTyUEpS)N(` zt1o~#PXXIic-aH;z|S|4ocBEqtK%Gm9a_);FGMxl>kA~dCJOTc`bJ15gyvIBut+I! zgn9N@8n_Qf*)_kr^!#Lq*Y?lyE*R{@K>8F{i1B-6amcz+?eR6Gq17NsWtM|;tpIqE zRAm$WFUd1@CnF3qXdJU_K6@=FoLjlgCbLWAxgZ!e0P#OzIR$n;9{4>U+jXNIH!qm< zObD`27>`~v+l?_7@(Fa{={yBubQMQ(K8=ljxvPFmuJwYAJ$!&Suu+~wRxX5d5T7i2 zsm)PBtkz65?&<_(p6J|1 z#FvCVgPEQ-a@460@v%%if0Ukk6Em=)0zNs?S(3SJfsaytoAC6<3Yb6Ei)O70n5&8N*{GD#+kLKOwiptr9x@Yg-BxL}ijHd&S>1RV})Ae7F8eF#> zlTjQ>e&x!Ik{VRiX&JQy#h7{#Q51R}Qb2pt&wekr?`x&?e3Lg3R!2z(gZ`x!&l%*RznyQy}9oECmcy0=i{wQ>O#PWzoQnPclVEjJfZ zhiy|P9{V=1QbL@+A~A)M{E+y4p80fo=nhdidsqN>`#%PSoqZ>22)XF=TZfhMBD1(-HLk15rFEn!9iQi?Tqbl&g0oG>Q`O$a zL;T&!%DHC0|Df$zb!hU)bZ+k&1_(U`Z7l7HLoX#@wI1!QqK(LR0xzhd(qfT4tlYV< zLx@Fs)Cw46ba*z%;EhEhcIN%?*ao~{K5L0Q_sX}qVcwC|+zbCLTDxHxtm3-Vg!ff^ ziXFOAvC3tViFuya=Tky9+%ZKxlGk>hvM@_IycntkJ+ui1KyXevSZpO88D$&LHMpJQ z&h^^}ejD5E6A-X9f0fZYNyX_Je0raFR2X(P343&7mG1x;Q=1GFRtIo()zuS!L`Wo*fYngjB9ewx)z{RaCR#RE$%`MoaHO|^ChcrV^PPaI{@RR@D zM79^m8hkhCLAaM-GkGR#caxjuW(Ue$3_88ArRT;%Pxz3J?%RXRHMbjW<4-E@5F8bX zq~h&0=8|3(h&v--6RZ6-iDW>}WJvB?vai*$CXri~XC>~cRSx~@U`5gjUN{G|+m${D znX??j=6ZI1^mK;lJTtmKR&rq=vjCD}<-JIt36jYb$xnA_`}IATr?I4D{pEIR$vdlw zc-5CrZV#H}6M2WtSBMNBJnTLek%ebk9a7b^LF4Z~B{96Cj%%chPm^JqV6x@8kTQE2 zpR$C-O^|WqZK7AAfphlg^2(`g`1s}+@8R~I^Q3IBU#@2YnS!IBoLx$C_|ADv2d zMMeB{G?$V2`(`iG%fHbe<_{B9a%C4w6=mc5W%`DD$9{PAEsxNGR^&&dH=c|Zro;C6 z%iKe?rjD4_Bff?e!5^J-LcqFnO$r0CPz>*|=&w!;dqp2a_1&4252-kG%C3*?N z+b{+r5a&+xkxukvh*o=?)y+WMm)t@yW+`bB5fVfxxNG@y%vV{^HbWAbN$iiv(lQqY zIwfN?ElU06da*;f4)ndyhkEwKrOGF2tS{ukNFkwpTk2b!JC^HoD~r(Bsi?RUr+LKPy)fVAv;9Z0uoM(9}-u^(mlP!ptJX+0w*hNncM`O7nNMp zob#0$nV3W-;q+_JlVT&v(X!D+qilMc4KeLD`w%upYE!$+8Xh1n>=#L!83E*`nMV|h zTjFk78Z>+`mX`plq*T~QU5#unc@>gh5tDTzS?jN7n95a;#3$F7hw;_LY_pJJhr8fo zs3`jDD$@_uWqolld(l6p94mX2t~q1OoMyPhp;U1~UtC;36{dL8+U{FP&4WbdMroK) zZRwn=xq6jh3`%fxC`kh}UUxjEHLM~J==l$Gp?%vTg!_%9Z zdYYfs-r*2^#nr|srs7kDGJ|t&(UwOVvU*OSv(fE?kq-qoJ{hjEK5(P8osRlkA z(U!s&hJjuv=OgS1Iuu|oUZsE(lF$>UjwH9GR5!1Ms@-#1d<<+^sTe#$Dg0^t_Wgn{ zAf3S|`mX~!sdFM$Gd;T&Lh7jBQ0V#H*^&A+= zo~d8m9(IXdV@4CaY1b9nso79TC5n>OLr=GHAyhpO{wS$k6qth9%0y0&7lBs)Me7UF zXbj{0?U2@6aW)yovICc6R>SYr)yIu9+|WM$XUrz$qBVK!k_#6x=GlwxIT628cwe_t zVlVA76MWr7@to&VPAWbGeQXY?~2T~lO#173bdmFjFVPohe2k^-)BUcUaOqsk+V`C_jpXRXi3Vzvg& zqPAOlsbvd@3h@>iKUv4i36wmXEf3N(`Z8eI`U2HKbYJmu#RMaMG%b)BerEUsk6A*f zjZ)uh>9%}y{l)fUfB}QaETj35)m-n_(GJUHs~PtAm)ATf*x~F1b(cM8eOm>Vk(4OW z**ekL>F7$~RU!-VIY0d&cA_|zwDx-T7zvc&{$IWGbhQB9zKp9%N3YaWq+fLW{(4id z#`8=2?Lk8Pj^WD-!p0iLYEPrze_X*gx>Du|r<)`Ot|Z*!MS75zZ1}`q0%^Y;930I4 zy1Wd#iZhtQ=R(1yL$bufrc$~6Zm$Fmk9|$Y$W)l#*5tAh6rqZpVymXZ?9f6);Z5{( zvp7e6lE!Z`MP!dIWl8gb_tm{E9&kUSnaI`W{RW7$js>uI5?O|gD6?))hfi%&CLU=K zs3=>@K=(B&N4h#CHhhh4V#L%j$ zdsl442~Aw_C9p-Oy(@&h-+=))VB=&LQhp1Isv&$Kc?@rwQbomt_bSBNJn>nY`6>B) zsjrSB2g%8XxB44-9qByNzk1J{z9#izUvLbUh723E=h{~TO`BUD|-&GP5 zQmHWB*u$TOAHN2$aX77e7PYdCds!Ft)zu120&bU@|0FYA_)dTPKz9x#AX^HspiX1Y zb5pPISOeQ(-Z4GOKxTBRjY45Yk+?no_UFeQO@dgU^bOm!h4d{@+o|bz%hpowEr{nF zm&dQ}@u$pQ?t@PlZ0rP7+0<2rgDBI3zo3DF6mBWu(0JK%KDzcM_J}1#m+C=BFu!*7 zzRnEhBOz+r@t+rBBB@%4EF;C~X58Y8{pbc$*pQwFR#wuibly3$e5y*M`}+$imH&W= zO;kr=@G>-4qy0?X9H@f#!H5gx?>e3{oNtd#0)-#Dny6nQ!9P_UK{yP7TUH}TxSkz8 R6^MYKB&RN0BW)4!{{SlE5B~rF literal 0 HcmV?d00001 From d3a999ba58c9127b98c76d69c8519a69f02697a9 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Thu, 4 Jul 2024 10:23:02 +0200 Subject: [PATCH 277/827] Set baseVersion to 3.6.0 instead of 3.6.0-RC1 --- project/Build.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 54b4a6bf7801..c729f6036985 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -88,7 +88,9 @@ object Build { val referenceVersion = "3.4.2-RC1" - val baseVersion = "3.6.0-RC1" + val baseVersion = "3.6.0" + // Will be required by some automation later + val prereleaseVersion = s"$baseVersion-RC1" // LTS or Next val versionLine = "Next" @@ -169,9 +171,9 @@ object Build { if (isRelease) baseVersion else if (isNightly) - baseVersion + "-bin-" + VersionUtil.commitDate + "-" + VersionUtil.gitHash + "-NIGHTLY" + baseVersion + "-RC1-bin-" + VersionUtil.commitDate + "-" + VersionUtil.gitHash + "-NIGHTLY" else - baseVersion + "-bin-SNAPSHOT" + baseVersion + "-RC1-bin-SNAPSHOT" } val dottyNonBootstrappedVersion = { // Make sure sbt always computes the scalaBinaryVersion correctly From 83a59515b3fc9c248a648ed41aba722b5aaf1a5d Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Thu, 11 Jul 2024 13:17:46 +0200 Subject: [PATCH 278/827] Use sbt-native-packager instead of sbt-pack --- .github/workflows/build-msi.yml | 27 +++ .github/workflows/launchers.yml | 2 +- dist/LICENSE.rtf | 41 +++++ pkgs/msi/README.md | 9 + project/Build.scala | 72 +++++--- project/DistributionPlugin.scala | 50 ++++++ project/RepublishPlugin.scala | 161 ++++++++---------- project/plugins.sbt | 4 +- project/scripts/native-integration/bashTests | 4 +- .../scripts/native-integration/winTests.bat | 2 +- 10 files changed, 247 insertions(+), 125 deletions(-) create mode 100644 .github/workflows/build-msi.yml create mode 100644 dist/LICENSE.rtf create mode 100644 pkgs/msi/README.md create mode 100644 project/DistributionPlugin.scala diff --git a/.github/workflows/build-msi.yml b/.github/workflows/build-msi.yml new file mode 100644 index 000000000000..548cc4b9baa8 --- /dev/null +++ b/.github/workflows/build-msi.yml @@ -0,0 +1,27 @@ + +name: Build the MSI Package + +on: + pull_request: + +jobs: + build: + runs-on: windows-latest + env: + RELEASEBUILD: yes + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - uses: actions/setup-java@v3 + with: + distribution: 'adopt' + java-version: '8' + cache: 'sbt' + - name: Build MSI package + run: sbt 'dist-win-x86_64/Windows/packageBin' + - name: Upload MSI Artifact + uses: actions/upload-artifact@v4 + with: + name: scala.msi + path: ./dist/win-x86_64/target/windows/scala.msi diff --git a/.github/workflows/launchers.yml b/.github/workflows/launchers.yml index 818e3b72b06b..036b4f2966e8 100644 --- a/.github/workflows/launchers.yml +++ b/.github/workflows/launchers.yml @@ -90,7 +90,7 @@ jobs: distribution: 'temurin' cache: 'sbt' - name: Build the launcher command - run: sbt "dist-win-x86_64/pack" + run: sbt "dist-win-x86_64/Universal/stage" - name: Run the launcher command tests run: './project/scripts/native-integration/winTests.bat' shell: cmd diff --git a/dist/LICENSE.rtf b/dist/LICENSE.rtf new file mode 100644 index 000000000000..c2c7feee2921 --- /dev/null +++ b/dist/LICENSE.rtf @@ -0,0 +1,41 @@ +{\rtf1\ansi\ansicpg1252\deff0\nouicompat\deflang1033 +{\fonttbl{\f0\fswiss\fcharset0 Arial;}} +{\*\generator Riched20 10.0.18362}\viewkind4\uc1 +\pard\sa200\sl276\slmult1\b\f0\fs32 Apache License\par +\b0\fs28 Version 2.0, January 2004\par +\ul http://www.apache.org/licenses/\ulnone\par +\pard\sa200\sl276\slmult1\b\fs24 TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\par +\pard\sa200\sl276\slmult1\b0\fs20 1. Definitions.\par +\pard\sa200\sl276\slmult1 "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.\par + "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.\par + "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50\%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.\par + "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.\par + "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.\par + "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.\par + "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).\par + "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.\par + "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."\par + "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.\par +\pard\sa200\sl276\slmult1\b 2. Grant of Copyright License. \b0 Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.\par +\pard\sa200\sl276\slmult1\b 3. Grant of Patent License. \b0 Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.\par +\pard\sa200\sl276\slmult1\b 4. Redistribution. \b0 You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:\par + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and\par + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and\par + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and\par + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.\par + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.\par +\pard\sa200\sl276\slmult1\b 5. Submission of Contributions. \b0 Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.\par +\pard\sa200\sl276\slmult1\b 6. Trademarks. \b0 This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.\par +\pard\sa200\sl276\slmult1\b 7. Disclaimer of Warranty. \b0 Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.\par +\pard\sa200\sl276\slmult1\b 8. Limitation of Liability. \b0 In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.\par +\pard\sa200\sl276\slmult1\b 9. Accepting Warranty or Additional Liability. \b0 While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.\par +\pard\sa200\sl276\slmult1\qc\b END OF TERMS AND CONDITIONS\par +\pard\sa200\sl276\slmult1\b0\fs20 APPENDIX: How to apply the Apache License to your work.\par +\pard\sa200\sl276\slmult1 To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.\par + Copyright [yyyy] [name of copyright owner]\par + Licensed under the Apache License, Version 2.0 (the "License");\par + you may not use this file except in compliance with the License.\par + You may obtain a copy of the License at\par +\pard\sa200\sl276\slmult1\li720 \ul http://www.apache.org/licenses/LICENSE-2.0\ulnone\par +\pard\sa200\sl276\slmult1 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.\par +} diff --git a/pkgs/msi/README.md b/pkgs/msi/README.md new file mode 100644 index 000000000000..7904ef383277 --- /dev/null +++ b/pkgs/msi/README.md @@ -0,0 +1,9 @@ + +## Important information + +- We can only build `msi` packages with stable version number (no RCs, nor Nightlies). +Example of the error message when building with an RC + +``` +error CNDL0108 : The Product/@Version attribute's value, '3.5.1-RC1', is not a valid version. Legal version values should look like 'x.x.x.x' where x is an integer from 0 to 65534 +``` diff --git a/project/Build.scala b/project/Build.scala index c729f6036985..86da7eeee959 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1,27 +1,31 @@ import java.io.File import java.nio.file._ - import Process._ import Modes._ import ScaladocGeneration._ import com.jsuereth.sbtpgp.PgpKeys -import sbt.Keys._ -import sbt._ +import sbt.Keys.* +import sbt.* import complete.DefaultParsers._ import pl.project13.scala.sbt.JmhPlugin import pl.project13.scala.sbt.JmhPlugin.JmhKeys.Jmh +import com.typesafe.sbt.packager.Keys._ +import com.typesafe.sbt.packager.MappingsHelper.directory +import com.typesafe.sbt.packager.universal.UniversalPlugin +import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport.Universal +import com.typesafe.sbt.packager.windows.WindowsPlugin +import com.typesafe.sbt.packager.windows.WindowsPlugin.autoImport.Windows import sbt.Package.ManifestAttributes import sbt.PublishBinPlugin.autoImport._ import dotty.tools.sbtplugin.RepublishPlugin import dotty.tools.sbtplugin.RepublishPlugin.autoImport._ import sbt.plugins.SbtPlugin import sbt.ScriptedPlugin.autoImport._ -import xerial.sbt.pack.PackPlugin -import xerial.sbt.pack.PackPlugin.autoImport._ import xerial.sbt.Sonatype.autoImport._ import com.typesafe.tools.mima.plugin.MimaPlugin.autoImport._ import org.scalajs.sbtplugin.ScalaJSPlugin import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._ + import sbtbuildinfo.BuildInfoPlugin import sbtbuildinfo.BuildInfoPlugin.autoImport._ import sbttastymima.TastyMiMaPlugin @@ -2130,25 +2134,27 @@ object Build { ) lazy val commonDistSettings = Seq( - packMain := Map(), publishArtifact := false, - packGenerateMakefile := false, republishRepo := target.value / "republish", - packResourceDir += (republishRepo.value / "bin" -> "bin"), - packResourceDir += (republishRepo.value / "maven2" -> "maven2"), - packResourceDir += (republishRepo.value / "lib" -> "lib"), - republishCommandLibs += - ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), - republishCommandLibs += - ("with_compiler" -> List("scala3-staging", "scala3-tasty-inspector", "^!scala3-interfaces", "^!scala3-compiler", "^!scala3-library", "^!tasty-core")), - republishCommandLibs += - ("scaladoc" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-tasty-inspector", "scaladoc")), - Compile / pack := republishPack.value, + Universal / packageName := packageName.value, + // ======== + Universal / stage := (Universal / stage).dependsOn(republish).value, + Universal / packageBin := (Universal / packageBin).dependsOn(republish).value, + Universal / packageZipTarball := (Universal / packageZipTarball).dependsOn(republish).value, + // ======== + Universal / mappings ++= directory(republishRepo.value / "bin"), + Universal / mappings ++= directory(republishRepo.value / "maven2"), + Universal / mappings ++= directory(republishRepo.value / "lib"), + Universal / mappings += (republishRepo.value / "VERSION") -> "VERSION", + // ======== + republishCommandLibs += ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), + republishCommandLibs += ("with_compiler" -> List("scala3-staging", "scala3-tasty-inspector", "^!scala3-interfaces", "^!scala3-compiler", "^!scala3-library", "^!tasty-core")), + republishCommandLibs += ("scaladoc" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-tasty-inspector", "scaladoc")), ) lazy val dist = project.asDist(Bootstrapped) + .settings(packageName := "scala3-" + dottyVersion) .settings( - packArchiveName := "scala3-" + dottyVersion, republishBinDir := baseDirectory.value / "bin", republishCoursier += ("coursier.jar" -> s"https://github.com/coursier/coursier/releases/download/v$coursierJarVersion/coursier.jar"), @@ -2157,9 +2163,9 @@ object Build { ) lazy val `dist-mac-x86_64` = project.in(file("dist/mac-x86_64")).asDist(Bootstrapped) + .settings(packageName := (dist / packageName).value + "-x86_64-apple-darwin") .settings( republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-x86_64-apple-darwin", republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += @@ -2167,9 +2173,9 @@ object Build { ) lazy val `dist-mac-aarch64` = project.in(file("dist/mac-aarch64")).asDist(Bootstrapped) + .settings(packageName := (dist / packageName).value + "-aarch64-apple-darwin") .settings( republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-aarch64-apple-darwin", republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += @@ -2177,21 +2183,36 @@ object Build { ) lazy val `dist-win-x86_64` = project.in(file("dist/win-x86_64")).asDist(Bootstrapped) + .enablePlugins(WindowsPlugin) // TO GENERATE THE `.msi` installer + .settings(packageName := (dist / packageName).value + "-x86_64-pc-win32") .settings( republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-win32", republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishExtraProps += ("cli_version" -> scalaCliLauncherVersion), - mappings += (republishRepo.value / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), republishLaunchers += ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersionWindows/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") ) + .settings( + Universal / mappings += (republishRepo.value / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), + ) + .settings( + Windows / name := "scala", + Windows / mappings := (Universal / mappings).value, + Windows / packageBin := (Windows / packageBin).dependsOn(republish).value, + Windows / wixFiles := (Windows / wixFiles).dependsOn(republish).value, + maintainer := "The Scala Programming Language", + packageSummary := s"Scala $dottyVersion", + packageDescription := """Installer for the Scala Programming Language""", + wixProductId := "74ED19C3-74FE-4ABA-AF30-55A06B6322A9", + wixProductUpgradeId := "3E5A1A82-CA67-4353-94FE-5BDD400AF66B", + wixProductLicense := Some(dist.base / "LICENSE.rtf") + ) lazy val `dist-linux-x86_64` = project.in(file("dist/linux-x86_64")).asDist(Bootstrapped) + .settings(packageName := (dist / packageName).value + "-x86_64-pc-linux") .settings( republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-linux", republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += @@ -2199,9 +2220,9 @@ object Build { ) lazy val `dist-linux-aarch64` = project.in(file("dist/linux-aarch64")).asDist(Bootstrapped) + .settings(packageName := (dist / packageName).value + "-aarch64-pc-linux") .settings( republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-aarch64-pc-linux", republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += @@ -2339,7 +2360,7 @@ object Build { settings(scala3PresentationCompilerBuildInfo) def asDist(implicit mode: Mode): Project = project. - enablePlugins(PackPlugin, RepublishPlugin). + enablePlugins(UniversalPlugin, RepublishPlugin). withCommonSettings. settings(commonDistSettings). dependsOn( @@ -2435,7 +2456,6 @@ object ScaladocConfigs { } lazy val DefaultGenerationConfig = Def.task { - def distLocation = (dist / Compile / pack).value DefaultGenerationSettings.value } diff --git a/project/DistributionPlugin.scala b/project/DistributionPlugin.scala new file mode 100644 index 000000000000..473ecd8378af --- /dev/null +++ b/project/DistributionPlugin.scala @@ -0,0 +1,50 @@ +import com.typesafe.sbt.packager.Keys.stage +import com.typesafe.sbt.packager.universal.UniversalPlugin +import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport.Universal +import sbt.* + +/** + * @author Hamza REMMAL (https://github.com/hamzaremmal/) + */ +object DistributionPlugin extends AutoPlugin { + + override def trigger = allRequirements + + override def requires = + super.requires && UniversalPlugin // Require the Universal Plugin to + + object autoImport { + val `universal_project` = settingKey[Project]("???") + val `linux-aarch64_project` = settingKey[Project]("???") + val `linux-x86_64_project` = settingKey[Project]("???") + val `mac-aarch64_project` = settingKey[Project]("???") + val `win-x86_64_project` = settingKey[Project]("???") + + + // ========================== TASKS TO GENERATE THE FOLDER PACKAGE ============================ + val `pack-universal` = + taskKey[File]("Generate the package with the universal binaries (folder)") + val `pack_linux-aarch64` = + taskKey[File]("Generate the package with the linux-aarch64 binaries (folder)") + val `pack_linux-x86_64` = + taskKey[File]("Generate the package with the linux-x86_64 binaries (folder)") + val `pack_mac-aarch64` = + taskKey[File]("Generate the package with the mac-aarch64 binaries (folder)") + val `pack_mac-x86_64` = + taskKey[File]("Generate the package with the mac-x86_64 binaries (folder)") + val `pack_win-x86_64` = + taskKey[File]("Generate the package with the linux-x86_64 binaries (folder)") + } + + import autoImport.* + + override def projectSettings = Def.settings( + `pack-universal` := (`universal_project` / Universal./(stage)).value , + `pack_linux-aarch64` := ???, + `pack_linux-x86_64` := ???, + `pack_mac-aarch64` := ???, + `pack_mac-x86_64` := ???, + `pack_win-x86_64` := ??? + ) + +} diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 5611af798b33..ff469e8fda56 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -1,22 +1,18 @@ package dotty.tools.sbtplugin -import sbt._ -import xerial.sbt.pack.PackPlugin -import xerial.sbt.pack.PackPlugin.autoImport.{packResourceDir, packDir} -import sbt.Keys._ +import com.typesafe.sbt.packager.universal.UniversalPlugin +import sbt.* +import sbt.Keys.* import sbt.AutoPlugin import sbt.PublishBinPlugin -import sbt.PublishBinPlugin.autoImport._ +import sbt.PublishBinPlugin.autoImport.* import sbt.io.Using -import sbt.util.CacheImplicits._ +import sbt.util.CacheImplicits.* -import scala.collection.mutable import java.nio.file.Files - import java.nio.file.attribute.PosixFilePermission -import java.nio.file.{Files, Path} - -import scala.jdk.CollectionConverters._ +import java.nio.file.Path +import scala.jdk.CollectionConverters.* /** This local plugin provides ways of publishing a project classpath and library dependencies to * .a local repository */ @@ -53,7 +49,7 @@ object RepublishPlugin extends AutoPlugin { } override def trigger = allRequirements - override def requires = super.requires && PublishBinPlugin && PackPlugin + override def requires = super.requires && PublishBinPlugin && UniversalPlugin object autoImport { val republishProjectRefs = taskKey[Seq[ProjectRef]]("fetch the classpath deps from the project.") @@ -346,6 +342,65 @@ object RepublishPlugin extends AutoPlugin { allLaunchers.toSet } + private def generateVersionFile() = Def.task[Unit] { + import scala.util.Try + import java.time.format.DateTimeFormatterBuilder + import java.time.format.SignStyle + import java.time.temporal.ChronoField.* + import java.time.ZoneId + import java.time.Instant + import java.time.ZonedDateTime + import java.time.ZonedDateTime + import java.util.Locale + import java.util.Date + + val base: File = new File(".") // Using the working directory as base for readability + val s = streams.value + val log = s.log + val progVersion = version.value + val distDir = republishRepo.value + + def write(path: String, content: String) { + val p = distDir / path + IO.write(p, content) + } + + val humanReadableTimestampFormatter = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 2) + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2) + .appendLiteral(' ') + .appendValue(HOUR_OF_DAY, 2) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .appendOffset("+HHMM", "Z") + .toFormatter(Locale.US) + + // Retrieve build time + val systemZone = ZoneId.systemDefault().normalized() + val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(new Date().getTime), systemZone) + val buildTime = humanReadableTimestampFormatter.format(timestamp) + + // Check the current Git revision + val gitRevision: String = Try { + if ((base / ".git").exists()) { + log.info("[republish] Checking the git revision of the current project") + sys.process.Process("git rev-parse HEAD").!! + } else { + "unknown" + } + }.getOrElse("unknown").trim + + + // Output the version number and Git revision + write("VERSION", s"version:=${progVersion}\nrevision:=${gitRevision}\nbuildTime:=${buildTime}\n") + } + override val projectSettings: Seq[Def.Setting[_]] = Def.settings( republishCoursierDir := republishRepo.value / "coursier", republishLaunchers := Seq.empty, @@ -470,88 +525,8 @@ object RepublishPlugin extends AutoPlugin { val artifacts = republishClasspath.value val launchers = republishFetchLaunchers.value val extraProps = republishWriteExtraProps.value + val versionFile = generateVersionFile().value cacheDir }, - republishPack := { - val cacheDir = republish.value - val s = streams.value - val log = s.log - val distDir = target.value / packDir.value - val progVersion = version.value - - IO.createDirectory(distDir) - for ((path, dir) <- packResourceDir.value) { - val target = distDir / dir - IO.copyDirectory(path, target) - } - - locally { - // everything in this block is copied from sbt-pack plugin - import scala.util.Try - import java.time.format.DateTimeFormatterBuilder - import java.time.format.SignStyle - import java.time.temporal.ChronoField.* - import java.time.ZoneId - import java.time.Instant - import java.time.ZonedDateTime - import java.time.ZonedDateTime - import java.util.Locale - import java.util.Date - val base: File = new File(".") // Using the working directory as base for readability - - // Copy explicitly added dependencies - val mapped: Seq[(File, String)] = mappings.value - log.info("[republish] Copying explicit dependencies:") - val explicitDepsJars = for ((file, path) <- mapped) yield { - log.info(file.getPath) - val dest = distDir / path - IO.copyFile(file, dest, true) - dest - } - - def write(path: String, content: String) { - val p = distDir / path - IO.write(p, content) - } - - val humanReadableTimestampFormatter = new DateTimeFormatterBuilder() - .parseCaseInsensitive() - .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral('-') - .appendValue(MONTH_OF_YEAR, 2) - .appendLiteral('-') - .appendValue(DAY_OF_MONTH, 2) - .appendLiteral(' ') - .appendValue(HOUR_OF_DAY, 2) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2) - .appendOffset("+HHMM", "Z") - .toFormatter(Locale.US) - - // Retrieve build time - val systemZone = ZoneId.systemDefault().normalized() - val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(new Date().getTime), systemZone) - val buildTime = humanReadableTimestampFormatter.format(timestamp) - - // Check the current Git revision - val gitRevision: String = Try { - if ((base / ".git").exists()) { - log.info("[republish] Checking the git revision of the current project") - sys.process.Process("git rev-parse HEAD").!! - } else { - "unknown" - } - }.getOrElse("unknown").trim - - - // Output the version number and Git revision - write("VERSION", s"version:=${progVersion}\nrevision:=${gitRevision}\nbuildTime:=${buildTime}\n") - } - - - distDir - } ) } diff --git a/project/plugins.sbt b/project/plugins.sbt index 59e58007a4a0..96f767726315 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,8 +12,6 @@ addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1") -addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.17") - addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.5") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") @@ -21,3 +19,5 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("ch.epfl.scala" % "sbt-tasty-mima" % "1.0.0") + +addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.7.6") diff --git a/project/scripts/native-integration/bashTests b/project/scripts/native-integration/bashTests index 5fb77355238c..c71e81ac183b 100755 --- a/project/scripts/native-integration/bashTests +++ b/project/scripts/native-integration/bashTests @@ -19,7 +19,7 @@ die () { exit 1 } -PROG_HOME="$DIST_DIR/target/pack" +PROG_HOME="$DIST_DIR/target/universal/stage" SOURCE="$ROOT/tests/pos/HelloWorld.scala" SOURCE_VERSION="$ROOT/project/scripts/native-integration/reportScalaVersion.scala" @@ -42,7 +42,7 @@ clear_cli_dotfiles() # *---------------*/ # build the distribution -"$SBT" "$DIST_PROJECT/pack" +"$SBT" "$DIST_PROJECT/Universal/stage" SCALA_VERSION="" # iterate through lines in VERSION_SRC diff --git a/project/scripts/native-integration/winTests.bat b/project/scripts/native-integration/winTests.bat index a85b2c8c2531..18e406423ebd 100755 --- a/project/scripts/native-integration/winTests.bat +++ b/project/scripts/native-integration/winTests.bat @@ -2,7 +2,7 @@ setlocal @rem paths are relative to the root project directory -set "_PREFIX=dist\win-x86_64\target\pack" +set "_PREFIX=dist\win-x86_64\target\universal\stage" set "_SOURCE=tests\pos\HelloWorld.scala" set "_OUT_DIR=out" From 9b3bc6bf89e355fe93bc361c2ad274df49b084e7 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 1 Jul 2024 18:52:36 +0200 Subject: [PATCH 279/827] Adapt the CI workflow to test the msi --- .github/workflows/build-msi.yml | 23 ++++++++++++++++------- .github/workflows/ci.yaml | 5 +++++ 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build-msi.yml b/.github/workflows/build-msi.yml index 548cc4b9baa8..8e3ac5eeb78b 100644 --- a/.github/workflows/build-msi.yml +++ b/.github/workflows/build-msi.yml @@ -1,19 +1,28 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO BUILD SCALA MSI ### +### HOW TO USE: ### +### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### +### - IT WILL UPLOAD TO GITHUB THE MSI FILE FOR SCALA UNDER THE 'scala.msi' NAME ### +### ### +### NOTE: ### +### - WE SHOULD BUILD SCALA USING JAVA 8 ### +################################################################################################### name: Build the MSI Package on: - pull_request: + workflow_call: + +env: + # NECESSARY FLAG TO CORRECTLY CONFIGURE THE VERSION FOR SCALA + RELEASEBUILD: yes jobs: build: runs-on: windows-latest - env: - RELEASEBUILD: yes steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: distribution: 'adopt' java-version: '8' diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2747830fb7d6..8081d64ccf48 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1008,3 +1008,8 @@ jobs: WORKFLOW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} with: filename: .github/workflows/issue_nightly_failed.md + + build-msi-package: + uses: ./.github/workflows/build-msi.yml + if : github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_msi]') + # TODO: ADD A JOB THAT DEPENDS ON THIS TO TEST THE MSI From 04dd84300f96c5ccc62b3534fcbfacc900acd085 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 2 Jul 2024 11:35:58 +0200 Subject: [PATCH 280/827] Adapt the scripts to sbt-native-packager --- .github/workflows/ci.yaml | 8 +-- README.md | 2 +- bin/common | 6 +-- .../tools/scripting/ClasspathTests.scala | 2 +- .../dotty/tools/scripting/ScriptTestEnv.scala | 8 +-- project/DistributionPlugin.scala | 50 ------------------- project/scripts/bootstrappedOnlyCmdTests | 10 ++-- project/scripts/buildScalaBinary | 2 +- project/scripts/cmdScaladocTests | 2 +- project/scripts/winCmdTests | 2 +- project/scripts/winCmdTests.bat | 2 +- 11 files changed, 22 insertions(+), 72 deletions(-) delete mode 100644 project/DistributionPlugin.scala diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8081d64ccf48..74c4741c1ab4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -87,8 +87,8 @@ jobs: run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - name: Test - # DON'T add dist/pack! - # Adding dist/pack bootstraps the compiler + # DON'T add dist/Universal/stage! + # Adding dist/Universal/stage bootstraps the compiler # which undermines the point of these tests: # to quickly run the tests without the cost of bootstrapping # and also to run tests when the compiler doesn't bootstrap @@ -231,7 +231,7 @@ jobs: shell: cmd - name: build binary - run: sbt "dist-win-x86_64/pack" & bash -version + run: sbt "dist-win-x86_64/Universal/stage" & bash -version shell: cmd - name: cygwin tests @@ -271,7 +271,7 @@ jobs: uses: actions/checkout@v4 - name: build binary - run: sbt "dist-win-x86_64/pack" + run: sbt "dist-win-x86_64/Universal/stage" shell: cmd - name: Test diff --git a/README.md b/README.md index 7a2bda3f8073..7410c914a898 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ To try it in your project see also the [Getting Started User Guide](https://docs Building a Local Distribution ============================= -1. `sbt dist/packArchive` +1. `sbt dist/Universal/packageBin` 2. Find the newly-built distributions in `dist/target/` Code of Conduct diff --git a/bin/common b/bin/common index 37b2ebd1ff93..ad179412f590 100755 --- a/bin/common +++ b/bin/common @@ -13,14 +13,14 @@ shift # Mutates $@ by deleting the first element ($1) source "$ROOT/bin/common-platform" # Marker file used to obtain the date of latest call to sbt-back -version="$ROOT/$DIST_DIR/target/pack/VERSION" +version="$ROOT/$DIST_DIR/target/universal/stage/VERSION" # Create the target if absent or if file changed in ROOT/compiler new_files="$(find "$ROOT/compiler" \( -iname "*.scala" -o -iname "*.java" \) -newer "$version" 2> /dev/null)" if [ ! -f "$version" ] || [ ! -z "$new_files" ]; then echo "Building Dotty..." - (cd $ROOT && sbt "$DIST_PROJECT/pack") + (cd $ROOT && sbt "$DIST_PROJECT/Universal/stage") fi -"$ROOT/$DIST_DIR/target/pack/bin/$target" "$@" +"$ROOT/$DIST_DIR/target/universal/stage/bin/$target" "$@" diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index a946e509aeb3..0244e208af3c 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -67,7 +67,7 @@ class ClasspathTests: (hashbangJars.toSet -- packlibJars.toSet , "only in hashbang classpath") } // verify that the script hasbang classpath setting was effective at supplementing the classpath - // (a minimal subset of jars below dist*/target/pack/lib are always be in the classpath) + // (a minimal subset of jars below dist*/target/universal/stage/lib are always be in the classpath) val missingClasspathEntries = if hashbangClasspathJars.size != packlibJars.size then printf("packlib dir [%s]\n", packlibDir) printf("hashbangClasspathJars: %s\n", hashbangJars.map { _.relpath.norm }.mkString("\n ", "\n ", "")) diff --git a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala index dd1cc04bb58a..771c3ba14af0 100644 --- a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala +++ b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala @@ -16,7 +16,7 @@ import scala.jdk.CollectionConverters.* /** * Common Code for supporting scripting tests. * To override the path to the bash executable, set TEST_BASH= - * To specify where `dist[*]/target/pack/bin` resides, set TEST_CWD= + * To specify where `dist[*]/target/universal/stage/bin` resides, set TEST_CWD= * Test scripts run in a bash env, so paths are converted to forward slash via .norm. */ object ScriptTestEnv { @@ -48,7 +48,7 @@ object ScriptTestEnv { } lazy val nativePackDir: Option[String] = { - def nativeDir(os: String, arch: String) = Some(s"dist/$os-$arch/target/pack") + def nativeDir(os: String, arch: String) = Some(s"dist/$os-$arch/target/universal/stage") def nativeOs(os: String) = archNorm match case arch @ ("aarch64" | "x86_64") => nativeDir(os, arch) case _ => None @@ -61,7 +61,7 @@ object ScriptTestEnv { def jvmPackDir() = println("warning: unknown OS architecture combination, defaulting to JVM launcher.") - "dist/target/pack" + "dist/target/universal/stage" def packDir: String = nativePackDir.getOrElse(jvmPackDir()) @@ -302,7 +302,7 @@ object ScriptTestEnv { // use optional TEST_BASH if defined, otherwise, bash must be in PATH // envScalaHome is: - // dist[*]/target/pack, if present + // dist[*]/target/universal/stage, if present // else, SCALA_HOME if defined // else, not defined lazy val envScalaHome = diff --git a/project/DistributionPlugin.scala b/project/DistributionPlugin.scala deleted file mode 100644 index 473ecd8378af..000000000000 --- a/project/DistributionPlugin.scala +++ /dev/null @@ -1,50 +0,0 @@ -import com.typesafe.sbt.packager.Keys.stage -import com.typesafe.sbt.packager.universal.UniversalPlugin -import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport.Universal -import sbt.* - -/** - * @author Hamza REMMAL (https://github.com/hamzaremmal/) - */ -object DistributionPlugin extends AutoPlugin { - - override def trigger = allRequirements - - override def requires = - super.requires && UniversalPlugin // Require the Universal Plugin to - - object autoImport { - val `universal_project` = settingKey[Project]("???") - val `linux-aarch64_project` = settingKey[Project]("???") - val `linux-x86_64_project` = settingKey[Project]("???") - val `mac-aarch64_project` = settingKey[Project]("???") - val `win-x86_64_project` = settingKey[Project]("???") - - - // ========================== TASKS TO GENERATE THE FOLDER PACKAGE ============================ - val `pack-universal` = - taskKey[File]("Generate the package with the universal binaries (folder)") - val `pack_linux-aarch64` = - taskKey[File]("Generate the package with the linux-aarch64 binaries (folder)") - val `pack_linux-x86_64` = - taskKey[File]("Generate the package with the linux-x86_64 binaries (folder)") - val `pack_mac-aarch64` = - taskKey[File]("Generate the package with the mac-aarch64 binaries (folder)") - val `pack_mac-x86_64` = - taskKey[File]("Generate the package with the mac-x86_64 binaries (folder)") - val `pack_win-x86_64` = - taskKey[File]("Generate the package with the linux-x86_64 binaries (folder)") - } - - import autoImport.* - - override def projectSettings = Def.settings( - `pack-universal` := (`universal_project` / Universal./(stage)).value , - `pack_linux-aarch64` := ???, - `pack_linux-x86_64` := ???, - `pack_mac-aarch64` := ???, - `pack_mac-x86_64` := ???, - `pack_win-x86_64` := ??? - ) - -} diff --git a/project/scripts/bootstrappedOnlyCmdTests b/project/scripts/bootstrappedOnlyCmdTests index 11c35a7028cc..93263baa122a 100755 --- a/project/scripts/bootstrappedOnlyCmdTests +++ b/project/scripts/bootstrappedOnlyCmdTests @@ -15,13 +15,13 @@ echo "testing scala.quoted.Expr.run from sbt scala" grep -qe "val a: scala.Int = 3" "$tmp" # setup for `scalac`/`scala` script tests -"$SBT" "$DIST_PROJECT/pack" +"$SBT" "$DIST_PROJECT/universal/stage" -echo "capturing scala version from $DIST_DIR/target/pack/VERSION" -IFS=':=' read -ra versionProps < "$ROOT/$DIST_DIR/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps +echo "capturing scala version from $DIST_DIR/target/universal/stage/VERSION" +IFS=':=' read -ra versionProps < "$ROOT/$DIST_DIR/target/universal/stage/VERSION" # temporarily set IFS to ':=' to split versionProps [ ${#versionProps[@]} -eq 3 ] && \ [ ${versionProps[0]} = "version" ] && \ - [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $ROOT/$DIST_DIR/target/pack/VERSION" + [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $ROOT/$DIST_DIR/target/universal/stage/VERSION" scala_version=${versionProps[2]} # check that `scalac` compiles and `scala` runs it @@ -77,7 +77,7 @@ echo "testing sbt scalac with suspension" clear_out "$OUT" "$SBT" "scala3-compiler-bootstrapped/scalac -d $OUT tests/pos-macros/macros-in-same-project-1/Bar.scala tests/pos-macros/macros-in-same-project-1/Foo.scala" > "$tmp" -# echo ":quit" | ./$DIST_DIR/target/pack/bin/scala # not supported by CI +# echo ":quit" | ./$DIST_DIR/target/universal/stage/bin/scala # not supported by CI echo "testing ./bin/scaladoc" clear_out "$OUT1" diff --git a/project/scripts/buildScalaBinary b/project/scripts/buildScalaBinary index 7fc5275e5d8d..9451dbdd2a07 100755 --- a/project/scripts/buildScalaBinary +++ b/project/scripts/buildScalaBinary @@ -9,4 +9,4 @@ SBT="$ROOT/project/scripts/sbt" # if run on CI source "$ROOT/bin/common-platform" # build the scala/scalac/scaladoc binary, where scala is native for the current platform. -"$SBT" "$DIST_PROJECT/pack" +"$SBT" "$DIST_PROJECT/Universal/stage" diff --git a/project/scripts/cmdScaladocTests b/project/scripts/cmdScaladocTests index 06353af693f1..b54789032ad2 100755 --- a/project/scripts/cmdScaladocTests +++ b/project/scripts/cmdScaladocTests @@ -20,7 +20,7 @@ SOURCE_LINKS_REPOSITORY="scala/scala3" SOURCE_LINKS_VERSION="${GITHUB_SHA:-$DOTTY_BOOTSTRAPPED_VERSION}" "$SBT" "scaladoc/generateTestcasesDocumentation" > "$tmp" 2>&1 || echo "generated testcases project with sbt" -dist/target/pack/bin/scaladoc \ +dist/target/universal/stage/bin/scaladoc \ -d "$OUT1" \ -project "scaladoc testcases" \ -source-links:out/bootstrap/scala2-library-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/scala-library-src=github://scala/scala/v"${STDLIB_VERSION}"#src/library \ diff --git a/project/scripts/winCmdTests b/project/scripts/winCmdTests index fe6a43c7f68f..dbdaed218558 100644 --- a/project/scripts/winCmdTests +++ b/project/scripts/winCmdTests @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -e -PREFIX="dist/win-x86_64/target/pack" +PREFIX="dist/win-x86_64/target/universal/stage" SOURCE="tests/pos/HelloWorld.scala" $PREFIX/bin/scalac @project/scripts/options "$SOURCE" $PREFIX/bin/scalac -d out "$SOURCE" diff --git a/project/scripts/winCmdTests.bat b/project/scripts/winCmdTests.bat index 903f74d7ab98..097c05839205 100644 --- a/project/scripts/winCmdTests.bat +++ b/project/scripts/winCmdTests.bat @@ -2,7 +2,7 @@ setlocal @rem paths are relative to the root project directory -set "_PREFIX=dist\win-x86_64\target\pack" +set "_PREFIX=dist\win-x86_64\target\universal\stage" set "_SOURCE=tests\pos\HelloWorld.scala" set "_OUT_DIR=out" set "_SITE_DIR=_site" From af1175420982edb067eecc033bb90d3ca809bae8 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Wed, 3 Jul 2024 22:02:00 +0200 Subject: [PATCH 281/827] Fix issue with scaladoc --- project/Build.scala | 1 + project/scripts/bootstrappedOnlyCmdTests | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 86da7eeee959..49ad419265ca 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2456,6 +2456,7 @@ object ScaladocConfigs { } lazy val DefaultGenerationConfig = Def.task { + def distLocation = (dist / Universal / stage).value DefaultGenerationSettings.value } diff --git a/project/scripts/bootstrappedOnlyCmdTests b/project/scripts/bootstrappedOnlyCmdTests index 93263baa122a..6f5c75ceb922 100755 --- a/project/scripts/bootstrappedOnlyCmdTests +++ b/project/scripts/bootstrappedOnlyCmdTests @@ -15,7 +15,7 @@ echo "testing scala.quoted.Expr.run from sbt scala" grep -qe "val a: scala.Int = 3" "$tmp" # setup for `scalac`/`scala` script tests -"$SBT" "$DIST_PROJECT/universal/stage" +"$SBT" "$DIST_PROJECT/Universal/stage" echo "capturing scala version from $DIST_DIR/target/universal/stage/VERSION" IFS=':=' read -ra versionProps < "$ROOT/$DIST_DIR/target/universal/stage/VERSION" # temporarily set IFS to ':=' to split versionProps From 7c996e5a0d14475c2781544429fc409aa4455017 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Thu, 11 Jul 2024 15:31:23 +0200 Subject: [PATCH 282/827] Update the native-package plugin --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 96f767726315..bb0693ced132 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -20,4 +20,4 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("ch.epfl.scala" % "sbt-tasty-mima" % "1.0.0") -addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.7.6") +addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.10.0") From a0f502e5819c8b664f398b161e2d75aef3f35c7a Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 15 Jul 2024 11:42:16 +0200 Subject: [PATCH 283/827] Address review --- project/Build.scala | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 49ad419265ca..09080bba370d 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2201,12 +2201,13 @@ object Build { Windows / mappings := (Universal / mappings).value, Windows / packageBin := (Windows / packageBin).dependsOn(republish).value, Windows / wixFiles := (Windows / wixFiles).dependsOn(republish).value, - maintainer := "The Scala Programming Language", - packageSummary := s"Scala $dottyVersion", - packageDescription := """Installer for the Scala Programming Language""", - wixProductId := "74ED19C3-74FE-4ABA-AF30-55A06B6322A9", - wixProductUpgradeId := "3E5A1A82-CA67-4353-94FE-5BDD400AF66B", - wixProductLicense := Some(dist.base / "LICENSE.rtf") + // Additional information: https://wixtoolset.org/docs/schema/wxs/package/ + maintainer := "The Scala Programming Language", // The displayed maintainer of the package + packageSummary := s"Scala $dottyVersion", // The displayed name of the package + packageDescription := """Installer for the Scala Programming Language""", // The displayed description of the package + wixProductId := "*", // Unique ID for each generated MSI; will change for each generated msi + wixProductUpgradeId := "3E5A1A82-CA67-4353-94FE-5BDD400AF66B", // Unique ID to identify the package; used to manage the upgrades + wixProductLicense := Some(dist.base / "LICENSE.rtf") // Link to the LICENSE to show during the installation (keep in sync with ../LICENSE) ) lazy val `dist-linux-x86_64` = project.in(file("dist/linux-x86_64")).asDist(Bootstrapped) From 604816a39fbf08e4556ab64dedc38be8951e6893 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 16 Jul 2024 19:46:43 +0200 Subject: [PATCH 284/827] add test to assert classes are still reported --- .../test/xsbt/ProductsSpecification.scala | 31 +++++++++++++++++-- .../xsbt/ScalaCompilerForUnitTesting.scala | 17 +++++++--- sbt-bridge/test/xsbti/TestCallback.scala | 4 +++ 3 files changed, 45 insertions(+), 7 deletions(-) diff --git a/sbt-bridge/test/xsbt/ProductsSpecification.scala b/sbt-bridge/test/xsbt/ProductsSpecification.scala index b13defecc4cc..f268818f2d8b 100644 --- a/sbt-bridge/test/xsbt/ProductsSpecification.scala +++ b/sbt-bridge/test/xsbt/ProductsSpecification.scala @@ -23,12 +23,39 @@ class ProductsSpecification { val output = compiler.compileSrcsToJar(src) val srcFile = output.srcFiles.head val products = output.analysis.productClassesToSources.filter(_._2 == srcFile).keys.toSet - + def toPathInJar(className: String): Path = Paths.get(s"${output.classesOutput}!${className.replace('.', File.separatorChar)}.class") val expected = Set("example.A", "example.A$B", "example.A$C$1").map(toPathInJar) assertEquals(products, expected) } + @Test + def extractNonLocalClassesNoInc = { + val src = + """package example + | + |class A { + | class B + | def foo = + | class C + |}""".stripMargin + val output = compiler.compileSrcsNoInc(src) + val srcFile = output.srcFiles.head + val (srcNames, binaryNames) = output.analysis.classNames(srcFile).unzip // non local class names + + assertFalse(output.analysis.enabled()) // inc phases are disabled + assertTrue(output.analysis.apis.isEmpty) // extract-api did not run + assertTrue(output.analysis.usedNamesAndScopes.isEmpty) // extract-dependencies did not run + + // note that local class C is not included, classNames only records non local classes + val expectedBinary = Set("example.A", "example.A$B") + assertEquals(expectedBinary, binaryNames.toSet) + + // note that local class C is not included, classNames only records non local classes + val expectedSrc = Set("example.A", "example.A.B") + assertEquals(expectedSrc, srcNames.toSet) + } + private def compiler = new ScalaCompilerForUnitTesting -} \ No newline at end of file +} diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index fd125f25560b..400bcd369e27 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -135,11 +135,15 @@ class ScalaCompilerForUnitTesting { * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil, compileToJar: Boolean = false): CompileOutput = { + def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil, compileToJar: Boolean = false, incEnabled: Boolean = true): CompileOutput = { val temp = IO.createTemporaryDirectory - val analysisCallback = new TestCallback + val (forceSbtArgs, analysisCallback) = + if (incEnabled) + (Seq("-Yforce-sbt-phases"), new TestCallback) + else + (Seq.empty, new TestCallbackNoInc) val testProgress = new TestCompileProgress - val classesOutput = + val classesOutput = if (compileToJar) { val jar = new File(temp, "classes.jar") jar.createNewFile() @@ -174,7 +178,7 @@ class ScalaCompilerForUnitTesting { bridge.run( virtualSrcFiles, new TestDependencyChanges, - Array("-Yforce-sbt-phases", "-classpath", classesOutputPath, "-usejavacp", "-d", classesOutputPath) ++ maybeSourcePath, + (forceSbtArgs ++: Array("-classpath", classesOutputPath, "-usejavacp", "-d", classesOutputPath)) ++ maybeSourcePath, output, analysisCallback, new TestReporter, @@ -193,6 +197,10 @@ class ScalaCompilerForUnitTesting { compileSrcs(List(srcs.toList)) } + def compileSrcsNoInc(srcs: String*): CompileOutput = { + compileSrcs(List(srcs.toList), incEnabled = false) + } + def compileSrcsToJar(srcs: String*): CompileOutput = compileSrcs(List(srcs.toList), compileToJar = true) @@ -202,4 +210,3 @@ class ScalaCompilerForUnitTesting { new TestVirtualFile(srcFile.toPath) } } - diff --git a/sbt-bridge/test/xsbti/TestCallback.scala b/sbt-bridge/test/xsbti/TestCallback.scala index 3398590b169a..9f6df75d84f0 100644 --- a/sbt-bridge/test/xsbti/TestCallback.scala +++ b/sbt-bridge/test/xsbti/TestCallback.scala @@ -11,6 +11,10 @@ import DependencyContext._ import java.{util => ju} import ju.Optional +class TestCallbackNoInc extends TestCallback { + override def enabled(): Boolean = false +} + class TestCallback extends AnalysisCallback2 { case class TestUsedName(name: String, scopes: ju.EnumSet[UseScope]) From d0091b799f822281b36f7eaacfa2572c29188e6b Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Sat, 8 Jun 2024 14:05:50 +0100 Subject: [PATCH 285/827] Add Chocolatey package code --- pkgs/README.md | 15 +++++++ pkgs/chocolatey/README.md | 10 +++++ pkgs/chocolatey/icon.svg | 30 +++++++++++++ pkgs/chocolatey/scala.nuspec | 25 +++++++++++ pkgs/chocolatey/tools/chocolateyInstall.ps1 | 44 +++++++++++++++++++ pkgs/chocolatey/tools/chocolateyUninstall.ps1 | 21 +++++++++ 6 files changed, 145 insertions(+) create mode 100644 pkgs/README.md create mode 100644 pkgs/chocolatey/README.md create mode 100644 pkgs/chocolatey/icon.svg create mode 100644 pkgs/chocolatey/scala.nuspec create mode 100644 pkgs/chocolatey/tools/chocolateyInstall.ps1 create mode 100644 pkgs/chocolatey/tools/chocolateyUninstall.ps1 diff --git a/pkgs/README.md b/pkgs/README.md new file mode 100644 index 000000000000..86b0dc6b6fe6 --- /dev/null +++ b/pkgs/README.md @@ -0,0 +1,15 @@ +

Configuration for Chocolatey

+ +Official support for Chocolatey started by the release of Scala 3.6.0 + +> [!IMPORTANT] +> This folder contains the templates to generate the configuration for Chocolatey. +> The `scala.nuspec` and `chocolateyInstall.ps1` files needs to be rewritten by changing the following placeholders: +> - @LAUNCHER_VERSION@ : Placeholder for the current scala version to deploy +> - @LAUNCHER_URL@ : Placeholder for the URL to the windows zip released on GitHub + +## Important information + +- How to create a *Chocolatey* package: https://docs.chocolatey.org/en-us/create/create-packages/ +- Guidelines to follow for the package icon: https://docs.chocolatey.org/en-us/create/create-packages/#package-icon-guidelines +- `.nuspec` format specification: https://learn.microsoft.com/en-gb/nuget/reference/nuspec diff --git a/pkgs/chocolatey/README.md b/pkgs/chocolatey/README.md new file mode 100644 index 000000000000..fac301082bac --- /dev/null +++ b/pkgs/chocolatey/README.md @@ -0,0 +1,10 @@ +

Configuration for Chocolatey

+ +Official support for Chocolatey started by the release of Scala 3.6.0 + +> [!IMPORTANT] +> This folder contains the templates to generate the configuration for Chocolatey. +> The `scala.nuspec` and `chocolateyInstall.ps1` files needs to be rewritten by changing the following placeholders: +> - @LAUNCHER_VERSION@ : Placeholder for the current scala version to deploy +> - @LAUNCHER_URL@ : Placeholder for the URL to the windows zip released on GitHub +> - @LAUNCHER_SHA256@ : Placeholder for the SHA256 of the msi file released on GitHub diff --git a/pkgs/chocolatey/icon.svg b/pkgs/chocolatey/icon.svg new file mode 100644 index 000000000000..0ccb404b5624 --- /dev/null +++ b/pkgs/chocolatey/icon.svg @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/pkgs/chocolatey/scala.nuspec b/pkgs/chocolatey/scala.nuspec new file mode 100644 index 000000000000..2fff36a83d5b --- /dev/null +++ b/pkgs/chocolatey/scala.nuspec @@ -0,0 +1,25 @@ + + + + scala + @LAUNCHER_VERSION@ + Scala + scala + scala + scala + Scala + Official release of the Scala Programming Language on Chocolatey. + https://github.com/scala/scala3/tree/main/pkgs/chocolatey + https://github.com/scala/scala3 + https://scala-lang.org/ + https://github.com/scala/scala3/issues + © 2002-2024, LAMP/EPFL + https://cdn.jsdelivr.net/gh/scala/scala3@version/pkgs/chocolatey/icon.svg + https://github.com/scala/scala3/blob/main/LICENSE + true + https://github.com/scala/scala3/releases + + + + + diff --git a/pkgs/chocolatey/tools/chocolateyInstall.ps1 b/pkgs/chocolatey/tools/chocolateyInstall.ps1 new file mode 100644 index 000000000000..dc8874942561 --- /dev/null +++ b/pkgs/chocolatey/tools/chocolateyInstall.ps1 @@ -0,0 +1,44 @@ +$ErrorActionPreference = 'Stop'; + +$unzipLocation = Split-Path -Parent $MyInvocation.MyCommand.Definition # Get the root of chocolatey folder +$unzipLocation = Join-Path $unzipLocation "$($env:chocolateyPackageName)" # Append the package's name +$unzipLocation = Join-Path $unzipLocation "$($env:chocolateyPackageVersion)" # Append the package's version + +# Configure the installation arguments +$packageArgs = @{ + packageName = 'scala' + Url64 = '@LAUNCHER_URL@' + UnzipLocation = $unzipLocation +} + +## In case we are running in the CI, add the authorisation header to fetch the zip +## See: https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact +if ($env:DOTTY_CI_INSTALLATION) { + Write-Host "Installing the Chocolatey package in Scala 3's CI" + $packageArgs += @{ + Options = @{ + Headers = @{ + Accept = 'application/vnd.github+json' + Authorization = "Bearer $env:DOTTY_CI_INSTALLATION" + } + } + } +} + +Install-ChocolateyZipPackage @packageArgs + +# Find the path to the bin directory to create the shims +if($env:DOTTY_CI_INSTALLATION) { + $scalaBinPath = Join-Path $unzipLocation 'bin' # Update this path if the structure inside the ZIP changes +} else { + $extractedDir = Get-ChildItem -Path $unzipLocation | Where-Object { $_.PSIsContainer } | Select-Object -First 1 + $scalaBinPath = Join-Path $unzipLocation $extractedDir | Join-Path -ChildPath 'bin' +} + +# Iterate through the .bat files in the bin directory and create shims +Write-Host "Creating shims for .bat file from $scalaBinPath" +Get-ChildItem -Path $scalaBinPath -Filter '*.bat' | ForEach-Object { + $file = $_.FullName + Write-Host "Creating shim for $file..." + Install-BinFile -Name $_.BaseName -Path $file +} diff --git a/pkgs/chocolatey/tools/chocolateyUninstall.ps1 b/pkgs/chocolatey/tools/chocolateyUninstall.ps1 new file mode 100644 index 000000000000..387914af5d09 --- /dev/null +++ b/pkgs/chocolatey/tools/chocolateyUninstall.ps1 @@ -0,0 +1,21 @@ +$ErrorActionPreference = 'Stop'; + +$unzipLocation = Split-Path -Parent $MyInvocation.MyCommand.Definition # Get the root of chocolatey folder +$unzipLocation = Join-Path $unzipLocation "$($env:chocolateyPackageName)" # Append the package's name +$unzipLocation = Join-Path $unzipLocation "$($env:chocolateyPackageVersion)" # Append the package's version + +# Find the path to the bin directory to create the shims +if($env:DOTTY_CI_INSTALLATION) { + $scalaBinPath = Join-Path $unzipLocation 'bin' # Update this path if the structure inside the ZIP changes + } else { + $extractedDir = Get-ChildItem -Path $unzipLocation | Where-Object { $_.PSIsContainer } | Select-Object -First 1 + $scalaBinPath = Join-Path $unzipLocation $extractedDir | Join-Path -ChildPath 'bin' + } + +# Iterate through the .bat files in the bin directory and remove shims +Write-Host "Removing shims for .bat file from $scalaBinPath" +Get-ChildItem -Path $scalaBinPath -Filter '*.bat' | ForEach-Object { + $file = $_.FullName + Write-Host "Removing shim for $file..." + Uninstall-BinFile -Name $_.BaseName -Path $file +} From b67e1861935752a228710b7f69b98651438c60b2 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Wed, 17 Jul 2024 14:35:20 +0200 Subject: [PATCH 286/827] Add workflow for Chocolatey --- .github/workflows/build-chocolatey.yml | 48 ++++++++++ .github/workflows/build-sdk.yml | 106 +++++++++++++++++++++++ .github/workflows/ci.yaml | 20 +++++ .github/workflows/publish-chocolatey.yml | 39 +++++++++ .github/workflows/releases.yml | 19 ++++ .github/workflows/test-chocolatey.yml | 51 +++++++++++ 6 files changed, 283 insertions(+) create mode 100644 .github/workflows/build-chocolatey.yml create mode 100644 .github/workflows/build-sdk.yml create mode 100644 .github/workflows/publish-chocolatey.yml create mode 100644 .github/workflows/test-chocolatey.yml diff --git a/.github/workflows/build-chocolatey.yml b/.github/workflows/build-chocolatey.yml new file mode 100644 index 000000000000..d1326c19ed87 --- /dev/null +++ b/.github/workflows/build-chocolatey.yml @@ -0,0 +1,48 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO BUILD SCALA WITH CHOCOLATEY ### +### HOW TO USE: ### +### ### +### NOTE: ### +### ### +################################################################################################### + + +name: Build 'scala' Chocolatey Package +run-name: Build 'scala' (${{ inputs.version }}) Chocolatey Package + +on: + workflow_call: + inputs: + version: + required: true + type: string + url: + required: true + type: string + +jobs: + build: + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + - name: Replace the version placeholder + uses: richardrigutins/replace-in-files@v2 + with: + files: ./pkgs/chocolatey/scala.nuspec + search-text: '@LAUNCHER_VERSION@' + replacement-text: ${{ inputs.version }} + - name: Replace the URL placeholder + uses: richardrigutins/replace-in-files@v2 + with: + files: ./pkgs/chocolatey/tools/chocolateyInstall.ps1 + search-text: '@LAUNCHER_URL@' + replacement-text: ${{ inputs.url }} + - name: Build the Chocolatey package (.nupkg) + run: choco pack ./pkgs/chocolatey/scala.nuspec --out ./pkgs/chocolatey + - name: Upload the Chocolatey package to GitHub + uses: actions/upload-artifact@v4 + with: + name: scala.nupkg + path: ./pkgs/chocolatey/scala.${{ inputs.version }}.nupkg + if-no-files-found: error + \ No newline at end of file diff --git a/.github/workflows/build-sdk.yml b/.github/workflows/build-sdk.yml new file mode 100644 index 000000000000..935e19c7d90c --- /dev/null +++ b/.github/workflows/build-sdk.yml @@ -0,0 +1,106 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO BUILD THE SCALA LAUNCHERS ### +### HOW TO USE: ### +### - THSI WORKFLOW WILL PACKAGE THE ALL THE LAUNCHERS AND UPLOAD THEM TO GITHUB ARTIFACTS ### +### ### +### NOTE: ### +### - SEE THE WORFLOW FOR THE NAMES OF THE ARTIFACTS ### +################################################################################################### + + +name: Build Scala Launchers +run-name: Build Scala Launchers + +on: + workflow_call: + inputs: + java-version: + type : string + required : true + outputs: + universal-id: + description: ID of the `universal` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.universal-id }} + linux-x86_64-id: + description: ID of the `linux x86-64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.linux-x86_64-id }} + linux-aarch64-id: + description: ID of the `linux aarch64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.linux-aarch64-id }} + mac-x86_64-id: + description: ID of the `mac x86-64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.mac-x86_64-id }} + mac-aarch64-id: + description: ID of the `mac aarch64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.mac-aarch64-id }} + win-x86_64-id: + description: ID of the `win x86-64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.win-x86_64-id }} + + +jobs: + build: + runs-on: ubuntu-latest + outputs: + universal-id : ${{ steps.universal.outputs.artifact-id }} + linux-x86_64-id : ${{ steps.linux-x86_64.outputs.artifact-id }} + linux-aarch64-id: ${{ steps.linux-aarch64.outputs.artifact-id }} + mac-x86_64-id : ${{ steps.mac-x86_64.outputs.artifact-id }} + mac-aarch64-id : ${{ steps.mac-aarch64.outputs.artifact-id }} + win-x86_64-id : ${{ steps.win-x86_64.outputs.artifact-id }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{ inputs.java-version }} + cache : sbt + - name: Build and pack the SDK (universal) + run : ./project/scripts/sbt dist/Universal/stage + - name: Build and pack the SDK (linux x86-64) + run : ./project/scripts/sbt dist-linux-x86_64/Universal/stage + - name: Build and pack the SDK (linux aarch64) + run : ./project/scripts/sbt dist-linux-aarch64/Universal/stage + - name: Build and pack the SDK (mac x86-64) + run : ./project/scripts/sbt dist-mac-x86_64/Universal/stage + - name: Build and pack the SDK (mac aarch64) + run : ./project/scripts/sbt dist-mac-aarch64/Universal/stage + - name: Build and pack the SDK (win x86-64) + run : ./project/scripts/sbt dist-win-x86_64/Universal/stage + - name: Upload zip archive to GitHub Artifact (universal) + uses: actions/upload-artifact@v4 + id : universal + with: + path: ./dist/target/universal/stage + name: scala3-universal + - name: Upload zip archive to GitHub Artifact (linux x86-64) + uses: actions/upload-artifact@v4 + id : linux-x86_64 + with: + path: ./dist/linux-x86_64/target/universal/stage + name: scala3-x86_64-pc-linux + - name: Upload zip archive to GitHub Artifact (linux aarch64) + uses: actions/upload-artifact@v4 + id : linux-aarch64 + with: + path: ./dist/linux-aarch64/target/universal/stage + name: scala3-aarch64-pc-linux + - name: Upload zip archive to GitHub Artifact (mac x86-64) + uses: actions/upload-artifact@v4 + id : mac-x86_64 + with: + path: ./dist/mac-x86_64/target/universal/stage + name: scala3-x86_64-apple-darwin + - name: Upload zip archive to GitHub Artifact (mac aarch64) + uses: actions/upload-artifact@v4 + id : mac-aarch64 + with: + path: ./dist/mac-aarch64/target/universal/stage + name: scala3-aarch64-apple-darwin + - name: Upload zip archive to GitHub Artifact (win x86-64) + uses: actions/upload-artifact@v4 + id : win-x86_64 + with: + path: ./dist/win-x86_64/target/universal/stage + name: scala3-x86_64-pc-win32 + \ No newline at end of file diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 74c4741c1ab4..8b98448a98bd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1013,3 +1013,23 @@ jobs: uses: ./.github/workflows/build-msi.yml if : github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_msi]') # TODO: ADD A JOB THAT DEPENDS ON THIS TO TEST THE MSI + + build-sdk-package: + uses: ./.github/workflows/build-sdk.yml + with: + java-version: 8 + + build-chocolatey-package: + uses: ./.github/workflows/build-chocolatey.yml + needs: [ build-sdk-package ] + with: + version: 3.6.0-local # TODO: FIX THIS + url : https://api.github.com/repos/scala/scala3/actions/artifacts/${{ needs.build-sdk-package.outputs.win-x86_64-id }}/zip + + test-chocolatey-package: + uses: ./.github/workflows/test-chocolatey.yml + with: + version : 3.6.0-local # TODO: FIX THIS + java-version: 8 + if: github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_chocolatey]') + needs: [ build-chocolatey-package ] diff --git a/.github/workflows/publish-chocolatey.yml b/.github/workflows/publish-chocolatey.yml new file mode 100644 index 000000000000..3b31728a50ba --- /dev/null +++ b/.github/workflows/publish-chocolatey.yml @@ -0,0 +1,39 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO PUBLISH SCALA TO CHOCOLATEY ### +### HOW TO USE: ### +### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### +### - IT WILL PUBLISH TO CHOCOLATEY THE MSI ### +### ### +### NOTE: ### +### - WE SHOULD KEEP IN SYNC THE NAME OF THE MSI WITH THE ACTUAL BUILD ### +### - WE SHOULD KEEP IN SYNC THE URL OF THE RELEASE ### +### - IT ASSUMES THAT THE `build-chocolatey` WORKFLOW WAS EXECUTED BEFORE ### +################################################################################################### + + +name: Publish Scala to Chocolatey +run-name: Publish Scala ${{ inputs.version }} to Chocolatey + +on: + workflow_call: + inputs: + version: + required: true + type: string + secrets: + # Connect to https://community.chocolatey.org/profiles/scala + # Accessible via https://community.chocolatey.org/account + API-KEY: + required: true + +jobs: + publish: + runs-on: windows-latest + steps: + - name: Fetch the Chocolatey package from GitHub + uses: actions/download-artifact@v4 + with: + name: scala.nupkg + - name: Publish the package to Chocolatey + run: choco push scala.nupkg --source https://push.chocolatey.org/ --api-key ${{ secrets.API-KEY }} + \ No newline at end of file diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index a4977bc5ffd9..f0edbac4127a 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -37,4 +37,23 @@ jobs: secrets: DOTTYBOT-TOKEN: ${{ secrets.DOTTYBOT_WINGET_TOKEN }} + build-chocolatey: + uses: ./.github/workflows/build-chocolatey.yml + with: + version: ${{ inputs.version }} + url : 'https://github.com/scala/scala3/releases/download/${{ inputs.version }}/scala3-${{ inputs.version }}-x86_64-pc-win32.zip' + test-chocolatey: + uses: ./.github/workflows/test-chocolatey.yml + needs: build-chocolatey + with: + version : ${{ inputs.version }} + java-version: 8 + publish-chocolatey: + uses: ./.github/workflows/publish-chocolatey.yml + needs: [ build-chocolatey, test-chocolatey ] + with: + version: ${{ inputs.version }} + secrets: + API-KEY: ${{ secrets.CHOCOLATEY_KEY }} + # TODO: ADD RELEASE WORKFLOW TO CHOCOLATEY AND OTHER PACKAGE MANAGERS HERE \ No newline at end of file diff --git a/.github/workflows/test-chocolatey.yml b/.github/workflows/test-chocolatey.yml new file mode 100644 index 000000000000..b6ca9bf74b12 --- /dev/null +++ b/.github/workflows/test-chocolatey.yml @@ -0,0 +1,51 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO TEST SCALA WITH CHOCOLATEY ### +### HOW TO USE: ### +### ### +### NOTE: ### +### ### +################################################################################################### + +name: Test 'scala' Chocolatey Package +run-name: Test 'scala' (${{ inputs.version }}) Chocolatey Package + +on: + workflow_call: + inputs: + version: + required: true + type: string + java-version: + required: true + type : string + +env: + CHOCOLATEY-REPOSITORY: chocolatey-pkgs + DOTTY_CI_INSTALLATION: ${{ secrets.GITHUB_TOKEN }} + +jobs: + test: + runs-on: windows-latest + steps: + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{ inputs.java-version }} + - name: Download the 'nupkg' from GitHub Artifacts + uses: actions/download-artifact@v4 + with: + name: scala.nupkg + path: ${{ env.CHOCOLATEY-REPOSITORY }} + - name : Install the `scala` package with Chocolatey + run : choco install scala --source "${{ env.CHOCOLATEY-REPOSITORY }}" --pre # --pre since we might be testing non-stable releases + shell: pwsh + - name : Test the `scala` command + run : scala --version + shell: pwsh + - name : Test the `scalac` command + run : scalac --version + - name : Test the `scaladoc` command + run : scaladoc --version + - name : Uninstall the `scala` package + run : choco uninstall scala + \ No newline at end of file From 3e782384e83d8165ac6ba7330d795143ea41d510 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Wed, 17 Jul 2024 14:59:55 +0200 Subject: [PATCH 287/827] feat: Implement completions for named tuple fields (#21202) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit closes scala#20478 --------- Co-authored-by: Jędrzej Rochala <48657087+rochala@users.noreply.github.com> --- .../tools/dotc/interactive/Completion.scala | 43 +++++++++++++++-- .../tools/languageserver/CompletionTest.scala | 10 ++++ .../pc/tests/completion/CompletionSuite.scala | 47 +++++++++++++++++++ 3 files changed, 96 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 7882d635f84a..1395d9b80b53 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -32,6 +32,8 @@ import dotty.tools.dotc.core.Names import dotty.tools.dotc.core.Types import dotty.tools.dotc.core.Symbols import dotty.tools.dotc.core.Constants +import dotty.tools.dotc.core.TypeOps +import dotty.tools.dotc.core.StdNames /** * One of the results of a completion query. @@ -200,7 +202,8 @@ object Completion: private def computeCompletions( pos: SourcePosition, - mode: Mode, rawPrefix: String, + mode: Mode, + rawPrefix: String, adjustedPath: List[tpd.Tree], untpdPath: List[untpd.Tree], matches: Option[Name => Boolean] @@ -442,9 +445,17 @@ object Completion: def selectionCompletions(qual: tpd.Tree)(using Context): CompletionMap = val adjustedQual = widenQualifier(qual) - implicitConversionMemberCompletions(adjustedQual) ++ - extensionCompletions(adjustedQual) ++ - directMemberCompletions(adjustedQual) + val implicitConversionMembers = implicitConversionMemberCompletions(adjustedQual) + val extensionMembers = extensionCompletions(adjustedQual) + val directMembers = directMemberCompletions(adjustedQual) + val namedTupleMembers = namedTupleCompletions(adjustedQual) + + List( + implicitConversionMembers, + extensionMembers, + directMembers, + namedTupleMembers + ).reduce(_ ++ _) /** Completions for members of `qual`'s type. * These include inherited definitions but not members added by extensions or implicit conversions @@ -516,6 +527,30 @@ object Completion: .toSeq .groupByName + /** Completions for named tuples */ + private def namedTupleCompletions(qual: tpd.Tree)(using Context): CompletionMap = + def namedTupleCompletionsFromType(tpe: Type): CompletionMap = + val freshCtx = ctx.fresh.setExploreTyperState() + inContext(freshCtx): + tpe.namedTupleElementTypes + .map { (name, tpe) => + val symbol = newSymbol(owner = NoSymbol, name, EmptyFlags, tpe) + val denot = SymDenotation(symbol, NoSymbol, name, EmptyFlags, tpe) + name -> denot + } + .toSeq + .filter((name, denot) => include(denot, name)) + .groupByName + + val qualTpe = qual.typeOpt + if qualTpe.isNamedTupleType then + namedTupleCompletionsFromType(qualTpe) + else if qualTpe.derivesFrom(defn.SelectableClass) then + val pre = if !TypeOps.isLegalPrefix(qualTpe) then Types.SkolemType(qualTpe) else qualTpe + val fieldsType = pre.select(StdNames.tpnme.Fields).dealias.simplified + namedTupleCompletionsFromType(fieldsType) + else Map.empty + /** Completions from extension methods */ private def extensionCompletions(qual: tpd.Tree)(using Context): CompletionMap = def asDefLikeType(tpe: Type): Type = tpe match diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index d64bb44c1a5d..887c7a983729 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -1723,4 +1723,14 @@ class CompletionTest { .completion(m5, Set()) .completion(m6, Set()) + @Test def namedTupleCompletion: Unit = + code"""|import scala.language.experimental.namedTuples + | + |val person: (name: String, city: String) = + | (name = "Jamie", city = "Lausanne") + | + |val n = person.na$m1 + |""" + .completion(m1, Set(("name", Field, "String"))) + } diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index f660baa6af6d..f281f42d9db3 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -1983,3 +1983,50 @@ class CompletionSuite extends BaseCompletionSuite: |val foo: SomeClass |""".stripMargin, ) + + @Test def `namedTuple completions` = + check( + """|import scala.language.experimental.namedTuples + |import scala.NamedTuple.* + | + |val person = (name = "Jamie", city = "Lausanne") + | + |val n = person.na@@""".stripMargin, + "name: String", + filter = _.contains("name") + ) + + @Test def `Selectable with namedTuple Fields member` = + check( + """|import scala.language.experimental.namedTuples + |import scala.NamedTuple.* + | + |class NamedTupleSelectable extends Selectable { + | type Fields <: AnyNamedTuple + | def selectDynamic(name: String): Any = ??? + |} + | + |val person2 = new NamedTupleSelectable { + | type Fields = (name: String, city: String) + |} + | + |val n = person2.na@@""".stripMargin, + """|name: String + |selectDynamic(name: String): Any + """.stripMargin, + filter = _.contains("name") + ) + + @Test def `Selectable without namedTuple Fields mamber` = + check( + """|class NonNamedTupleSelectable extends Selectable { + | def selectDynamic(name: String): Any = ??? + |} + | + |val person2 = new NonNamedTupleSelectable {} + | + |val n = person2.na@@""".stripMargin, + """|selectDynamic(name: String): Any + """.stripMargin, + filter = _.contains("name") + ) From 5af4908c86f92d328c4cc98a7fbba3050528e4d8 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Wed, 17 Jul 2024 17:35:54 +0200 Subject: [PATCH 288/827] Add support for checksums in Chocolatey --- .github/workflows/build-chocolatey.yml | 13 ++++++++++-- .github/workflows/build-sdk.yml | 22 +++++++++++++++------ .github/workflows/ci.yaml | 1 + .github/workflows/releases.yml | 13 ++++++++++++ pkgs/chocolatey/tools/chocolateyInstall.ps1 | 8 +++++--- 5 files changed, 46 insertions(+), 11 deletions(-) diff --git a/.github/workflows/build-chocolatey.yml b/.github/workflows/build-chocolatey.yml index d1326c19ed87..9de87d8e5ad6 100644 --- a/.github/workflows/build-chocolatey.yml +++ b/.github/workflows/build-chocolatey.yml @@ -15,10 +15,13 @@ on: inputs: version: required: true - type: string + type : string url: required: true - type: string + type : string + digest: + required: true + type : string jobs: build: @@ -37,6 +40,12 @@ jobs: files: ./pkgs/chocolatey/tools/chocolateyInstall.ps1 search-text: '@LAUNCHER_URL@' replacement-text: ${{ inputs.url }} + - name: Replace the CHECKSUM placeholder + uses: richardrigutins/replace-in-files@v2 + with: + files: ./pkgs/chocolatey/tools/chocolateyInstall.ps1 + search-text: '@LAUNCHER_SHA256@' + replacement-text: ${{ inputs.digest }} - name: Build the Chocolatey package (.nupkg) run: choco pack ./pkgs/chocolatey/scala.nuspec --out ./pkgs/chocolatey - name: Upload the Chocolatey package to GitHub diff --git a/.github/workflows/build-sdk.yml b/.github/workflows/build-sdk.yml index 935e19c7d90c..0233403894fb 100644 --- a/.github/workflows/build-sdk.yml +++ b/.github/workflows/build-sdk.yml @@ -36,18 +36,22 @@ on: win-x86_64-id: description: ID of the `win x86-64` package from GitHub Artifacts (Authentication Required) value : ${{ jobs.build.outputs.win-x86_64-id }} + win-x86_64-digest: + description: The SHA256 of the uploaded artifact (`win x86-64`) + value : ${{ jobs.build.outputs.win-x86_64-digest }} jobs: build: runs-on: ubuntu-latest outputs: - universal-id : ${{ steps.universal.outputs.artifact-id }} - linux-x86_64-id : ${{ steps.linux-x86_64.outputs.artifact-id }} - linux-aarch64-id: ${{ steps.linux-aarch64.outputs.artifact-id }} - mac-x86_64-id : ${{ steps.mac-x86_64.outputs.artifact-id }} - mac-aarch64-id : ${{ steps.mac-aarch64.outputs.artifact-id }} - win-x86_64-id : ${{ steps.win-x86_64.outputs.artifact-id }} + universal-id : ${{ steps.universal.outputs.artifact-id }} + linux-x86_64-id : ${{ steps.linux-x86_64.outputs.artifact-id }} + linux-aarch64-id : ${{ steps.linux-aarch64.outputs.artifact-id }} + mac-x86_64-id : ${{ steps.mac-x86_64.outputs.artifact-id }} + mac-aarch64-id : ${{ steps.mac-aarch64.outputs.artifact-id }} + win-x86_64-id : ${{ steps.win-x86_64.outputs.artifact-id }} + win-x86_64-digest: ${{ steps.win-x86_64-digest.outputs.digest }} steps: - uses: actions/checkout@v4 - uses: actions/setup-java@v4 @@ -103,4 +107,10 @@ jobs: with: path: ./dist/win-x86_64/target/universal/stage name: scala3-x86_64-pc-win32 + - name: Compute SHA256 of the uploaded artifact (win x86-64) + id : win-x86_64-digest + run : | + curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -o artifact.zip -L https://api.github.com/repos/scala/scala3/actions/artifacts/${{ steps.win-x86_64.outputs.artifact-id }}/zip + echo "digest=$(sha256sum artifact.zip | cut -d " " -f 1)" >> "$GITHUB_OUTPUT" + \ No newline at end of file diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8b98448a98bd..d4583847c438 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1025,6 +1025,7 @@ jobs: with: version: 3.6.0-local # TODO: FIX THIS url : https://api.github.com/repos/scala/scala3/actions/artifacts/${{ needs.build-sdk-package.outputs.win-x86_64-id }}/zip + digest : ${{ needs.build-sdk-package.outputs.win-x86_64-digest }} test-chocolatey-package: uses: ./.github/workflows/test-chocolatey.yml diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index f0edbac4127a..ab921ec588d2 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -37,11 +37,24 @@ jobs: secrets: DOTTYBOT-TOKEN: ${{ secrets.DOTTYBOT_WINGET_TOKEN }} + compute-digest: + runs-on: ubuntu-latest + outputs: + digest: ${{ steps.digest.outputs.digest }} + steps: + - name: Compute the SHA256 of scala3-${{ inputs.version }}-x86_64-pc-win32.zip in GitHub Release + id: digest + run: | + curl -o artifact.zip -L https://github.com/scala/scala3/releases/download/${{ inputs.version }}/scala3-${{ inputs.version }}-x86_64-pc-win32.zip + echo "digest=$(sha256sum artifact.zip | cut -d " " -f 1)" >> "$GITHUB_OUTPUT" + build-chocolatey: uses: ./.github/workflows/build-chocolatey.yml + needs: compute-digest with: version: ${{ inputs.version }} url : 'https://github.com/scala/scala3/releases/download/${{ inputs.version }}/scala3-${{ inputs.version }}-x86_64-pc-win32.zip' + digest : ${{ needs.compute-digest.outputs.digest }} test-chocolatey: uses: ./.github/workflows/test-chocolatey.yml needs: build-chocolatey diff --git a/pkgs/chocolatey/tools/chocolateyInstall.ps1 b/pkgs/chocolatey/tools/chocolateyInstall.ps1 index dc8874942561..3117efadaf0e 100644 --- a/pkgs/chocolatey/tools/chocolateyInstall.ps1 +++ b/pkgs/chocolatey/tools/chocolateyInstall.ps1 @@ -6,9 +6,11 @@ $unzipLocation = Join-Path $unzipLocation "$($env:chocolateyPackageVersion)" # # Configure the installation arguments $packageArgs = @{ - packageName = 'scala' - Url64 = '@LAUNCHER_URL@' - UnzipLocation = $unzipLocation + packageName = 'scala' + Url64 = '@LAUNCHER_URL@' + UnzipLocation = $unzipLocation + Checksum64 = '@LAUNCHER_SHA256@' + ChecksumType64 = 'SHA256' } ## In case we are running in the CI, add the authorisation header to fetch the zip From 0e71645b410c8e39fc4b88900e5de4b11fbd139e Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 5 Jun 2024 13:40:33 +0200 Subject: [PATCH 289/827] More robust level handling --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 49 +++++++++- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 57 +++++------ .../dotty/tools/dotc/cc/CheckCaptures.scala | 32 ++++--- compiler/src/dotty/tools/dotc/cc/Setup.scala | 96 +++++++++++-------- .../tools/dotc/printing/PlainPrinter.scala | 2 +- .../dotty/tools/dotc/transform/Recheck.scala | 2 +- tests/neg-custom-args/captures/levels.check | 2 +- .../neg-custom-args/captures/outer-var.check | 2 +- tests/neg-custom-args/captures/reaches.check | 2 +- tests/neg-custom-args/captures/vars.check | 4 +- tests/printing/dependent-annot.check | 7 +- 11 files changed, 152 insertions(+), 103 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index c272183b6dfb..88f5e7d52867 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -14,6 +14,7 @@ import tpd.* import StdNames.nme import config.Feature import collection.mutable +import CCState.* private val Captures: Key[CaptureSet] = Key() @@ -64,11 +65,47 @@ class CCState: */ var levelError: Option[CaptureSet.CompareResult.LevelError] = None + private var curLevel: Level = outermostLevel + private val symLevel: mutable.Map[Symbol, Int] = mutable.Map() + +object CCState: + + opaque type Level = Int + + val undefinedLevel: Level = -1 + + val outermostLevel: Level = 0 + + /** The level of the current environment. Levels start at 0 and increase for + * each nested function or class. -1 means the level is undefined. + */ + def currentLevel(using Context): Level = ccState.curLevel + + inline def inNestedLevel[T](inline op: T)(using Context): T = + val ccs = ccState + val saved = ccs.curLevel + ccs.curLevel = ccs.curLevel.nextInner + try op finally ccs.curLevel = saved + + inline def inNestedLevelUnless[T](inline p: Boolean)(inline op: T)(using Context): T = + val ccs = ccState + val saved = ccs.curLevel + if !p then ccs.curLevel = ccs.curLevel.nextInner + try op finally ccs.curLevel = saved + + extension (x: Level) + def isDefined: Boolean = x >= 0 + def <= (y: Level) = (x: Int) <= y + def nextInner: Level = if isDefined then x + 1 else x + + extension (sym: Symbol)(using Context) + def ccLevel: Level = ccState.symLevel.getOrElse(sym, -1) + def recordLevel() = ccState.symLevel(sym) = currentLevel end CCState /** The currently valid CCState */ def ccState(using Context) = - Phases.checkCapturesPhase.asInstanceOf[CheckCaptures].ccState + Phases.checkCapturesPhase.asInstanceOf[CheckCaptures].ccState1 class NoCommonRoot(rs: Symbol*)(using Context) extends Exception( i"No common capture root nested in ${rs.mkString(" and ")}" @@ -339,6 +376,12 @@ extension (tp: Type) case _ => tp + def level(using Context): Level = + tp match + case tp: TermRef => tp.symbol.ccLevel + case tp: ThisType => tp.cls.ccLevel.nextInner + case _ => undefinedLevel + extension (cls: ClassSymbol) def pureBaseClass(using Context): Option[Symbol] = @@ -423,9 +466,7 @@ extension (sym: Symbol) || sym.is(Method, butNot = Accessor) /** The owner of the current level. Qualifying owners are - * - methods other than constructors and anonymous functions - * - anonymous functions, provided they either define a local - * root of type caps.Capability, or they are the rhs of a val definition. + * - methods, other than accessors * - classes, if they are not staticOwners * - _root_ */ diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index f78ed1a91bd6..5db8dadf5b66 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -16,6 +16,7 @@ import util.{SimpleIdentitySet, Property} import typer.ErrorReporting.Addenda import util.common.alwaysTrue import scala.collection.mutable +import CCState.* /** A class for capture sets. Capture sets can be constants or variables. * Capture sets support inclusion constraints <:< where <:< is subcapturing. @@ -55,10 +56,14 @@ sealed abstract class CaptureSet extends Showable: */ def isAlwaysEmpty: Boolean - /** An optional level limit, or NoSymbol if none exists. All elements of the set - * must be in scopes visible from the level limit. + /** An optional level limit, or undefinedLevel if none exists. All elements of the set + * must be at levels equal or smaller than the level of the set, if it is defined. */ - def levelLimit: Symbol + def level: Level + + /** An optional owner, or NoSymbol if none exists. Used for diagnstics + */ + def owner: Symbol /** Is this capture set definitely non-empty? */ final def isNotEmpty: Boolean = !elems.isEmpty @@ -239,9 +244,7 @@ sealed abstract class CaptureSet extends Showable: if this.subCaptures(that, frozen = true).isOK then that else if that.subCaptures(this, frozen = true).isOK then this else if this.isConst && that.isConst then Const(this.elems ++ that.elems) - else Var( - this.levelLimit.maxNested(that.levelLimit, onConflict = (sym1, sym2) => sym1), - this.elems ++ that.elems) + else Var(initialElems = this.elems ++ that.elems) .addAsDependentTo(this).addAsDependentTo(that) /** The smallest superset (via <:<) of this capture set that also contains `ref`. @@ -411,7 +414,9 @@ object CaptureSet: def withDescription(description: String): Const = Const(elems, description) - def levelLimit = NoSymbol + def level = undefinedLevel + + def owner = NoSymbol override def toString = elems.toString end Const @@ -431,7 +436,7 @@ object CaptureSet: end Fluid /** The subclass of captureset variables with given initial elements */ - class Var(directOwner: Symbol, initialElems: Refs = emptySet)(using @constructorOnly ictx: Context) extends CaptureSet: + class Var(override val owner: Symbol = NoSymbol, initialElems: Refs = emptySet, val level: Level = undefinedLevel, underBox: Boolean = false)(using @constructorOnly ictx: Context) extends CaptureSet: /** A unique identification number for diagnostics */ val id = @@ -440,9 +445,6 @@ object CaptureSet: //assert(id != 40) - override val levelLimit = - if directOwner.exists then directOwner.levelOwner else NoSymbol - /** A variable is solved if it is aproximated to a from-then-on constant set. */ private var isSolved: Boolean = false @@ -516,12 +518,10 @@ object CaptureSet: private def levelOK(elem: CaptureRef)(using Context): Boolean = if elem.isRootCapability then !noUniversal else elem match - case elem: TermRef if levelLimit.exists => - var sym = elem.symbol - if sym.isLevelOwner then sym = sym.owner - levelLimit.isContainedIn(sym.levelOwner) - case elem: ThisType if levelLimit.exists => - levelLimit.isContainedIn(elem.cls.levelOwner) + case elem: TermRef if level.isDefined => + elem.symbol.ccLevel <= level + case elem: ThisType if level.isDefined => + elem.cls.ccLevel.nextInner <= level case ReachCapability(elem1) => levelOK(elem1) case MaybeCapability(elem1) => @@ -599,8 +599,8 @@ object CaptureSet: val debugInfo = if !isConst && ctx.settings.YccDebug.value then ids else "" val limitInfo = - if ctx.settings.YprintLevel.value && levelLimit.exists - then i"" + if ctx.settings.YprintLevel.value && level.isDefined + then i"" else "" debugInfo ++ limitInfo @@ -619,13 +619,6 @@ object CaptureSet: override def toString = s"Var$id$elems" end Var - /** Variables that represent refinements of class parameters can have the universal - * capture set, since they represent only what is the result of the constructor. - * Test case: Without that tweak, logger.scala would not compile. - */ - class RefiningVar(directOwner: Symbol)(using Context) extends Var(directOwner): - override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context) = this - /** A variable that is derived from some other variable via a map or filter. */ abstract class DerivedVar(owner: Symbol, initialElems: Refs)(using @constructorOnly ctx: Context) extends Var(owner, initialElems): @@ -654,7 +647,7 @@ object CaptureSet: */ class Mapped private[CaptureSet] (val source: Var, tm: TypeMap, variance: Int, initial: CaptureSet)(using @constructorOnly ctx: Context) - extends DerivedVar(source.levelLimit, initial.elems): + extends DerivedVar(source.owner, initial.elems): addAsDependentTo(initial) // initial mappings could change by propagation private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] @@ -751,7 +744,7 @@ object CaptureSet: */ final class BiMapped private[CaptureSet] (val source: Var, bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) - extends DerivedVar(source.levelLimit, initialElems): + extends DerivedVar(source.owner, initialElems): override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = if origin eq source then @@ -785,7 +778,7 @@ object CaptureSet: /** A variable with elements given at any time as { x <- source.elems | p(x) } */ class Filtered private[CaptureSet] (val source: Var, p: Context ?=> CaptureRef => Boolean)(using @constructorOnly ctx: Context) - extends DerivedVar(source.levelLimit, source.elems.filter(p)): + extends DerivedVar(source.owner, source.elems.filter(p)): override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = if accountsFor(elem) then @@ -815,7 +808,7 @@ object CaptureSet: extends Filtered(source, !other.accountsFor(_)) class Intersected(cs1: CaptureSet, cs2: CaptureSet)(using Context) - extends Var(cs1.levelLimit.minNested(cs2.levelLimit), elemIntersection(cs1, cs2)): + extends Var(initialElems = elemIntersection(cs1, cs2)): addAsDependentTo(cs1) addAsDependentTo(cs2) deps += cs1 @@ -905,7 +898,7 @@ object CaptureSet: if ctx.settings.YccDebug.value then printer.toText(trace, ", ") else blocking.show case LevelError(cs: CaptureSet, elem: CaptureRef) => - Str(i"($elem at wrong level for $cs in ${cs.levelLimit})") + Str(i"($elem at wrong level for $cs at level ${cs.level.toString})") /** The result is OK */ def isOK: Boolean = this == OK @@ -1148,6 +1141,6 @@ object CaptureSet: i""" | |Note that reference ${ref}$levelStr - |cannot be included in outer capture set $cs which is associated with ${cs.levelLimit}""" + |cannot be included in outer capture set $cs""" end CaptureSet diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index e41f32cab672..c36b0cbf552e 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -19,6 +19,7 @@ import transform.{Recheck, PreRecheck, CapturedVars} import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} +import CCState.* import StdNames.nme import NameKinds.{DefaultGetterName, WildcardParamName, UniqueNameKind} import reporting.trace @@ -191,7 +192,7 @@ class CheckCaptures extends Recheck, SymTransformer: if Feature.ccEnabled then super.run - val ccState = new CCState + val ccState1 = new CCState // Dotty problem: Rename to ccState ==> Crash in ExplicitOuter class CaptureChecker(ictx: Context) extends Rechecker(ictx): @@ -311,7 +312,7 @@ class CheckCaptures extends Recheck, SymTransformer: def capturedVars(sym: Symbol)(using Context): CaptureSet = myCapturedVars.getOrElseUpdate(sym, if sym.ownersIterator.exists(_.isTerm) - then CaptureSet.Var(sym.owner) + then CaptureSet.Var(sym.owner, level = sym.ccLevel) else CaptureSet.empty) /** For all nested environments up to `limit` or a closed environment perform `op`, @@ -592,6 +593,9 @@ class CheckCaptures extends Recheck, SymTransformer: tree.srcPos) super.recheckTypeApply(tree, pt) + override def recheckBlock(tree: Block, pt: Type)(using Context): Type = + inNestedLevel(super.recheckBlock(tree, pt)) + override def recheckClosure(tree: Closure, pt: Type, forceDependent: Boolean)(using Context): Type = val cs = capturedVars(tree.meth.symbol) capt.println(i"typing closure $tree with cvs $cs") @@ -695,13 +699,14 @@ class CheckCaptures extends Recheck, SymTransformer: val localSet = capturedVars(sym) if !localSet.isAlwaysEmpty then curEnv = Env(sym, EnvKind.Regular, localSet, curEnv) - try checkInferredResult(super.recheckDefDef(tree, sym), tree) - finally - if !sym.isAnonymousFunction then - // Anonymous functions propagate their type to the enclosing environment - // so it is not in general sound to interpolate their types. - interpolateVarsIn(tree.tpt) - curEnv = saved + inNestedLevel: + try checkInferredResult(super.recheckDefDef(tree, sym), tree) + finally + if !sym.isAnonymousFunction then + // Anonymous functions propagate their type to the enclosing environment + // so it is not in general sound to interpolate their types. + interpolateVarsIn(tree.tpt) + curEnv = saved /** If val or def definition with inferred (result) type is visible * in other compilation units, check that the actual inferred type @@ -771,7 +776,8 @@ class CheckCaptures extends Recheck, SymTransformer: checkSubset(thisSet, CaptureSet.empty.withDescription(i"of pure base class $pureBase"), selfType.srcPos, cs1description = " captured by this self type") - super.recheckClassDef(tree, impl, cls) + inNestedLevelUnless(cls.is(Module)): + super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -823,9 +829,9 @@ class CheckCaptures extends Recheck, SymTransformer: val saved = curEnv tree match case _: RefTree | closureDef(_) if pt.isBoxedCapturing => - curEnv = Env(curEnv.owner, EnvKind.Boxed, CaptureSet.Var(curEnv.owner), curEnv) + curEnv = Env(curEnv.owner, EnvKind.Boxed, CaptureSet.Var(curEnv.owner, level = currentLevel), curEnv) case _ if tree.hasAttachment(ClosureBodyValue) => - curEnv = Env(curEnv.owner, EnvKind.ClosureResult, CaptureSet.Var(curEnv.owner), curEnv) + curEnv = Env(curEnv.owner, EnvKind.ClosureResult, CaptureSet.Var(curEnv.owner, level = currentLevel), curEnv) case _ => val res = try @@ -995,7 +1001,7 @@ class CheckCaptures extends Recheck, SymTransformer: val saved = curEnv curEnv = Env( curEnv.owner, EnvKind.NestedInOwner, - CaptureSet.Var(curEnv.owner), + CaptureSet.Var(curEnv.owner, level = currentLevel), if boxed then null else curEnv) try val (eargs, eres) = expected.dealias.stripCapturing match diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 0175d40c186c..2c0cdfb7b129 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -16,6 +16,7 @@ import Synthetics.isExcluded import util.Property import printing.{Printer, Texts}, Texts.{Text, Str} import collection.mutable +import CCState.* /** Operations accessed from CheckCaptures */ trait SetupAPI: @@ -189,7 +190,10 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: val getterType = mapInferred(refine = false)(tp.memberInfo(getter)).strippedDealias RefinedType(core, getter.name, - CapturingType(getterType, CaptureSet.RefiningVar(ctx.owner))) + CapturingType(getterType, + new CaptureSet.Var(ctx.owner): + override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context) = this + )) .showing(i"add capture refinement $tp --> $result", capt) else core @@ -402,14 +406,17 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if isExcluded(meth) then return - inContext(ctx.withOwner(meth)): - paramss.foreach(traverse) - transformResultType(tpt, meth) - traverse(tree.rhs) - //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") + meth.recordLevel() + inNestedLevel: + inContext(ctx.withOwner(meth)): + paramss.foreach(traverse) + transformResultType(tpt, meth) + traverse(tree.rhs) + //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") case tree @ ValDef(_, tpt: TypeTree, _) => val sym = tree.symbol + sym.recordLevel() val defCtx = if sym.isOneOf(TermParamOrAccessor) then ctx else ctx.withOwner(sym) inContext(defCtx): transformResultType(tpt, sym) @@ -426,13 +433,19 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed case tree: TypeDef if tree.symbol.isClass => - inContext(ctx.withOwner(tree.symbol)): - traverseChildren(tree) + val sym = tree.symbol + sym.recordLevel() + inNestedLevelUnless(sym.is(Module)): + inContext(ctx.withOwner(sym)) + traverseChildren(tree) case tree @ SeqLiteral(elems, tpt: TypeTree) => traverse(elems) tpt.rememberType(box(transformInferredType(tpt.tpe))) + case tree: Block => + inNestedLevel(traverseChildren(tree)) + case _ => traverseChildren(tree) postProcess(tree) @@ -531,36 +544,37 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tree: TypeDef => tree.symbol match case cls: ClassSymbol => - val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - def innerModule = cls.is(ModuleClass) && !cls.isStatic - val selfInfo1 = - if (selfInfo ne NoType) && !innerModule then - // if selfInfo is explicitly given then use that one, except if - // self info applies to non-static modules, these still need to be inferred - selfInfo - else if cls.isPureClass then - // is cls is known to be pure, nothing needs to be added to self type - selfInfo - else if !cls.isEffectivelySealed && !cls.baseClassHasExplicitSelfType then - // assume {cap} for completely unconstrained self types of publicly extensible classes - CapturingType(cinfo.selfType, CaptureSet.universal) - else - // Infer the self type for the rest, which is all classes without explicit - // self types (to which we also add nested module classes), provided they are - // neither pure, nor are publicily extensible with an unconstrained self type. - CapturingType(cinfo.selfType, CaptureSet.Var(cls)) - val ps1 = inContext(ctx.withOwner(cls)): - ps.mapConserve(transformExplicitType(_)) - if (selfInfo1 ne selfInfo) || (ps1 ne ps) then - val newInfo = ClassInfo(prefix, cls, ps1, decls, selfInfo1) - updateInfo(cls, newInfo) - capt.println(i"update class info of $cls with parents $ps selfinfo $selfInfo to $newInfo") - cls.thisType.asInstanceOf[ThisType].invalidateCaches() - if cls.is(ModuleClass) then - // if it's a module, the capture set of the module reference is the capture set of the self type - val modul = cls.sourceModule - updateInfo(modul, CapturingType(modul.info, selfInfo1.asInstanceOf[Type].captureSet)) - modul.termRef.invalidateCaches() + inNestedLevelUnless(cls.is(Module)): + val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo + def innerModule = cls.is(ModuleClass) && !cls.isStatic + val selfInfo1 = + if (selfInfo ne NoType) && !innerModule then + // if selfInfo is explicitly given then use that one, except if + // self info applies to non-static modules, these still need to be inferred + selfInfo + else if cls.isPureClass then + // is cls is known to be pure, nothing needs to be added to self type + selfInfo + else if !cls.isEffectivelySealed && !cls.baseClassHasExplicitSelfType then + // assume {cap} for completely unconstrained self types of publicly extensible classes + CapturingType(cinfo.selfType, CaptureSet.universal) + else + // Infer the self type for the rest, which is all classes without explicit + // self types (to which we also add nested module classes), provided they are + // neither pure, nor are publicily extensible with an unconstrained self type. + CapturingType(cinfo.selfType, CaptureSet.Var(cls, level = currentLevel)) + val ps1 = inContext(ctx.withOwner(cls)): + ps.mapConserve(transformExplicitType(_)) + if (selfInfo1 ne selfInfo) || (ps1 ne ps) then + val newInfo = ClassInfo(prefix, cls, ps1, decls, selfInfo1) + updateInfo(cls, newInfo) + capt.println(i"update class info of $cls with parents $ps selfinfo $selfInfo to $newInfo") + cls.thisType.asInstanceOf[ThisType].invalidateCaches() + if cls.is(ModuleClass) then + // if it's a module, the capture set of the module reference is the capture set of the self type + val modul = cls.sourceModule + updateInfo(modul, CapturingType(modul.info, selfInfo1.asInstanceOf[Type].captureSet)) + modul.termRef.invalidateCaches() case _ => case _ => end postProcess @@ -672,11 +686,11 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: /** Add a capture set variable to `tp` if necessary, or maybe pull out * an embedded capture set variable from a part of `tp`. */ - def addVar(tp: Type, owner: Symbol)(using Context): Type = + private def addVar(tp: Type, owner: Symbol)(using Context): Type = decorate(tp, addedSet = _.dealias.match - case CapturingType(_, refs) => CaptureSet.Var(owner, refs.elems) - case _ => CaptureSet.Var(owner)) + case CapturingType(_, refs) => CaptureSet.Var(owner, refs.elems, level = currentLevel) + case _ => CaptureSet.Var(owner, level = currentLevel)) def setupUnit(tree: Tree, recheckDef: DefRecheck)(using Context): Unit = setupTraverser(recheckDef).traverse(tree)(using ctx.withPhase(thisPhase)) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index c06b43cafe17..71ebb7054000 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -15,7 +15,7 @@ import util.SourcePosition import scala.util.control.NonFatal import scala.annotation.switch import config.{Config, Feature} -import cc.{CapturingType, RetainingType, CaptureSet, ReachCapability, MaybeCapability, isBoxed, levelOwner, retainedElems, isRetainsLike} +import cc.{CapturingType, RetainingType, CaptureSet, ReachCapability, MaybeCapability, isBoxed, retainedElems, isRetainsLike} class PlainPrinter(_ctx: Context) extends Printer { diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 79dfe3393578..f025c9e9369f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -264,7 +264,7 @@ abstract class Recheck extends Phase, SymTransformer: def recheckClassDef(tree: TypeDef, impl: Template, sym: ClassSymbol)(using Context): Type = recheck(impl.constr) - impl.parentsOrDerived.foreach(recheck(_)) + impl.parents.foreach(recheck(_)) recheck(impl.self) recheckStats(impl.body) sym.typeRef diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check index a5f8d73ccf7a..479a231a0404 100644 --- a/tests/neg-custom-args/captures/levels.check +++ b/tests/neg-custom-args/captures/levels.check @@ -12,6 +12,6 @@ | Required: box (x$0: String) ->? String | | Note that reference (cap3 : CC^), defined in method scope - | cannot be included in outer capture set ? of value r which is associated with method test2 + | cannot be included in outer capture set ? of value r | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/outer-var.check b/tests/neg-custom-args/captures/outer-var.check index b9f1f57be769..d57d615cda64 100644 --- a/tests/neg-custom-args/captures/outer-var.check +++ b/tests/neg-custom-args/captures/outer-var.check @@ -32,7 +32,7 @@ | Required: () ->{p} Unit | | Note that reference (q : Proc), defined in method inner - | cannot be included in outer capture set {p} of variable y which is associated with method test + | cannot be included in outer capture set {p} of variable y | | longer explanation available when compiling with `-explain` -- Error: tests/neg-custom-args/captures/outer-var.scala:16:53 --------------------------------------------------------- diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index a1c5a56369e9..ccd9e891380b 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -12,7 +12,7 @@ | Required: box List[box () ->{xs*} Unit]^? | | Note that reference (f : File^), defined in method $anonfun - | cannot be included in outer capture set {xs*} of value cur which is associated with method runAll1 + | cannot be included in outer capture set {xs*} of value cur | | longer explanation available when compiling with `-explain` -- Error: tests/neg-custom-args/captures/reaches.scala:35:6 ------------------------------------------------------------ diff --git a/tests/neg-custom-args/captures/vars.check b/tests/neg-custom-args/captures/vars.check index 22d13d8e26e7..e2d817f2d8bd 100644 --- a/tests/neg-custom-args/captures/vars.check +++ b/tests/neg-custom-args/captures/vars.check @@ -4,7 +4,7 @@ | reference (cap3 : Cap) is not included in the allowed capture set {cap1} of variable a | | Note that reference (cap3 : Cap), defined in method scope - | cannot be included in outer capture set {cap1} of variable a which is associated with method test + | cannot be included in outer capture set {cap1} of variable a -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:23:8 ------------------------------------------ 23 | a = g // error | ^ @@ -12,7 +12,7 @@ | Required: (x$0: String) ->{cap1} String | | Note that reference (cap3 : Cap), defined in method scope - | cannot be included in outer capture set {cap1} of variable a which is associated with method test + | cannot be included in outer capture set {cap1} of variable a | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:25:12 ----------------------------------------- diff --git a/tests/printing/dependent-annot.check b/tests/printing/dependent-annot.check index a8a7e8b0bfee..f2dd0f702884 100644 --- a/tests/printing/dependent-annot.check +++ b/tests/printing/dependent-annot.check @@ -11,12 +11,7 @@ package { def f(y: C, z: C): Unit = { def g(): C @ann([y,z : Any]*) = ??? - val ac: - (C => Array[String]) - { - def apply(x: C): Array[String @ann([x : Any]*)] - } - = ??? + val ac: (x: C) => Array[String @ann([x : Any]*)] = ??? val dc: Array[String] = ac.apply(g()) () } From 5b9d305fc55adcda119d22bdd2189174f507bdcf Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 5 Jun 2024 13:42:20 +0200 Subject: [PATCH 290/827] Add existential capabilities, 2nd draft --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 29 ++ .../src/dotty/tools/dotc/cc/CaptureSet.scala | 2 + .../dotty/tools/dotc/cc/CheckCaptures.scala | 13 +- .../src/dotty/tools/dotc/cc/Existential.scala | 366 ++++++++++++++++++ compiler/src/dotty/tools/dotc/cc/Setup.scala | 31 +- .../src/dotty/tools/dotc/config/Config.scala | 2 +- .../dotty/tools/dotc/core/Definitions.scala | 13 +- .../src/dotty/tools/dotc/core/NameKinds.scala | 1 + .../dotty/tools/dotc/core/TypeComparer.scala | 97 ++++- .../tools/dotc/printing/RefinedPrinter.scala | 4 +- library/src/scala/caps.scala | 6 + tests/neg-custom-args/captures/reaches.check | 8 +- tests/neg-custom-args/captures/reaches.scala | 7 +- tests/neg/cc-ex-conformance.scala | 25 ++ tests/new/test.scala | 9 +- tests/pos/cc-ex-unpack.scala | 18 + 16 files changed, 580 insertions(+), 51 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/cc/Existential.scala create mode 100644 tests/neg/cc-ex-conformance.scala create mode 100644 tests/pos/cc-ex-unpack.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 88f5e7d52867..080577b5773f 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -26,6 +26,8 @@ object ccConfig: */ inline val allowUnsoundMaps = false + val useExistentials = false + /** If true, use `sealed` as encapsulation mechanism instead of the * previous global retriction that `cap` can't be boxed or unboxed. */ @@ -532,6 +534,33 @@ object ReachCapability extends AnnotatedCapability(defn.ReachCapabilityAnnot) */ object MaybeCapability extends AnnotatedCapability(defn.MaybeCapabilityAnnot) +/** Offers utility method to be used for type maps that follow aliases */ +trait ConservativeFollowAliasMap(using Context) extends TypeMap: + + /** If `mapped` is a type alias, apply the map to the alias, while keeping + * annotations. If the result is different, return it, otherwise return `mapped`. + * Furthermore, if `original` is a LazyRef or TypeVar and the mapped result is + * the same as the underlying type, keep `original`. This avoids spurious differences + * which would lead to spurious dealiasing in the result + */ + protected def applyToAlias(original: Type, mapped: Type) = + val mapped1 = mapped match + case t: (TypeRef | AppliedType) => + val t1 = t.dealiasKeepAnnots + if t1 eq t then t + else + // If we see a type alias, map the alias type and keep it if it's different + val t2 = apply(t1) + if t2 ne t1 then t2 else t + case _ => + mapped + original match + case original: (LazyRef | TypeVar) if mapped1 eq original.underlying => + original + case _ => + mapped1 +end ConservativeFollowAliasMap + /** An extractor for all kinds of function types as well as method and poly types. * @return 1st half: The argument types or empty if this is a type function * 2nd half: The result type diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 5db8dadf5b66..cf803e47eca0 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -14,6 +14,7 @@ import printing.{Showable, Printer} import printing.Texts.* import util.{SimpleIdentitySet, Property} import typer.ErrorReporting.Addenda +import TypeComparer.canSubsumeExistentially import util.common.alwaysTrue import scala.collection.mutable import CCState.* @@ -172,6 +173,7 @@ sealed abstract class CaptureSet extends Showable: x.info match case x1: CaptureRef => x1.subsumes(y) case _ => false + case x: TermParamRef => canSubsumeExistentially(x, y) case _ => false /** {x} <:< this where <:< is subcapturing, but treating all variables diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index c36b0cbf552e..e6e091cd5897 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -612,10 +612,10 @@ class CheckCaptures extends Recheck, SymTransformer: mdef.rhs.putAttachment(ClosureBodyValue, ()) case _ => - // Constrain closure's parameters and result from the expected type before - // rechecking the body. openClosures = (mdef.symbol, pt) :: openClosures try + // Constrain closure's parameters and result from the expected type before + // rechecking the body. val res = recheckClosure(expr, pt, forceDependent = true) if !isEtaExpansion(mdef) then // If closure is an eta expanded method reference it's better to not constrain @@ -699,7 +699,7 @@ class CheckCaptures extends Recheck, SymTransformer: val localSet = capturedVars(sym) if !localSet.isAlwaysEmpty then curEnv = Env(sym, EnvKind.Regular, localSet, curEnv) - inNestedLevel: + inNestedLevel: // TODO: needed here? try checkInferredResult(super.recheckDefDef(tree, sym), tree) finally if !sym.isAnonymousFunction then @@ -920,8 +920,7 @@ class CheckCaptures extends Recheck, SymTransformer: case expected @ defn.FunctionOf(args, resultType, isContextual) if defn.isNonRefinedFunction(expected) => actual match - case RefinedType(parent, nme.apply, rinfo: MethodType) - if defn.isFunctionNType(actual) => + case defn.RefinedFunctionOf(rinfo: MethodType) => depFun(args, resultType, isContextual, rinfo.paramNames) case _ => expected case _ => expected @@ -1132,12 +1131,12 @@ class CheckCaptures extends Recheck, SymTransformer: * @param sym symbol of the field definition that is being checked */ override def checkSubType(actual: Type, expected: Type)(using Context): Boolean = - val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) + val expected1 = alignDependentFunction(addOuterRefs(/*Existential.strip*/(expected), actual), actual.stripCapturing) val actual1 = val saved = curEnv try curEnv = Env(clazz, EnvKind.NestedInOwner, capturedVars(clazz), outer0 = curEnv) - val adapted = adaptBoxed(actual, expected1, srcPos, covariant = true, alwaysConst = true) + val adapted = adaptBoxed(/*Existential.strip*/(actual), expected1, srcPos, covariant = true, alwaysConst = true) actual match case _: MethodType => // We remove the capture set resulted from box adaptation for method types, diff --git a/compiler/src/dotty/tools/dotc/cc/Existential.scala b/compiler/src/dotty/tools/dotc/cc/Existential.scala new file mode 100644 index 000000000000..0dba1a62e7ed --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/Existential.scala @@ -0,0 +1,366 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* +import CaptureSet.IdempotentCaptRefMap +import StdNames.nme +import ast.tpd.* +import Decorators.* +import typer.ErrorReporting.errorType +import NameKinds.exSkolemName +import reporting.Message + +/** + +Handling existentials in CC: + + - We generally use existentials only in function and method result types + - All occurrences of an EX-bound variable appear co-variantly in the bound type + +In Setup: + + - Convert occurrences of `cap` in function results to existentials. Precise rules below. + - Conversions are done in two places: + + + As part of mapping from local types of parameters and results to infos of methods. + The local types just use `cap`, whereas the result type in the info uses EX-bound variables. + + When converting functions or methods appearing in explicitly declared types. + Here again, we only replace cap's in fucntion results. + + - Conversion is done with a BiTypeMap in `Existential.mapCap`. + +In adapt: + + - If an EX is toplevel in actual type, replace its bound variable + occurrences with `cap`. + +Level checking and avoidance: + + - Environments, capture refs, and capture set variables carry levels + + + levels start at 0 + + The level of a block or template statement sequence is one higher than the level of + its environment + + The level of a TermRef is the level of the environment where its symbol is defined. + + The level of a ThisType is the level of the statements of the class to which it beloongs. + + The level of a TermParamRef is currently -1 (i.e. TermParamRefs are not yet checked using this system) + + The level of a capture set variable is the level of the environment where it is created. + + - Variables also carry info whether they accept `cap` or not. Variables introduced under a box + don't, the others do. + + - Capture set variables do not accept elements of level higher than the variable's level + - We use avoidance to heal such cases: If the level-incorrect ref appears + + covariantly: widen to underlying capture set, reject if that is cap and the variable does not allow it + + contravariantly: narrow to {} + + invarianty: reject with error + +In cv-computation (markFree): + + - Reach capabilities x* of a parameter x cannot appear in the capture set of + the owning method. They have to be widened to dcs(x), or, where this is not + possible, it's an error. + +In well-formedness checking of explicitly written type T: + + - If T is not the type of a parameter, check that no cap occurrence or EX-bound variable appears + under a box. + +Subtype rules + + - new alphabet: existentially bound variables `a`. + - they can be stored in environments Gamma. + - they are alpha-renable, usual hygiene conditions apply + + Gamma |- EX a.T <: U + if Gamma, a |- T <: U + + Gamma |- T <: EX a.U + if exists capture set C consisting of capture refs and ex-bound variables + bound in Gamma such that Gamma |- T <: [a := C]U + +Representation: + + EX a.T[a] is represented as a dependent function type + + (a: Exists) => T[a]] + + where Exists is defined in caps like this: + + sealed trait Exists extends Capability + + The defn.RefinedFunctionOf extractor will exclude existential types from + its results, so only normal refined functions match. + + Let `boundvar(ex)` be the TermParamRef defined by the existential type `ex`. + +Subtype checking algorithm, general scheme: + + Maintain two structures in TypeComparer: + + openExistentials: List[TermParamRef] + assocExistentials: Map[TermParamRef, List[TermParamRef]] + + `openExistentials` corresponds to the list of existential variables stored in the environment. + `assocExistentials` maps existential variables bound by existentials appearing on the right + to the value of `openExistentials` at the time when the existential on the right was dropped. + +Subtype checking algorithm, steps to add for tp1 <:< tp2: + + If tp1 is an existential EX a.tp1a: + + val saved = openExistentials + openExistentials = boundvar(tp1) :: openExistentials + try tp1a <:< tp2 + finally openExistentials = saved + + If tp2 is an existential EX a.tp2a: + + val saved = assocExistentials + assocExistentials = assocExistentials + (boundvar(tp2) -> openExistentials) + try tp1 <:< tp2a + finally assocExistentials = saved + + If tp2 is an existentially bound variable: + assocExistentials(tp2).isDefined + && (assocExistentials(tp2).contains(tp1) || tp1 is not existentially bound) + +Existential source syntax: + + Existential types are ususally not written in source, since we still allow the `^` + syntax that can express most of them more concesely (see below for translation rules). + But we should also allow to write existential types explicity, even if it ends up mainly + for debugging. To express them, we use the encoding with `Exists`, so a typical + expression of an existential would be + + (x: Exists) => A ->{x} B + + Existential types can only at the top level of the result type + of a function or method. + +Restrictions on Existential Types: + + - An existential capture ref must be the only member of its set. This is + intended to model the idea that existential variables effectibely range + over capture sets, not capture references. But so far our calculus + and implementation does not yet acoommodate first-class capture sets. + - Existential capture refs must appear co-variantly in their bound type + + So the following would all be illegal: + + EX x.C^{x, io} // error: multiple members + EX x.() => EX y.C^{x, y} // error: multiple members + EX x.C^{x} ->{x} D // error: contra-variant occurrence + EX x.Set[C^{x}] // error: invariant occurrence + +Expansion of ^: + + We expand all occurrences of `cap` in the result types of functions or methods + to existentially quantified types. Nested scopes are expanded before outer ones. + + The expansion algorithm is then defined as follows: + + 1. In a result type, replace every occurrence of ^ with a fresh existentially + bound variable and quantify over all variables such introduced. + + 2. After this step, type aliases are expanded. If aliases have aliases in arguments, + the outer alias is expanded before the aliases in the arguments. Each time an alias + is expanded that reveals a `^`, apply step (1). + + 3. The algorithm ends when no more alieases remain to be expanded. + + Examples: + + - `A => B` is an alias type that expands to `(A -> B)^`, therefore + `() -> A => B` expands to `() -> EX c. A ->{c} B`. + + - `() => Iterator[A => B]` expands to `() => EX c. Iterator[A ->{c} B]` + + - `A -> B^` expands to `A -> EX c.B^{c}`. + + - If we define `type Fun[T] = A -> T`, then `() -> Fun[B^]` expands to `() -> EX c.Fun[B^{c}]`, which + dealiases to `() -> EX c.A -> B^{c}`. + + - If we define + + type F = A -> Fun[B^] + + then the type alias expands to + + type F = A -> EX c.A -> B^{c} +*/ +object Existential: + + type Carrier = RefinedType + + def openExpected(pt: Type)(using Context): Type = pt.dealias match + case Existential(boundVar, unpacked) => + val tm = new IdempotentCaptRefMap: + val cvar = CaptureSet.Var(ctx.owner) + def apply(t: Type) = mapOver(t) match + case t @ CapturingType(parent, refs) if refs.elems.contains(boundVar) => + assert(refs.isConst && refs.elems.size == 1, i"malformed existential $t") + t.derivedCapturingType(parent, cvar) + case t => + t + openExpected(tm(unpacked)) + case _ => pt + + def toCap(tp: Type)(using Context) = tp.dealias match + case Existential(boundVar, unpacked) => + unpacked.substParam(boundVar, defn.captureRoot.termRef) + case _ => tp + + /** Replace all occurrences of `cap` in parts of this type by an existentially bound + * variable. If there are such occurrences, or there might be in the future due to embedded + * capture set variables, create an existential with the variable wrapping the type. + * Stop at function or method types since these have been mapped before. + */ + def mapCap(tp: Type, fail: Message => Unit)(using Context): Type = + var needsWrap = false + + class Wrap(boundVar: TermParamRef) extends BiTypeMap, ConservativeFollowAliasMap: + def apply(t: Type) = // go deep first, so that we map parts of alias types before dealiasing + mapOver(t) match + case t1: TermRef if t1.isRootCapability => + if variance > 0 then + needsWrap = true + boundVar + else + val varianceStr = if variance < 0 then "contra" else "in" + fail(em"cap appears in ${varianceStr}variant position in $tp") + t1 + case t1 @ FunctionOrMethod(_, _) => + // These have been mapped before + t1 + case t1 @ CapturingType(_, _: CaptureSet.Var) => + if variance > 0 then needsWrap = true // the set might get a cap later. + t1 + case t1 => + applyToAlias(t, t1) + + lazy val inverse = new BiTypeMap with ConservativeFollowAliasMap: + def apply(t: Type) = mapOver(t) match + case t1: TermParamRef if t1 eq boundVar => defn.captureRoot.termRef + case t1 @ FunctionOrMethod(_, _) => t1 + case t1 => applyToAlias(t, t1) + def inverse = Wrap.this + override def toString = "Wrap.inverse" + end Wrap + + if ccConfig.useExistentials then + val wrapped = apply(Wrap(_)(tp)) + if needsWrap then wrapped else tp + else tp + end mapCap + + def mapCapInResult(tp: Type, fail: Message => Unit)(using Context): Type = + def mapCapInFinalResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = mapCapInFinalResult(tp.resultType)) + case _ => + mapCap(tp, fail) + tp match + case tp: MethodOrPoly => + mapCapInFinalResult(tp) + case defn.FunctionNOf(args, res, contextual) => + tp.derivedFunctionOrMethod(args, mapCap(res, fail)) + case _ => tp + + def strip(tp: Type)(using Context) = tp match + case Existential(_, tpunpacked) => tpunpacked + case _ => tp + + def skolemize(tp: Type)(using Context) = tp.widenDealias match // TODO needed? + case Existential(boundVar, unpacked) => + val skolem = tp match + case tp: CaptureRef if tp.isTracked => tp + case _ => newSkolemSym(boundVar.underlying).termRef + val tm = new IdempotentCaptRefMap: + var deep = false + private inline def deepApply(t: Type): Type = + val saved = deep + deep = true + try apply(t) finally deep = saved + def apply(t: Type) = + if t eq boundVar then + if deep then skolem.reach else skolem + else t match + case defn.FunctionOf(args, res, contextual) => + val res1 = deepApply(res) + if res1 ne res then defn.FunctionOf(args, res1, contextual) + else t + case defn.RefinedFunctionOf(mt) => + mt.derivedLambdaType(resType = deepApply(mt.resType)) + case _ => + mapOver(t) + tm(unpacked) + case _ => tp + end skolemize + + def newSkolemSym(tp: Type)(using Context): TermSymbol = // TODO needed? + newSymbol(ctx.owner.enclosingMethodOrClass, exSkolemName.fresh(), Synthetic, tp) +/* + def fromDepFun(arg: Tree)(using Context): Type = arg.tpe match + case RefinedType(parent, nme.apply, info: MethodType) if defn.isNonRefinedFunction(parent) => + info match + case info @ MethodType(_ :: Nil) + if info.paramInfos.head.derivesFrom(defn.Caps_Capability) => + apply(ref => info.resultType.substParams(info, ref :: Nil)) + case _ => + errorType(em"Malformed existential: dependent function must have a singgle parameter of type caps.Capability", arg.srcPos) + case _ => + errorType(em"Malformed existential: dependent function type expected", arg.srcPos) +*/ + private class PackMap(sym: Symbol, rt: RecType)(using Context) extends DeepTypeMap, IdempotentCaptRefMap: + def apply(tp: Type): Type = tp match + case ref: TermRef if ref.symbol == sym => TermRef(rt.recThis, defn.captureRoot) + case _ => mapOver(tp) + + /** Unpack current type from an existential `rt` so that all references bound by `rt` + * are recplaced by `ref`. + */ + private class OpenMap(rt: RecType, ref: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap: + def apply(tp: Type): Type = + if isExBound(tp, rt) then ref else mapOver(tp) + + /** Is `tp` a reference to the bound variable of `rt`? */ + private def isExBound(tp: Type, rt: Type)(using Context) = tp match + case tp @ TermRef(RecThis(rt1), _) => (rt1 eq rt) && tp.symbol == defn.captureRoot + case _ => false + + /** Open existential, replacing the bund variable by `ref` */ + def open(rt: RecType, ref: Type)(using Context): Type = OpenMap(rt, ref)(rt.parent) + + /** Create an existential type `ex c.` so that all references to `sym` in `tp` + * become references to the existentially bound variable `c`. + */ + def fromSymbol(tp: Type, sym: Symbol)(using Context): RecType = + RecType(PackMap(sym, _)(tp)) + + def isExistentialMethod(mt: TermLambda)(using Context): Boolean = mt.paramInfos match + case (info: TypeRef) :: rest => info.symbol == defn.Caps_Exists && rest.isEmpty + case _ => false + + def isExistentialVar(ref: CaptureRef)(using Context) = ref match + case ref: TermParamRef => isExistentialMethod(ref.binder) + case _ => false + + def unapply(tp: Carrier)(using Context): Option[(TermParamRef, Type)] = + tp.refinedInfo match + case mt: MethodType + if isExistentialMethod(mt) && defn.isNonRefinedFunction(tp.parent) => + Some(mt.paramRefs.head, mt.resultType) + case _ => None + + def apply(mk: TermParamRef => Type)(using Context): MethodType = + MethodType(defn.Caps_Exists.typeRef :: Nil): mt => + mk(mt.paramRefs.head) + + /** Create existential if bound variable appear in result */ + def wrap(mk: TermParamRef => Type)(using Context): Type = + val mt = apply(mk) + if mt.isResultDependent then mt.toFunctionType() else mt.resType +end Existential diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 2c0cdfb7b129..9f33ad4e7fcb 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -14,6 +14,7 @@ import transform.{PreRecheck, Recheck}, Recheck.* import CaptureSet.{IdentityCaptRefMap, IdempotentCaptRefMap} import Synthetics.isExcluded import util.Property +import reporting.Message import printing.{Printer, Texts}, Texts.{Text, Str} import collection.mutable import CCState.* @@ -241,6 +242,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: val rinfo1 = apply(rinfo) if rinfo1 ne rinfo then rinfo1.toFunctionType(alwaysDependent = true) else tp + case Existential(_, unpacked) => + // drop the existential, the bound variables will be replaced by capture set variables + apply(unpacked) case tp: MethodType => tp.derivedLambdaType( paramInfos = mapNested(tp.paramInfos), @@ -256,13 +260,19 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: end apply end mapInferred - mapInferred(refine = true)(tp) + try mapInferred(refine = true)(tp) + catch case ex: AssertionError => + println(i"error while mapping inferred $tp") + throw ex end transformInferredType private def transformExplicitType(tp: Type, tptToCheck: Option[Tree] = None)(using Context): Type = val expandAliases = new DeepTypeMap: override def toString = "expand aliases" + def fail(msg: Message) = + for tree <- tptToCheck do report.error(msg, tree.srcPos) + /** Expand $throws aliases. This is hard-coded here since $throws aliases in stdlib * are defined with `?=>` rather than `?->`. * We also have to add a capture set to the last expanded throws alias. I.e. @@ -288,7 +298,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: CapturingType(fntpe, cs, boxed = false) else fntpe - private def recur(t: Type): Type = normalizeCaptures(mapOver(t)) + private def recur(t: Type): Type = + Existential.mapCapInResult(normalizeCaptures(mapOver(t)), fail) def apply(t: Type) = t match @@ -383,7 +394,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: try transformTT(tpt, boxed = !ccConfig.allowUniversalInBoxed && sym.is(Mutable, butNot = Method), - // types of mutable variables are boxed in pre 3.3 codee + // types of mutable variables are boxed in pre 3.3 code exact = sym.allOverriddenSymbols.hasNext, // types of symbols that override a parent don't get a capture set TODO drop ) @@ -476,11 +487,14 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: else tree.tpt.knownType def paramSignatureChanges = tree.match - case tree: DefDef => tree.paramss.nestedExists: - case param: ValDef => param.tpt.hasRememberedType - case param: TypeDef => param.rhs.hasRememberedType + case tree: DefDef => + tree.paramss.nestedExists: + case param: ValDef => param.tpt.hasRememberedType + case param: TypeDef => param.rhs.hasRememberedType case _ => false + // A symbol's signature changes if some of its parameter types or its result type + // have a new type installed here (meaning hasRememberedType is true) def signatureChanges = tree.tpt.hasRememberedType && !sym.isConstructor || paramSignatureChanges @@ -515,7 +529,10 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: else SubstParams(prevPsymss, prevLambdas)(resType) if sym.exists && signatureChanges then - val newInfo = integrateRT(sym.info, sym.paramSymss, localReturnType, Nil, Nil) + val newInfo = + Existential.mapCapInResult( + integrateRT(sym.info, sym.paramSymss, localReturnType, Nil, Nil), + report.error(_, tree.srcPos)) .showing(i"update info $sym: ${sym.info} = $result", capt) if newInfo ne sym.info then val updatedInfo = diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index ee8ed4b215d7..e8a234ff821f 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -229,7 +229,7 @@ object Config { inline val reuseSymDenotations = true /** If `checkLevelsOnConstraints` is true, check levels of type variables - * and create fresh ones as needed when bounds are first entered intot he constraint. + * and create fresh ones as needed when bounds are first entered into the constraint. * If `checkLevelsOnInstantiation` is true, allow level-incorrect constraints but * fix levels on type variable instantiation. */ diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 1f0a673f90b1..3ee532ccfbaa 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -15,7 +15,7 @@ import Comments.{Comment, docCtx} import util.Spans.NoSpan import config.Feature import Symbols.requiredModuleRef -import cc.{CaptureSet, RetainingType} +import cc.{CaptureSet, RetainingType, Existential} import ast.tpd.ref import scala.annotation.tailrec @@ -993,6 +993,7 @@ class Definitions { @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") @tu lazy val Caps_Capability: ClassSymbol = requiredClass("scala.caps.Capability") @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") + @tu lazy val Caps_Exists = requiredClass("scala.caps.Exists") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") @@ -1189,11 +1190,17 @@ class Definitions { /** Matches a refined `PolyFunction`/`FunctionN[...]`/`ContextFunctionN[...]`. * Extracts the method type type and apply info. + * Will NOT math an existential type encoded as a dependent function. */ def unapply(tpe: RefinedType)(using Context): Option[MethodOrPoly] = tpe.refinedInfo match - case mt: MethodOrPoly - if tpe.refinedName == nme.apply && isFunctionType(tpe.parent) => Some(mt) + case mt: MethodType + if tpe.refinedName == nme.apply + && isFunctionType(tpe.parent) + && !Existential.isExistentialMethod(mt) => Some(mt) + case mt: PolyType + if tpe.refinedName == nme.apply + && isFunctionType(tpe.parent) => Some(mt) case _ => None end RefinedFunctionOf diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index 74d440562824..a6348304c4d7 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -332,6 +332,7 @@ object NameKinds { val InlineScrutineeName: UniqueNameKind = new UniqueNameKind("$scrutinee") val InlineBinderName: UniqueNameKind = new UniqueNameKind("$proxy") val MacroNames: UniqueNameKind = new UniqueNameKind("$macro$") + val exSkolemName: UniqueNameKind = new UniqueNameKind("$exSkolem") // TODO needed? val UniqueExtMethName: UniqueNameKind = new UniqueNameKindWithUnmangle("$extension") diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index dca8bf206bac..c5b3611463b0 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -47,6 +47,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling monitored = false GADTused = false opaquesUsed = false + openedExistentials = Nil + assocExistentials = Map.empty recCount = 0 needsGc = false if Config.checkTypeComparerReset then checkReset() @@ -65,6 +67,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Indicates whether the subtype check used opaque types */ private var opaquesUsed: Boolean = false + /** In capture checking: The existential types that are open because they + * appear in an existential type on the left in an enclosing comparison. + */ + private var openedExistentials: List[TermParamRef] = Nil + + /** In capture checking: A map from existential types that are appear + * in an existential type on the right in an enclosing comparison. + * Each existential gets mapped to the opened existentials to which it + * may resolve at this point. + */ + private var assocExistentials: Map[TermParamRef, List[TermParamRef]] = Map.empty + private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance @@ -326,14 +340,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling isSubPrefix(tp1.prefix, tp2.prefix) || thirdTryNamed(tp2) else - ( (tp1.name eq tp2.name) + (tp1.name eq tp2.name) && !sym1.is(Private) && tp2.isPrefixDependentMemberRef && isSubPrefix(tp1.prefix, tp2.prefix) && tp1.signature == tp2.signature && !(sym1.isClass && sym2.isClass) // class types don't subtype each other - ) || - thirdTryNamed(tp2) + || thirdTryNamed(tp2) case _ => secondTry end compareNamed @@ -345,7 +358,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp2: ProtoType => isMatchedByProto(tp2, tp1) case tp2: BoundType => - tp2 == tp1 || secondTry + tp2 == tp1 + || existentialVarsConform(tp1, tp2) + || secondTry case tp2: TypeVar => recur(tp1, typeVarInstance(tp2)) case tp2: WildcardType => @@ -547,6 +562,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if reduced.exists then recur(reduced, tp2) && recordGadtUsageIf { MatchType.thatReducesUsingGadt(tp1) } else thirdTry + case Existential(boundVar, tp1unpacked) => + compareExistentialLeft(boundVar, tp1unpacked, tp2) case _: FlexType => true case _ => @@ -628,6 +645,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling thirdTryNamed(tp2) case tp2: TypeParamRef => compareTypeParamRef(tp2) + case Existential(boundVar, tp2unpacked) => + compareExistentialRight(tp1, boundVar, tp2unpacked) case tp2: RefinedType => def compareRefinedSlow: Boolean = val name2 = tp2.refinedName @@ -1420,20 +1439,21 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling canConstrain(param2) && canInstantiate(param2) || compareLower(bounds(param2), tyconIsTypeRef = false) case tycon2: TypeRef => - isMatchingApply(tp1) || - byGadtBounds || - defn.isCompiletimeAppliedType(tycon2.symbol) && compareCompiletimeAppliedType(tp2, tp1, fromBelow = true) || { - tycon2.info match { - case info2: TypeBounds => - compareLower(info2, tyconIsTypeRef = true) - case info2: ClassInfo => - tycon2.name.startsWith("Tuple") && - defn.isTupleNType(tp2) && recur(tp1, tp2.toNestedPairs) || - tryBaseType(info2.cls) - case _ => - fourthTry - } - } || tryLiftedToThis2 + isMatchingApply(tp1) + || byGadtBounds + || defn.isCompiletimeAppliedType(tycon2.symbol) + && compareCompiletimeAppliedType(tp2, tp1, fromBelow = true) + || tycon2.info.match + case info2: TypeBounds => + compareLower(info2, tyconIsTypeRef = true) + case info2: ClassInfo => + tycon2.name.startsWith("Tuple") + && defn.isTupleNType(tp2) + && recur(tp1, tp2.toNestedPairs) + || tryBaseType(info2.cls) + case _ => + fourthTry + || tryLiftedToThis2 case tv: TypeVar => if tv.isInstantiated then @@ -1470,12 +1490,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling inFrozenGadt { isSubType(bounds1.hi.applyIfParameterized(args1), tp2, approx.addLow) } } && recordGadtUsageIf(true) - !sym.isClass && { defn.isCompiletimeAppliedType(sym) && compareCompiletimeAppliedType(tp1, tp2, fromBelow = false) || { recur(tp1.superTypeNormalized, tp2) && recordGadtUsageIf(MatchType.thatReducesUsingGadt(tp1)) } || tryLiftedToThis1 - } || byGadtBounds + } + || byGadtBounds case tycon1: TypeProxy => recur(tp1.superTypeNormalized, tp2) case _ => @@ -2769,6 +2789,40 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false } + private def compareExistentialLeft(boundVar: TermParamRef, tp1unpacked: Type, tp2: Type)(using Context): Boolean = + val saved = openedExistentials + try + openedExistentials = boundVar :: openedExistentials + recur(tp1unpacked, tp2) + finally + openedExistentials = saved + + private def compareExistentialRight(tp1: Type, boundVar: TermParamRef, tp2unpacked: Type)(using Context): Boolean = + val saved = assocExistentials + try + assocExistentials = assocExistentials.updated(boundVar, openedExistentials) + recur(tp1, tp2unpacked) + finally + assocExistentials = saved + + def canSubsumeExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context): Boolean = + Existential.isExistentialVar(tp1) + && assocExistentials.get(tp1).match + case Some(xs) => !Existential.isExistentialVar(tp2) || xs.contains(tp2) + case None => false + + /** Are tp1, tp2 termRefs that can be linked? This should never be called + * normally, since exietential variables appear only in capture sets + * which are in annotations that are ignored during normal typing. The real + * work is done in CaptureSet#subsumes which calls linkOK directly. + */ + private def existentialVarsConform(tp1: Type, tp2: Type) = + tp2 match + case tp2: TermParamRef => tp1 match + case tp1: CaptureRef => canSubsumeExistentially(tp2, tp1) + case _ => false + case _ => false + protected def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = refs1.subCaptures(refs2, frozen) @@ -3236,6 +3290,9 @@ object TypeComparer { def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true)(using Context): Type = comparing(_.lub(tp1, tp2, canConstrain = canConstrain, isSoft = isSoft)) + def canSubsumeExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context) = + comparing(_.canSubsumeExistentially(tp1, tp2)) + /** The least upper bound of a list of types */ final def lub(tps: List[Type])(using Context): Type = tps.foldLeft(defn.NothingType: Type)(lub(_,_)) diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 0c6e36c8f18f..9852dfc1170d 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -564,7 +564,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case SingletonTypeTree(ref) => toTextLocal(ref) ~ "." ~ keywordStr("type") case RefinedTypeTree(tpt, refines) => - toTextLocal(tpt) ~ " " ~ blockText(refines) + if defn.isFunctionSymbol(tpt.symbol) && tree.hasType && !printDebug + then changePrec(GlobalPrec) { toText(tree.typeOpt) } + else toTextLocal(tpt) ~ blockText(refines) case AppliedTypeTree(tpt, args) => if (tpt.symbol == defn.orType && args.length == 2) changePrec(OrTypePrec) { toText(args(0)) ~ " | " ~ atPrec(OrTypePrec + 1) { toText(args(1)) } } diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 808bdba34e3f..840601f1622d 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -22,6 +22,12 @@ import annotation.experimental */ extension (x: Any) def reachCapability: Any = x + /** A trait to allow expressing existential types such as + * + * (x: Exists) => A ->{x} B + */ + sealed trait Exists extends Capability + object unsafe: extension [T](x: T) diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index ccd9e891380b..45c1776d8c43 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -34,15 +34,15 @@ | that type captures the root capability `cap`. | This is often caused by a local capability in an argument of constructor Id | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:60:27 -------------------------------------- -60 | val f1: File^{id*} = id(f) // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:61:27 -------------------------------------- +61 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ | Found: File^{id, f} | Required: File^{id*} | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/reaches.scala:77:5 ------------------------------------------------------------ -77 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * +-- Error: tests/neg-custom-args/captures/reaches.scala:78:5 ------------------------------------------------------------ +78 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) | ^^^^^^ | Reach capability cap and universal capability cap cannot both | appear in the type [B](f: ((box A ->{ps*} A, box A ->{ps*} A)) => B): List[B] of this expression diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index de5e4362cdf2..6a5ffd51c2c6 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -55,9 +55,10 @@ def test = def attack2 = val id: File^ -> File^ = x => x + // val id: File^ -> EX C.File^C val leaked = usingFile[File^{id*}]: f => - val f1: File^{id*} = id(f) // error + val f1: File^{id*} = id(f) // error, since now id(f): File^ f1 class List[+A]: @@ -74,6 +75,4 @@ def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = z => g(f(z)) def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = - ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * - - + ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) diff --git a/tests/neg/cc-ex-conformance.scala b/tests/neg/cc-ex-conformance.scala new file mode 100644 index 000000000000..9cfdda43c764 --- /dev/null +++ b/tests/neg/cc-ex-conformance.scala @@ -0,0 +1,25 @@ +import language.experimental.captureChecking +import caps.{Exists, Capability} + +class C + +type EX1 = () => (c: Exists) => (C^{c}, C^{c}) + +type EX2 = () => (c1: Exists) => (c2: Exists) => (C^{c1}, C^{c2}) + +type EX3 = () => (c: Exists) => () => C^{c} + +type EX4 = () => () => (c: Exists) => C^{c} + +def Test = + val ex1: EX1 = ??? + val ex2: EX2 = ??? + val _: EX1 = ex1 + val _: EX2 = ex1 // ok + val _: EX1 = ex2 // ok + + val ex3: EX3 = ??? + val ex4: EX4 = ??? + val _: EX4 = ex3 // ok + val _: EX4 = ex4 + val _: EX3 = ex4 // error diff --git a/tests/new/test.scala b/tests/new/test.scala index 16a823547553..18644422ab06 100644 --- a/tests/new/test.scala +++ b/tests/new/test.scala @@ -2,8 +2,9 @@ import language.experimental.namedTuples type Person = (name: String, age: Int) -def test = - val bob = (name = "Bob", age = 33): (name: String, age: Int) +trait A: + type T + +class B: + type U =:= A { type T = U } - val silly = bob match - case (name = n, age = a) => n.length + a diff --git a/tests/pos/cc-ex-unpack.scala b/tests/pos/cc-ex-unpack.scala new file mode 100644 index 000000000000..ae9b4ea5d805 --- /dev/null +++ b/tests/pos/cc-ex-unpack.scala @@ -0,0 +1,18 @@ +import language.experimental.captureChecking +import caps.{Exists, Capability} + +class C + +type EX1 = (c: Exists) -> (C^{c}, C^{c}) + +type EX2 = () -> (c1: Exists) -> (c2: Exists) -> (C^{c1}, C^{c2}) + +type EX3 = () -> (c: Exists) -> () -> C^{c} + +type EX4 = () -> () -> (c: Exists) -> C^{c} + +def Test = + def f = + val ex1: EX1 = ??? + val c1 = ex1 + c1 From b375d97b9bb23b2575c4f7c6134c271d347f632f Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 6 Jun 2024 19:47:28 +0200 Subject: [PATCH 291/827] Generalize isAlwaysEmpty and streamline isBoxedCaptured - isBoxedCaptured no longer requires the construction of intermediate capture sets. - isAlwaysEmpty is also true for solved variables that have no elements --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 10 +++++++++- compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 080577b5773f..40e271707b26 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -197,7 +197,15 @@ extension (tp: Type) getBoxed(tp) /** Is the boxedCaptureSet of this type nonempty? */ - def isBoxedCapturing(using Context) = !tp.boxedCaptureSet.isAlwaysEmpty + def isBoxedCapturing(using Context): Boolean = + tp match + case tp @ CapturingType(parent, refs) => + tp.isBoxed && !refs.isAlwaysEmpty || parent.isBoxedCapturing + case tp: TypeRef if tp.symbol.isAbstractOrParamType => false + case tp: TypeProxy => tp.superType.isBoxedCapturing + case tp: AndType => tp.tp1.isBoxedCapturing && tp.tp2.isBoxedCapturing + case tp: OrType => tp.tp1.isBoxedCapturing || tp.tp2.isBoxedCapturing + case _ => false /** If this type is a capturing type, the version with boxed statues as given by `boxed`. * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index cf803e47eca0..9b0afbf3567e 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -459,7 +459,7 @@ object CaptureSet: var deps: Deps = emptySet def isConst = isSolved - def isAlwaysEmpty = false + def isAlwaysEmpty = isSolved && elems.isEmpty def isMaybeSet = false // overridden in BiMapped From f9ddc717b20f268bcff650454dfeef76f7477a2a Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 7 Jun 2024 11:59:27 +0200 Subject: [PATCH 292/827] Refine class refinements - Use a uniform criterion when to add them - Don't add them for @constructorOnly or @cc.untrackedCaptures arguments @untrackedCaptures is a new annotation --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 46 +++++-------------- .../dotty/tools/dotc/cc/CheckCaptures.scala | 7 +-- compiler/src/dotty/tools/dotc/cc/Setup.scala | 5 +- .../dotty/tools/dotc/core/Definitions.scala | 1 + library/src/scala/caps.scala | 5 ++ 5 files changed, 24 insertions(+), 40 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 40e271707b26..d8c567f145d4 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -467,41 +467,17 @@ extension (sym: Symbol) && !sym.allowsRootCapture && sym != defn.Caps_unsafeBox && sym != defn.Caps_unsafeUnbox - - /** Does this symbol define a level where we do not want to let local variables - * escape into outer capture sets? - */ - def isLevelOwner(using Context): Boolean = - sym.isClass - || sym.is(Method, butNot = Accessor) - - /** The owner of the current level. Qualifying owners are - * - methods, other than accessors - * - classes, if they are not staticOwners - * - _root_ - */ - def levelOwner(using Context): Symbol = - def recur(sym: Symbol): Symbol = - if !sym.exists || sym.isRoot || sym.isStaticOwner then defn.RootClass - else if sym.isLevelOwner then sym - else recur(sym.owner) - recur(sym) - - /** The outermost symbol owned by both `sym` and `other`. if none exists - * since the owning scopes of `sym` and `other` are not nested, invoke - * `onConflict` to return a symbol. - */ - def maxNested(other: Symbol, onConflict: (Symbol, Symbol) => Context ?=> Symbol)(using Context): Symbol = - if !sym.exists || other.isContainedIn(sym) then other - else if !other.exists || sym.isContainedIn(other) then sym - else onConflict(sym, other) - - /** The innermost symbol owning both `sym` and `other`. - */ - def minNested(other: Symbol)(using Context): Symbol = - if !other.exists || other.isContainedIn(sym) then sym - else if !sym.exists || sym.isContainedIn(other) then other - else sym.owner.minNested(other.owner) + && !defn.isPolymorphicAfterErasure(sym) + + def isRefiningParamAccessor(using Context): Boolean = + sym.is(ParamAccessor) + && { + val param = sym.owner.primaryConstructor.paramSymss + .nestedFind(_.name == sym.name) + .getOrElse(NoSymbol) + !param.hasAnnotation(defn.ConstructorOnlyAnnot) + && !param.hasAnnotation(defn.UntrackedCapturesAnnot) + } extension (tp: AnnotatedType) /** Is this a boxed capturing type? */ diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index e6e091cd5897..87d37b61941a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -550,8 +550,8 @@ class CheckCaptures extends Recheck, SymTransformer: var allCaptures: CaptureSet = if core.derivesFromCapability then CaptureSet.universal else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do - val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol - if getter.termRef.isTracked && !getter.is(Private) then + val getter = cls.info.member(getterName).suchThat(_.isRefiningParamAccessor).symbol + if !getter.is(Private) && getter.termRef.isTracked then refined = RefinedType(refined, getterName, argType) allCaptures ++= argType.captureSet (refined, allCaptures) @@ -764,7 +764,8 @@ class CheckCaptures extends Recheck, SymTransformer: val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") checkSubset(localSet, thisSet, tree.srcPos) // (2) for param <- cls.paramGetters do - if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then + if !param.hasAnnotation(defn.ConstructorOnlyAnnot) + && !param.hasAnnotation(defn.UntrackedCapturesAnnot) then checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) for pureBase <- cls.pureBaseClass do // (4) def selfType = impl.body diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 9f33ad4e7fcb..0851d8063d13 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -69,9 +69,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case _ => foldOver(x, tp) def apply(tp: Type): Boolean = apply(false, tp) - if symd.isAllOf(PrivateParamAccessor) + if symd.symbol.isRefiningParamAccessor + && symd.is(Private) && symd.owner.is(CaptureChecked) - && !symd.hasAnnotation(defn.ConstructorOnlyAnnot) && containsCovarRetains(symd.symbol.originDenotation.info) then symd.flags &~ Private else symd.flags @@ -186,6 +186,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if !defn.isFunctionClass(cls) && cls.is(CaptureChecked) => cls.paramGetters.foldLeft(tp) { (core, getter) => if atPhase(thisPhase.next)(getter.termRef.isTracked) + && getter.isRefiningParamAccessor && !getter.is(Tracked) then val getterType = diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 3ee532ccfbaa..88de8e66054e 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1053,6 +1053,7 @@ class Definitions { @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") @tu lazy val UncheckedCapturesAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedCaptures") + @tu lazy val UntrackedCapturesAnnot: ClassSymbol = requiredClass("scala.caps.untrackedCaptures") @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") @tu lazy val BeanGetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanGetter") @tu lazy val BeanSetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanSetter") diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 840601f1622d..46702271474a 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -28,6 +28,11 @@ import annotation.experimental */ sealed trait Exists extends Capability + /** This should go into annotations. For now it is here, so that we + * can experiment with it quickly between minor releases + */ + final class untrackedCaptures extends annotation.StaticAnnotation + object unsafe: extension [T](x: T) From 045801247565b0d6f8c13d893959a1b478a862ca Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 7 Jun 2024 13:00:38 +0200 Subject: [PATCH 293/827] Improve handling of no-cap-under-box/unbox errors - Improve error messages - Better propagation of @uncheckedCaptures - -un-deprecacte caps.unsafeUnbox and friends. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 65 +++++++++++++------ compiler/src/dotty/tools/dotc/cc/Setup.scala | 8 ++- library/src/scala/caps.scala | 9 +-- 3 files changed, 53 insertions(+), 29 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 87d37b61941a..6e4a10efe607 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -12,7 +12,7 @@ import ast.{tpd, untpd, Trees} import Trees.* import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPairsChecker} import typer.Checking.{checkBounds, checkAppliedTypesIn} -import typer.ErrorReporting.{Addenda, err} +import typer.ErrorReporting.{Addenda, NothingToAdd, err} import typer.ProtoTypes.{AnySelectionProto, LhsProto} import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.{Recheck, PreRecheck, CapturedVars} @@ -22,7 +22,7 @@ import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult import CCState.* import StdNames.nme import NameKinds.{DefaultGetterName, WildcardParamName, UniqueNameKind} -import reporting.trace +import reporting.{trace, Message} /** The capture checker */ object CheckCaptures: @@ -866,7 +866,10 @@ class CheckCaptures extends Recheck, SymTransformer: } checkNotUniversal(parent) case _ => - if !ccConfig.allowUniversalInBoxed && needsUniversalCheck then + if !ccConfig.allowUniversalInBoxed + && !tpe.hasAnnotation(defn.UncheckedCapturesAnnot) + && needsUniversalCheck + then checkNotUniversal(tpe) super.recheckFinish(tpe, tree, pt) end recheckFinish @@ -884,6 +887,17 @@ class CheckCaptures extends Recheck, SymTransformer: private inline val debugSuccesses = false + type BoxErrors = mutable.ListBuffer[Message] | Null + + private def boxErrorAddenda(boxErrors: BoxErrors) = + if boxErrors == null then NothingToAdd + else new Addenda: + override def toAdd(using Context): List[String] = + boxErrors.toList.map: msg => + i""" + | + |Note that ${msg.toString}""" + /** Massage `actual` and `expected` types before checking conformance. * Massaging is done by the methods following this one: * - align dependent function types and add outer references in the expected type @@ -893,7 +907,8 @@ class CheckCaptures extends Recheck, SymTransformer: */ override def checkConformsExpr(actual: Type, expected: Type, tree: Tree, addenda: Addenda)(using Context): Type = var expected1 = alignDependentFunction(expected, actual.stripCapturing) - val actualBoxed = adapt(actual, expected1, tree.srcPos) + val boxErrors = new mutable.ListBuffer[Message] + val actualBoxed = adapt(actual, expected1, tree.srcPos, boxErrors) //println(i"check conforms $actualBoxed <<< $expected1") if actualBoxed eq actual then @@ -907,7 +922,8 @@ class CheckCaptures extends Recheck, SymTransformer: actualBoxed else capt.println(i"conforms failed for ${tree}: $actual vs $expected") - err.typeMismatch(tree.withType(actualBoxed), expected1, addenda ++ CaptureSet.levelErrors) + err.typeMismatch(tree.withType(actualBoxed), expected1, + addenda ++ CaptureSet.levelErrors ++ boxErrorAddenda(boxErrors)) actual end checkConformsExpr @@ -991,7 +1007,7 @@ class CheckCaptures extends Recheck, SymTransformer: * * @param alwaysConst always make capture set variables constant after adaptation */ - def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, covariant: Boolean, alwaysConst: Boolean)(using Context): Type = + def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, covariant: Boolean, alwaysConst: Boolean, boxErrors: BoxErrors)(using Context): Type = /** Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation * @param boxed if true we adapt to a boxed expected type @@ -1008,8 +1024,8 @@ class CheckCaptures extends Recheck, SymTransformer: case FunctionOrMethod(eargs, eres) => (eargs, eres) case _ => (aargs.map(_ => WildcardType), WildcardType) val aargs1 = aargs.zipWithConserve(eargs): - adaptBoxed(_, _, pos, !covariant, alwaysConst) - val ares1 = adaptBoxed(ares, eres, pos, covariant, alwaysConst) + adaptBoxed(_, _, pos, !covariant, alwaysConst, boxErrors) + val ares1 = adaptBoxed(ares, eres, pos, covariant, alwaysConst, boxErrors) val resTp = if (aargs1 eq aargs) && (ares1 eq ares) then actualShape // optimize to avoid redundant matches else actualShape.derivedFunctionOrMethod(aargs1, ares1) @@ -1057,22 +1073,26 @@ class CheckCaptures extends Recheck, SymTransformer: val criticalSet = // the set which is not allowed to have `cap` if covariant then captures // can't box with `cap` else expected.captureSet // can't unbox with `cap` - if criticalSet.isUniversal && expected.isValueType && !ccConfig.allowUniversalInBoxed then + def msg = em"""$actual cannot be box-converted to $expected + |since at least one of their capture sets contains the root capability `cap`""" + def allowUniversalInBoxed = + ccConfig.allowUniversalInBoxed + || expected.hasAnnotation(defn.UncheckedCapturesAnnot) + || actual.widen.hasAnnotation(defn.UncheckedCapturesAnnot) + if criticalSet.isUniversal && expected.isValueType && !allowUniversalInBoxed then // We can't box/unbox the universal capability. Leave `actual` as it is - // so we get an error in checkConforms. This tends to give better error + // so we get an error in checkConforms. Add the error message generated + // from boxing as an addendum. This tends to give better error // messages than disallowing the root capability in `criticalSet`. + if boxErrors != null then boxErrors += msg if ctx.settings.YccDebug.value then println(i"cannot box/unbox $actual vs $expected") actual else - if !ccConfig.allowUniversalInBoxed then + if !allowUniversalInBoxed then // Disallow future addition of `cap` to `criticalSet`. - criticalSet.disallowRootCapability { () => - report.error( - em"""$actual cannot be box-converted to $expected - |since one of their capture sets contains the root capability `cap`""", - pos) - } + criticalSet.disallowRootCapability: () => + report.error(msg, pos) if !insertBox then // unboxing //debugShowEnvs() markFree(criticalSet, pos) @@ -1109,13 +1129,15 @@ class CheckCaptures extends Recheck, SymTransformer: * * @param alwaysConst always make capture set variables constant after adaptation */ - def adapt(actual: Type, expected: Type, pos: SrcPos)(using Context): Type = + def adapt(actual: Type, expected: Type, pos: SrcPos, boxErrors: BoxErrors)(using Context): Type = if expected == LhsProto || expected.isSingleton && actual.isSingleton then actual else val normalized = makeCaptureSetExplicit(actual) - val widened = improveCaptures(normalized.widenDealias, actual) - val adapted = adaptBoxed(widened.withReachCaptures(actual), expected, pos, covariant = true, alwaysConst = false) + val widened = improveCaptures(normalized.widen.dealiasKeepAnnots, actual) + val adapted = adaptBoxed( + widened.withReachCaptures(actual), expected, pos, + covariant = true, alwaysConst = false, boxErrors) if adapted eq widened then normalized else adapted.showing(i"adapt boxed $actual vs $expected ===> $adapted", capt) end adapt @@ -1137,7 +1159,8 @@ class CheckCaptures extends Recheck, SymTransformer: val saved = curEnv try curEnv = Env(clazz, EnvKind.NestedInOwner, capturedVars(clazz), outer0 = curEnv) - val adapted = adaptBoxed(/*Existential.strip*/(actual), expected1, srcPos, covariant = true, alwaysConst = true) + val adapted = + adaptBoxed(/*Existential.strip*/(actual), expected1, srcPos, covariant = true, alwaysConst = true, null) actual match case _: MethodType => // We remove the capture set resulted from box adaptation for method types, diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 0851d8063d13..35f22538f074 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -394,7 +394,10 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def transformResultType(tpt: TypeTree, sym: Symbol)(using Context): Unit = try transformTT(tpt, - boxed = !ccConfig.allowUniversalInBoxed && sym.is(Mutable, butNot = Method), + boxed = + sym.is(Mutable, butNot = Method) + && !ccConfig.allowUniversalInBoxed + && !sym.hasAnnotation(defn.UncheckedCapturesAnnot), // types of mutable variables are boxed in pre 3.3 code exact = sym.allOverriddenSymbols.hasNext, // types of symbols that override a parent don't get a capture set TODO drop @@ -405,7 +408,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: val addDescription = new TypeTraverser: def traverse(tp: Type) = tp match case tp @ CapturingType(parent, refs) => - if !refs.isConst then refs.withDescription(i"of $sym") + if !refs.isConst && refs.description.isEmpty then + refs.withDescription(i"of $sym") traverse(parent) case _ => traverseChildren(tp) diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 46702271474a..5ae5b860f501 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -43,22 +43,19 @@ import annotation.experimental def unsafeAssumePure: T = x /** If argument is of type `cs T`, converts to type `box cs T`. This - * avoids the error that would be raised when boxing `*`. + * avoids the error that would be raised when boxing `cap`. */ - @deprecated(since = "3.3") def unsafeBox: T = x /** If argument is of type `box cs T`, converts to type `cs T`. This - * avoids the error that would be raised when unboxing `*`. + * avoids the error that would be raised when unboxing `cap`. */ - @deprecated(since = "3.3") def unsafeUnbox: T = x extension [T, U](f: T => U) /** If argument is of type `box cs T`, converts to type `cs T`. This - * avoids the error that would be raised when unboxing `*`. + * avoids the error that would be raised when unboxing `cap`. */ - @deprecated(since = "3.3") def unsafeBoxFunArg: T => U = f end unsafe From 24506ae51bdedc0376485fc23167dffcc2cca2df Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 7 Jun 2024 13:27:17 +0200 Subject: [PATCH 294/827] Go back to original no cap in box/unbox restrictions We go back to the original lifetime restriction that box/unbox cannot apply to universal capture sets, and drop the later restriction that type variable instantiations may not deeply capture cap. The original restriction is proven to be sound and is probably expressive enough when we add reach capabilities. This required some changes in tests and also in the standard library. The original restriction is in place for source <= 3.2 and >= 3.5. Source 3.3 and 3.4 use the alternative restriction on type variable instances. Some neg tests have not been brought forward to 3.4. They are all in tests/neg-customargs/captures and start with //> using options -source 3.4 We need to look at these tests one-by-one and analyze whether the new 3.5 behavior is correct. --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 4 +- .../src/scala/collection/Iterator.scala | 4 +- .../src/scala/collection/SeqView.scala | 12 +++-- .../immutable/LazyListIterable.scala | 53 ++++++++++--------- .../captures/box-adapt-cases.scala | 2 +- .../neg-custom-args/captures/capt-test.scala | 4 +- tests/neg-custom-args/captures/capt1.check | 40 +++++++------- tests/neg-custom-args/captures/capt1.scala | 2 + .../captures/effect-swaps-explicit.check | 22 ++++---- .../captures/effect-swaps-explicit.scala | 2 + tests/neg-custom-args/captures/filevar.scala | 2 +- tests/neg-custom-args/captures/i15749.scala | 15 ++++++ tests/neg-custom-args/captures/i15772.check | 8 +-- tests/neg-custom-args/captures/i15922.scala | 2 + .../captures/i15923-cases.scala | 7 +++ tests/neg-custom-args/captures/i16114.scala | 2 + .../captures/i19330-alt2.scala | 2 + tests/neg-custom-args/captures/i19330.scala | 2 + .../captures/lazylists-exceptions.check | 5 +- tests/neg-custom-args/captures/levels.check | 8 +-- tests/neg-custom-args/captures/levels.scala | 2 + .../neg-custom-args/captures/outer-var.check | 30 ++++++----- tests/neg-custom-args/captures/reaches.check | 28 +++++----- tests/neg-custom-args/captures/reaches.scala | 2 + tests/neg-custom-args/captures/real-try.check | 52 +++++++++--------- tests/neg-custom-args/captures/real-try.scala | 2 + tests/neg-custom-args/captures/try.check | 16 +++--- tests/neg-custom-args/captures/try.scala | 4 +- .../captures}/unsound-reach-2.scala | 2 + .../captures}/unsound-reach-3.scala | 2 + .../captures}/unsound-reach-4.check | 4 +- .../captures}/unsound-reach-4.scala | 2 + .../captures/unsound-reach.check | 12 +++++ .../captures}/unsound-reach.scala | 2 +- .../captures/vars-simple.check | 9 ++-- tests/neg-custom-args/captures/vars.check | 16 +++--- tests/neg-custom-args/captures/vars.scala | 2 + tests/neg/unsound-reach.check | 5 -- tests/pos-custom-args/captures/casts.scala | 4 ++ .../captures/filevar-expanded.scala | 3 +- tests/pos-custom-args/captures/i15749.scala | 4 +- .../captures/i15923-cases.scala | 4 -- tests/pos-custom-args/captures/i15925.scala | 5 +- tests/pos-custom-args/captures/levels.scala | 23 ++++++++ .../captures/unsafe-captures.scala | 8 +++ .../captures/untracked-captures.scala | 34 ++++++++++++ .../colltest5/CollectionStrawManCC5_1.scala | 2 +- 47 files changed, 310 insertions(+), 167 deletions(-) create mode 100644 tests/neg-custom-args/captures/i15749.scala create mode 100644 tests/neg-custom-args/captures/i15923-cases.scala rename tests/{neg => neg-custom-args/captures}/unsound-reach-2.scala (89%) rename tests/{neg => neg-custom-args/captures}/unsound-reach-3.scala (89%) rename tests/{neg => neg-custom-args/captures}/unsound-reach-4.check (55%) rename tests/{neg => neg-custom-args/captures}/unsound-reach-4.scala (85%) create mode 100644 tests/neg-custom-args/captures/unsound-reach.check rename tests/{neg => neg-custom-args/captures}/unsound-reach.scala (83%) delete mode 100644 tests/neg/unsound-reach.check create mode 100644 tests/pos-custom-args/captures/casts.scala create mode 100644 tests/pos-custom-args/captures/levels.scala create mode 100644 tests/pos-custom-args/captures/unsafe-captures.scala create mode 100644 tests/pos-custom-args/captures/untracked-captures.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index d8c567f145d4..7a8ed7b3651a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -32,7 +32,9 @@ object ccConfig: * previous global retriction that `cap` can't be boxed or unboxed. */ def allowUniversalInBoxed(using Context) = - Feature.sourceVersion.isAtLeast(SourceVersion.`3.3`) + Feature.sourceVersion.stable == SourceVersion.`3.3` + || Feature.sourceVersion.stable == SourceVersion.`3.4` + //|| Feature.sourceVersion.stable == SourceVersion.`3.5` // drop `//` if you want to test with the sealed type params strategy end ccConfig diff --git a/scala2-library-cc/src/scala/collection/Iterator.scala b/scala2-library-cc/src/scala/collection/Iterator.scala index 58ef4beb930d..4d1b0ed4ff95 100644 --- a/scala2-library-cc/src/scala/collection/Iterator.scala +++ b/scala2-library-cc/src/scala/collection/Iterator.scala @@ -1008,7 +1008,7 @@ object Iterator extends IterableFactory[Iterator] { def newBuilder[A]: Builder[A, Iterator[A]] = new ImmutableBuilder[A, Iterator[A]](empty[A]) { override def addOne(elem: A): this.type = { elems = elems ++ single(elem); this } - } + }.asInstanceOf // !!! CC unsafe op /** Creates iterator that produces the results of some element computation a number of times. * @@ -1160,7 +1160,7 @@ object Iterator extends IterableFactory[Iterator] { @tailrec def merge(): Unit = if (current.isInstanceOf[ConcatIterator[_]]) { val c = current.asInstanceOf[ConcatIterator[A]] - current = c.current + current = c.current.asInstanceOf // !!! CC unsafe op currentHasNextChecked = c.currentHasNextChecked if (c.tail != null) { if (last == null) last = c.last diff --git a/scala2-library-cc/src/scala/collection/SeqView.scala b/scala2-library-cc/src/scala/collection/SeqView.scala index 34405e06eedb..c7af0077ce1a 100644 --- a/scala2-library-cc/src/scala/collection/SeqView.scala +++ b/scala2-library-cc/src/scala/collection/SeqView.scala @@ -186,12 +186,14 @@ object SeqView { } @SerialVersionUID(3L) - class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A]^, + class Sorted[A, B >: A] private (underlying: SomeSeqOps[A]^, private[this] val len: Int, ord: Ordering[B]) extends SeqView[A] { outer: Sorted[A, B]^ => + private var myUnderlying: SomeSeqOps[A]^{underlying} = underlying + // force evaluation immediately by calling `length` so infinite collections // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls def this(underlying: SomeSeqOps[A]^, ord: Ordering[B]) = this(underlying, underlying.length, ord) @@ -221,10 +223,10 @@ object SeqView { val res = { val len = this.len if (len == 0) Nil - else if (len == 1) List(underlying.head) + else if (len == 1) List(myUnderlying.head) else { val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] - underlying.copyToArray(arr) + myUnderlying.copyToArray(arr) java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it // is safe because: @@ -238,12 +240,12 @@ object SeqView { } } evaluated = true - underlying = null + myUnderlying = null res } private[this] def elems: SomeSeqOps[A]^{this} = { - val orig = underlying + val orig = myUnderlying if (evaluated) _sorted else orig } diff --git a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala index ac24995e6892..2f7b017a6729 100644 --- a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala +++ b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala @@ -24,6 +24,7 @@ import scala.language.implicitConversions import scala.runtime.Statics import language.experimental.captureChecking import annotation.unchecked.uncheckedCaptures +import caps.untrackedCaptures /** This class implements an immutable linked list. We call it "lazy" * because it computes its elements only when they are needed. @@ -245,7 +246,7 @@ import annotation.unchecked.uncheckedCaptures * @define evaluatesAllElements This method evaluates all elements of the collection. */ @SerialVersionUID(3L) -final class LazyListIterable[+A] private(private[this] var lazyState: () => LazyListIterable.State[A]^) +final class LazyListIterable[+A] private(@untrackedCaptures lazyState: () => LazyListIterable.State[A]^) extends AbstractIterable[A] with Iterable[A] with IterableOps[A, LazyListIterable, LazyListIterable[A]] @@ -253,6 +254,8 @@ final class LazyListIterable[+A] private(private[this] var lazyState: () => Lazy with Serializable { import LazyListIterable._ + private var myLazyState = lazyState + @volatile private[this] var stateEvaluated: Boolean = false @inline private def stateDefined: Boolean = stateEvaluated private[this] var midEvaluation = false @@ -264,11 +267,11 @@ final class LazyListIterable[+A] private(private[this] var lazyState: () => Lazy throw new RuntimeException("self-referential LazyListIterable or a derivation thereof has no more elements") } midEvaluation = true - val res = try lazyState() finally midEvaluation = false + val res = try myLazyState() finally midEvaluation = false // if we set it to `true` before evaluating, we may infinite loop // if something expects `state` to already be evaluated stateEvaluated = true - lazyState = null // allow GC + myLazyState = null // allow GC res } @@ -755,7 +758,7 @@ final class LazyListIterable[+A] private(private[this] var lazyState: () => Lazy * The iterator returned by this method mostly preserves laziness; * a single element ahead of the iterator is evaluated. */ - override def grouped(size: Int): Iterator[LazyListIterable[A]] = { + override def grouped(size: Int): Iterator[LazyListIterable[A]]^{this} = { require(size > 0, "size must be positive, but was " + size) slidingImpl(size = size, step = size) } @@ -765,12 +768,12 @@ final class LazyListIterable[+A] private(private[this] var lazyState: () => Lazy * The iterator returned by this method mostly preserves laziness; * `size - step max 1` elements ahead of the iterator are evaluated. */ - override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]] = { + override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]]^{this} = { require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") slidingImpl(size = size, step = step) } - @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]] = + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]]^{this} = if (knownIsEmpty) Iterator.empty else new SlidingIterator[A](this, size = size, step = step) @@ -996,7 +999,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var elem: A = null.asInstanceOf[A] var found = false @@ -1013,7 +1016,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { val marker = Statics.pfMarker val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased @@ -1032,7 +1035,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var it: Iterator[B]^{ll, f} = null var itHasNext = false @@ -1056,7 +1059,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric var iRef = n // val iRef = new IntRef(n) newLL { var rest = restRef // var rest = restRef.elem @@ -1073,7 +1076,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var rest = restRef // var rest = restRef.elem while (!rest.isEmpty && p(rest.head)) { @@ -1086,8 +1089,8 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - var scoutRef: LazyListIterable[A]^{ll*} = ll // same situation + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var scoutRef: LazyListIterable[A]^{ll} = ll // same situation var remainingRef = n // val remainingRef = new IntRef(n) newLL { var scout = scoutRef // var scout = scoutRef.elem @@ -1236,33 +1239,35 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { */ def newBuilder[A]: Builder[A, LazyListIterable[A]] = new LazyBuilder[A] - private class LazyIterator[+A](private[this] var lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { - override def hasNext: Boolean = !lazyList.isEmpty + private class LazyIterator[+A](lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { + private var myLazyList = lazyList + override def hasNext: Boolean = !myLazyList.isEmpty override def next(): A = - if (lazyList.isEmpty) Iterator.empty.next() + if (myLazyList.isEmpty) Iterator.empty.next() else { - val res = lazyList.head - lazyList = lazyList.tail + val res = myLazyList.head + myLazyList = myLazyList.tail res } } - private class SlidingIterator[A](private[this] var lazyList: LazyListIterable[A]^, size: Int, step: Int) + private class SlidingIterator[A](lazyList: LazyListIterable[A]^, size: Int, step: Int) extends AbstractIterator[LazyListIterable[A]] { + private var myLazyList = lazyList private val minLen = size - step max 0 private var first = true def hasNext: Boolean = - if (first) !lazyList.isEmpty - else lazyList.lengthGt(minLen) + if (first) !myLazyList.isEmpty + else myLazyList.lengthGt(minLen) def next(): LazyListIterable[A] = { if (!hasNext) Iterator.empty.next() else { first = false - val list = lazyList - lazyList = list.drop(step) + val list = myLazyList + myLazyList = list.drop(step) list.take(size) } } @@ -1281,7 +1286,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { import LazyBuilder._ private[this] var next: DeferredState[A] = _ - private[this] var list: LazyListIterable[A] = _ + @uncheckedCaptures private[this] var list: LazyListIterable[A]^ = _ clear() diff --git a/tests/neg-custom-args/captures/box-adapt-cases.scala b/tests/neg-custom-args/captures/box-adapt-cases.scala index 3dac26a98318..681d699842ed 100644 --- a/tests/neg-custom-args/captures/box-adapt-cases.scala +++ b/tests/neg-custom-args/captures/box-adapt-cases.scala @@ -4,7 +4,7 @@ def test1(): Unit = { type Id[X] = [T] -> (op: X => T) -> T val x: Id[Cap^] = ??? - x(cap => cap.use()) // was error, now OK + x(cap => cap.use()) // error, OK under sealed } def test2(io: Cap^): Unit = { diff --git a/tests/neg-custom-args/captures/capt-test.scala b/tests/neg-custom-args/captures/capt-test.scala index 80ee1aba84e1..b202a14d0940 100644 --- a/tests/neg-custom-args/captures/capt-test.scala +++ b/tests/neg-custom-args/captures/capt-test.scala @@ -20,8 +20,8 @@ def handle[E <: Exception, R <: Top](op: (CT[E] @retains(caps.cap)) => R)(handl catch case ex: E => handler(ex) def test: Unit = - val b = handle[Exception, () => Nothing] { // error + val b = handle[Exception, () => Nothing] { (x: CanThrow[Exception]) => () => raise(new Exception)(using x) - } { + } { // error (ex: Exception) => ??? } diff --git a/tests/neg-custom-args/captures/capt1.check b/tests/neg-custom-args/captures/capt1.check index 0e99d1876d3c..3d0ed538b2e5 100644 --- a/tests/neg-custom-args/captures/capt1.check +++ b/tests/neg-custom-args/captures/capt1.check @@ -1,52 +1,52 @@ --- Error: tests/neg-custom-args/captures/capt1.scala:4:11 -------------------------------------------------------------- -4 | () => if x == null then y else y // error +-- Error: tests/neg-custom-args/captures/capt1.scala:6:11 -------------------------------------------------------------- +6 | () => if x == null then y else y // error | ^ | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} | of an enclosing function literal with expected type () -> C --- Error: tests/neg-custom-args/captures/capt1.scala:7:11 -------------------------------------------------------------- -7 | () => if x == null then y else y // error +-- Error: tests/neg-custom-args/captures/capt1.scala:9:11 -------------------------------------------------------------- +9 | () => if x == null then y else y // error | ^ | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} | of an enclosing function literal with expected type Matchable --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:14:2 ----------------------------------------- -14 | def f(y: Int) = if x == null then y else y // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:16:2 ----------------------------------------- +16 | def f(y: Int) = if x == null then y else y // error | ^ | Found: (y: Int) ->{x} Int | Required: Matchable -15 | f +17 | f | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:21:2 ----------------------------------------- -21 | class F(y: Int) extends A: // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:23:2 ----------------------------------------- +23 | class F(y: Int) extends A: // error | ^ | Found: A^{x} | Required: A -22 | def m() = if x == null then y else y -23 | F(22) +24 | def m() = if x == null then y else y +25 | F(22) | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:26:2 ----------------------------------------- -26 | new A: // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:28:2 ----------------------------------------- +28 | new A: // error | ^ | Found: A^{x} | Required: A -27 | def m() = if x == null then y else y +29 | def m() = if x == null then y else y | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/capt1.scala:32:12 ------------------------------------------------------------- -32 | val z2 = h[() -> Cap](() => x) // error // error +-- Error: tests/neg-custom-args/captures/capt1.scala:34:12 ------------------------------------------------------------- +34 | val z2 = h[() -> Cap](() => x) // error // error | ^^^^^^^^^^^^ | Sealed type variable X cannot be instantiated to () -> box C^ since | the part box C^ of that type captures the root capability `cap`. | This is often caused by a local capability in an argument of method h | leaking as part of its result. --- Error: tests/neg-custom-args/captures/capt1.scala:32:30 ------------------------------------------------------------- -32 | val z2 = h[() -> Cap](() => x) // error // error +-- Error: tests/neg-custom-args/captures/capt1.scala:34:30 ------------------------------------------------------------- +34 | val z2 = h[() -> Cap](() => x) // error // error | ^ | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} | of an enclosing function literal with expected type () -> box C^ --- Error: tests/neg-custom-args/captures/capt1.scala:34:12 ------------------------------------------------------------- -34 | val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // error +-- Error: tests/neg-custom-args/captures/capt1.scala:36:12 ------------------------------------------------------------- +36 | val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^ | Sealed type variable X cannot be instantiated to box () ->{x} Cap since | the part Cap of that type captures the root capability `cap`. diff --git a/tests/neg-custom-args/captures/capt1.scala b/tests/neg-custom-args/captures/capt1.scala index 48c4d889bf8d..cad0bad4ba56 100644 --- a/tests/neg-custom-args/captures/capt1.scala +++ b/tests/neg-custom-args/captures/capt1.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import annotation.retains class C def f(x: C @retains(caps.cap), y: C): () -> C = diff --git a/tests/neg-custom-args/captures/effect-swaps-explicit.check b/tests/neg-custom-args/captures/effect-swaps-explicit.check index 8c4d1f315fd8..47559ab97568 100644 --- a/tests/neg-custom-args/captures/effect-swaps-explicit.check +++ b/tests/neg-custom-args/captures/effect-swaps-explicit.check @@ -1,29 +1,29 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:62:8 ------------------------- -61 | Result: -62 | Future: // error, type mismatch +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:64:8 ------------------------- +63 | Result: +64 | Future: // error, type mismatch | ^ | Found: Result.Ok[box Future[box T^?]^{fr, contextual$1}] | Required: Result[Future[T], Nothing] -63 | fr.await.ok +65 | fr.await.ok |-------------------------------------------------------------------------------------------------------------------- |Inline stack trace |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from effect-swaps-explicit.scala:39 -39 | boundary(Ok(body)) + |This location contains code that was inlined from effect-swaps-explicit.scala:41 +41 | boundary(Ok(body)) | ^^^^^^^^ -------------------------------------------------------------------------------------------------------------------- | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:72:10 ------------------------ -72 | Future: fut ?=> // error: type mismatch +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:74:10 ------------------------ +74 | Future: fut ?=> // error: type mismatch | ^ | Found: Future[box T^?]^{fr, lbl} | Required: Future[box T^?]^? -73 | fr.await.ok +75 | fr.await.ok | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:66:15 --------------------------------------------- -66 | Result.make: //lbl ?=> // error, escaping label from Result +-- Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:68:15 --------------------------------------------- +68 | Result.make: //lbl ?=> // error, escaping label from Result | ^^^^^^^^^^^ |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9}, box E^?]]^): | box Future[box T^?]^{fr, contextual$9} leaks into outer capture set of type parameter T of method make in object Result diff --git a/tests/neg-custom-args/captures/effect-swaps-explicit.scala b/tests/neg-custom-args/captures/effect-swaps-explicit.scala index 052beaab01b2..7474e1711b34 100644 --- a/tests/neg-custom-args/captures/effect-swaps-explicit.scala +++ b/tests/neg-custom-args/captures/effect-swaps-explicit.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) object boundary: final class Label[-T] // extends caps.Capability diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index 0d9cbed164e3..2859f4c5e826 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -6,7 +6,7 @@ class File: class Service: var file: File^ = uninitialized // error - def log = file.write("log") + def log = file.write("log") // error, was OK under sealed def withFile[T](op: (l: caps.Capability) ?-> (f: File^{l}) => T): T = op(using caps.cap)(new File) diff --git a/tests/neg-custom-args/captures/i15749.scala b/tests/neg-custom-args/captures/i15749.scala new file mode 100644 index 000000000000..c5b59042085a --- /dev/null +++ b/tests/neg-custom-args/captures/i15749.scala @@ -0,0 +1,15 @@ +class Unit +object unit extends Unit + +type Top = Any^ + +type LazyVal[T] = Unit => T + +class Foo[T](val x: T) + +// Foo[□ Unit => T] +type BoxedLazyVal[T] = Foo[LazyVal[T]] + +def force[A](v: BoxedLazyVal[A]): A = + // Γ ⊢ v.x : □ {cap} Unit -> A + v.x(unit) // error: (unbox v.x)(unit), was ok under the sealed policy \ No newline at end of file diff --git a/tests/neg-custom-args/captures/i15772.check b/tests/neg-custom-args/captures/i15772.check index 0f8f0bf6eac5..58582423b101 100644 --- a/tests/neg-custom-args/captures/i15772.check +++ b/tests/neg-custom-args/captures/i15772.check @@ -25,11 +25,11 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:33:34 --------------------------------------- 33 | val boxed2 : Observe[C]^ = box2(c) // error | ^ - | Found: box C^ - | Required: box C{val arg: C^?}^? + | Found: C^ + | Required: box C{val arg: C^?}^ | - | Note that the universal capability `cap` - | cannot be included in capture set ? + | Note that C^ cannot be box-converted to box C{val arg: C^?}^ + | since at least one of their capture sets contains the root capability `cap` | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:44:2 ---------------------------------------- diff --git a/tests/neg-custom-args/captures/i15922.scala b/tests/neg-custom-args/captures/i15922.scala index 974870cd769c..89bf91493fcd 100644 --- a/tests/neg-custom-args/captures/i15922.scala +++ b/tests/neg-custom-args/captures/i15922.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to force sealed encapsulation checking) trait Cap { def use(): Int } type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) diff --git a/tests/neg-custom-args/captures/i15923-cases.scala b/tests/neg-custom-args/captures/i15923-cases.scala new file mode 100644 index 000000000000..83cfa554e8b9 --- /dev/null +++ b/tests/neg-custom-args/captures/i15923-cases.scala @@ -0,0 +1,7 @@ +trait Cap { def use(): Int } +type Id[X] = [T] -> (op: X => T) -> T +def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) + +def foo(x: Id[Cap^]) = { + x(_.use()) // error, was OK under sealed policy +} diff --git a/tests/neg-custom-args/captures/i16114.scala b/tests/neg-custom-args/captures/i16114.scala index d363bb665dc3..ec04fe9c9827 100644 --- a/tests/neg-custom-args/captures/i16114.scala +++ b/tests/neg-custom-args/captures/i16114.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) trait Cap { def use(): Int; def close(): Unit } def mkCap(): Cap^ = ??? diff --git a/tests/neg-custom-args/captures/i19330-alt2.scala b/tests/neg-custom-args/captures/i19330-alt2.scala index b49dce4b71ef..86634b45dbe3 100644 --- a/tests/neg-custom-args/captures/i19330-alt2.scala +++ b/tests/neg-custom-args/captures/i19330-alt2.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import language.experimental.captureChecking trait Logger diff --git a/tests/neg-custom-args/captures/i19330.scala b/tests/neg-custom-args/captures/i19330.scala index 8acb0dd8f66b..5fbdc00db311 100644 --- a/tests/neg-custom-args/captures/i19330.scala +++ b/tests/neg-custom-args/captures/i19330.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to force sealed encapsulation checking) import language.experimental.captureChecking trait Logger diff --git a/tests/neg-custom-args/captures/lazylists-exceptions.check b/tests/neg-custom-args/captures/lazylists-exceptions.check index 3095c1f2f4f9..4a8738118609 100644 --- a/tests/neg-custom-args/captures/lazylists-exceptions.check +++ b/tests/neg-custom-args/captures/lazylists-exceptions.check @@ -1,9 +1,8 @@ -- Error: tests/neg-custom-args/captures/lazylists-exceptions.scala:36:2 ----------------------------------------------- 36 | try // error | ^ - | result of `try` cannot have type LazyList[Int]^ since - | that type captures the root capability `cap`. - | This is often caused by a locally generated exception capability leaking as part of its result. + | The expression's type LazyList[Int]^ is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. 37 | tabulate(10) { i => 38 | if i > 9 then throw Ex1() 39 | i * i diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check index 479a231a0404..2dae3ec3bbc6 100644 --- a/tests/neg-custom-args/captures/levels.check +++ b/tests/neg-custom-args/captures/levels.check @@ -1,12 +1,12 @@ --- Error: tests/neg-custom-args/captures/levels.scala:17:13 ------------------------------------------------------------ -17 | val _ = Ref[String => String]((x: String) => x) // error +-- Error: tests/neg-custom-args/captures/levels.scala:19:13 ------------------------------------------------------------ +19 | val _ = Ref[String => String]((x: String) => x) // error | ^^^^^^^^^^^^^^^^^^^^^ | Sealed type variable T cannot be instantiated to box String => String since | that type captures the root capability `cap`. | This is often caused by a local capability in an argument of constructor Ref | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/levels.scala:22:11 --------------------------------------- -22 | r.setV(g) // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/levels.scala:24:11 --------------------------------------- +24 | r.setV(g) // error | ^ | Found: box (x: String) ->{cap3} String | Required: box (x$0: String) ->? String diff --git a/tests/neg-custom-args/captures/levels.scala b/tests/neg-custom-args/captures/levels.scala index b28e87f03ef7..4709fd80d9b8 100644 --- a/tests/neg-custom-args/captures/levels.scala +++ b/tests/neg-custom-args/captures/levels.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) class CC def test1(cap1: CC^) = diff --git a/tests/neg-custom-args/captures/outer-var.check b/tests/neg-custom-args/captures/outer-var.check index d57d615cda64..ee32c3ce03f2 100644 --- a/tests/neg-custom-args/captures/outer-var.check +++ b/tests/neg-custom-args/captures/outer-var.check @@ -1,8 +1,8 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:11:8 ------------------------------------- 11 | x = q // error | ^ - | Found: (q : Proc) - | Required: () ->{p, q²} Unit + | Found: box () ->{q} Unit + | Required: box () ->{p, q²} Unit | | where: q is a parameter in method inner | q² is a parameter in method test @@ -12,14 +12,17 @@ 12 | x = (q: Proc) // error | ^^^^^^^ | Found: Proc - | Required: () ->{p, q} Unit + | Required: box () ->{p, q} Unit + | + | Note that () => Unit cannot be box-converted to box () ->{p, q} Unit + | since at least one of their capture sets contains the root capability `cap` | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:13:9 ------------------------------------- 13 | y = (q: Proc) // error | ^^^^^^^ | Found: Proc - | Required: () ->{p} Unit + | Required: box () ->{p} Unit | | Note that the universal capability `cap` | cannot be included in capture set {p} of variable y @@ -28,17 +31,20 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:14:8 ------------------------------------- 14 | y = q // error | ^ - | Found: (q : Proc) - | Required: () ->{p} Unit + | Found: box () ->{q} Unit + | Required: box () ->{p} Unit | | Note that reference (q : Proc), defined in method inner | cannot be included in outer capture set {p} of variable y | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/outer-var.scala:16:53 --------------------------------------------------------- +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:16:65 ------------------------------------ 16 | var finalizeActions = collection.mutable.ListBuffer[() => Unit]() // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Sealed type variable A cannot be instantiated to box () => Unit since - | that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of method apply - | leaking as part of its result. + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: scala.collection.mutable.ListBuffer[box () => Unit] + | Required: box scala.collection.mutable.ListBuffer[box () ->? Unit]^? + | + | Note that the universal capability `cap` + | cannot be included in capture set ? of variable finalizeActions + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index 45c1776d8c43..f20dbdf311ad 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -1,12 +1,12 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:21:11 -------------------------------------- -21 | cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:23:11 -------------------------------------- +23 | cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} | ^^^^^^^^^^^^^^^^^^^^^^^ | Found: List[box () ->{f} Unit] | Required: List[box () ->{xs*} Unit] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:32:7 --------------------------------------- -32 | (() => f.write()) :: Nil // error since {f*} !<: {xs*} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:34:7 --------------------------------------- +34 | (() => f.write()) :: Nil // error since {f*} !<: {xs*} | ^^^^^^^^^^^^^^^^^^^^^^^ | Found: List[box () ->{f} Unit] | Required: box List[box () ->{xs*} Unit]^? @@ -15,34 +15,34 @@ | cannot be included in outer capture set {xs*} of value cur | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/reaches.scala:35:6 ------------------------------------------------------------ -35 | var cur: List[Proc] = xs // error: Illegal type for var +-- Error: tests/neg-custom-args/captures/reaches.scala:37:6 ------------------------------------------------------------ +37 | var cur: List[Proc] = xs // error: Illegal type for var | ^ | Mutable variable cur cannot have type List[box () => Unit] since | the part box () => Unit of that type captures the root capability `cap`. --- Error: tests/neg-custom-args/captures/reaches.scala:42:15 ----------------------------------------------------------- -42 | val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref +-- Error: tests/neg-custom-args/captures/reaches.scala:44:15 ----------------------------------------------------------- +44 | val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref | ^^^^^^^^^^^^^^^ | Sealed type variable T cannot be instantiated to List[box () => Unit] since | the part box () => Unit of that type captures the root capability `cap`. | This is often caused by a local capability in an argument of constructor Ref | leaking as part of its result. --- Error: tests/neg-custom-args/captures/reaches.scala:52:31 ----------------------------------------------------------- -52 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error +-- Error: tests/neg-custom-args/captures/reaches.scala:54:31 ----------------------------------------------------------- +54 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error | ^^^^^^^^^^^^^^^^^^^^ | Sealed type variable A cannot be instantiated to box () => Unit since | that type captures the root capability `cap`. | This is often caused by a local capability in an argument of constructor Id | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:61:27 -------------------------------------- -61 | val f1: File^{id*} = id(f) // error, since now id(f): File^ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:63:27 -------------------------------------- +63 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ | Found: File^{id, f} | Required: File^{id*} | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/reaches.scala:78:5 ------------------------------------------------------------ -78 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) +-- Error: tests/neg-custom-args/captures/reaches.scala:80:5 ------------------------------------------------------------ +80 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) | ^^^^^^ | Reach capability cap and universal capability cap cannot both | appear in the type [B](f: ((box A ->{ps*} A, box A ->{ps*} A)) => B): List[B] of this expression diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index 6a5ffd51c2c6..eadb76c69e5b 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) class File: def write(): Unit = ??? diff --git a/tests/neg-custom-args/captures/real-try.check b/tests/neg-custom-args/captures/real-try.check index 50dcc16f5f54..7f8ab50bc222 100644 --- a/tests/neg-custom-args/captures/real-try.check +++ b/tests/neg-custom-args/captures/real-try.check @@ -1,46 +1,46 @@ --- [E190] Potential Issue Warning: tests/neg-custom-args/captures/real-try.scala:36:4 ---------------------------------- -36 | b.x +-- [E190] Potential Issue Warning: tests/neg-custom-args/captures/real-try.scala:38:4 ---------------------------------- +38 | b.x | ^^^ | Discarded non-Unit value of type () -> Unit. You may want to use `()`. | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/real-try.scala:12:2 ----------------------------------------------------------- -12 | try // error +-- Error: tests/neg-custom-args/captures/real-try.scala:14:2 ----------------------------------------------------------- +14 | try // error | ^ | result of `try` cannot have type () => Unit since | that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. -13 | () => foo(1) -14 | catch -15 | case _: Ex1 => ??? -16 | case _: Ex2 => ??? --- Error: tests/neg-custom-args/captures/real-try.scala:18:10 ---------------------------------------------------------- -18 | val x = try // error +15 | () => foo(1) +16 | catch +17 | case _: Ex1 => ??? +18 | case _: Ex2 => ??? +-- Error: tests/neg-custom-args/captures/real-try.scala:20:10 ---------------------------------------------------------- +20 | val x = try // error | ^ | result of `try` cannot have type () => Unit since | that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. -19 | () => foo(1) -20 | catch -21 | case _: Ex1 => ??? -22 | case _: Ex2 => ??? --- Error: tests/neg-custom-args/captures/real-try.scala:24:10 ---------------------------------------------------------- -24 | val y = try // error +21 | () => foo(1) +22 | catch +23 | case _: Ex1 => ??? +24 | case _: Ex2 => ??? +-- Error: tests/neg-custom-args/captures/real-try.scala:26:10 ---------------------------------------------------------- +26 | val y = try // error | ^ | result of `try` cannot have type () => Cell[Unit]^? since | that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. -25 | () => Cell(foo(1)) -26 | catch -27 | case _: Ex1 => ??? -28 | case _: Ex2 => ??? --- Error: tests/neg-custom-args/captures/real-try.scala:30:10 ---------------------------------------------------------- -30 | val b = try // error +27 | () => Cell(foo(1)) +28 | catch +29 | case _: Ex1 => ??? +30 | case _: Ex2 => ??? +-- Error: tests/neg-custom-args/captures/real-try.scala:32:10 ---------------------------------------------------------- +32 | val b = try // error | ^ | result of `try` cannot have type Cell[box () => Unit]^? since | the part box () => Unit of that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. -31 | Cell(() => foo(1)) -32 | catch -33 | case _: Ex1 => ??? -34 | case _: Ex2 => ??? +33 | Cell(() => foo(1)) +34 | catch +35 | case _: Ex1 => ??? +36 | case _: Ex2 => ??? diff --git a/tests/neg-custom-args/captures/real-try.scala b/tests/neg-custom-args/captures/real-try.scala index 23961e884ea3..51f1a0fdea5a 100644 --- a/tests/neg-custom-args/captures/real-try.scala +++ b/tests/neg-custom-args/captures/real-try.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import language.experimental.saferExceptions class Ex1 extends Exception("Ex1") diff --git a/tests/neg-custom-args/captures/try.check b/tests/neg-custom-args/captures/try.check index 3b96927de738..77a5fc06e05a 100644 --- a/tests/neg-custom-args/captures/try.check +++ b/tests/neg-custom-args/captures/try.check @@ -1,10 +1,12 @@ --- Error: tests/neg-custom-args/captures/try.scala:23:16 --------------------------------------------------------------- -23 | val a = handle[Exception, CanThrow[Exception]] { // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Sealed type variable R cannot be instantiated to box CT[Exception]^ since - | that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of method handle - | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/try.scala:25:3 ---------------------------------------------------------------- +23 | val a = handle[Exception, CanThrow[Exception]] { +24 | (x: CanThrow[Exception]) => x +25 | }{ // error (but could be better) + | ^ + | The expression's type box CT[Exception]^ is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. +26 | (ex: Exception) => ??? +27 | } -- Error: tests/neg-custom-args/captures/try.scala:30:65 --------------------------------------------------------------- 30 | (x: CanThrow[Exception]) => () => raise(new Exception)(using x) // error | ^ diff --git a/tests/neg-custom-args/captures/try.scala b/tests/neg-custom-args/captures/try.scala index 3d25dff4cd2c..45a1b346a512 100644 --- a/tests/neg-custom-args/captures/try.scala +++ b/tests/neg-custom-args/captures/try.scala @@ -20,9 +20,9 @@ def handle[E <: Exception, R <: Top](op: CT[E]^ => R)(handler: E => R): R = catch case ex: E => handler(ex) def test = - val a = handle[Exception, CanThrow[Exception]] { // error + val a = handle[Exception, CanThrow[Exception]] { (x: CanThrow[Exception]) => x - }{ + }{ // error (but could be better) (ex: Exception) => ??? } diff --git a/tests/neg/unsound-reach-2.scala b/tests/neg-custom-args/captures/unsound-reach-2.scala similarity index 89% rename from tests/neg/unsound-reach-2.scala rename to tests/neg-custom-args/captures/unsound-reach-2.scala index 083cec6ee5b2..384af31ee1fc 100644 --- a/tests/neg/unsound-reach-2.scala +++ b/tests/neg-custom-args/captures/unsound-reach-2.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import language.experimental.captureChecking trait Consumer[-T]: def apply(x: T): Unit diff --git a/tests/neg/unsound-reach-3.scala b/tests/neg-custom-args/captures/unsound-reach-3.scala similarity index 89% rename from tests/neg/unsound-reach-3.scala rename to tests/neg-custom-args/captures/unsound-reach-3.scala index 71c27fe5007d..985beb7ae55d 100644 --- a/tests/neg/unsound-reach-3.scala +++ b/tests/neg-custom-args/captures/unsound-reach-3.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import language.experimental.captureChecking trait File: def close(): Unit diff --git a/tests/neg/unsound-reach-4.check b/tests/neg-custom-args/captures/unsound-reach-4.check similarity index 55% rename from tests/neg/unsound-reach-4.check rename to tests/neg-custom-args/captures/unsound-reach-4.check index 47256baf408a..9abf86c772d5 100644 --- a/tests/neg/unsound-reach-4.check +++ b/tests/neg-custom-args/captures/unsound-reach-4.check @@ -1,5 +1,5 @@ --- Error: tests/neg/unsound-reach-4.scala:20:19 ------------------------------------------------------------------------ -20 | escaped = boom.use(f) // error +-- Error: tests/neg-custom-args/captures/unsound-reach-4.scala:22:19 --------------------------------------------------- +22 | escaped = boom.use(f) // error | ^^^^^^^^ | Reach capability backdoor* and universal capability cap cannot both | appear in the type (x: F): box File^{backdoor*} of this expression diff --git a/tests/neg/unsound-reach-4.scala b/tests/neg-custom-args/captures/unsound-reach-4.scala similarity index 85% rename from tests/neg/unsound-reach-4.scala rename to tests/neg-custom-args/captures/unsound-reach-4.scala index fa395fa117ca..14050b4afff2 100644 --- a/tests/neg/unsound-reach-4.scala +++ b/tests/neg-custom-args/captures/unsound-reach-4.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import language.experimental.captureChecking trait File: def close(): Unit diff --git a/tests/neg-custom-args/captures/unsound-reach.check b/tests/neg-custom-args/captures/unsound-reach.check new file mode 100644 index 000000000000..22b00b74deb1 --- /dev/null +++ b/tests/neg-custom-args/captures/unsound-reach.check @@ -0,0 +1,12 @@ +-- Error: tests/neg-custom-args/captures/unsound-reach.scala:18:13 ----------------------------------------------------- +18 | boom.use(f): (f1: File^{backdoor*}) => // error + | ^^^^^^^^ + | Reach capability backdoor* and universal capability cap cannot both + | appear in the type (x: File^)(op: box File^{backdoor*} => Unit): Unit of this expression +-- [E164] Declaration Error: tests/neg-custom-args/captures/unsound-reach.scala:10:8 ----------------------------------- +10 | def use(x: File^)(op: File^ => Unit): Unit = op(x) // error, was OK using sealed checking + | ^ + | error overriding method use in trait Foo of type (x: File^)(op: box File^ => Unit): Unit; + | method use of type (x: File^)(op: File^ => Unit): Unit has incompatible type + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/unsound-reach.scala b/tests/neg-custom-args/captures/unsound-reach.scala similarity index 83% rename from tests/neg/unsound-reach.scala rename to tests/neg-custom-args/captures/unsound-reach.scala index 48a74f86d311..c3c31a7f32ff 100644 --- a/tests/neg/unsound-reach.scala +++ b/tests/neg-custom-args/captures/unsound-reach.scala @@ -7,7 +7,7 @@ def withFile[R](path: String)(op: File^ => R): R = ??? trait Foo[+X]: def use(x: File^)(op: X => Unit): Unit class Bar extends Foo[File^]: - def use(x: File^)(op: File^ => Unit): Unit = op(x) + def use(x: File^)(op: File^ => Unit): Unit = op(x) // error, was OK using sealed checking def bad(): Unit = val backdoor: Foo[File^] = new Bar diff --git a/tests/neg-custom-args/captures/vars-simple.check b/tests/neg-custom-args/captures/vars-simple.check index 2bc014e9a4e7..2ef301b6ec1f 100644 --- a/tests/neg-custom-args/captures/vars-simple.check +++ b/tests/neg-custom-args/captures/vars-simple.check @@ -2,14 +2,17 @@ 15 | a = (g: String => String) // error | ^^^^^^^^^^^^^^^^^^^ | Found: String => String - | Required: String ->{cap1, cap2} String + | Required: box String ->{cap1, cap2} String + | + | Note that String => String cannot be box-converted to box String ->{cap1, cap2} String + | since at least one of their capture sets contains the root capability `cap` | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars-simple.scala:16:8 ----------------------------------- 16 | a = g // error | ^ - | Found: (x: String) ->{cap3} String - | Required: (x: String) ->{cap1, cap2} String + | Found: box (x: String) ->{cap3} String + | Required: box (x: String) ->{cap1, cap2} String | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars-simple.scala:17:12 ---------------------------------- diff --git a/tests/neg-custom-args/captures/vars.check b/tests/neg-custom-args/captures/vars.check index e2d817f2d8bd..e4b1e71a2000 100644 --- a/tests/neg-custom-args/captures/vars.check +++ b/tests/neg-custom-args/captures/vars.check @@ -1,12 +1,12 @@ --- Error: tests/neg-custom-args/captures/vars.scala:22:14 -------------------------------------------------------------- -22 | a = x => g(x) // error +-- Error: tests/neg-custom-args/captures/vars.scala:24:14 -------------------------------------------------------------- +24 | a = x => g(x) // error | ^^^^ | reference (cap3 : Cap) is not included in the allowed capture set {cap1} of variable a | | Note that reference (cap3 : Cap), defined in method scope | cannot be included in outer capture set {cap1} of variable a --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:23:8 ------------------------------------------ -23 | a = g // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:25:8 ------------------------------------------ +25 | a = g // error | ^ | Found: (x: String) ->{cap3} String | Required: (x$0: String) ->{cap1} String @@ -15,14 +15,14 @@ | cannot be included in outer capture set {cap1} of variable a | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:25:12 ----------------------------------------- -25 | b = List(g) // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:27:12 ----------------------------------------- +27 | b = List(g) // error | ^^^^^^^ | Found: List[box (x$0: String) ->{cap3} String] | Required: List[box String ->{cap1, cap2} String] | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/vars.scala:34:2 --------------------------------------------------------------- -34 | local { cap3 => // error +-- Error: tests/neg-custom-args/captures/vars.scala:36:2 --------------------------------------------------------------- +36 | local { cap3 => // error | ^^^^^ | local reference cap3 leaks into outer capture set of type parameter T of method local diff --git a/tests/neg-custom-args/captures/vars.scala b/tests/neg-custom-args/captures/vars.scala index ab5a2f43acc7..5eb1e3fedda9 100644 --- a/tests/neg-custom-args/captures/vars.scala +++ b/tests/neg-custom-args/captures/vars.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) class CC type Cap = CC^ diff --git a/tests/neg/unsound-reach.check b/tests/neg/unsound-reach.check deleted file mode 100644 index 8cabbe1571a0..000000000000 --- a/tests/neg/unsound-reach.check +++ /dev/null @@ -1,5 +0,0 @@ --- Error: tests/neg/unsound-reach.scala:18:13 -------------------------------------------------------------------------- -18 | boom.use(f): (f1: File^{backdoor*}) => // error - | ^^^^^^^^ - | Reach capability backdoor* and universal capability cap cannot both - | appear in the type (x: File^)(op: box File^{backdoor*} => Unit): Unit of this expression diff --git a/tests/pos-custom-args/captures/casts.scala b/tests/pos-custom-args/captures/casts.scala new file mode 100644 index 000000000000..572b58d008f6 --- /dev/null +++ b/tests/pos-custom-args/captures/casts.scala @@ -0,0 +1,4 @@ +import language.experimental.captureChecking +def Test = + val x: Any = ??? + val y = x.asInstanceOf[Int => Int] diff --git a/tests/pos-custom-args/captures/filevar-expanded.scala b/tests/pos-custom-args/captures/filevar-expanded.scala index 13051994f346..a883471e8d2e 100644 --- a/tests/pos-custom-args/captures/filevar-expanded.scala +++ b/tests/pos-custom-args/captures/filevar-expanded.scala @@ -32,5 +32,6 @@ object test2: def test(io3: IO^) = withFile(io3): f => val o = Service(io3) - o.file = f + o.file = f // this is a bit dubious. It's legal since we treat class refinements + // as capture set variables that can be made to include refs coming from outside. o.log diff --git a/tests/pos-custom-args/captures/i15749.scala b/tests/pos-custom-args/captures/i15749.scala index 0a552ae1a3c5..58274c7cc817 100644 --- a/tests/pos-custom-args/captures/i15749.scala +++ b/tests/pos-custom-args/captures/i15749.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) class Unit object unit extends Unit @@ -12,4 +14,4 @@ type BoxedLazyVal[T] = Foo[LazyVal[T]] def force[A](v: BoxedLazyVal[A]): A = // Γ ⊢ v.x : □ {cap} Unit -> A - v.x(unit) // was error: (unbox v.x)(unit), where (unbox v.x) should be untypable, now ok \ No newline at end of file + v.x(unit) // should be error: (unbox v.x)(unit), where (unbox v.x) should be untypable, now ok \ No newline at end of file diff --git a/tests/pos-custom-args/captures/i15923-cases.scala b/tests/pos-custom-args/captures/i15923-cases.scala index 7c5635f7b3dd..4b5a36f208ec 100644 --- a/tests/pos-custom-args/captures/i15923-cases.scala +++ b/tests/pos-custom-args/captures/i15923-cases.scala @@ -2,10 +2,6 @@ trait Cap { def use(): Int } type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) -def foo(x: Id[Cap^]) = { - x(_.use()) // was error, now OK -} - def bar(io: Cap^, x: Id[Cap^{io}]) = { x(_.use()) } diff --git a/tests/pos-custom-args/captures/i15925.scala b/tests/pos-custom-args/captures/i15925.scala index 63b6962ff9f8..1c448c7377c2 100644 --- a/tests/pos-custom-args/captures/i15925.scala +++ b/tests/pos-custom-args/captures/i15925.scala @@ -1,4 +1,5 @@ import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures class Unit object u extends Unit @@ -6,8 +7,8 @@ object u extends Unit type Foo[X] = [T] -> (op: X => T) -> T type Lazy[X] = Unit => X -def force[X](fx: Foo[Lazy[X]]): X = +def force[X](fx: Foo[Lazy[X] @uncheckedCaptures]): X = fx[X](f => f(u)) -def force2[X](fx: Foo[Unit => X]): X = +def force2[X](fx: Foo[(Unit => X) @uncheckedCaptures]): X = fx[X](f => f(u)) diff --git a/tests/pos-custom-args/captures/levels.scala b/tests/pos-custom-args/captures/levels.scala new file mode 100644 index 000000000000..cabd537442a5 --- /dev/null +++ b/tests/pos-custom-args/captures/levels.scala @@ -0,0 +1,23 @@ +class CC + +def test1(cap1: CC^) = + + class Ref[T](init: T): + private var v: T = init + def setV(x: T): Unit = v = x + def getV: T = v + +def test2(cap1: CC^) = + + class Ref[T](init: T): + private var v: T = init + def setV(x: T): Unit = v = x + def getV: T = v + + val _ = Ref[String => String]((x: String) => x) // ok + val r = Ref((x: String) => x) + + def scope(cap3: CC^) = + def g(x: String): String = if cap3 == cap3 then "" else "a" + r.setV(g) // error + () diff --git a/tests/pos-custom-args/captures/unsafe-captures.scala b/tests/pos-custom-args/captures/unsafe-captures.scala new file mode 100644 index 000000000000..5e0144331344 --- /dev/null +++ b/tests/pos-custom-args/captures/unsafe-captures.scala @@ -0,0 +1,8 @@ +import annotation.unchecked.uncheckedCaptures +class LL[+A] private (private var lazyState: (() => LL.State[A]^) @uncheckedCaptures): + private val res = lazyState() // without unchecked captures we get a van't unbox cap error + + +object LL: + + private trait State[+A] diff --git a/tests/pos-custom-args/captures/untracked-captures.scala b/tests/pos-custom-args/captures/untracked-captures.scala new file mode 100644 index 000000000000..7a090a5dd24f --- /dev/null +++ b/tests/pos-custom-args/captures/untracked-captures.scala @@ -0,0 +1,34 @@ +import caps.untrackedCaptures +class LL[+A] private (@untrackedCaptures lazyState: () => LL.State[A]^): + private val res = lazyState() + + +object LL: + + private trait State[+A] + private object State: + object Empty extends State[Nothing] + + private def newLL[A](state: () => State[A]^): LL[A]^{state} = ??? + + private def sCons[A](hd: A, tl: LL[A]^): State[A]^{tl} = ??? + + def filterImpl[A](ll: LL[A]^, p: A => Boolean): LL[A]^{ll, p} = + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef: LL[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + + val cl = () => + var elem: A = null.asInstanceOf[A] + var found = false + var rest = restRef // Without untracked captures a type ascription would be needed here + // because the compiler tries to keep track of lazyState in refinements + // of LL and gets confused (c.f Setup.addCaptureRefinements) + + while !found do + found = p(elem) + rest = rest + restRef = rest + val res = if found then sCons(elem, filterImpl(rest, p)) else State.Empty + ??? : State[A]^{ll, p} + val nll = newLL(cl) + nll diff --git a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala index 20a6a33d3e02..5443758afa72 100644 --- a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala +++ b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala @@ -552,7 +552,7 @@ object CollectionStrawMan5 { } def flatMap[B](f: A => IterableOnce[B]^): Iterator[B]^{this, f} = new Iterator[B] { - private var myCurrent: Iterator[B]^{this} = Iterator.empty + private var myCurrent: Iterator[B]^{this, f} = Iterator.empty private def current = { while (!myCurrent.hasNext && self.hasNext) myCurrent = f(self.next()).iterator From 1008a0dce8be020d5ed0687744d31b86cc46bc02 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 10 Jun 2024 17:58:09 +0200 Subject: [PATCH 295/827] More robust scheme to re-check definitions once. The previous scheme relied on subtle and unstated assumptions between symbol updates and re-checking. If they were violated some definitions could not be rechecked at all. The new scheme is more robust. We always re-check except when the checker implementation returns true for `skipRecheck`. And that test is based on an explicitly maintained set of completed symbols. --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 8 +++++++- .../src/dotty/tools/dotc/transform/Recheck.scala | 14 ++++++++------ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 6e4a10efe607..bf25d448f402 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1185,6 +1185,11 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => traverseChildren(t) + private val completed = new mutable.HashSet[Symbol] + + override def skipRecheck(sym: Symbol)(using Context): Boolean = + completed.contains(sym) + /** Check a ValDef or DefDef as an action performed in a completer. Since * these checks can appear out of order, we need to firsty create the correct * environment for checking the definition. @@ -1205,7 +1210,8 @@ class CheckCaptures extends Recheck, SymTransformer: case None => Env(sym, EnvKind.Regular, localSet, restoreEnvFor(sym.owner)) curEnv = restoreEnvFor(sym.owner) capt.println(i"Complete $sym in ${curEnv.outersIterator.toList.map(_.owner)}") - recheckDef(tree, sym) + try recheckDef(tree, sym) + finally completed += sym finally curEnv = saved diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index f025c9e9369f..3aec18dc2bd0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -454,12 +454,16 @@ abstract class Recheck extends Phase, SymTransformer: case _ => traverse(stats) + /** A hook to prevent rechecking a ValDef or DefDef. + * Typycally used when definitions are completed on first use. + */ + def skipRecheck(sym: Symbol)(using Context) = false + def recheckDef(tree: ValOrDefDef, sym: Symbol)(using Context): Type = - inContext(ctx.localContext(tree, sym)) { + inContext(ctx.localContext(tree, sym)): tree match case tree: ValDef => recheckValDef(tree, sym) case tree: DefDef => recheckDefDef(tree, sym) - } /** Recheck tree without adapting it, returning its new type. * @param tree the original tree @@ -476,10 +480,8 @@ abstract class Recheck extends Phase, SymTransformer: case tree: ValOrDefDef => if tree.isEmpty then NoType else - if sym.isUpdatedAfter(preRecheckPhase) then - sym.ensureCompleted() // in this case the symbol's completer should recheck the right hand side - else - recheckDef(tree, sym) + sym.ensureCompleted() + if !skipRecheck(sym) then recheckDef(tree, sym) sym.termRef case tree: TypeDef => // TODO: Should we allow for completers as for ValDefs or DefDefs? From a30cf473751723a44e4119158ad6b67cf68d18ad Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 10 Jun 2024 18:14:36 +0200 Subject: [PATCH 296/827] Show unsuccessful subCapture tests in TypeMismatch explanations --- .../dotty/tools/dotc/core/TypeComparer.scala | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index c5b3611463b0..174bbaeca21d 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3753,6 +3753,11 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa private val b = new StringBuilder private var lastForwardGoal: String | Null = null + private def appendFailure(x: String) = + if lastForwardGoal != null then // last was deepest goal that failed + b.append(s" = $x") + lastForwardGoal = null + override def traceIndented[T](str: String)(op: => T): T = val str1 = str.replace('\n', ' ') if short && str1 == lastForwardGoal then @@ -3764,12 +3769,13 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa b.append("\n").append(" " * indent).append("==> ").append(str1) val res = op if short then - if res == false then - if lastForwardGoal != null then // last was deepest goal that failed - b.append(" = false") - lastForwardGoal = null - else - b.length = curLength // don't show successful subtraces + res match + case false => + appendFailure("false") + case res: CaptureSet.CompareResult if res != CaptureSet.CompareResult.OK => + appendFailure(show(res)) + case _ => + b.length = curLength // don't show successful subtraces else b.append("\n").append(" " * indent).append("<== ").append(str1).append(" = ").append(show(res)) indent -= 2 From 20b8630a03823356895b6259518e4b131e42091e Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 10 Jun 2024 18:33:35 +0200 Subject: [PATCH 297/827] Enable existential capabilities Enabled from 3.5. There are still a number of open questions - Clarify type inference with existentials propagating into capture sets. Right now, no pos or run test exercises this. - Also map arguments of function to existentials (at least double flip ones). - Adapt reach capabilities and drop previous restrictions. --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 17 +- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 14 +- .../dotty/tools/dotc/cc/CheckCaptures.scala | 95 ++++++---- .../src/dotty/tools/dotc/cc/Existential.scala | 176 +++++++----------- compiler/src/dotty/tools/dotc/cc/Setup.scala | 1 + .../src/dotty/tools/dotc/core/NameKinds.scala | 2 +- .../src/dotty/tools/dotc/typer/Namer.scala | 2 +- .../captures/refine-reach-shallow.scala | 2 +- tests/neg/cc-ex-conformance.scala | 4 +- .../pos-custom-args/captures/capt-test.scala | 1 + 10 files changed, 161 insertions(+), 153 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 7a8ed7b3651a..49dbc9773229 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -15,6 +15,7 @@ import StdNames.nme import config.Feature import collection.mutable import CCState.* +import reporting.Message private val Captures: Key[CaptureSet] = Key() @@ -26,7 +27,8 @@ object ccConfig: */ inline val allowUnsoundMaps = false - val useExistentials = false + def useExistentials(using Context) = + Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.5`) /** If true, use `sealed` as encapsulation mechanism instead of the * previous global retriction that `cap` can't be boxed or unboxed. @@ -69,6 +71,11 @@ class CCState: */ var levelError: Option[CaptureSet.CompareResult.LevelError] = None + /** Warnings relating to upper approximations of capture sets with + * existentially bound variables. + */ + val approxWarnings: mutable.ListBuffer[Message] = mutable.ListBuffer() + private var curLevel: Level = outermostLevel private val symLevel: mutable.Map[Symbol, Int] = mutable.Map() @@ -356,6 +363,7 @@ extension (tp: Type) ok = false case _ => traverseChildren(t) + end CheckContraCaps object narrowCaps extends TypeMap: /** Has the variance been flipped at this point? */ @@ -368,12 +376,19 @@ extension (tp: Type) t.dealias match case t1 @ CapturingType(p, cs) if cs.isUniversal && !isFlipped => t1.derivedCapturingType(apply(p), ref.reach.singletonCaptureSet) + case t @ FunctionOrMethod(args, res @ Existential(_, _)) + if args.forall(_.isAlwaysPure) => + // Also map existentials in results to reach capabilities if all + // preceding arguments are known to be always pure + apply(t.derivedFunctionOrMethod(args, Existential.toCap(res))) case _ => t match case t @ CapturingType(p, cs) => t.derivedCapturingType(apply(p), cs) // don't map capture set variables case t => mapOver(t) finally isFlipped = saved + end narrowCaps + ref match case ref: CaptureRef if ref.isTrackableRef => val checker = new CheckContraCaps diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 9b0afbf3567e..4069b9ffb014 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -562,7 +562,14 @@ object CaptureSet: universal else computingApprox = true - try computeApprox(origin).ensuring(_.isConst) + try + val approx = computeApprox(origin).ensuring(_.isConst) + if approx.elems.exists(Existential.isExistentialVar(_)) then + ccState.approxWarnings += + em"""Capture set variable $this gets upper-approximated + |to existential variable from $approx, using {cap} instead.""" + universal + else approx finally computingApprox = false /** The intersection of all upper approximations of dependent sets */ @@ -757,9 +764,8 @@ object CaptureSet: CompareResult.OK else source.tryInclude(bimap.backward(elem), this) - .showing(i"propagating new elem $elem backward from $this to $source = $result", capt) - .andAlso: - addNewElem(elem) + .showing(i"propagating new elem $elem backward from $this to $source = $result", captDebug) + .andAlso(addNewElem(elem)) /** For a BiTypeMap, supertypes of the mapped type also constrain * the source via the inverse type mapping and vice versa. That is, if diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index bf25d448f402..ec37a46ef5af 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -223,6 +223,9 @@ class CheckCaptures extends Recheck, SymTransformer: if tpt.isInstanceOf[InferredTypeTree] then interpolator().traverse(tpt.knownType) .showing(i"solved vars in ${tpt.knownType}", capt) + for msg <- ccState.approxWarnings do + report.warning(msg, tpt.srcPos) + ccState.approxWarnings.clear() /** Assert subcapturing `cs1 <: cs2` */ def assertSub(cs1: CaptureSet, cs2: CaptureSet)(using Context) = @@ -492,7 +495,7 @@ class CheckCaptures extends Recheck, SymTransformer: tp.derivedCapturingType(forceBox(parent), refs) mapArgUsing(forceBox) else - super.recheckApply(tree, pt) match + Existential.toCap(super.recheckApply(tree, pt)) match case appType @ CapturingType(appType1, refs) => tree.fun match case Select(qual, _) @@ -505,7 +508,7 @@ class CheckCaptures extends Recheck, SymTransformer: val callCaptures = tree.args.foldLeft(qual.tpe.captureSet): (cs, arg) => cs ++ arg.tpe.captureSet appType.derivedCapturingType(appType1, callCaptures) - .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) + .showing(i"narrow $tree: $appType, refs = $refs, qual-cs = ${qual.tpe.captureSet} = $result", capt) case _ => appType case appType => appType end recheckApply @@ -591,7 +594,7 @@ class CheckCaptures extends Recheck, SymTransformer: i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) - super.recheckTypeApply(tree, pt) + Existential.toCap(super.recheckTypeApply(tree, pt)) override def recheckBlock(tree: Block, pt: Type)(using Context): Type = inNestedLevel(super.recheckBlock(tree, pt)) @@ -624,7 +627,12 @@ class CheckCaptures extends Recheck, SymTransformer: // Example is the line `a = x` in neg-custom-args/captures/vars.scala. // For all other closures, early constraints are preferred since they // give more localized error messages. - checkConformsExpr(res, pt, expr) + val res1 = Existential.toCapDeeply(res) + val pt1 = Existential.toCapDeeply(pt) + // We need to open existentials here in order not to get vars mixed up in them + // We do the proper check with existentials when we are finished with the closure block. + capt.println(i"pre-check closure $expr of type $res1 against $pt1") + checkConformsExpr(res1, pt1, expr) recheckDef(mdef, mdef.symbol) res finally @@ -1009,35 +1017,50 @@ class CheckCaptures extends Recheck, SymTransformer: */ def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, covariant: Boolean, alwaysConst: Boolean, boxErrors: BoxErrors)(using Context): Type = - /** Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation - * @param boxed if true we adapt to a boxed expected type - */ - def adaptShape(actualShape: Type, boxed: Boolean): (Type, CaptureSet) = actualShape match - case FunctionOrMethod(aargs, ares) => - val saved = curEnv - curEnv = Env( - curEnv.owner, EnvKind.NestedInOwner, - CaptureSet.Var(curEnv.owner, level = currentLevel), - if boxed then null else curEnv) - try - val (eargs, eres) = expected.dealias.stripCapturing match - case FunctionOrMethod(eargs, eres) => (eargs, eres) - case _ => (aargs.map(_ => WildcardType), WildcardType) - val aargs1 = aargs.zipWithConserve(eargs): - adaptBoxed(_, _, pos, !covariant, alwaysConst, boxErrors) - val ares1 = adaptBoxed(ares, eres, pos, covariant, alwaysConst, boxErrors) - val resTp = - if (aargs1 eq aargs) && (ares1 eq ares) then actualShape // optimize to avoid redundant matches - else actualShape.derivedFunctionOrMethod(aargs1, ares1) - (resTp, CaptureSet(curEnv.captured.elems)) - finally curEnv = saved - case _ => - (actualShape, CaptureSet()) + def recur(actual: Type, expected: Type, covariant: Boolean): Type = + + /** Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation + * @param boxed if true we adapt to a boxed expected type + */ + def adaptShape(actualShape: Type, boxed: Boolean): (Type, CaptureSet) = actualShape match + case FunctionOrMethod(aargs, ares) => + val saved = curEnv + curEnv = Env( + curEnv.owner, EnvKind.NestedInOwner, + CaptureSet.Var(curEnv.owner, level = currentLevel), + if boxed then null else curEnv) + try + val (eargs, eres) = expected.dealias.stripCapturing match + case FunctionOrMethod(eargs, eres) => (eargs, eres) + case _ => (aargs.map(_ => WildcardType), WildcardType) + val aargs1 = aargs.zipWithConserve(eargs): + recur(_, _, !covariant) + val ares1 = recur(ares, eres, covariant) + val resTp = + if (aargs1 eq aargs) && (ares1 eq ares) then actualShape // optimize to avoid redundant matches + else actualShape.derivedFunctionOrMethod(aargs1, ares1) + (resTp, CaptureSet(curEnv.captured.elems)) + finally curEnv = saved + case _ => + (actualShape, CaptureSet()) + end adaptShape - def adaptStr = i"adapting $actual ${if covariant then "~~>" else "<~~"} $expected" + def adaptStr = i"adapting $actual ${if covariant then "~~>" else "<~~"} $expected" + + actual match + case actual @ Existential(_, actualUnpacked) => + return Existential.derivedExistentialType(actual): + recur(actualUnpacked, expected, covariant) + case _ => + expected match + case expected @ Existential(_, expectedUnpacked) => + return recur(actual, expectedUnpacked, covariant) + case _: WildcardType => + return actual + case _ => + + trace(adaptStr, capt, show = true) { - if expected.isInstanceOf[WildcardType] then actual - else trace(adaptStr, recheckr, show = true): // Decompose the actual type into the inner shape type, the capture set and the box status val actualShape = if actual.isFromJavaObject then actual else actual.stripCapturing val actualIsBoxed = actual.isBoxedCapturing @@ -1099,6 +1122,10 @@ class CheckCaptures extends Recheck, SymTransformer: adaptedType(!actualIsBoxed) else adaptedType(actualIsBoxed) + } + end recur + + recur(actual, expected, covariant) end adaptBoxed /** If actual derives from caps.Capability, yet is not a capturing type itself, @@ -1139,7 +1166,7 @@ class CheckCaptures extends Recheck, SymTransformer: widened.withReachCaptures(actual), expected, pos, covariant = true, alwaysConst = false, boxErrors) if adapted eq widened then normalized - else adapted.showing(i"adapt boxed $actual vs $expected ===> $adapted", capt) + else adapted.showing(i"adapt boxed $actual vs $expected = $adapted", capt) end adapt /** Check overrides again, taking capture sets into account. @@ -1154,13 +1181,13 @@ class CheckCaptures extends Recheck, SymTransformer: * @param sym symbol of the field definition that is being checked */ override def checkSubType(actual: Type, expected: Type)(using Context): Boolean = - val expected1 = alignDependentFunction(addOuterRefs(/*Existential.strip*/(expected), actual), actual.stripCapturing) + val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) val actual1 = val saved = curEnv try curEnv = Env(clazz, EnvKind.NestedInOwner, capturedVars(clazz), outer0 = curEnv) val adapted = - adaptBoxed(/*Existential.strip*/(actual), expected1, srcPos, covariant = true, alwaysConst = true, null) + adaptBoxed(actual, expected1, srcPos, covariant = true, alwaysConst = true, null) actual match case _: MethodType => // We remove the capture set resulted from box adaptation for method types, diff --git a/compiler/src/dotty/tools/dotc/cc/Existential.scala b/compiler/src/dotty/tools/dotc/cc/Existential.scala index 0dba1a62e7ed..0c269a484092 100644 --- a/compiler/src/dotty/tools/dotc/cc/Existential.scala +++ b/compiler/src/dotty/tools/dotc/cc/Existential.scala @@ -9,7 +9,7 @@ import StdNames.nme import ast.tpd.* import Decorators.* import typer.ErrorReporting.errorType -import NameKinds.exSkolemName +import NameKinds.ExistentialBinderName import reporting.Message /** @@ -195,22 +195,61 @@ object Existential: type Carrier = RefinedType - def openExpected(pt: Type)(using Context): Type = pt.dealias match + def unapply(tp: Carrier)(using Context): Option[(TermParamRef, Type)] = + tp.refinedInfo match + case mt: MethodType + if isExistentialMethod(mt) && defn.isNonRefinedFunction(tp.parent) => + Some(mt.paramRefs.head, mt.resultType) + case _ => None + + /** Create method type in the refinement of an existential type */ + private def exMethodType(mk: TermParamRef => Type)(using Context): MethodType = + val boundName = ExistentialBinderName.fresh() + MethodType(boundName :: Nil)( + mt => defn.Caps_Exists.typeRef :: Nil, + mt => mk(mt.paramRefs.head)) + + /** Create existential */ + def apply(mk: TermParamRef => Type)(using Context): Type = + exMethodType(mk).toFunctionType(alwaysDependent = true) + + /** Create existential if bound variable appears in result of `mk` */ + def wrap(mk: TermParamRef => Type)(using Context): Type = + val mt = exMethodType(mk) + if mt.isResultDependent then mt.toFunctionType() else mt.resType + + extension (tp: Carrier) + def derivedExistentialType(core: Type)(using Context): Type = tp match + case Existential(boundVar, unpacked) => + if core eq unpacked then tp + else apply(bv => core.substParam(boundVar, bv)) + case _ => + core + + /** Map top-level existentials to `cap`. Do the same for existentials + * in function results if all preceding arguments are known to be always pure. + */ + def toCap(tp: Type)(using Context): Type = tp.dealias match case Existential(boundVar, unpacked) => - val tm = new IdempotentCaptRefMap: - val cvar = CaptureSet.Var(ctx.owner) - def apply(t: Type) = mapOver(t) match - case t @ CapturingType(parent, refs) if refs.elems.contains(boundVar) => - assert(refs.isConst && refs.elems.size == 1, i"malformed existential $t") - t.derivedCapturingType(parent, cvar) - case t => - t - openExpected(tm(unpacked)) - case _ => pt - - def toCap(tp: Type)(using Context) = tp.dealias match + val transformed = unpacked.substParam(boundVar, defn.captureRoot.termRef) + transformed match + case FunctionOrMethod(args, res @ Existential(_, _)) + if args.forall(_.isAlwaysPure) => + transformed.derivedFunctionOrMethod(args, toCap(res)) + case _ => + transformed + case _ => tp + + /** Map existentials at the top-level and in all nested result types to `cap` + */ + def toCapDeeply(tp: Type)(using Context): Type = tp.dealias match case Existential(boundVar, unpacked) => - unpacked.substParam(boundVar, defn.captureRoot.termRef) + toCapDeeply(unpacked.substParam(boundVar, defn.captureRoot.termRef)) + case tp1 @ FunctionOrMethod(args, res) => + val tp2 = tp1.derivedFunctionOrMethod(args, toCapDeeply(res)) + if tp2 ne tp1 then tp2 else tp + case tp1 @ CapturingType(parent, refs) => + tp1.derivedCapturingType(toCapDeeply(parent), refs) case _ => tp /** Replace all occurrences of `cap` in parts of this type by an existentially bound @@ -229,8 +268,9 @@ object Existential: needsWrap = true boundVar else - val varianceStr = if variance < 0 then "contra" else "in" - fail(em"cap appears in ${varianceStr}variant position in $tp") + if variance == 0 then + fail(em"cap appears in invariant position in $tp") + // we accept variance < 0, and leave the cap as it is t1 case t1 @ FunctionOrMethod(_, _) => // These have been mapped before @@ -251,11 +291,16 @@ object Existential: end Wrap if ccConfig.useExistentials then - val wrapped = apply(Wrap(_)(tp)) - if needsWrap then wrapped else tp + tp match + case Existential(_, _) => tp + case _ => + val wrapped = apply(Wrap(_)(tp)) + if needsWrap then wrapped else tp else tp end mapCap + /** Map `cap` to existential in the results of functions or methods. + */ def mapCapInResult(tp: Type, fail: Message => Unit)(using Context): Type = def mapCapInFinalResult(tp: Type): Type = tp match case tp: MethodOrPoly => @@ -263,104 +308,17 @@ object Existential: case _ => mapCap(tp, fail) tp match - case tp: MethodOrPoly => - mapCapInFinalResult(tp) - case defn.FunctionNOf(args, res, contextual) => - tp.derivedFunctionOrMethod(args, mapCap(res, fail)) + case tp: MethodOrPoly => mapCapInFinalResult(tp) case _ => tp - def strip(tp: Type)(using Context) = tp match - case Existential(_, tpunpacked) => tpunpacked - case _ => tp - - def skolemize(tp: Type)(using Context) = tp.widenDealias match // TODO needed? - case Existential(boundVar, unpacked) => - val skolem = tp match - case tp: CaptureRef if tp.isTracked => tp - case _ => newSkolemSym(boundVar.underlying).termRef - val tm = new IdempotentCaptRefMap: - var deep = false - private inline def deepApply(t: Type): Type = - val saved = deep - deep = true - try apply(t) finally deep = saved - def apply(t: Type) = - if t eq boundVar then - if deep then skolem.reach else skolem - else t match - case defn.FunctionOf(args, res, contextual) => - val res1 = deepApply(res) - if res1 ne res then defn.FunctionOf(args, res1, contextual) - else t - case defn.RefinedFunctionOf(mt) => - mt.derivedLambdaType(resType = deepApply(mt.resType)) - case _ => - mapOver(t) - tm(unpacked) - case _ => tp - end skolemize - - def newSkolemSym(tp: Type)(using Context): TermSymbol = // TODO needed? - newSymbol(ctx.owner.enclosingMethodOrClass, exSkolemName.fresh(), Synthetic, tp) -/* - def fromDepFun(arg: Tree)(using Context): Type = arg.tpe match - case RefinedType(parent, nme.apply, info: MethodType) if defn.isNonRefinedFunction(parent) => - info match - case info @ MethodType(_ :: Nil) - if info.paramInfos.head.derivesFrom(defn.Caps_Capability) => - apply(ref => info.resultType.substParams(info, ref :: Nil)) - case _ => - errorType(em"Malformed existential: dependent function must have a singgle parameter of type caps.Capability", arg.srcPos) - case _ => - errorType(em"Malformed existential: dependent function type expected", arg.srcPos) -*/ - private class PackMap(sym: Symbol, rt: RecType)(using Context) extends DeepTypeMap, IdempotentCaptRefMap: - def apply(tp: Type): Type = tp match - case ref: TermRef if ref.symbol == sym => TermRef(rt.recThis, defn.captureRoot) - case _ => mapOver(tp) - - /** Unpack current type from an existential `rt` so that all references bound by `rt` - * are recplaced by `ref`. - */ - private class OpenMap(rt: RecType, ref: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap: - def apply(tp: Type): Type = - if isExBound(tp, rt) then ref else mapOver(tp) - - /** Is `tp` a reference to the bound variable of `rt`? */ - private def isExBound(tp: Type, rt: Type)(using Context) = tp match - case tp @ TermRef(RecThis(rt1), _) => (rt1 eq rt) && tp.symbol == defn.captureRoot - case _ => false - - /** Open existential, replacing the bund variable by `ref` */ - def open(rt: RecType, ref: Type)(using Context): Type = OpenMap(rt, ref)(rt.parent) - - /** Create an existential type `ex c.` so that all references to `sym` in `tp` - * become references to the existentially bound variable `c`. - */ - def fromSymbol(tp: Type, sym: Symbol)(using Context): RecType = - RecType(PackMap(sym, _)(tp)) - + /** Is `mt` a method represnting an existential type when used in a refinement? */ def isExistentialMethod(mt: TermLambda)(using Context): Boolean = mt.paramInfos match case (info: TypeRef) :: rest => info.symbol == defn.Caps_Exists && rest.isEmpty case _ => false + /** Is `ref` this an existentially bound variable? */ def isExistentialVar(ref: CaptureRef)(using Context) = ref match case ref: TermParamRef => isExistentialMethod(ref.binder) case _ => false - def unapply(tp: Carrier)(using Context): Option[(TermParamRef, Type)] = - tp.refinedInfo match - case mt: MethodType - if isExistentialMethod(mt) && defn.isNonRefinedFunction(tp.parent) => - Some(mt.paramRefs.head, mt.resultType) - case _ => None - - def apply(mk: TermParamRef => Type)(using Context): MethodType = - MethodType(defn.Caps_Exists.typeRef :: Nil): mt => - mk(mt.paramRefs.head) - - /** Create existential if bound variable appear in result */ - def wrap(mk: TermParamRef => Type)(using Context): Type = - val mt = apply(mk) - if mt.isResultDependent then mt.toFunctionType() else mt.resType end Existential diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 35f22538f074..466948161acf 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -75,6 +75,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: && containsCovarRetains(symd.symbol.originDenotation.info) then symd.flags &~ Private else symd.flags + end newFlagsFor def isPreCC(sym: Symbol)(using Context): Boolean = sym.isTerm && sym.maybeOwner.isClass diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index a6348304c4d7..e9575c7d6c4a 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -325,6 +325,7 @@ object NameKinds { val TailLocalName: UniqueNameKind = new UniqueNameKind("$tailLocal") val TailTempName: UniqueNameKind = new UniqueNameKind("$tmp") val ExceptionBinderName: UniqueNameKind = new UniqueNameKind("ex") + val ExistentialBinderName: UniqueNameKind = new UniqueNameKind("ex$") val SkolemName: UniqueNameKind = new UniqueNameKind("?") val SuperArgName: UniqueNameKind = new UniqueNameKind("$superArg$") val DocArtifactName: UniqueNameKind = new UniqueNameKind("$doc") @@ -332,7 +333,6 @@ object NameKinds { val InlineScrutineeName: UniqueNameKind = new UniqueNameKind("$scrutinee") val InlineBinderName: UniqueNameKind = new UniqueNameKind("$proxy") val MacroNames: UniqueNameKind = new UniqueNameKind("$macro$") - val exSkolemName: UniqueNameKind = new UniqueNameKind("$exSkolem") // TODO needed? val UniqueExtMethName: UniqueNameKind = new UniqueNameKindWithUnmangle("$extension") diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 83964417a6f1..32467de77264 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1613,7 +1613,7 @@ class Namer { typer: Typer => else if pclazz.isEffectivelySealed && pclazz.associatedFile != cls.associatedFile then if pclazz.is(Sealed) && !pclazz.is(JavaDefined) then report.error(UnableToExtendSealedClass(pclazz), cls.srcPos) - else if sourceVersion.isAtLeast(future) then + else if sourceVersion.isAtLeast(`3.6`) then checkFeature(nme.adhocExtensions, i"Unless $pclazz is declared 'open', its extension in a separate file", cls.topLevelClass, diff --git a/tests/neg-custom-args/captures/refine-reach-shallow.scala b/tests/neg-custom-args/captures/refine-reach-shallow.scala index 9f4b28ce52e3..525d33fdb7c5 100644 --- a/tests/neg-custom-args/captures/refine-reach-shallow.scala +++ b/tests/neg-custom-args/captures/refine-reach-shallow.scala @@ -14,5 +14,5 @@ def test4(): Unit = val ys: List[IO^{xs*}] = xs // ok def test5(): Unit = val f: [R] -> (IO^ -> R) -> IO^ = ??? - val g: [R] -> (IO^ -> R) -> IO^{f*} = f // ok + val g: [R] -> (IO^ -> R) -> IO^{f*} = f // error val h: [R] -> (IO^{f*} -> R) -> IO^ = f // error diff --git a/tests/neg/cc-ex-conformance.scala b/tests/neg/cc-ex-conformance.scala index 9cfdda43c764..a953466daa9a 100644 --- a/tests/neg/cc-ex-conformance.scala +++ b/tests/neg/cc-ex-conformance.scala @@ -7,9 +7,9 @@ type EX1 = () => (c: Exists) => (C^{c}, C^{c}) type EX2 = () => (c1: Exists) => (c2: Exists) => (C^{c1}, C^{c2}) -type EX3 = () => (c: Exists) => () => C^{c} +type EX3 = () => (c: Exists) => (x: Object^) => C^{c} -type EX4 = () => () => (c: Exists) => C^{c} +type EX4 = () => (x: Object^) => (c: Exists) => C^{c} def Test = val ex1: EX1 = ??? diff --git a/tests/pos-custom-args/captures/capt-test.scala b/tests/pos-custom-args/captures/capt-test.scala index e229c685d846..49f199f106f1 100644 --- a/tests/pos-custom-args/captures/capt-test.scala +++ b/tests/pos-custom-args/captures/capt-test.scala @@ -36,3 +36,4 @@ def test(c: Cap, d: Cap) = val a4 = zs.map(identity) val a4c: LIST[Cap ->{d, y} Unit] = a4 + val a5: LIST[Cap ->{d, y} Unit] = zs.map(identity) From ca715e8a94c8c530ebaf6c2064993040d3dc5252 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 13 Jun 2024 11:21:39 +0200 Subject: [PATCH 298/827] Tighten rules against escaping local references Fixes the problem in effect-swaps.scala --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 6 ++++-- compiler/src/dotty/tools/dotc/cc/Setup.scala | 6 ++---- tests/neg-custom-args/captures/effect-swaps.check | 4 ++++ tests/neg-custom-args/captures/effect-swaps.scala | 2 +- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index ec37a46ef5af..6d3ea34f4c0a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1131,7 +1131,9 @@ class CheckCaptures extends Recheck, SymTransformer: /** If actual derives from caps.Capability, yet is not a capturing type itself, * make its capture set explicit. */ - private def makeCaptureSetExplicit(actual: Type)(using Context): Type = actual match + private def makeCaptureSetExplicit(actual: Type)(using Context): Type = + if false then actual + else actual match case CapturingType(_, _) => actual case _ if actual.derivesFromCapability => val cap: CaptureRef = actual match @@ -1346,7 +1348,7 @@ class CheckCaptures extends Recheck, SymTransformer: case ref: TermParamRef if !allowed.contains(ref) && !seen.contains(ref) => seen += ref - if ref.underlying.isRef(defn.Caps_Capability) then + if ref.isMaxCapability then report.error(i"escaping local reference $ref", tree.srcPos) else val widened = ref.captureSetOfInfo diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 466948161acf..10796ca1bef1 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -612,10 +612,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: !refs.isEmpty case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol - if sym.isClass then - !sym.isPureClass - else - sym != defn.Caps_Capability && instanceCanBeImpure(tp.superType) + if sym.isClass then !sym.isPureClass + else instanceCanBeImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => instanceCanBeImpure(tp.underlying) case tp: AndType => diff --git a/tests/neg-custom-args/captures/effect-swaps.check b/tests/neg-custom-args/captures/effect-swaps.check index ef5a95d333bf..22941be36794 100644 --- a/tests/neg-custom-args/captures/effect-swaps.check +++ b/tests/neg-custom-args/captures/effect-swaps.check @@ -22,3 +22,7 @@ 73 | fr.await.ok | | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/effect-swaps.scala:66:15 ------------------------------------------------------ +66 | Result.make: // error + | ^^^^^^^^^^^ + | escaping local reference contextual$9.type diff --git a/tests/neg-custom-args/captures/effect-swaps.scala b/tests/neg-custom-args/captures/effect-swaps.scala index d4eed2bae2f2..0b362b80e3ce 100644 --- a/tests/neg-custom-args/captures/effect-swaps.scala +++ b/tests/neg-custom-args/captures/effect-swaps.scala @@ -63,7 +63,7 @@ def test[T, E](using Async) = fr.await.ok def fail4[T, E](fr: Future[Result[T, E]]^) = - Result.make: //lbl ?=> // should be error, escaping label from Result but infers Result[Any, Any] + Result.make: // error Future: fut ?=> fr.await.ok From b9c21097aa0ab3f55e0aea5255831f87d86b29d7 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 13 Jun 2024 14:36:54 +0200 Subject: [PATCH 299/827] Go back to expansion of capability class references at Setup This gives us the necessary levers to switch to existential capabilities. # Conflicts: # compiler/src/dotty/tools/dotc/cc/CaptureOps.scala --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 5 +++++ compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 6 +++--- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 2 +- compiler/src/dotty/tools/dotc/cc/Setup.scala | 5 ++++- tests/neg-custom-args/captures/byname.check | 7 ++++++- tests/neg-custom-args/captures/byname.scala | 3 +++ tests/neg-custom-args/captures/cc-this5.check | 2 +- tests/neg-custom-args/captures/extending-cap-classes.check | 2 +- tests/neg-custom-args/captures/i16725.scala | 4 ++-- 9 files changed, 26 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 49dbc9773229..bc1641b6f414 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -27,6 +27,11 @@ object ccConfig: */ inline val allowUnsoundMaps = false + /** If true, expand capability classes in Setup instead of treating them + * in adapt. + */ + inline val expandCapabilityInSetup = true + def useExistentials(using Context) = Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.5`) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 4069b9ffb014..eb3718e9601f 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -163,7 +163,7 @@ sealed abstract class CaptureSet extends Showable: case y: TermRef => (y.prefix eq x) || y.info.match - case y1: CaptureRef => x.subsumes(y1) + case y1: SingletonCaptureRef => x.subsumes(y1) case _ => false case MaybeCapability(y1) => x.stripMaybe.subsumes(y1) case _ => false @@ -171,7 +171,7 @@ sealed abstract class CaptureSet extends Showable: case ReachCapability(x1) => x1.subsumes(y.stripReach) case x: TermRef => x.info match - case x1: CaptureRef => x1.subsumes(y) + case x1: SingletonCaptureRef => x1.subsumes(y) case _ => false case x: TermParamRef => canSubsumeExistentially(x, y) case _ => false @@ -1059,7 +1059,7 @@ object CaptureSet: case tp: TermParamRef => tp.captureSet case tp: TypeRef => - if tp.derivesFromCapability then universal // TODO: maybe return another value that indicates that the underltinf ref is maximal? + if !ccConfig.expandCapabilityInSetup && tp.derivesFromCapability then universal else empty case _: TypeParamRef => empty diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 6d3ea34f4c0a..b73184447c47 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1132,7 +1132,7 @@ class CheckCaptures extends Recheck, SymTransformer: * make its capture set explicit. */ private def makeCaptureSetExplicit(actual: Type)(using Context): Type = - if false then actual + if ccConfig.expandCapabilityInSetup then actual else actual match case CapturingType(_, _) => actual case _ if actual.derivesFromCapability => diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 10796ca1bef1..992f851831ad 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -323,7 +323,10 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case t: TypeVar => this(t.underlying) case t => - recur(t) + // Map references to capability classes C to C^ + if ccConfig.expandCapabilityInSetup && t.derivesFromCapability + then CapturingType(t, defn.expandedUniversalSet, boxed = false) + else recur(t) end expandAliases val tp1 = expandAliases(tp) // TODO: Do we still need to follow aliases? diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index b9e5c81b721d..c9530f6aad50 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -1,8 +1,13 @@ -- Error: tests/neg-custom-args/captures/byname.scala:19:5 ------------------------------------------------------------- 19 | h(g()) // error | ^^^ - | reference (cap2 : Cap) is not included in the allowed capture set {cap1} + | reference (cap2 : Cap^) is not included in the allowed capture set {cap1} | of an enclosing function literal with expected type () ?->{cap1} I +-- Error: tests/neg-custom-args/captures/byname.scala:22:12 ------------------------------------------------------------ +22 | h2(() => g())() // error + | ^^^ + | reference (cap2 : Cap^) is not included in the allowed capture set {cap1} + | of an enclosing function literal with expected type () ->{cap1} I -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/byname.scala:4:2 ----------------------------------------- 4 | def f() = if cap1 == cap1 then g else g // error | ^ diff --git a/tests/neg-custom-args/captures/byname.scala b/tests/neg-custom-args/captures/byname.scala index 0ed3a09cb414..75ad527dbd2d 100644 --- a/tests/neg-custom-args/captures/byname.scala +++ b/tests/neg-custom-args/captures/byname.scala @@ -17,6 +17,9 @@ def test2(cap1: Cap, cap2: Cap): I^{cap1} = def h(x: ->{cap1} I) = x // ok h(f()) // OK h(g()) // error + def h2(x: () ->{cap1} I) = x // ok + h2(() => f()) // OK + h2(() => g())() // error diff --git a/tests/neg-custom-args/captures/cc-this5.check b/tests/neg-custom-args/captures/cc-this5.check index 1329734ce37d..8affe7005e2e 100644 --- a/tests/neg-custom-args/captures/cc-this5.check +++ b/tests/neg-custom-args/captures/cc-this5.check @@ -1,7 +1,7 @@ -- Error: tests/neg-custom-args/captures/cc-this5.scala:16:20 ---------------------------------------------------------- 16 | def f = println(c) // error | ^ - | (c : Cap) cannot be referenced here; it is not included in the allowed capture set {} + | (c : Cap^) cannot be referenced here; it is not included in the allowed capture set {} | of the enclosing class A -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/cc-this5.scala:21:15 ------------------------------------- 21 | val x: A = this // error diff --git a/tests/neg-custom-args/captures/extending-cap-classes.check b/tests/neg-custom-args/captures/extending-cap-classes.check index 3bdddfd9dd3c..0936f48576e5 100644 --- a/tests/neg-custom-args/captures/extending-cap-classes.check +++ b/tests/neg-custom-args/captures/extending-cap-classes.check @@ -15,7 +15,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/extending-cap-classes.scala:13:15 ------------------------ 13 | val z2: C1 = y2 // error | ^^ - | Found: (y2 : C2)^{y2} + | Found: (y2 : C2^) | Required: C1 | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i16725.scala b/tests/neg-custom-args/captures/i16725.scala index 733c2c562bbc..1accf197c626 100644 --- a/tests/neg-custom-args/captures/i16725.scala +++ b/tests/neg-custom-args/captures/i16725.scala @@ -7,8 +7,8 @@ type Wrapper[T] = [R] -> (f: T => R) -> R def mk[T](x: T): Wrapper[T] = [R] => f => f(x) def useWrappedIO(wrapper: Wrapper[IO]): () -> Unit = () => - wrapper: io => + wrapper: io => // error io.brewCoffee() def main(): Unit = - val escaped = usingIO(io => useWrappedIO(mk(io))) // error + val escaped = usingIO(io => useWrappedIO(mk(io))) escaped() // boom From 420f2cda4b7edd1a473950b13d0b5cd212835516 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 13 Jun 2024 15:23:23 +0200 Subject: [PATCH 300/827] Drop expandedUniversalSet An expandedUniversalSet was the same as `{cap}` but not reference-equal to CaptureSet.universal. This construct was previously needed to avoid multiple expansions, but this does not seem to be the case any longer so the construct can be dropped. --- compiler/src/dotty/tools/dotc/cc/CapturingType.scala | 3 --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 5 ++--- compiler/src/dotty/tools/dotc/core/Definitions.scala | 1 - 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala index ee0cad4d4d03..f859b0d110aa 100644 --- a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala +++ b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala @@ -28,7 +28,6 @@ object CapturingType: /** Smart constructor that * - drops empty capture sets - * - drops a capability class expansion if it is further refined with another capturing type * - fuses compatible capturing types. * An outer type capturing type A can be fused with an inner capturing type B if their * boxing status is the same or if A is boxed. @@ -36,8 +35,6 @@ object CapturingType: def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = if refs.isAlwaysEmpty then parent else parent match - case parent @ CapturingType(parent1, refs1) if refs1 eq defn.expandedUniversalSet => - apply(parent1, refs, boxed) case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => apply(parent1, refs ++ refs1, boxed) case _ => diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 992f851831ad..e7bf584f9d44 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -325,7 +325,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case t => // Map references to capability classes C to C^ if ccConfig.expandCapabilityInSetup && t.derivesFromCapability - then CapturingType(t, defn.expandedUniversalSet, boxed = false) + then CapturingType(t, CaptureSet.universal, boxed = false) else recur(t) end expandAliases @@ -749,8 +749,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if ref.captureSetOfInfo.elems.isEmpty then report.error(em"$ref cannot be tracked since its capture set is empty", pos) - if parent.captureSet ne defn.expandedUniversalSet then - check(parent.captureSet, parent) + check(parent.captureSet, parent) val others = for j <- 0 until retained.length if j != i yield retained(j).toCaptureRef diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 88de8e66054e..57402ffe27bf 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -999,7 +999,6 @@ class Definitions { @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") - @tu lazy val expandedUniversalSet: CaptureSet = CaptureSet(captureRoot.termRef) @tu lazy val PureClass: Symbol = requiredClass("scala.Pure") From e9474ff2ccea087dc334ddf4688dc5baf2af2cfe Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 13 Jun 2024 15:54:50 +0200 Subject: [PATCH 301/827] Adapt new capability class scheme to existentials --- compiler/src/dotty/tools/dotc/cc/Existential.scala | 10 ++++++++-- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Existential.scala b/compiler/src/dotty/tools/dotc/cc/Existential.scala index 0c269a484092..218713f85e1f 100644 --- a/compiler/src/dotty/tools/dotc/cc/Existential.scala +++ b/compiler/src/dotty/tools/dotc/cc/Existential.scala @@ -229,7 +229,7 @@ object Existential: /** Map top-level existentials to `cap`. Do the same for existentials * in function results if all preceding arguments are known to be always pure. */ - def toCap(tp: Type)(using Context): Type = tp.dealias match + def toCap(tp: Type)(using Context): Type = tp.dealiasKeepAnnots match case Existential(boundVar, unpacked) => val transformed = unpacked.substParam(boundVar, defn.captureRoot.termRef) transformed match @@ -238,11 +238,15 @@ object Existential: transformed.derivedFunctionOrMethod(args, toCap(res)) case _ => transformed + case tp1 @ CapturingType(parent, refs) => + tp1.derivedCapturingType(toCap(parent), refs) + case tp1 @ AnnotatedType(parent, ann) => + tp1.derivedAnnotatedType(toCap(parent), ann) case _ => tp /** Map existentials at the top-level and in all nested result types to `cap` */ - def toCapDeeply(tp: Type)(using Context): Type = tp.dealias match + def toCapDeeply(tp: Type)(using Context): Type = tp.dealiasKeepAnnots match case Existential(boundVar, unpacked) => toCapDeeply(unpacked.substParam(boundVar, defn.captureRoot.termRef)) case tp1 @ FunctionOrMethod(args, res) => @@ -250,6 +254,8 @@ object Existential: if tp2 ne tp1 then tp2 else tp case tp1 @ CapturingType(parent, refs) => tp1.derivedCapturingType(toCapDeeply(parent), refs) + case tp1 @ AnnotatedType(parent, ann) => + tp1.derivedAnnotatedType(toCapDeeply(parent), ann) case _ => tp /** Replace all occurrences of `cap` in parts of this type by an existentially bound diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index e7bf584f9d44..7e9f4e6e9c4b 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -324,7 +324,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: this(t.underlying) case t => // Map references to capability classes C to C^ - if ccConfig.expandCapabilityInSetup && t.derivesFromCapability + if ccConfig.expandCapabilityInSetup && t.derivesFromCapability && t.typeSymbol != defn.Caps_Exists then CapturingType(t, CaptureSet.universal, boxed = false) else recur(t) end expandAliases From 0161bb06916954ebbd001c6417f97507f3899836 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 13 Jun 2024 17:43:26 +0200 Subject: [PATCH 302/827] Map capability classes to existentials # Conflicts: # compiler/src/dotty/tools/dotc/cc/Setup.scala --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 13 ++++++------- .../src/dotty/tools/dotc/core/TypeComparer.scala | 5 ++++- tests/pos/infer-exists.scala | 12 ++++++++++++ 3 files changed, 22 insertions(+), 8 deletions(-) create mode 100644 tests/pos/infer-exists.scala diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 7e9f4e6e9c4b..c8e7a8d89a89 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -300,9 +300,6 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: CapturingType(fntpe, cs, boxed = false) else fntpe - private def recur(t: Type): Type = - Existential.mapCapInResult(normalizeCaptures(mapOver(t)), fail) - def apply(t: Type) = t match case t @ CapturingType(parent, refs) => @@ -323,10 +320,12 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case t: TypeVar => this(t.underlying) case t => - // Map references to capability classes C to C^ - if ccConfig.expandCapabilityInSetup && t.derivesFromCapability && t.typeSymbol != defn.Caps_Exists - then CapturingType(t, CaptureSet.universal, boxed = false) - else recur(t) + Existential.mapCapInResult( + // Map references to capability classes C to C^ + if ccConfig.expandCapabilityInSetup && t.derivesFromCapability && t.typeSymbol != defn.Caps_Exists + then CapturingType(t, CaptureSet.universal, boxed = false) + else normalizeCaptures(mapOver(t)), + fail) end expandAliases val tp1 = expandAliases(tp) // TODO: Do we still need to follow aliases? diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 174bbaeca21d..abbb4387a125 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2824,7 +2824,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => false protected def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = - refs1.subCaptures(refs2, frozen) + try refs1.subCaptures(refs2, frozen) + catch case ex: AssertionError => + println(i"fail while subCaptures $refs1 <:< $refs2") + throw ex /** Is the boxing status of tp1 and tp2 the same, or alternatively, is * the capture sets `refs1` of `tp1` a subcapture of the empty set? diff --git a/tests/pos/infer-exists.scala b/tests/pos/infer-exists.scala new file mode 100644 index 000000000000..6d5225f75128 --- /dev/null +++ b/tests/pos/infer-exists.scala @@ -0,0 +1,12 @@ +import language.experimental.captureChecking + +class C extends caps.Capability +class D + +def test1 = + val a: (x: C) -> C = ??? + val b = a + +def test2 = + val a: (x: D^) -> D^ = ??? + val b = a From 7296b40597238fda2c059185a2ee8e6101e7df4d Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 14 Jun 2024 20:50:30 +0200 Subject: [PATCH 303/827] Change encoding of impure dependent function types The encoding of (x: T) => U in capture checked code has changed. Previously: T => U' { def apply(x: T): U } Now: (T -> U' { def apply(x: T): U })^{cap} We often handle dependent functions by transforming the apply method and then mapping back to a function type using `.toFunctionType`. But that would always generate a pure function, so the impurity info could get lost. --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index c518de7dbbfe..cbf79577c2a3 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1683,10 +1683,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else val resTpt = TypeTree(mt.nonDependentResultApprox).withSpan(body.span) val paramTpts = appDef.termParamss.head.map(p => TypeTree(p.tpt.tpe).withSpan(p.tpt.span)) - val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) + val funSym = defn.FunctionSymbol(numArgs, isContextual) val tycon = TypeTree(funSym.typeRef) AppliedTypeTree(tycon, paramTpts :+ resTpt) - RefinedTypeTree(core, List(appDef), ctx.owner.asClass) + val res = RefinedTypeTree(core, List(appDef), ctx.owner.asClass) + if isImpure then + typed(untpd.makeRetaining(untpd.TypedSplice(res), Nil, tpnme.retainsCap), pt) + else + res end typedDependent args match { From 4add745c1c8222351554c04e2d1ab26c86d8c964 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 15 Jun 2024 11:45:47 +0200 Subject: [PATCH 304/827] Fix mapping of cap to existentials Still missing: Mapping parameters --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 3 +- .../src/dotty/tools/dotc/cc/Existential.scala | 103 +++++++++++------- compiler/src/dotty/tools/dotc/cc/Setup.scala | 39 +++---- .../dotty/tools/dotc/core/Definitions.scala | 2 + .../src/dotty/tools/dotc/core/Types.scala | 9 ++ tests/neg-custom-args/captures/lazylist.check | 8 +- tests/neg/existential-mapping.check | 88 +++++++++++++++ tests/neg/existential-mapping.scala | 47 ++++++++ .../captures/curried-closures.scala | 5 +- 9 files changed, 233 insertions(+), 71 deletions(-) create mode 100644 tests/neg/existential-mapping.check create mode 100644 tests/neg/existential-mapping.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index bc1641b6f414..a76e6a1315ac 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -28,7 +28,7 @@ object ccConfig: inline val allowUnsoundMaps = false /** If true, expand capability classes in Setup instead of treating them - * in adapt. + * in adapt. */ inline val expandCapabilityInSetup = true @@ -568,6 +568,7 @@ trait ConservativeFollowAliasMap(using Context) extends TypeMap: end ConservativeFollowAliasMap /** An extractor for all kinds of function types as well as method and poly types. + * It includes aliases of function types such as `=>`. TODO: Can we do without? * @return 1st half: The argument types or empty if this is a type function * 2nd half: The result type */ diff --git a/compiler/src/dotty/tools/dotc/cc/Existential.scala b/compiler/src/dotty/tools/dotc/cc/Existential.scala index 218713f85e1f..94aab59443ba 100644 --- a/compiler/src/dotty/tools/dotc/cc/Existential.scala +++ b/compiler/src/dotty/tools/dotc/cc/Existential.scala @@ -10,6 +10,7 @@ import ast.tpd.* import Decorators.* import typer.ErrorReporting.errorType import NameKinds.ExistentialBinderName +import NameOps.isImpureFunction import reporting.Message /** @@ -258,6 +259,13 @@ object Existential: tp1.derivedAnnotatedType(toCapDeeply(parent), ann) case _ => tp + /** Knowing that `tp` is a function type, is an alias to a function other + * than `=>`? + */ + private def isAliasFun(tp: Type)(using Context) = tp match + case AppliedType(tycon, _) => !defn.isFunctionSymbol(tycon.typeSymbol) + case _ => false + /** Replace all occurrences of `cap` in parts of this type by an existentially bound * variable. If there are such occurrences, or there might be in the future due to embedded * capture set variables, create an existential with the variable wrapping the type. @@ -266,56 +274,69 @@ object Existential: def mapCap(tp: Type, fail: Message => Unit)(using Context): Type = var needsWrap = false - class Wrap(boundVar: TermParamRef) extends BiTypeMap, ConservativeFollowAliasMap: - def apply(t: Type) = // go deep first, so that we map parts of alias types before dealiasing - mapOver(t) match - case t1: TermRef if t1.isRootCapability => - if variance > 0 then - needsWrap = true - boundVar - else - if variance == 0 then - fail(em"cap appears in invariant position in $tp") - // we accept variance < 0, and leave the cap as it is - t1 - case t1 @ FunctionOrMethod(_, _) => - // These have been mapped before - t1 - case t1 @ CapturingType(_, _: CaptureSet.Var) => - if variance > 0 then needsWrap = true // the set might get a cap later. - t1 - case t1 => - applyToAlias(t, t1) - - lazy val inverse = new BiTypeMap with ConservativeFollowAliasMap: - def apply(t: Type) = mapOver(t) match - case t1: TermParamRef if t1 eq boundVar => defn.captureRoot.termRef - case t1 @ FunctionOrMethod(_, _) => t1 - case t1 => applyToAlias(t, t1) + abstract class CapMap extends BiTypeMap: + override def mapOver(t: Type): Type = t match + case t @ FunctionOrMethod(args, res) if variance > 0 && !isAliasFun(t) => + t // `t` should be mapped in this case by a different call to `mapCap`. + case Existential(_, _) => + t + case t: (LazyRef | TypeVar) => + mapConserveSuper(t) + case _ => + super.mapOver(t) + + class Wrap(boundVar: TermParamRef) extends CapMap: + def apply(t: Type) = t match + case t: TermRef if t.isRootCapability => + if variance > 0 then + needsWrap = true + boundVar + else + if variance == 0 then + fail(em"""$tp captures the root capability `cap` in invariant position""") + // we accept variance < 0, and leave the cap as it is + super.mapOver(t) + case t @ CapturingType(parent, refs: CaptureSet.Var) => + if variance > 0 then needsWrap = true + super.mapOver(t) + case _ => + mapOver(t) + //.showing(i"mapcap $t = $result") + + lazy val inverse = new BiTypeMap: + def apply(t: Type) = t match + case t: TermParamRef if t eq boundVar => defn.captureRoot.termRef + case _ => mapOver(t) def inverse = Wrap.this override def toString = "Wrap.inverse" end Wrap if ccConfig.useExistentials then - tp match - case Existential(_, _) => tp - case _ => - val wrapped = apply(Wrap(_)(tp)) - if needsWrap then wrapped else tp + val wrapped = apply(Wrap(_)(tp)) + if needsWrap then wrapped else tp else tp end mapCap - /** Map `cap` to existential in the results of functions or methods. - */ - def mapCapInResult(tp: Type, fail: Message => Unit)(using Context): Type = - def mapCapInFinalResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = mapCapInFinalResult(tp.resultType)) + def mapCapInResults(fail: Message => Unit)(using Context): TypeMap = new: + + def mapFunOrMethod(tp: Type, args: List[Type], res: Type): Type = + val args1 = atVariance(-variance)(args.map(this)) + val res1 = res match + case res: MethodType => mapFunOrMethod(res, res.paramInfos, res.resType) + case res: PolyType => mapFunOrMethod(res, Nil, res.resType) // TODO: Also map bounds of PolyTypes + case _ => mapCap(apply(res), fail) + tp.derivedFunctionOrMethod(args1, res1) + + def apply(t: Type): Type = t match + case FunctionOrMethod(args, res) if variance > 0 && !isAliasFun(t) => + mapFunOrMethod(t, args, res) + case CapturingType(parent, refs) => + t.derivedCapturingType(this(parent), refs) + case t: (LazyRef | TypeVar) => + mapConserveSuper(t) case _ => - mapCap(tp, fail) - tp match - case tp: MethodOrPoly => mapCapInFinalResult(tp) - case _ => tp + mapOver(t) + end mapCapInResults /** Is `mt` a method represnting an existential type when used in a refinement? */ def isExistentialMethod(mt: TermLambda)(using Context): Boolean = mt.paramInfos match diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index c8e7a8d89a89..23d05168e1f2 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -269,12 +269,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: end transformInferredType private def transformExplicitType(tp: Type, tptToCheck: Option[Tree] = None)(using Context): Type = - val expandAliases = new DeepTypeMap: + val toCapturing = new DeepTypeMap: override def toString = "expand aliases" - def fail(msg: Message) = - for tree <- tptToCheck do report.error(msg, tree.srcPos) - /** Expand $throws aliases. This is hard-coded here since $throws aliases in stdlib * are defined with `?=>` rather than `?->`. * We also have to add a capture set to the last expanded throws alias. I.e. @@ -314,23 +311,20 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: t.derivedAnnotatedType(parent1, ann) case throwsAlias(res, exc) => this(expandThrowsAlias(res, exc, Nil)) - case t: LazyRef => - val t1 = this(t.ref) - if t1 ne t.ref then t1 else t - case t: TypeVar => - this(t.underlying) case t => - Existential.mapCapInResult( - // Map references to capability classes C to C^ - if ccConfig.expandCapabilityInSetup && t.derivesFromCapability && t.typeSymbol != defn.Caps_Exists - then CapturingType(t, CaptureSet.universal, boxed = false) - else normalizeCaptures(mapOver(t)), - fail) - end expandAliases - - val tp1 = expandAliases(tp) // TODO: Do we still need to follow aliases? - if tp1 ne tp then capt.println(i"expanded in ${ctx.owner}: $tp --> $tp1") - tp1 + // Map references to capability classes C to C^ + if ccConfig.expandCapabilityInSetup && t.derivesFromCapability && t.typeSymbol != defn.Caps_Exists + then CapturingType(t, CaptureSet.universal, boxed = false) + else normalizeCaptures(mapOver(t)) + end toCapturing + + def fail(msg: Message) = + for tree <- tptToCheck do report.error(msg, tree.srcPos) + + val tp1 = toCapturing(tp) + val tp2 = Existential.mapCapInResults(fail)(tp1) + if tp2 ne tp then capt.println(i"expanded in ${ctx.owner}: $tp --> $tp1 --> $tp2") + tp2 end transformExplicitType /** Transform type of type tree, and remember the transformed type as the type the tree */ @@ -538,9 +532,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if sym.exists && signatureChanges then val newInfo = - Existential.mapCapInResult( - integrateRT(sym.info, sym.paramSymss, localReturnType, Nil, Nil), - report.error(_, tree.srcPos)) + Existential.mapCapInResults(report.error(_, tree.srcPos)): + integrateRT(sym.info, sym.paramSymss, localReturnType, Nil, Nil) .showing(i"update info $sym: ${sym.info} = $result", capt) if newInfo ne sym.info then val updatedInfo = diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 57402ffe27bf..ad80d0565f63 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1161,6 +1161,8 @@ class Definitions { if mt.hasErasedParams then RefinedType(PolyFunctionClass.typeRef, nme.apply, mt) else FunctionNOf(args, resultType, isContextual) + // Unlike PolyFunctionOf and RefinedFunctionOf this extractor follows aliases. + // Can we do without? Same for FunctionNOf and isFunctionNType. def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean)] = { ft match case PolyFunctionOf(mt: MethodType) => diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 9aca8a9b4b60..5e12e4d6b84a 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -6271,6 +6271,15 @@ object Types extends TypeUtils { try derivedCapturingType(tp, this(parent), refs.map(this)) finally variance = saved + /** Utility method. Maps the supertype of a type proxy. Returns the + * type proxy itself if the mapping leaves the supertype unchanged. + * This avoids needless changes in mapped types. + */ + protected def mapConserveSuper(t: TypeProxy): Type = + val t1 = t.superType + val t2 = apply(t1) + if t2 ne t1 then t2 else t + /** Map this function over given type */ def mapOver(tp: Type): Type = { record(s"TypeMap mapOver ${getClass}") diff --git a/tests/neg-custom-args/captures/lazylist.check b/tests/neg-custom-args/captures/lazylist.check index 09352ec648ce..643ef78841f0 100644 --- a/tests/neg-custom-args/captures/lazylist.check +++ b/tests/neg-custom-args/captures/lazylist.check @@ -8,8 +8,8 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:35:29 ------------------------------------- 35 | val ref1c: LazyList[Int] = ref1 // error | ^^^^ - | Found: (ref1 : lazylists.LazyCons[Int]{val xs: () ->{cap1} lazylists.LazyList[Int]^?}^{cap1}) - | Required: lazylists.LazyList[Int] + | Found: lazylists.LazyCons[Int]{val xs: () ->{cap1} lazylists.LazyList[Int]^?}^{ref1} + | Required: lazylists.LazyList[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:37:36 ------------------------------------- @@ -29,8 +29,8 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:41:48 ------------------------------------- 41 | val ref4c: LazyList[Int]^{cap1, ref3, cap3} = ref4 // error | ^^^^ - | Found: (ref4 : lazylists.LazyList[Int]^{cap3, cap2, ref1, cap1}) - | Required: lazylists.LazyList[Int]^{cap1, ref3, cap3} + | Found: (ref4 : lazylists.LazyList[Int]^{cap3, cap2, ref1}) + | Required: lazylists.LazyList[Int]^{cap1, ref3, cap3} | | longer explanation available when compiling with `-explain` -- [E164] Declaration Error: tests/neg-custom-args/captures/lazylist.scala:22:6 ---------------------------------------- diff --git a/tests/neg/existential-mapping.check b/tests/neg/existential-mapping.check new file mode 100644 index 000000000000..bab04868b123 --- /dev/null +++ b/tests/neg/existential-mapping.check @@ -0,0 +1,88 @@ +-- Error: tests/neg/existential-mapping.scala:44:13 -------------------------------------------------------------------- +44 | val z1: A^ => Array[C^] = ??? // error + | ^^^^^^^^^^^^^^^ + | Array[box C^] captures the root capability `cap` in invariant position +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:9:25 ------------------------------------------------ +9 | val _: (x: C^) -> C = x1 // error + | ^^ + | Found: (x1 : (x: C^) -> (ex$3: caps.Exists) -> C^{ex$3}) + | Required: (x: C^) -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:12:20 ----------------------------------------------- +12 | val _: C^ -> C = x2 // error + | ^^ + | Found: (x2 : C^ -> (ex$9: caps.Exists) -> C^{ex$9}) + | Required: C^ -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:15:30 ----------------------------------------------- +15 | val _: A^ -> (x: C^) -> C = x3 // error + | ^^ + | Found: (x3 : A^ -> (x: C^) -> (ex$15: caps.Exists) -> C^{ex$15}) + | Required: A^ -> (x: C^) -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:18:25 ----------------------------------------------- +18 | val _: A^ -> C^ -> C = x4 // error + | ^^ + | Found: (x4 : A^ -> C^ -> (ex$25: caps.Exists) -> C^{ex$25}) + | Required: A^ -> C^ -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:21:30 ----------------------------------------------- +21 | val _: A^ -> (x: C^) -> C = x5 // error + | ^^ + | Found: (x5 : A^ -> (ex$35: caps.Exists) -> Fun[C^{ex$35}]) + | Required: A^ -> (x: C^) -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:24:30 ----------------------------------------------- +24 | val _: A^ -> (x: C^) => C = x6 // error + | ^^ + | Found: (x6 : A^ -> (ex$43: caps.Exists) -> IFun[C^{ex$43}]) + | Required: A^ -> (ex$48: caps.Exists) -> (x: C^) ->{ex$48} C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:27:25 ----------------------------------------------- +27 | val _: (x: C^) => C = y1 // error + | ^^ + | Found: (y1 : (x: C^) => (ex$54: caps.Exists) -> C^{ex$54}) + | Required: (x: C^) => C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:30:20 ----------------------------------------------- +30 | val _: C^ => C = y2 // error + | ^^ + | Found: (y2 : C^ => (ex$60: caps.Exists) -> C^{ex$60}) + | Required: C^ => C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:33:30 ----------------------------------------------- +33 | val _: A^ => (x: C^) => C = y3 // error + | ^^ + | Found: (y3 : A^ => (ex$67: caps.Exists) -> (x: C^) ->{ex$67} (ex$66: caps.Exists) -> C^{ex$66}) + | Required: A^ => (ex$78: caps.Exists) -> (x: C^) ->{ex$78} C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:36:25 ----------------------------------------------- +36 | val _: A^ => C^ => C = y4 // error + | ^^ + | Found: (y4 : A^ => C^ => (ex$84: caps.Exists) -> C^{ex$84}) + | Required: A^ => C^ => C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:39:30 ----------------------------------------------- +39 | val _: A^ => (x: C^) -> C = y5 // error + | ^^ + | Found: (y5 : A^ => (ex$94: caps.Exists) -> Fun[C^{ex$94}]) + | Required: A^ => (x: C^) -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:42:30 ----------------------------------------------- +42 | val _: A^ => (x: C^) => C = y6 // error + | ^^ + | Found: (y6 : A^ => (ex$102: caps.Exists) -> IFun[C^{ex$102}]) + | Required: A^ => (ex$107: caps.Exists) -> (x: C^) ->{ex$107} C + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/existential-mapping.scala b/tests/neg/existential-mapping.scala new file mode 100644 index 000000000000..96a36d8a7b9b --- /dev/null +++ b/tests/neg/existential-mapping.scala @@ -0,0 +1,47 @@ +import language.experimental.captureChecking + +class A +class C +type Fun[X] = (x: C^) -> X +type IFun[X] = (x: C^) => X +def Test = + val x1: (x: C^) -> C^ = ??? + val _: (x: C^) -> C = x1 // error + + val x2: C^ -> C^ = ??? + val _: C^ -> C = x2 // error + + val x3: A^ -> (x: C^) -> C^ = ??? + val _: A^ -> (x: C^) -> C = x3 // error + + val x4: A^ -> C^ -> C^ = ??? + val _: A^ -> C^ -> C = x4 // error + + val x5: A^ -> Fun[C^] = ??? + val _: A^ -> (x: C^) -> C = x5 // error + + val x6: A^ -> IFun[C^] = ??? + val _: A^ -> (x: C^) => C = x6 // error + + val y1: (x: C^) => C^ = ??? + val _: (x: C^) => C = y1 // error + + val y2: C^ => C^ = ??? + val _: C^ => C = y2 // error + + val y3: A^ => (x: C^) => C^ = ??? + val _: A^ => (x: C^) => C = y3 // error + + val y4: A^ => C^ => C^ = ??? + val _: A^ => C^ => C = y4 // error + + val y5: A^ => Fun[C^] = ??? + val _: A^ => (x: C^) -> C = y5 // error + + val y6: A^ => IFun[C^] = ??? + val _: A^ => (x: C^) => C = y6 // error + + val z1: A^ => Array[C^] = ??? // error + + + diff --git a/tests/pos-custom-args/captures/curried-closures.scala b/tests/pos-custom-args/captures/curried-closures.scala index 0ad729375b3c..262dd4b66b92 100644 --- a/tests/pos-custom-args/captures/curried-closures.scala +++ b/tests/pos-custom-args/captures/curried-closures.scala @@ -1,6 +1,7 @@ -//> using options -experimental +import annotation.experimental +import language.experimental.captureChecking -object Test: +@experimental object Test: def map2(xs: List[Int])(f: Int => Int): List[Int] = xs.map(f) val f1 = map2 val fc1: List[Int] -> (Int => Int) -> List[Int] = f1 From d5c15dd15b693d11936c29fdba586f0647f02376 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 15 Jun 2024 13:11:02 +0200 Subject: [PATCH 305/827] Let only value types derive from Capabilities Fixes crash with opaque types reported by @natsukagami --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 2 +- tests/pos-custom-args/captures/opaque-cap.scala | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 tests/pos-custom-args/captures/opaque-cap.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index a76e6a1315ac..1516b769c7ee 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -272,7 +272,7 @@ extension (tp: Type) val sym = tp.typeSymbol if sym.isClass then sym.derivesFrom(defn.Caps_Capability) else tp.superType.derivesFromCapability - case tp: TypeProxy => + case tp: (TypeProxy & ValueType) => tp.superType.derivesFromCapability case tp: AndType => tp.tp1.derivesFromCapability || tp.tp2.derivesFromCapability diff --git a/tests/pos-custom-args/captures/opaque-cap.scala b/tests/pos-custom-args/captures/opaque-cap.scala new file mode 100644 index 000000000000..dc3d48a2d311 --- /dev/null +++ b/tests/pos-custom-args/captures/opaque-cap.scala @@ -0,0 +1,6 @@ +import language.experimental.captureChecking + +trait A extends caps.Capability + +object O: + opaque type B = A \ No newline at end of file From d242e0117235592d3a0434efe4f6629001b1ac97 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 17 Jun 2024 09:38:30 +0200 Subject: [PATCH 306/827] Type inference for existentials - Add existentials to inferred types - Map existentials in one compared type to existentials in the other - Also: Don't re-analyze existentials in mapCap. --- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 29 ++--- .../src/dotty/tools/dotc/cc/Existential.scala | 16 ++- compiler/src/dotty/tools/dotc/cc/Setup.scala | 8 +- .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../dotty/tools/dotc/core/TypeComparer.scala | 111 +++++++++++++++--- .../captures/heal-tparam-cs.scala | 4 +- tests/neg-custom-args/captures/reaches2.check | 4 +- tests/neg/existential-mapping.check | 28 ++--- 8 files changed, 148 insertions(+), 53 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index eb3718e9601f..8f161810f6f9 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -14,7 +14,7 @@ import printing.{Showable, Printer} import printing.Texts.* import util.{SimpleIdentitySet, Property} import typer.ErrorReporting.Addenda -import TypeComparer.canSubsumeExistentially +import TypeComparer.subsumesExistentially import util.common.alwaysTrue import scala.collection.mutable import CCState.* @@ -173,7 +173,7 @@ sealed abstract class CaptureSet extends Showable: x.info match case x1: SingletonCaptureRef => x1.subsumes(y) case _ => false - case x: TermParamRef => canSubsumeExistentially(x, y) + case x: TermParamRef => subsumesExistentially(x, y) case _ => false /** {x} <:< this where <:< is subcapturing, but treating all variables @@ -498,10 +498,13 @@ object CaptureSet: deps = state.deps(this) final def addThisElem(elem: CaptureRef)(using Context, VarState): CompareResult = - if isConst || !recordElemsState() then - CompareResult.Fail(this :: Nil) // fail if variable is solved or given VarState is frozen + if isConst // Fail if variable is solved, + || !recordElemsState() // or given VarState is frozen, + || Existential.isBadExistential(elem) // or `elem` is an out-of-scope existential, + then + CompareResult.Fail(this :: Nil) else if !levelOK(elem) then - CompareResult.LevelError(this, elem) + CompareResult.LevelError(this, elem) // or `elem` is not visible at the level of the set. else //if id == 34 then assert(!elem.isUniversalRootCapability) assert(elem.isTrackableRef, elem) @@ -694,19 +697,10 @@ object CaptureSet: if cond then propagate else CompareResult.OK val mapped = extrapolateCaptureRef(elem, tm, variance) + def isFixpoint = mapped.isConst && mapped.elems.size == 1 && mapped.elems.contains(elem) - def addMapped = - val added = mapped.elems.filter(!accountsFor(_)) - addNewElems(added) - .andAlso: - if mapped.isConst then CompareResult.OK - else if mapped.asVar.recordDepsState() then { addAsDependentTo(mapped); CompareResult.OK } - else CompareResult.Fail(this :: Nil) - .andAlso: - propagateIf(!added.isEmpty) - def failNoFixpoint = val reason = if variance <= 0 then i"the set's variance is $variance" @@ -716,11 +710,14 @@ object CaptureSet: CompareResult.Fail(this :: Nil) if origin eq source then // elements have to be mapped - addMapped + val added = mapped.elems.filter(!accountsFor(_)) + addNewElems(added) .andAlso: if mapped.isConst then CompareResult.OK else if mapped.asVar.recordDepsState() then { addAsDependentTo(mapped); CompareResult.OK } else CompareResult.Fail(this :: Nil) + .andAlso: + propagateIf(!added.isEmpty) else if accountsFor(elem) then CompareResult.OK else if variance > 0 then diff --git a/compiler/src/dotty/tools/dotc/cc/Existential.scala b/compiler/src/dotty/tools/dotc/cc/Existential.scala index 94aab59443ba..2bdc82bef53f 100644 --- a/compiler/src/dotty/tools/dotc/cc/Existential.scala +++ b/compiler/src/dotty/tools/dotc/cc/Existential.scala @@ -9,6 +9,7 @@ import StdNames.nme import ast.tpd.* import Decorators.* import typer.ErrorReporting.errorType +import Names.TermName import NameKinds.ExistentialBinderName import NameOps.isImpureFunction import reporting.Message @@ -204,8 +205,10 @@ object Existential: case _ => None /** Create method type in the refinement of an existential type */ - private def exMethodType(mk: TermParamRef => Type)(using Context): MethodType = - val boundName = ExistentialBinderName.fresh() + private def exMethodType(using Context)( + mk: TermParamRef => Type, + boundName: TermName = ExistentialBinderName.fresh() + ): MethodType = MethodType(boundName :: Nil)( mt => defn.Caps_Exists.typeRef :: Nil, mt => mk(mt.paramRefs.head)) @@ -332,6 +335,8 @@ object Existential: mapFunOrMethod(t, args, res) case CapturingType(parent, refs) => t.derivedCapturingType(this(parent), refs) + case Existential(_, _) => + t case t: (LazyRef | TypeVar) => mapConserveSuper(t) case _ => @@ -348,4 +353,11 @@ object Existential: case ref: TermParamRef => isExistentialMethod(ref.binder) case _ => false + def isBadExistential(ref: CaptureRef) = ref match + case ref: TermParamRef => ref.paramName == nme.OOS_EXISTENTIAL + case _ => false + + def badExistential(using Context): TermParamRef = + exMethodType(identity, nme.OOS_EXISTENTIAL).paramRefs.head + end Existential diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 23d05168e1f2..7fc78599c377 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -262,7 +262,11 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: end apply end mapInferred - try mapInferred(refine = true)(tp) + try + val tp1 = mapInferred(refine = true)(tp) + val tp2 = Existential.mapCapInResults(_ => assert(false))(tp1) + if tp2 ne tp then capt.println(i"expanded implicit in ${ctx.owner}: $tp --> $tp1 --> $tp2") + tp2 catch case ex: AssertionError => println(i"error while mapping inferred $tp") throw ex @@ -323,7 +327,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: val tp1 = toCapturing(tp) val tp2 = Existential.mapCapInResults(fail)(tp1) - if tp2 ne tp then capt.println(i"expanded in ${ctx.owner}: $tp --> $tp1 --> $tp2") + if tp2 ne tp then capt.println(i"expanded explicit in ${ctx.owner}: $tp --> $tp1 --> $tp2") tp2 end transformExplicitType diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 3753d1688399..6548b46186bb 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -294,6 +294,7 @@ object StdNames { val EVT2U: N = "evt2u$" val EQEQ_LOCAL_VAR: N = "eqEqTemp$" val LAZY_FIELD_OFFSET: N = "OFFSET$" + val OOS_EXISTENTIAL: N = "" val OUTER: N = "$outer" val REFINE_CLASS: N = "" val ROOTPKG: N = "_root_" diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index abbb4387a125..e532324e95a7 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -11,7 +11,7 @@ import collection.mutable import util.{Stats, NoSourcePosition, EqHashMap} import config.Config import config.Feature.{betterMatchTypeExtractorsEnabled, migrateTo3, sourceVersion} -import config.Printers.{subtyping, gadts, matchTypes, noPrinter} +import config.Printers.{subtyping, gadts, matchTypes, capt, noPrinter} import config.SourceVersion import TypeErasure.{erasedLub, erasedGlb} import TypeApplications.* @@ -48,7 +48,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling GADTused = false opaquesUsed = false openedExistentials = Nil - assocExistentials = Map.empty + assocExistentials = Nil recCount = 0 needsGc = false if Config.checkTypeComparerReset then checkReset() @@ -77,7 +77,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * Each existential gets mapped to the opened existentials to which it * may resolve at this point. */ - private var assocExistentials: Map[TermParamRef, List[TermParamRef]] = Map.empty + private var assocExistentials: ExAssoc = Nil private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance @@ -359,7 +359,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling isMatchedByProto(tp2, tp1) case tp2: BoundType => tp2 == tp1 - || existentialVarsConform(tp1, tp2) || secondTry case tp2: TypeVar => recur(tp1, typeVarInstance(tp2)) @@ -2789,6 +2788,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false } + // ----------- Capture checking ----------------------------------------------- + + /** A type associating instantiatable existentials on the right of a comparison + * with the existentials they can be instantiated with. + */ + type ExAssoc = List[(TermParamRef, List[TermParamRef])] + private def compareExistentialLeft(boundVar: TermParamRef, tp1unpacked: Type, tp2: Type)(using Context): Boolean = val saved = openedExistentials try @@ -2800,16 +2806,32 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private def compareExistentialRight(tp1: Type, boundVar: TermParamRef, tp2unpacked: Type)(using Context): Boolean = val saved = assocExistentials try - assocExistentials = assocExistentials.updated(boundVar, openedExistentials) + assocExistentials = (boundVar, openedExistentials) :: assocExistentials recur(tp1, tp2unpacked) finally assocExistentials = saved - def canSubsumeExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context): Boolean = - Existential.isExistentialVar(tp1) - && assocExistentials.get(tp1).match - case Some(xs) => !Existential.isExistentialVar(tp2) || xs.contains(tp2) - case None => false + /** Is `tp1` an existential var that subsumes `tp2`? This is the case if `tp1` is + * instantiatable (i.e. it's a key in `assocExistentials`) and one of the + * following is true: + * - `tp2` is not an existential var, + * - `tp1` is associated via `assocExistentials` with `tp2`, + * - `tp2` appears as key in `assocExistentials` further out than `tp1`. + * The third condition allows to instantiate c2 to c1 in + * EX c1: A -> Ex c2. B + */ + def subsumesExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context): Boolean = + def canInstantiateWith(assoc: ExAssoc): Boolean = assoc match + case (bv, bvs) :: assoc1 => + if bv == tp1 then + !Existential.isExistentialVar(tp2) + || bvs.contains(tp2) + || assoc1.exists(_._1 == tp2) + else + canInstantiateWith(assoc1) + case Nil => + false + Existential.isExistentialVar(tp1) && canInstantiateWith(assocExistentials) /** Are tp1, tp2 termRefs that can be linked? This should never be called * normally, since exietential variables appear only in capture sets @@ -2819,16 +2841,70 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private def existentialVarsConform(tp1: Type, tp2: Type) = tp2 match case tp2: TermParamRef => tp1 match - case tp1: CaptureRef => canSubsumeExistentially(tp2, tp1) + case tp1: CaptureRef => subsumesExistentially(tp2, tp1) case _ => false case _ => false + /** bi-map taking existentials to the left of a comparison to matching + * existentials on the right. This is not a bijection. However + * we have `forwards(backwards(bv)) == bv` for an existentially bound `bv`. + * That's enough to qualify as a BiTypeMap. + */ + private class MapExistentials(assoc: ExAssoc)(using Context) extends BiTypeMap: + + private def bad(t: Type) = + Existential.badExistential + .showing(i"existential match not found for $t in $assoc", capt) + + def apply(t: Type) = t match + case t: TermParamRef if Existential.isExistentialVar(t) => + // Find outermost existential on the right that can be instantiated to `t`, + // or `badExistential` if none exists. + def findMapped(assoc: ExAssoc): CaptureRef = assoc match + case (bv, assocBvs) :: assoc1 => + val outer = findMapped(assoc1) + if !Existential.isBadExistential(outer) then outer + else if assocBvs.contains(t) then bv + else bad(t) + case Nil => + bad(t) + findMapped(assoc) + case _ => + mapOver(t) + + /** The inverse takes existentials on the right to the innermost existential + * on the left to which they can be instantiated. + */ + lazy val inverse = new BiTypeMap: + def apply(t: Type) = t match + case t: TermParamRef if Existential.isExistentialVar(t) => + assoc.find(_._1 == t) match + case Some((_, bvs)) if bvs.nonEmpty => bvs.head + case _ => bad(t) + case _ => + mapOver(t) + + def inverse = MapExistentials.this + override def toString = "MapExistentials.inverse" + end inverse + end MapExistentials + protected def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = - try refs1.subCaptures(refs2, frozen) + try + if assocExistentials.isEmpty then + refs1.subCaptures(refs2, frozen) + else + val mapped = refs1.map(MapExistentials(assocExistentials)) + if mapped.elems.exists(Existential.isBadExistential) + then CaptureSet.CompareResult.Fail(refs2 :: Nil) + else subCapturesMapped(mapped, refs2, frozen) catch case ex: AssertionError => println(i"fail while subCaptures $refs1 <:< $refs2") throw ex + protected def subCapturesMapped(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = + refs1.subCaptures(refs2, frozen) + /** Is the boxing status of tp1 and tp2 the same, or alternatively, is * the capture sets `refs1` of `tp1` a subcapture of the empty set? * In the latter case, boxing status does not matter. @@ -3293,9 +3369,6 @@ object TypeComparer { def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true)(using Context): Type = comparing(_.lub(tp1, tp2, canConstrain = canConstrain, isSoft = isSoft)) - def canSubsumeExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context) = - comparing(_.canSubsumeExistentially(tp1, tp2)) - /** The least upper bound of a list of types */ final def lub(tps: List[Type])(using Context): Type = tps.foldLeft(defn.NothingType: Type)(lub(_,_)) @@ -3368,6 +3441,9 @@ object TypeComparer { def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = comparing(_.subCaptures(refs1, refs2, frozen)) + + def subsumesExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context) = + comparing(_.subsumesExistentially(tp1, tp2)) } object MatchReducer: @@ -3831,5 +3907,10 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa super.subCaptures(refs1, refs2, frozen) } + override def subCapturesMapped(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = + traceIndented(i"subcaptures mapped $refs1 <:< $refs2 ${if frozen then "frozen" else ""}") { + super.subCapturesMapped(refs1, refs2, frozen) + } + def lastTrace(header: String): String = header + { try b.toString finally b.clear() } } diff --git a/tests/neg-custom-args/captures/heal-tparam-cs.scala b/tests/neg-custom-args/captures/heal-tparam-cs.scala index 498292166297..fde4b93e196c 100644 --- a/tests/neg-custom-args/captures/heal-tparam-cs.scala +++ b/tests/neg-custom-args/captures/heal-tparam-cs.scala @@ -11,12 +11,12 @@ def main(io: Capp^, net: Capp^): Unit = { } val test2: (c: Capp^) -> () => Unit = - localCap { c => // should work + localCap { c => // error (c1: Capp^) => () => { c1.use() } } val test3: (c: Capp^{io}) -> () ->{io} Unit = - localCap { c => // should work + localCap { c => // error (c1: Capp^{io}) => () => { c1.use() } } diff --git a/tests/neg-custom-args/captures/reaches2.check b/tests/neg-custom-args/captures/reaches2.check index 504955b220ad..f646a9736395 100644 --- a/tests/neg-custom-args/captures/reaches2.check +++ b/tests/neg-custom-args/captures/reaches2.check @@ -2,9 +2,9 @@ 8 | ps.map((x, y) => compose1(x, y)) // error // error | ^ |reference (ps : List[(box A => A, box A => A)]) @reachCapability is not included in the allowed capture set {} - |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? A^? + |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? (ex$15: caps.Exists) -> A^? -- Error: tests/neg-custom-args/captures/reaches2.scala:8:13 ----------------------------------------------------------- 8 | ps.map((x, y) => compose1(x, y)) // error // error | ^ |reference (ps : List[(box A => A, box A => A)]) @reachCapability is not included in the allowed capture set {} - |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? A^? + |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? (ex$15: caps.Exists) -> A^? diff --git a/tests/neg/existential-mapping.check b/tests/neg/existential-mapping.check index bab04868b123..7c1de8b31529 100644 --- a/tests/neg/existential-mapping.check +++ b/tests/neg/existential-mapping.check @@ -12,77 +12,77 @@ -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:12:20 ----------------------------------------------- 12 | val _: C^ -> C = x2 // error | ^^ - | Found: (x2 : C^ -> (ex$9: caps.Exists) -> C^{ex$9}) + | Found: (x2 : C^ -> (ex$7: caps.Exists) -> C^{ex$7}) | Required: C^ -> C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:15:30 ----------------------------------------------- 15 | val _: A^ -> (x: C^) -> C = x3 // error | ^^ - | Found: (x3 : A^ -> (x: C^) -> (ex$15: caps.Exists) -> C^{ex$15}) + | Found: (x3 : A^ -> (x: C^) -> (ex$11: caps.Exists) -> C^{ex$11}) | Required: A^ -> (x: C^) -> C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:18:25 ----------------------------------------------- 18 | val _: A^ -> C^ -> C = x4 // error | ^^ - | Found: (x4 : A^ -> C^ -> (ex$25: caps.Exists) -> C^{ex$25}) + | Found: (x4 : A^ -> C^ -> (ex$19: caps.Exists) -> C^{ex$19}) | Required: A^ -> C^ -> C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:21:30 ----------------------------------------------- 21 | val _: A^ -> (x: C^) -> C = x5 // error | ^^ - | Found: (x5 : A^ -> (ex$35: caps.Exists) -> Fun[C^{ex$35}]) + | Found: (x5 : A^ -> (ex$27: caps.Exists) -> Fun[C^{ex$27}]) | Required: A^ -> (x: C^) -> C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:24:30 ----------------------------------------------- 24 | val _: A^ -> (x: C^) => C = x6 // error | ^^ - | Found: (x6 : A^ -> (ex$43: caps.Exists) -> IFun[C^{ex$43}]) - | Required: A^ -> (ex$48: caps.Exists) -> (x: C^) ->{ex$48} C + | Found: (x6 : A^ -> (ex$33: caps.Exists) -> IFun[C^{ex$33}]) + | Required: A^ -> (ex$36: caps.Exists) -> (x: C^) ->{ex$36} C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:27:25 ----------------------------------------------- 27 | val _: (x: C^) => C = y1 // error | ^^ - | Found: (y1 : (x: C^) => (ex$54: caps.Exists) -> C^{ex$54}) + | Found: (y1 : (x: C^) => (ex$38: caps.Exists) -> C^{ex$38}) | Required: (x: C^) => C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:30:20 ----------------------------------------------- 30 | val _: C^ => C = y2 // error | ^^ - | Found: (y2 : C^ => (ex$60: caps.Exists) -> C^{ex$60}) + | Found: (y2 : C^ => (ex$42: caps.Exists) -> C^{ex$42}) | Required: C^ => C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:33:30 ----------------------------------------------- 33 | val _: A^ => (x: C^) => C = y3 // error | ^^ - | Found: (y3 : A^ => (ex$67: caps.Exists) -> (x: C^) ->{ex$67} (ex$66: caps.Exists) -> C^{ex$66}) - | Required: A^ => (ex$78: caps.Exists) -> (x: C^) ->{ex$78} C + | Found: (y3 : A^ => (ex$47: caps.Exists) -> (x: C^) ->{ex$47} (ex$46: caps.Exists) -> C^{ex$46}) + | Required: A^ => (ex$50: caps.Exists) -> (x: C^) ->{ex$50} C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:36:25 ----------------------------------------------- 36 | val _: A^ => C^ => C = y4 // error | ^^ - | Found: (y4 : A^ => C^ => (ex$84: caps.Exists) -> C^{ex$84}) + | Found: (y4 : A^ => C^ => (ex$52: caps.Exists) -> C^{ex$52}) | Required: A^ => C^ => C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:39:30 ----------------------------------------------- 39 | val _: A^ => (x: C^) -> C = y5 // error | ^^ - | Found: (y5 : A^ => (ex$94: caps.Exists) -> Fun[C^{ex$94}]) + | Found: (y5 : A^ => (ex$60: caps.Exists) -> Fun[C^{ex$60}]) | Required: A^ => (x: C^) -> C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:42:30 ----------------------------------------------- 42 | val _: A^ => (x: C^) => C = y6 // error | ^^ - | Found: (y6 : A^ => (ex$102: caps.Exists) -> IFun[C^{ex$102}]) - | Required: A^ => (ex$107: caps.Exists) -> (x: C^) ->{ex$107} C + | Found: (y6 : A^ => (ex$66: caps.Exists) -> IFun[C^{ex$66}]) + | Required: A^ => (ex$69: caps.Exists) -> (x: C^) ->{ex$69} C | | longer explanation available when compiling with `-explain` From 687516e74455b5f4d951c8b4d35f24d9a1a48c13 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 17 Jun 2024 18:08:45 +0200 Subject: [PATCH 307/827] Drop checkReachCapsIsolated restriction --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 5 +++-- tests/neg-custom-args/captures/unsound-reach.check | 5 ----- tests/neg-custom-args/captures/unsound-reach.scala | 2 +- tests/neg-custom-args/captures/widen-reach.check | 14 ++++++++++++++ tests/neg-custom-args/captures/widen-reach.scala | 14 ++++++++++++++ tests/neg/i20503.scala | 4 ++-- 6 files changed, 34 insertions(+), 10 deletions(-) create mode 100644 tests/neg-custom-args/captures/widen-reach.check create mode 100644 tests/neg-custom-args/captures/widen-reach.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index b73184447c47..48824078c337 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -259,7 +259,7 @@ class CheckCaptures extends Recheck, SymTransformer: ctx.printer.toTextCaptureRef(ref).show // Uses 4-space indent as a trial - def checkReachCapsIsolated(tpe: Type, pos: SrcPos)(using Context): Unit = + private def checkReachCapsIsolated(tpe: Type, pos: SrcPos)(using Context): Unit = object checker extends TypeTraverser: var refVariances: Map[Boolean, Int] = Map.empty @@ -854,7 +854,8 @@ class CheckCaptures extends Recheck, SymTransformer: tree.tpe finally curEnv = saved if tree.isTerm then - checkReachCapsIsolated(res.widen, tree.srcPos) + if !ccConfig.useExistentials then + checkReachCapsIsolated(res.widen, tree.srcPos) if !pt.isBoxedCapturing then markFree(res.boxedCaptureSet, tree.srcPos) res diff --git a/tests/neg-custom-args/captures/unsound-reach.check b/tests/neg-custom-args/captures/unsound-reach.check index 22b00b74deb1..f0e4c4deeb41 100644 --- a/tests/neg-custom-args/captures/unsound-reach.check +++ b/tests/neg-custom-args/captures/unsound-reach.check @@ -1,8 +1,3 @@ --- Error: tests/neg-custom-args/captures/unsound-reach.scala:18:13 ----------------------------------------------------- -18 | boom.use(f): (f1: File^{backdoor*}) => // error - | ^^^^^^^^ - | Reach capability backdoor* and universal capability cap cannot both - | appear in the type (x: File^)(op: box File^{backdoor*} => Unit): Unit of this expression -- [E164] Declaration Error: tests/neg-custom-args/captures/unsound-reach.scala:10:8 ----------------------------------- 10 | def use(x: File^)(op: File^ => Unit): Unit = op(x) // error, was OK using sealed checking | ^ diff --git a/tests/neg-custom-args/captures/unsound-reach.scala b/tests/neg-custom-args/captures/unsound-reach.scala index c3c31a7f32ff..22ed4614b71b 100644 --- a/tests/neg-custom-args/captures/unsound-reach.scala +++ b/tests/neg-custom-args/captures/unsound-reach.scala @@ -15,6 +15,6 @@ def bad(): Unit = var escaped: File^{backdoor*} = null withFile("hello.txt"): f => - boom.use(f): (f1: File^{backdoor*}) => // error + boom.use(f): (f1: File^{backdoor*}) => // was error before existentials escaped = f1 diff --git a/tests/neg-custom-args/captures/widen-reach.check b/tests/neg-custom-args/captures/widen-reach.check new file mode 100644 index 000000000000..a4ea91981702 --- /dev/null +++ b/tests/neg-custom-args/captures/widen-reach.check @@ -0,0 +1,14 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/widen-reach.scala:13:26 ---------------------------------- +13 | val y2: IO^ -> IO^ = y1.foo // error + | ^^^^^^ + | Found: IO^ ->{x*} IO^{x*} + | Required: IO^ -> (ex$6: caps.Exists) -> IO^{ex$6} + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/widen-reach.scala:14:30 ---------------------------------- +14 | val y3: IO^ -> IO^{x*} = y1.foo // error + | ^^^^^^ + | Found: IO^ ->{x*} IO^{x*} + | Required: IO^ -> IO^{x*} + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/widen-reach.scala b/tests/neg-custom-args/captures/widen-reach.scala new file mode 100644 index 000000000000..5b9cd667d901 --- /dev/null +++ b/tests/neg-custom-args/captures/widen-reach.scala @@ -0,0 +1,14 @@ +import language.experimental.captureChecking + +trait IO + +trait Foo[+T]: + val foo: IO^ -> T + +trait Bar extends Foo[IO^]: + val foo: IO^ -> IO^ = x => x + +def test(x: Foo[IO^]): Unit = + val y1: Foo[IO^{x*}] = x + val y2: IO^ -> IO^ = y1.foo // error + val y3: IO^ -> IO^{x*} = y1.foo // error \ No newline at end of file diff --git a/tests/neg/i20503.scala b/tests/neg/i20503.scala index 7a1bffcff529..e8770b934ad1 100644 --- a/tests/neg/i20503.scala +++ b/tests/neg/i20503.scala @@ -9,8 +9,8 @@ class List[+A]: def runOps(ops: List[() => Unit]): Unit = // See i20156, due to limitation in expressiveness of current system, - // we cannot map over the list of impure elements. - ops.foreach(op => op()) // error + // we could map over the list of impure elements. OK with existentials. + ops.foreach(op => op()) def main(): Unit = val f: List[() => Unit] -> Unit = runOps // error From 9b27b69d78ec19348429217b954179bafa113f81 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 17 Jun 2024 18:30:15 +0200 Subject: [PATCH 308/827] Drop no universal in deep capture set test everywhere from source 3.5 --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 2 +- tests/neg-custom-args/captures/filevar.scala | 2 +- .../neg-custom-args/captures/outer-var.check | 26 +++---------------- .../neg-custom-args/captures/outer-var.scala | 4 +-- 4 files changed, 7 insertions(+), 27 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 48824078c337..54bbb54997a5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -170,7 +170,7 @@ object CheckCaptures: traverse(parent) case t => traverseChildren(t) - check.traverse(tp) + if ccConfig.allowUniversalInBoxed then check.traverse(tp) end disallowRootCapabilitiesIn /** Attachment key for bodies of closures, provided they are values */ diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index 2859f4c5e826..e54f161ef124 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -5,7 +5,7 @@ class File: def write(x: String): Unit = ??? class Service: - var file: File^ = uninitialized // error + var file: File^ = uninitialized // OK, was error under sealed def log = file.write("log") // error, was OK under sealed def withFile[T](op: (l: caps.Capability) ?-> (f: File^{l}) => T): T = diff --git a/tests/neg-custom-args/captures/outer-var.check b/tests/neg-custom-args/captures/outer-var.check index ee32c3ce03f2..32351a179eab 100644 --- a/tests/neg-custom-args/captures/outer-var.check +++ b/tests/neg-custom-args/captures/outer-var.check @@ -22,29 +22,9 @@ 13 | y = (q: Proc) // error | ^^^^^^^ | Found: Proc - | Required: box () ->{p} Unit + | Required: box () => Unit | - | Note that the universal capability `cap` - | cannot be included in capture set {p} of variable y - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:14:8 ------------------------------------- -14 | y = q // error - | ^ - | Found: box () ->{q} Unit - | Required: box () ->{p} Unit - | - | Note that reference (q : Proc), defined in method inner - | cannot be included in outer capture set {p} of variable y - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:16:65 ------------------------------------ -16 | var finalizeActions = collection.mutable.ListBuffer[() => Unit]() // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Found: scala.collection.mutable.ListBuffer[box () => Unit] - | Required: box scala.collection.mutable.ListBuffer[box () ->? Unit]^? - | - | Note that the universal capability `cap` - | cannot be included in capture set ? of variable finalizeActions + | Note that () => Unit cannot be box-converted to box () => Unit + | since at least one of their capture sets contains the root capability `cap` | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/outer-var.scala b/tests/neg-custom-args/captures/outer-var.scala index 39c3a6da4ca3..e26cd631602a 100644 --- a/tests/neg-custom-args/captures/outer-var.scala +++ b/tests/neg-custom-args/captures/outer-var.scala @@ -11,8 +11,8 @@ def test(p: Proc, q: () => Unit) = x = q // error x = (q: Proc) // error y = (q: Proc) // error - y = q // error + y = q // OK, was error under sealed - var finalizeActions = collection.mutable.ListBuffer[() => Unit]() // error + var finalizeActions = collection.mutable.ListBuffer[() => Unit]() // OK, was error under sealed From 1d3ae88ee62e3e1567862d4ef7ef953fd049ac2a Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 17 Jun 2024 18:40:46 +0200 Subject: [PATCH 309/827] Disallow to box or unbox existentials --- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 8 +++-- .../src/dotty/tools/dotc/cc/Existential.scala | 34 ++++++++++++++----- .../captures/widen-reach.check | 6 ++++ .../captures/widen-reach.scala | 2 +- 4 files changed, 38 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 8f161810f6f9..e83da64a920a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -85,6 +85,9 @@ sealed abstract class CaptureSet extends Showable: final def isUniversal(using Context) = elems.exists(_.isRootCapability) + final def isUnboxable(using Context) = + elems.exists(elem => elem.isRootCapability || Existential.isExistentialVar(elem)) + /** Try to include an element in this capture set. * @param elem The element to be added * @param origin The set that originated the request, or `empty` if the request came from outside. @@ -331,7 +334,7 @@ sealed abstract class CaptureSet extends Showable: /** Invoke handler if this set has (or later aquires) the root capability `cap` */ def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = - if isUniversal then handler() + if isUnboxable then handler() this /** Invoke handler on the elements to ensure wellformedness of the capture set. @@ -521,7 +524,8 @@ object CaptureSet: res.addToTrace(this) private def levelOK(elem: CaptureRef)(using Context): Boolean = - if elem.isRootCapability then !noUniversal + if elem.isRootCapability || Existential.isExistentialVar(elem) then + !noUniversal else elem match case elem: TermRef if level.isDefined => elem.symbol.ccLevel <= level diff --git a/compiler/src/dotty/tools/dotc/cc/Existential.scala b/compiler/src/dotty/tools/dotc/cc/Existential.scala index 2bdc82bef53f..9809d97a4504 100644 --- a/compiler/src/dotty/tools/dotc/cc/Existential.scala +++ b/compiler/src/dotty/tools/dotc/cc/Existential.scala @@ -33,9 +33,9 @@ In Setup: - Conversion is done with a BiTypeMap in `Existential.mapCap`. -In adapt: +In reckeckApply and recheckTypeApply: - - If an EX is toplevel in actual type, replace its bound variable + - If an EX is toplevel in the result type, replace its bound variable occurrences with `cap`. Level checking and avoidance: @@ -54,6 +54,7 @@ Level checking and avoidance: don't, the others do. - Capture set variables do not accept elements of level higher than the variable's level + - We use avoidance to heal such cases: If the level-incorrect ref appears + covariantly: widen to underlying capture set, reject if that is cap and the variable does not allow it + contravariantly: narrow to {} @@ -65,10 +66,9 @@ In cv-computation (markFree): the owning method. They have to be widened to dcs(x), or, where this is not possible, it's an error. -In well-formedness checking of explicitly written type T: +In box adaptation: - - If T is not the type of a parameter, check that no cap occurrence or EX-bound variable appears - under a box. + - Check that existential variables are not boxed or unboxed. Subtype rules @@ -129,6 +129,18 @@ Subtype checking algorithm, steps to add for tp1 <:< tp2: assocExistentials(tp2).isDefined && (assocExistentials(tp2).contains(tp1) || tp1 is not existentially bound) +Subtype checking algorithm, comparing two capture sets CS1 <:< CS2: + + We need to map the (possibly to-be-added) existentials in CS1 to existentials + in CS2 so that we can compare them. We use `assocExistentals` for that: + To map an EX-variable V1 in CS1, pick the last (i.e. outermost, leading to the smallest + type) EX-variable in `assocExistentials` that has V1 in its possible instances. + To go the other way (and therby produce a BiTypeMap), map an EX-variable + V2 in CS2 to the first (i.e. innermost) EX-variable it can be instantiated to. + If either direction is not defined, we choose a special "bad-existetal" value + that represents and out-of-scope existential. This leads to failure + of the comparison. + Existential source syntax: Existential types are ususally not written in source, since we still allow the `^` @@ -142,7 +154,8 @@ Existential source syntax: Existential types can only at the top level of the result type of a function or method. -Restrictions on Existential Types: +Restrictions on Existential Types: (to be implemented if we want to +keep the source syntax for users). - An existential capture ref must be the only member of its set. This is intended to model the idea that existential variables effectibely range @@ -353,11 +366,14 @@ object Existential: case ref: TermParamRef => isExistentialMethod(ref.binder) case _ => false + /** An value signalling an out-of-scope existential that should + * lead to a compare failure. + */ + def badExistential(using Context): TermParamRef = + exMethodType(identity, nme.OOS_EXISTENTIAL).paramRefs.head + def isBadExistential(ref: CaptureRef) = ref match case ref: TermParamRef => ref.paramName == nme.OOS_EXISTENTIAL case _ => false - def badExistential(using Context): TermParamRef = - exMethodType(identity, nme.OOS_EXISTENTIAL).paramRefs.head - end Existential diff --git a/tests/neg-custom-args/captures/widen-reach.check b/tests/neg-custom-args/captures/widen-reach.check index a4ea91981702..6f496b87fd16 100644 --- a/tests/neg-custom-args/captures/widen-reach.check +++ b/tests/neg-custom-args/captures/widen-reach.check @@ -12,3 +12,9 @@ | Required: IO^ -> IO^{x*} | | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/widen-reach.scala:8:18 -------------------------------------------------------- +8 |trait Bar extends Foo[IO^]: // error + | ^ + | IO^{ex$3} cannot be box-converted to box IO^ + | since at least one of their capture sets contains the root capability `cap` +9 | val foo: IO^ -> IO^ = x => x diff --git a/tests/neg-custom-args/captures/widen-reach.scala b/tests/neg-custom-args/captures/widen-reach.scala index 5b9cd667d901..9a9305640473 100644 --- a/tests/neg-custom-args/captures/widen-reach.scala +++ b/tests/neg-custom-args/captures/widen-reach.scala @@ -5,7 +5,7 @@ trait IO trait Foo[+T]: val foo: IO^ -> T -trait Bar extends Foo[IO^]: +trait Bar extends Foo[IO^]: // error val foo: IO^ -> IO^ = x => x def test(x: Foo[IO^]): Unit = From 4ad9e3cfc8f628d72bddf84c5796dde0a30a7570 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 17 Jun 2024 19:45:22 +0200 Subject: [PATCH 310/827] Fix existential mapping of non-dependent impure function aliases => There was a forgotten case. --- compiler/src/dotty/tools/dotc/cc/Existential.scala | 7 +++++++ tests/neg/existential-mapping.check | 10 +++++----- tests/neg/existential-mapping.scala | 1 - 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Existential.scala b/compiler/src/dotty/tools/dotc/cc/Existential.scala index 9809d97a4504..732510789e28 100644 --- a/compiler/src/dotty/tools/dotc/cc/Existential.scala +++ b/compiler/src/dotty/tools/dotc/cc/Existential.scala @@ -315,6 +315,12 @@ object Existential: case t @ CapturingType(parent, refs: CaptureSet.Var) => if variance > 0 then needsWrap = true super.mapOver(t) + case defn.FunctionNOf(args, res, contextual) if t.typeSymbol.name.isImpureFunction => + if variance > 0 then + needsWrap = true + super.mapOver: + defn.FunctionNOf(args, res, contextual).capturing(boundVar.singletonCaptureSet) + else mapOver(t) case _ => mapOver(t) //.showing(i"mapcap $t = $result") @@ -341,6 +347,7 @@ object Existential: case res: MethodType => mapFunOrMethod(res, res.paramInfos, res.resType) case res: PolyType => mapFunOrMethod(res, Nil, res.resType) // TODO: Also map bounds of PolyTypes case _ => mapCap(apply(res), fail) + //.showing(i"map cap res $res / ${apply(res)} of $tp = $result") tp.derivedFunctionOrMethod(args1, res1) def apply(t: Type): Type = t match diff --git a/tests/neg/existential-mapping.check b/tests/neg/existential-mapping.check index 7c1de8b31529..edfce67f6eef 100644 --- a/tests/neg/existential-mapping.check +++ b/tests/neg/existential-mapping.check @@ -68,21 +68,21 @@ -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:36:25 ----------------------------------------------- 36 | val _: A^ => C^ => C = y4 // error | ^^ - | Found: (y4 : A^ => C^ => (ex$52: caps.Exists) -> C^{ex$52}) - | Required: A^ => C^ => C + | Found: (y4 : A^ => (ex$53: caps.Exists) -> C^ ->{ex$53} (ex$52: caps.Exists) -> C^{ex$52}) + | Required: A^ => (ex$56: caps.Exists) -> C^ ->{ex$56} C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:39:30 ----------------------------------------------- 39 | val _: A^ => (x: C^) -> C = y5 // error | ^^ - | Found: (y5 : A^ => (ex$60: caps.Exists) -> Fun[C^{ex$60}]) + | Found: (y5 : A^ => (ex$58: caps.Exists) -> Fun[C^{ex$58}]) | Required: A^ => (x: C^) -> C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:42:30 ----------------------------------------------- 42 | val _: A^ => (x: C^) => C = y6 // error | ^^ - | Found: (y6 : A^ => (ex$66: caps.Exists) -> IFun[C^{ex$66}]) - | Required: A^ => (ex$69: caps.Exists) -> (x: C^) ->{ex$69} C + | Found: (y6 : A^ => (ex$64: caps.Exists) -> IFun[C^{ex$64}]) + | Required: A^ => (ex$67: caps.Exists) -> (x: C^) ->{ex$67} C | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/existential-mapping.scala b/tests/neg/existential-mapping.scala index 96a36d8a7b9b..290f7dc767a6 100644 --- a/tests/neg/existential-mapping.scala +++ b/tests/neg/existential-mapping.scala @@ -44,4 +44,3 @@ def Test = val z1: A^ => Array[C^] = ??? // error - From b950c7da50f8012d10769907a043eb5b2295cce5 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 17 Jun 2024 20:29:21 +0200 Subject: [PATCH 311/827] Drop restrictions in widenReachCaptures These should be no longer necessary with existentials. --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 8 +++++--- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 2 +- tests/neg-custom-args/captures/widen-reach.check | 13 +++++++------ tests/neg-custom-args/captures/widen-reach.scala | 4 ++-- 4 files changed, 15 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 1516b769c7ee..7e5c647753c2 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -381,11 +381,13 @@ extension (tp: Type) t.dealias match case t1 @ CapturingType(p, cs) if cs.isUniversal && !isFlipped => t1.derivedCapturingType(apply(p), ref.reach.singletonCaptureSet) - case t @ FunctionOrMethod(args, res @ Existential(_, _)) + case t1 @ FunctionOrMethod(args, res @ Existential(_, _)) if args.forall(_.isAlwaysPure) => // Also map existentials in results to reach capabilities if all // preceding arguments are known to be always pure - apply(t.derivedFunctionOrMethod(args, Existential.toCap(res))) + apply(t1.derivedFunctionOrMethod(args, Existential.toCap(res))) + case Existential(_, _) => + t case _ => t match case t @ CapturingType(p, cs) => t.derivedCapturingType(apply(p), cs) // don't map capture set variables @@ -397,7 +399,7 @@ extension (tp: Type) ref match case ref: CaptureRef if ref.isTrackableRef => val checker = new CheckContraCaps - checker.traverse(tp) + if !ccConfig.useExistentials then checker.traverse(tp) if checker.ok then val tp1 = narrowCaps(tp) if tp1 ne tp then capt.println(i"narrow $tp of $ref to $tp1") diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 54bbb54997a5..78aad96a4ff8 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1103,7 +1103,7 @@ class CheckCaptures extends Recheck, SymTransformer: ccConfig.allowUniversalInBoxed || expected.hasAnnotation(defn.UncheckedCapturesAnnot) || actual.widen.hasAnnotation(defn.UncheckedCapturesAnnot) - if criticalSet.isUniversal && expected.isValueType && !allowUniversalInBoxed then + if criticalSet.isUnboxable && expected.isValueType && !allowUniversalInBoxed then // We can't box/unbox the universal capability. Leave `actual` as it is // so we get an error in checkConforms. Add the error message generated // from boxing as an addendum. This tends to give better error diff --git a/tests/neg-custom-args/captures/widen-reach.check b/tests/neg-custom-args/captures/widen-reach.check index 6f496b87fd16..dbe811ab99ec 100644 --- a/tests/neg-custom-args/captures/widen-reach.check +++ b/tests/neg-custom-args/captures/widen-reach.check @@ -12,9 +12,10 @@ | Required: IO^ -> IO^{x*} | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/widen-reach.scala:8:18 -------------------------------------------------------- -8 |trait Bar extends Foo[IO^]: // error - | ^ - | IO^{ex$3} cannot be box-converted to box IO^ - | since at least one of their capture sets contains the root capability `cap` -9 | val foo: IO^ -> IO^ = x => x +-- [E164] Declaration Error: tests/neg-custom-args/captures/widen-reach.scala:9:6 -------------------------------------- +9 | val foo: IO^ -> IO^ = x => x // error + | ^ + | error overriding value foo in trait Foo of type IO^ -> box IO^; + | value foo of type IO^ -> (ex$3: caps.Exists) -> IO^{ex$3} has incompatible type + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/widen-reach.scala b/tests/neg-custom-args/captures/widen-reach.scala index 9a9305640473..fa5eee1232df 100644 --- a/tests/neg-custom-args/captures/widen-reach.scala +++ b/tests/neg-custom-args/captures/widen-reach.scala @@ -5,8 +5,8 @@ trait IO trait Foo[+T]: val foo: IO^ -> T -trait Bar extends Foo[IO^]: // error - val foo: IO^ -> IO^ = x => x +trait Bar extends Foo[IO^]: + val foo: IO^ -> IO^ = x => x // error def test(x: Foo[IO^]): Unit = val y1: Foo[IO^{x*}] = x From adfb70020c02dcb8a1775e0314a87cee40b2f476 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 18 Jun 2024 09:33:23 +0200 Subject: [PATCH 312/827] Cleanup ccConfig settings --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 16 +++++----- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 5 ++-- .../dotty/tools/dotc/cc/CheckCaptures.scala | 29 +++++-------------- compiler/src/dotty/tools/dotc/cc/Setup.scala | 4 +-- 4 files changed, 18 insertions(+), 36 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 7e5c647753c2..633aaae57a5d 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -27,21 +27,19 @@ object ccConfig: */ inline val allowUnsoundMaps = false - /** If true, expand capability classes in Setup instead of treating them - * in adapt. - */ - inline val expandCapabilityInSetup = true - + /** If true, use existential capture set variables */ def useExistentials(using Context) = Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.5`) - /** If true, use `sealed` as encapsulation mechanism instead of the - * previous global retriction that `cap` can't be boxed or unboxed. + /** If true, use "sealed" as encapsulation mechanism, meaning that we + * check that type variable instantiations don't have `cap` in any of + * their capture sets. This is an alternative of the original restriction + * that `cap` can't be boxed or unboxed. It is used in 3.3 and 3.4 but + * dropped again in 3.5. */ - def allowUniversalInBoxed(using Context) = + def useSealed(using Context) = Feature.sourceVersion.stable == SourceVersion.`3.3` || Feature.sourceVersion.stable == SourceVersion.`3.4` - //|| Feature.sourceVersion.stable == SourceVersion.`3.5` // drop `//` if you want to test with the sealed type params strategy end ccConfig diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index e83da64a920a..80e563e108a0 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -1059,9 +1059,8 @@ object CaptureSet: tp.captureSet case tp: TermParamRef => tp.captureSet - case tp: TypeRef => - if !ccConfig.expandCapabilityInSetup && tp.derivesFromCapability then universal - else empty + case _: TypeRef => + empty case _: TypeParamRef => empty case CapturingType(parent, refs) => diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 78aad96a4ff8..6d6222374944 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -170,7 +170,7 @@ object CheckCaptures: traverse(parent) case t => traverseChildren(t) - if ccConfig.allowUniversalInBoxed then check.traverse(tp) + if ccConfig.useSealed then check.traverse(tp) end disallowRootCapabilitiesIn /** Attachment key for bodies of closures, provided they are values */ @@ -581,7 +581,7 @@ class CheckCaptures extends Recheck, SymTransformer: end instantiate override def recheckTypeApply(tree: TypeApply, pt: Type)(using Context): Type = - if ccConfig.allowUniversalInBoxed then + if ccConfig.useSealed then val TypeApply(fn, args) = tree val polyType = atPhase(thisPhase.prev): fn.tpe.widen.asInstanceOf[TypeLambda] @@ -806,7 +806,7 @@ class CheckCaptures extends Recheck, SymTransformer: override def recheckTry(tree: Try, pt: Type)(using Context): Type = val tp = super.recheckTry(tree, pt) - if ccConfig.allowUniversalInBoxed && Feature.enabled(Feature.saferExceptions) then + if ccConfig.useSealed && Feature.enabled(Feature.saferExceptions) then disallowRootCapabilitiesIn(tp, ctx.owner, "result of `try`", "have type", "This is often caused by a locally generated exception capability leaking as part of its result.", @@ -875,7 +875,7 @@ class CheckCaptures extends Recheck, SymTransformer: } checkNotUniversal(parent) case _ => - if !ccConfig.allowUniversalInBoxed + if !ccConfig.useSealed && !tpe.hasAnnotation(defn.UncheckedCapturesAnnot) && needsUniversalCheck then @@ -1100,7 +1100,7 @@ class CheckCaptures extends Recheck, SymTransformer: def msg = em"""$actual cannot be box-converted to $expected |since at least one of their capture sets contains the root capability `cap`""" def allowUniversalInBoxed = - ccConfig.allowUniversalInBoxed + ccConfig.useSealed || expected.hasAnnotation(defn.UncheckedCapturesAnnot) || actual.widen.hasAnnotation(defn.UncheckedCapturesAnnot) if criticalSet.isUnboxable && expected.isValueType && !allowUniversalInBoxed then @@ -1129,20 +1129,6 @@ class CheckCaptures extends Recheck, SymTransformer: recur(actual, expected, covariant) end adaptBoxed - /** If actual derives from caps.Capability, yet is not a capturing type itself, - * make its capture set explicit. - */ - private def makeCaptureSetExplicit(actual: Type)(using Context): Type = - if ccConfig.expandCapabilityInSetup then actual - else actual match - case CapturingType(_, _) => actual - case _ if actual.derivesFromCapability => - val cap: CaptureRef = actual match - case ref: CaptureRef if ref.isTracked => ref - case _ => defn.captureRoot.termRef // TODO: skolemize? - CapturingType(actual, cap.singletonCaptureSet) - case _ => actual - /** If actual is a tracked CaptureRef `a` and widened is a capturing type T^C, * improve `T^C` to `T^{a}`, following the VAR rule of CC. */ @@ -1163,12 +1149,11 @@ class CheckCaptures extends Recheck, SymTransformer: if expected == LhsProto || expected.isSingleton && actual.isSingleton then actual else - val normalized = makeCaptureSetExplicit(actual) - val widened = improveCaptures(normalized.widen.dealiasKeepAnnots, actual) + val widened = improveCaptures(actual.widen.dealiasKeepAnnots, actual) val adapted = adaptBoxed( widened.withReachCaptures(actual), expected, pos, covariant = true, alwaysConst = false, boxErrors) - if adapted eq widened then normalized + if adapted eq widened then actual else adapted.showing(i"adapt boxed $actual vs $expected = $adapted", capt) end adapt diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 7fc78599c377..376a0453fac8 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -317,7 +317,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: this(expandThrowsAlias(res, exc, Nil)) case t => // Map references to capability classes C to C^ - if ccConfig.expandCapabilityInSetup && t.derivesFromCapability && t.typeSymbol != defn.Caps_Exists + if t.derivesFromCapability && t.typeSymbol != defn.Caps_Exists then CapturingType(t, CaptureSet.universal, boxed = false) else normalizeCaptures(mapOver(t)) end toCapturing @@ -397,7 +397,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: transformTT(tpt, boxed = sym.is(Mutable, butNot = Method) - && !ccConfig.allowUniversalInBoxed + && !ccConfig.useSealed && !sym.hasAnnotation(defn.UncheckedCapturesAnnot), // types of mutable variables are boxed in pre 3.3 code exact = sym.allOverriddenSymbols.hasNext, From 7bf8be72c54699a30804687514549ed25aa31bff Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 18 Jun 2024 18:21:50 +0200 Subject: [PATCH 313/827] Don't add ^ to singleton capabilities during Setup --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- tests/pos-custom-args/captures/cap-problem.scala | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 tests/pos-custom-args/captures/cap-problem.scala diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 376a0453fac8..ee2cf4c2858d 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -317,7 +317,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: this(expandThrowsAlias(res, exc, Nil)) case t => // Map references to capability classes C to C^ - if t.derivesFromCapability && t.typeSymbol != defn.Caps_Exists + if t.derivesFromCapability && !t.isSingleton && t.typeSymbol != defn.Caps_Exists then CapturingType(t, CaptureSet.universal, boxed = false) else normalizeCaptures(mapOver(t)) end toCapturing diff --git a/tests/pos-custom-args/captures/cap-problem.scala b/tests/pos-custom-args/captures/cap-problem.scala new file mode 100644 index 000000000000..483b4e938b1b --- /dev/null +++ b/tests/pos-custom-args/captures/cap-problem.scala @@ -0,0 +1,13 @@ +import language.experimental.captureChecking + +trait Suspend: + type Suspension + + def resume(s: Suspension): Unit + +import caps.Capability + +trait Async(val support: Suspend) extends Capability + +class CancelSuspension(ac: Async, suspension: ac.support.Suspension): + ac.support.resume(suspension) From cd78a9e84354c0ce5dcd485c458c881e8d38bd64 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 21 Jun 2024 19:36:52 +0200 Subject: [PATCH 314/827] Refine parameter accessors that have nonempty deep capture sets A parameter accessor with a nonempty deep capture set needs to be tracked in refinements even if it is pure, as long as it might contain captures that can be referenced using a reach capability. --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 3 +++ compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 2 +- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 2 +- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- compiler/src/dotty/tools/dotc/core/Symbols.scala | 3 ++- tests/pos/reach-problem.scala | 9 +++++++++ 6 files changed, 17 insertions(+), 4 deletions(-) create mode 100644 tests/pos/reach-problem.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 633aaae57a5d..4dda8f1803e0 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -501,6 +501,9 @@ extension (sym: Symbol) && !param.hasAnnotation(defn.UntrackedCapturesAnnot) } + def hasTrackedParts(using Context): Boolean = + !CaptureSet.deepCaptureSet(sym.info).isAlwaysEmpty + extension (tp: AnnotatedType) /** Is this a boxed capturing type? */ def isBoxed(using Context): Boolean = tp.annot match diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 80e563e108a0..6e9343629388 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -1090,7 +1090,7 @@ object CaptureSet: recur(tp) //.showing(i"capture set of $tp = $result", captDebug) - private def deepCaptureSet(tp: Type)(using Context): CaptureSet = + def deepCaptureSet(tp: Type)(using Context): CaptureSet = val collect = new TypeAccumulator[CaptureSet]: def apply(cs: CaptureSet, t: Type) = t.dealias match case t @ CapturingType(p, cs1) => diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 6d6222374944..3981dcbb34a2 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -554,7 +554,7 @@ class CheckCaptures extends Recheck, SymTransformer: if core.derivesFromCapability then CaptureSet.universal else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do val getter = cls.info.member(getterName).suchThat(_.isRefiningParamAccessor).symbol - if !getter.is(Private) && getter.termRef.isTracked then + if !getter.is(Private) && getter.hasTrackedParts then refined = RefinedType(refined, getterName, argType) allCaptures ++= argType.captureSet (refined, allCaptures) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index ee2cf4c2858d..f588094fbdf3 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -186,7 +186,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case cls: ClassSymbol if !defn.isFunctionClass(cls) && cls.is(CaptureChecked) => cls.paramGetters.foldLeft(tp) { (core, getter) => - if atPhase(thisPhase.next)(getter.termRef.isTracked) + if atPhase(thisPhase.next)(getter.hasTrackedParts) && getter.isRefiningParamAccessor && !getter.is(Tracked) then diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index da0ecac47b7d..1c5d1941c524 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -846,7 +846,8 @@ object Symbols extends SymUtils { /** Map given symbols, subjecting their attributes to the mappings * defined in the given TreeTypeMap `ttmap`. * Cross symbol references are brought over from originals to copies. - * Do not copy any symbols if all attributes of all symbols stay the same. + * Do not copy any symbols if all attributes of all symbols stay the same + * and mapAlways is false. */ def mapSymbols(originals: List[Symbol], ttmap: TreeTypeMap, mapAlways: Boolean = false)(using Context): List[Symbol] = if (originals.forall(sym => diff --git a/tests/pos/reach-problem.scala b/tests/pos/reach-problem.scala new file mode 100644 index 000000000000..60dd1d4667a7 --- /dev/null +++ b/tests/pos/reach-problem.scala @@ -0,0 +1,9 @@ +import language.experimental.captureChecking + +class Box[T](items: Seq[T^]): + def getOne: T^{items*} = ??? + +object Box: + def getOne[T](items: Seq[T^]): T^{items*} = + val bx = Box(items) + bx.getOne \ No newline at end of file From 28f4bb53c720ec9a3e69fac717f47c0cd66ff66c Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 23 Jun 2024 11:33:27 +0200 Subject: [PATCH 315/827] More precise capture set unions When we take `{elem} <: B ++ C` where `elem` is not yet included in `B ++ C`, B is a constant and C is a variable, propagate with `{elem} <: C`. Likewise if C is a constant and B is a variable. This tries to minimize the slack between a union and its operands. Note: Propagation does not happen very often in our test suite so far: Once in pos tests and 15 times in scala2-library-cc. --- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 6e9343629388..98dc5db878e0 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -249,8 +249,7 @@ sealed abstract class CaptureSet extends Showable: if this.subCaptures(that, frozen = true).isOK then that else if that.subCaptures(this, frozen = true).isOK then this else if this.isConst && that.isConst then Const(this.elems ++ that.elems) - else Var(initialElems = this.elems ++ that.elems) - .addAsDependentTo(this).addAsDependentTo(that) + else Union(this, that) /** The smallest superset (via <:<) of this capture set that also contains `ref`. */ @@ -263,7 +262,7 @@ sealed abstract class CaptureSet extends Showable: if this.subCaptures(that, frozen = true).isOK then this else if that.subCaptures(this, frozen = true).isOK then that else if this.isConst && that.isConst then Const(elemIntersection(this, that)) - else Intersected(this, that) + else Intersection(this, that) /** The largest subset (via <:<) of this capture set that does not account for * any of the elements in the constant capture set `that` @@ -816,7 +815,29 @@ object CaptureSet: class Diff(source: Var, other: Const)(using Context) extends Filtered(source, !other.accountsFor(_)) - class Intersected(cs1: CaptureSet, cs2: CaptureSet)(using Context) + class Union(cs1: CaptureSet, cs2: CaptureSet)(using Context) + extends Var(initialElems = cs1.elems ++ cs2.elems): + addAsDependentTo(cs1) + addAsDependentTo(cs2) + + override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + if accountsFor(elem) then CompareResult.OK + else + val res = super.tryInclude(elem, origin) + // If this is the union of a constant and a variable, + // propagate `elem` to the variable part to avoid slack + // between the operands and the union. + if res.isOK && (origin ne cs1) && (origin ne cs2) then + if cs1.isConst then cs2.tryInclude(elem, origin) + else if cs2.isConst then cs1.tryInclude(elem, origin) + else res + else res + + override def propagateSolved()(using Context) = + if cs1.isConst && cs2.isConst && !isConst then markSolved() + end Union + + class Intersection(cs1: CaptureSet, cs2: CaptureSet)(using Context) extends Var(initialElems = elemIntersection(cs1, cs2)): addAsDependentTo(cs1) addAsDependentTo(cs2) @@ -841,7 +862,7 @@ object CaptureSet: override def propagateSolved()(using Context) = if cs1.isConst && cs2.isConst && !isConst then markSolved() - end Intersected + end Intersection def elemIntersection(cs1: CaptureSet, cs2: CaptureSet)(using Context): Refs = cs1.elems.filter(cs2.mightAccountFor) ++ cs2.elems.filter(cs1.mightAccountFor) From 04aa63def9e3d3c9a132482ebee34a318e0700f0 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 26 Jun 2024 23:21:06 +0200 Subject: [PATCH 316/827] First implementation of capture polymorphism --- compiler/src/dotty/tools/dotc/ast/untpd.scala | 11 ++++-- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 12 ++++++- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 4 ++- .../dotty/tools/dotc/cc/CheckCaptures.scala | 26 +++++++++----- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- .../dotty/tools/dotc/core/Definitions.scala | 4 ++- .../src/dotty/tools/dotc/core/StdNames.scala | 3 +- .../dotty/tools/dotc/core/TypeComparer.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 16 +++++++-- .../dotty/tools/dotc/parsing/Parsers.scala | 17 ++++++--- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- library/src/scala/caps.scala | 8 ++++- tests/pos/cc-poly-1.scala | 23 ++++++++++++ tests/pos/cc-poly-source.scala | 36 +++++++++++++++++++ 14 files changed, 139 insertions(+), 27 deletions(-) create mode 100644 tests/pos/cc-poly-1.scala create mode 100644 tests/pos/cc-poly-source.scala diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index c42e8f71246d..b7ad12369b88 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -521,12 +521,17 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def captureRoot(using Context): Select = Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) - def captureRootIn(using Context): Select = - Select(scalaDot(nme.caps), nme.capIn) - def makeRetaining(parent: Tree, refs: List[Tree], annotName: TypeName)(using Context): Annotated = Annotated(parent, New(scalaAnnotationDot(annotName), List(refs))) + def makeCapsOf(id: Ident)(using Context): Tree = + TypeApply(Select(scalaDot(nme.caps), nme.capsOf), id :: Nil) + + def makeCapsBound()(using Context): Tree = + makeRetaining( + Select(scalaDot(nme.caps), tpnme.CapSet), + Nil, tpnme.retainsCap) + def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 4dda8f1803e0..09a56fb1b359 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -131,6 +131,8 @@ extension (tree: Tree) def toCaptureRef(using Context): CaptureRef = tree match case ReachCapabilityApply(arg) => arg.toCaptureRef.reach + case CapsOfApply(arg) => + arg.toCaptureRef case _ => tree.tpe match case ref: CaptureRef if ref.isTrackableRef => ref @@ -145,7 +147,7 @@ extension (tree: Tree) case Some(refs) => refs case None => val refs = CaptureSet(tree.retainedElems.map(_.toCaptureRef)*) - .showing(i"toCaptureSet $tree --> $result", capt) + //.showing(i"toCaptureSet $tree --> $result", capt) tree.putAttachment(Captures, refs) refs @@ -526,6 +528,14 @@ object ReachCapabilityApply: case Apply(reach, arg :: Nil) if reach.symbol == defn.Caps_reachCapability => Some(arg) case _ => None +/** An extractor for `caps.capsOf[X]`, which is used to express a generic capture set + * as a tree in a @retains annotation. + */ +object CapsOfApply: + def unapply(tree: TypeApply)(using Context): Option[Tree] = tree match + case TypeApply(capsOf, arg :: Nil) if capsOf.symbol == defn.Caps_capsOf => Some(arg) + case _ => None + class AnnotatedCapability(annot: Context ?=> ClassSymbol): def apply(tp: Type)(using Context) = AnnotatedType(tp, Annotation(annot, util.Spans.NoSpan)) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 98dc5db878e0..0b19f75f14d0 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -879,7 +879,9 @@ object CaptureSet: val r1 = tm(r) val upper = r1.captureSet def isExact = - upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) + upper.isAlwaysEmpty + || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) + || r.derivesFrom(defn.Caps_CapSet) if variance > 0 || isExact then upper else if variance < 0 then CaptureSet.empty else upper.maybe diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 3981dcbb34a2..8eb2f2420369 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -123,16 +123,24 @@ object CheckCaptures: case _: SingletonType => report.error(em"Singleton type $parent cannot have capture set", parent.srcPos) case _ => + def check(elem: Tree, pos: SrcPos): Unit = elem.tpe match + case ref: CaptureRef => + if !ref.isTrackableRef then + report.error(em"$elem cannot be tracked since it is not a parameter or local value", pos) + case tpe => + report.error(em"$elem: $tpe is not a legal element of a capture set", pos) for elem <- ann.retainedElems do - val elem1 = elem match - case ReachCapabilityApply(arg) => arg - case _ => elem - elem1.tpe match - case ref: CaptureRef => - if !ref.isTrackableRef then - report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) - case tpe => - report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) + elem match + case CapsOfApply(arg) => + def isLegalCapsOfArg = + arg.symbol.isAbstractOrParamType && arg.symbol.info.derivesFrom(defn.Caps_CapSet) + if !isLegalCapsOfArg then + report.error( + em"""$arg is not a legal prefix for `^` here, + |is must be a type parameter or abstract type with a caps.CapSet upper bound.""", + elem.srcPos) + case ReachCapabilityApply(arg) => check(arg, elem.srcPos) + case _ => check(elem, elem.srcPos) /** Report an error if some part of `tp` contains the root capability in its capture set * or if it refers to an unsealed type parameter that could possibly be instantiated with diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index f588094fbdf3..6927983ad196 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -384,7 +384,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: sym.updateInfo(thisPhase, info, newFlagsFor(sym)) toBeUpdated -= sym sym.namedType match - case ref: CaptureRef => ref.invalidateCaches() // TODO: needed? + case ref: CaptureRef if ref.isTrackableRef => ref.invalidateCaches() // TODO: needed? case _ => extension (sym: Symbol) def nextInfo(using Context): Type = diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index ad80d0565f63..e8a853469f6f 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -991,8 +991,10 @@ class Definitions { @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") - @tu lazy val Caps_Capability: ClassSymbol = requiredClass("scala.caps.Capability") + @tu lazy val Caps_Capability: TypeSymbol = CapsModule.requiredType("Capability") + @tu lazy val Caps_CapSet = requiredClass("scala.caps.CapSet") @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") + @tu lazy val Caps_capsOf: TermSymbol = CapsModule.requiredMethod("capsOf") @tu lazy val Caps_Exists = requiredClass("scala.caps.Exists") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 6548b46186bb..d3e198a7e7a7 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -358,6 +358,7 @@ object StdNames { val AppliedTypeTree: N = "AppliedTypeTree" val ArrayAnnotArg: N = "ArrayAnnotArg" val CAP: N = "CAP" + val CapSet: N = "CapSet" val Constant: N = "Constant" val ConstantType: N = "ConstantType" val Eql: N = "Eql" @@ -441,8 +442,8 @@ object StdNames { val bytes: N = "bytes" val canEqual_ : N = "canEqual" val canEqualAny : N = "canEqualAny" - val capIn: N = "capIn" val caps: N = "caps" + val capsOf: N = "capsOf" val captureChecking: N = "captureChecking" val checkInitialized: N = "checkInitialized" val classOf: N = "classOf" diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index e532324e95a7..4b8e251ea337 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2841,7 +2841,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private def existentialVarsConform(tp1: Type, tp2: Type) = tp2 match case tp2: TermParamRef => tp1 match - case tp1: CaptureRef => subsumesExistentially(tp2, tp1) + case tp1: CaptureRef if tp1.isTrackableRef => subsumesExistentially(tp2, tp1) case _ => false case _ => false diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 5e12e4d6b84a..7d773d1ac4ec 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2312,7 +2312,11 @@ object Types extends TypeUtils { override def captureSet(using Context): CaptureSet = val cs = captureSetOfInfo - if isTrackableRef && !cs.isAlwaysEmpty then singletonCaptureSet else cs + if isTrackableRef then + if cs.isAlwaysEmpty then cs else singletonCaptureSet + else dealias match + case _: (TypeRef | TypeParamRef) => CaptureSet.empty + case _ => cs end CaptureRef @@ -3031,7 +3035,7 @@ object Types extends TypeUtils { abstract case class TypeRef(override val prefix: Type, private var myDesignator: Designator) - extends NamedType { + extends NamedType, CaptureRef { type ThisType = TypeRef type ThisName = TypeName @@ -3080,6 +3084,9 @@ object Types extends TypeUtils { /** Hook that can be called from creation methods in TermRef and TypeRef */ def validated(using Context): this.type = this + + override def isTrackableRef(using Context) = + symbol.isAbstractOrParamType && derivesFrom(defn.Caps_CapSet) } final class CachedTermRef(prefix: Type, designator: Designator, hc: Int) extends TermRef(prefix, designator) { @@ -4836,7 +4843,8 @@ object Types extends TypeUtils { /** Only created in `binder.paramRefs`. Use `binder.paramRefs(paramNum)` to * refer to `TypeParamRef(binder, paramNum)`. */ - abstract case class TypeParamRef(binder: TypeLambda, paramNum: Int) extends ParamRef { + abstract case class TypeParamRef(binder: TypeLambda, paramNum: Int) + extends ParamRef, CaptureRef { type BT = TypeLambda def kindString: String = "Type" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) @@ -4856,6 +4864,8 @@ object Types extends TypeUtils { case bound: OrType => occursIn(bound.tp1, fromBelow) || occursIn(bound.tp2, fromBelow) case _ => false } + + override def isTrackableRef(using Context) = derivesFrom(defn.Caps_CapSet) } private final class TypeParamRefImpl(binder: TypeLambda, paramNum: Int) extends TypeParamRef(binder, paramNum) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index ae8e16ac9ea4..5d36bd230b7e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1541,7 +1541,7 @@ object Parsers { case _ => None } - /** CaptureRef ::= ident | `this` | `cap` [`[` ident `]`] + /** CaptureRef ::= ident [`*` | `^`] | `this` */ def captureRef(): Tree = if in.token == THIS then simpleRef() @@ -1551,6 +1551,10 @@ object Parsers { in.nextToken() atSpan(startOffset(id)): PostfixOp(id, Ident(nme.CC_REACH)) + else if isIdent(nme.UPARROW) then + in.nextToken() + atSpan(startOffset(id)): + makeCapsOf(cpy.Ident(id)(id.name.toTypeName)) else id /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under captureChecking @@ -1968,7 +1972,7 @@ object Parsers { } /** SimpleType ::= SimpleLiteral - * | ‘?’ SubtypeBounds + * | ‘?’ TypeBounds * | SimpleType1 * | SimpleType ‘(’ Singletons ‘)’ -- under language.experimental.dependent, checked in Typer * Singletons ::= Singleton {‘,’ Singleton} @@ -2188,9 +2192,15 @@ object Parsers { inBraces(refineStatSeq()) /** TypeBounds ::= [`>:' Type] [`<:' Type] + * | `^` -- under captureChecking */ def typeBounds(): TypeBoundsTree = - atSpan(in.offset) { TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) } + atSpan(in.offset): + if in.isIdent(nme.UPARROW) && Feature.ccEnabled then + in.nextToken() + TypeBoundsTree(EmptyTree, makeCapsBound()) + else + TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) private def bound(tok: Int): Tree = if (in.token == tok) { in.nextToken(); toplevelTyp() } @@ -3384,7 +3394,6 @@ object Parsers { * * DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ * DefTypeParam ::= {Annotation} - * [`sealed`] -- under captureChecking * id [HkTypeParamClause] TypeParamBounds * * TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index cbf79577c2a3..5113a6380a78 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2328,7 +2328,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val res = Throw(expr1).withSpan(tree.span) if Feature.ccEnabled && !cap.isEmpty && !ctx.isAfterTyper then // Record access to the CanThrow capabulity recovered in `cap` by wrapping - // the type of the `throw` (i.e. Nothing) in a `@requiresCapability` annotatoon. + // the type of the `throw` (i.e. Nothing) in a `@requiresCapability` annotation. Typed(res, TypeTree( AnnotatedType(res.tpe, diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 5ae5b860f501..967246041082 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -1,6 +1,6 @@ package scala -import annotation.experimental +import annotation.{experimental, compileTimeOnly} @experimental object caps: @@ -16,6 +16,12 @@ import annotation.experimental @deprecated("Use `Capability` instead") type Cap = Capability + /** Carrier trait for capture set type parameters */ + trait CapSet extends Any + + @compileTimeOnly("Should be be used only internally by the Scala compiler") + def capsOf[CS]: Any = ??? + /** Reach capabilities x* which appear as terms in @retains annotations are encoded * as `caps.reachCapability(x)`. When converted to CaptureRef types in capture sets * they are represented as `x.type @annotation.internal.reachCapability`. diff --git a/tests/pos/cc-poly-1.scala b/tests/pos/cc-poly-1.scala new file mode 100644 index 000000000000..69b7557b8466 --- /dev/null +++ b/tests/pos/cc-poly-1.scala @@ -0,0 +1,23 @@ +import language.experimental.captureChecking +import annotation.experimental +import caps.{CapSet, Capability} + +@experimental object Test: + + class C extends Capability + class D + + def f[X^](x: D^{X^}): D^{X^} = x + def g[X^](x: D^{X^}, y: D^{X^}): D^{X^} = x + + def test(c1: C, c2: C) = + val d: D^{c1, c2} = D() + val x = f[CapSet^{c1, c2}](d) + val _: D^{c1, c2} = x + val d1: D^{c1} = D() + val d2: D^{c2} = D() + val y = g(d1, d2) + val _: D^{d1, d2} = y + val _: D^{c1, c2} = y + + diff --git a/tests/pos/cc-poly-source.scala b/tests/pos/cc-poly-source.scala new file mode 100644 index 000000000000..939f1f682dc8 --- /dev/null +++ b/tests/pos/cc-poly-source.scala @@ -0,0 +1,36 @@ +import language.experimental.captureChecking +import annotation.experimental +import caps.{CapSet, Capability} + +@experimental object Test: + + class Label //extends Capability + + class Listener + + class Source[X^]: + private var listeners: Set[Listener^{X^}] = Set.empty + def register(x: Listener^{X^}): Unit = + listeners += x + + def allListeners: Set[Listener^{X^}] = listeners + + def test1(lbl1: Label^, lbl2: Label^) = + val src = Source[CapSet^{lbl1, lbl2}] + def l1: Listener^{lbl1} = ??? + val l2: Listener^{lbl2} = ??? + src.register{l1} + src.register{l2} + val ls = src.allListeners + val _: Set[Listener^{lbl1, lbl2}] = ls + + def test2(lbls: List[Label^]) = + def makeListener(lbl: Label^): Listener^{lbl} = ??? + val listeners = lbls.map(makeListener) + val src = Source[CapSet^{lbls*}] + for l <- listeners do + src.register(l) + val ls = src.allListeners + val _: Set[Listener^{lbls*}] = ls + + From 9436f11f2c9c2df9341ea0bdfb7d9e861e4196e2 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 27 Jun 2024 13:40:20 +0200 Subject: [PATCH 317/827] Reclassify maximal capabilities Don't treat user-defined capabilities deriving from caps.Capability as maximal. That was a vestige from when we treated capability classes natively. It caused code that should compile to fail because if `x extends Capability` then `x` could not be widened to `x*`. As a consequence we have one missed error in effect-swaps again, which re-establishes the original (faulty) situation. --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 6 ++-- .../captures/effect-swaps.check | 4 --- .../captures/effect-swaps.scala | 2 +- tests/pos/cc-poly-source-capability.scala | 36 +++++++++++++++++++ tests/pos/reach-capability.scala | 17 +++++++++ 6 files changed, 57 insertions(+), 10 deletions(-) create mode 100644 tests/pos/cc-poly-source-capability.scala create mode 100644 tests/pos/reach-capability.scala diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 6927983ad196..6d1eb2a7bd38 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -743,7 +743,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if others.accountsFor(ref) then report.warning(em"redundant capture: $dom already accounts for $ref", pos) - if ref.captureSetOfInfo.elems.isEmpty then + if ref.captureSetOfInfo.elems.isEmpty && !ref.derivesFrom(defn.Caps_Capability) then report.error(em"$ref cannot be tracked since its capture set is empty", pos) check(parent.captureSet, parent) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 7d773d1ac4ec..fabe7c782280 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3026,8 +3026,7 @@ object Types extends TypeUtils { name == nme.CAPTURE_ROOT && symbol == defn.captureRoot override def isMaxCapability(using Context): Boolean = - import cc.* - this.derivesFromCapability && symbol.isStableMember + symbol == defn.captureRoot || info.derivesFrom(defn.Caps_Exists) override def normalizedRef(using Context): CaptureRef = if isTrackableRef then symbol.termRef else this @@ -4834,8 +4833,7 @@ object Types extends TypeUtils { def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) override def isTrackableRef(using Context) = true override def isMaxCapability(using Context) = - import cc.* - this.derivesFromCapability + underlying.derivesFrom(defn.Caps_Exists) } private final class TermParamRefImpl(binder: TermLambda, paramNum: Int) extends TermParamRef(binder, paramNum) diff --git a/tests/neg-custom-args/captures/effect-swaps.check b/tests/neg-custom-args/captures/effect-swaps.check index 22941be36794..ef5a95d333bf 100644 --- a/tests/neg-custom-args/captures/effect-swaps.check +++ b/tests/neg-custom-args/captures/effect-swaps.check @@ -22,7 +22,3 @@ 73 | fr.await.ok | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/effect-swaps.scala:66:15 ------------------------------------------------------ -66 | Result.make: // error - | ^^^^^^^^^^^ - | escaping local reference contextual$9.type diff --git a/tests/neg-custom-args/captures/effect-swaps.scala b/tests/neg-custom-args/captures/effect-swaps.scala index 0b362b80e3ce..4bafd6421af3 100644 --- a/tests/neg-custom-args/captures/effect-swaps.scala +++ b/tests/neg-custom-args/captures/effect-swaps.scala @@ -63,7 +63,7 @@ def test[T, E](using Async) = fr.await.ok def fail4[T, E](fr: Future[Result[T, E]]^) = - Result.make: // error + Result.make: // should be errorm but inders Result[Any, Any] Future: fut ?=> fr.await.ok diff --git a/tests/pos/cc-poly-source-capability.scala b/tests/pos/cc-poly-source-capability.scala new file mode 100644 index 000000000000..48b2d13599fd --- /dev/null +++ b/tests/pos/cc-poly-source-capability.scala @@ -0,0 +1,36 @@ +import language.experimental.captureChecking +import annotation.experimental +import caps.{CapSet, Capability} + +@experimental object Test: + + class Label extends Capability + + class Listener + + class Source[X^]: + private var listeners: Set[Listener^{X^}] = Set.empty + def register(x: Listener^{X^}): Unit = + listeners += x + + def allListeners: Set[Listener^{X^}] = listeners + + def test1(lbl1: Label, lbl2: Label) = + val src = Source[CapSet^{lbl1, lbl2}] + def l1: Listener^{lbl1} = ??? + val l2: Listener^{lbl2} = ??? + src.register{l1} + src.register{l2} + val ls = src.allListeners + val _: Set[Listener^{lbl1, lbl2}] = ls + + def test2(lbls: List[Label]) = + def makeListener(lbl: Label): Listener^{lbl} = ??? + val listeners = lbls.map(makeListener) + val src = Source[CapSet^{lbls*}] + for l <- listeners do + src.register(l) + val ls = src.allListeners + val _: Set[Listener^{lbls*}] = ls + + diff --git a/tests/pos/reach-capability.scala b/tests/pos/reach-capability.scala new file mode 100644 index 000000000000..d551113eb05b --- /dev/null +++ b/tests/pos/reach-capability.scala @@ -0,0 +1,17 @@ +import language.experimental.captureChecking +import annotation.experimental +import caps.{Capability} + +@experimental object Test2: + + class List[+A]: + def map[B](f: A => B): List[B] = ??? + + class Label extends Capability + + class Listener + + def test2(lbls: List[Label]) = + def makeListener(lbl: Label): Listener^{lbl} = ??? + val listeners = lbls.map(makeListener) // should work + From 7ac94c950b4f00ad4dbd0174de0b0d6bb21532ba Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 27 Jun 2024 13:59:57 +0200 Subject: [PATCH 318/827] Bring back RefiningVar We might want to treat it specially since a RefiningVar should ideally be closed for further additions when the constructor has been analyzed. --- compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 7 +++++++ compiler/src/dotty/tools/dotc/cc/Setup.scala | 5 +---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 0b19f75f14d0..a2233f862e53 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -634,6 +634,13 @@ object CaptureSet: override def toString = s"Var$id$elems" end Var + /** Variables that represent refinements of class parameters can have the universal + * capture set, since they represent only what is the result of the constructor. + * Test case: Without that tweak, logger.scala would not compile. + */ + class RefiningVar(owner: Symbol)(using Context) extends Var(owner): + override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context) = this + /** A variable that is derived from some other variable via a map or filter. */ abstract class DerivedVar(owner: Symbol, initialElems: Refs)(using @constructorOnly ctx: Context) extends Var(owner, initialElems): diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 6d1eb2a7bd38..7d2f6c24ce2d 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -193,10 +193,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: val getterType = mapInferred(refine = false)(tp.memberInfo(getter)).strippedDealias RefinedType(core, getter.name, - CapturingType(getterType, - new CaptureSet.Var(ctx.owner): - override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context) = this - )) + CapturingType(getterType, new CaptureSet.RefiningVar(ctx.owner))) .showing(i"add capture refinement $tp --> $result", capt) else core From aad33957457abef40ba75cc2f1fc181fdf34103f Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 27 Jun 2024 18:43:40 +0200 Subject: [PATCH 319/827] Allow for embedded CapSet^{refs} entries in capture sets --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 22 ++++++---- compiler/src/dotty/tools/dotc/cc/Setup.scala | 41 ++++++++++--------- tests/pos/cc-poly-source-capability.scala | 30 ++++++-------- 3 files changed, 49 insertions(+), 44 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 09a56fb1b359..3b25f0d58035 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -64,7 +64,7 @@ def depFun(args: List[Type], resultType: Type, isContextual: Boolean, paramNames mt.toFunctionType(alwaysDependent = true) /** An exception thrown if a @retains argument is not syntactically a CaptureRef */ -class IllegalCaptureRef(tpe: Type) extends Exception(tpe.toString) +class IllegalCaptureRef(tpe: Type)(using Context) extends Exception(tpe.show) /** Capture checking state, which is known to other capture checking components */ class CCState: @@ -127,15 +127,21 @@ class NoCommonRoot(rs: Symbol*)(using Context) extends Exception( extension (tree: Tree) - /** Map tree with CaptureRef type to its type, throw IllegalCaptureRef otherwise */ - def toCaptureRef(using Context): CaptureRef = tree match + /** Map tree with CaptureRef type to its type, + * map CapSet^{refs} to the `refs` references, + * throw IllegalCaptureRef otherwise + */ + def toCaptureRefs(using Context): List[CaptureRef] = tree match case ReachCapabilityApply(arg) => - arg.toCaptureRef.reach + arg.toCaptureRefs.map(_.reach) case CapsOfApply(arg) => - arg.toCaptureRef - case _ => tree.tpe match + arg.toCaptureRefs + case _ => tree.tpe.dealiasKeepAnnots match case ref: CaptureRef if ref.isTrackableRef => - ref + ref :: Nil + case AnnotatedType(parent, ann) + if ann.symbol.isRetains && parent.derivesFrom(defn.Caps_CapSet) => + ann.tree.toCaptureSet.elems.toList case tpe => throw IllegalCaptureRef(tpe) // if this was compiled from cc syntax, problem should have been reported at Typer @@ -146,7 +152,7 @@ extension (tree: Tree) tree.getAttachment(Captures) match case Some(refs) => refs case None => - val refs = CaptureSet(tree.retainedElems.map(_.toCaptureRef)*) + val refs = CaptureSet(tree.retainedElems.flatMap(_.toCaptureRefs)*) //.showing(i"toCaptureSet $tree --> $result", capt) tree.putAttachment(Captures, refs) refs diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 7d2f6c24ce2d..cb74e2c71e73 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -729,25 +729,28 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: var retained = ann.retainedElems.toArray for i <- 0 until retained.length do val refTree = retained(i) - val ref = refTree.toCaptureRef - - def pos = - if refTree.span.exists then refTree.srcPos - else if ann.span.exists then ann.srcPos - else tpt.srcPos - - def check(others: CaptureSet, dom: Type | CaptureSet): Unit = - if others.accountsFor(ref) then - report.warning(em"redundant capture: $dom already accounts for $ref", pos) - - if ref.captureSetOfInfo.elems.isEmpty && !ref.derivesFrom(defn.Caps_Capability) then - report.error(em"$ref cannot be tracked since its capture set is empty", pos) - check(parent.captureSet, parent) - - val others = - for j <- 0 until retained.length if j != i yield retained(j).toCaptureRef - val remaining = CaptureSet(others*) - check(remaining, remaining) + for ref <- refTree.toCaptureRefs do + def pos = + if refTree.span.exists then refTree.srcPos + else if ann.span.exists then ann.srcPos + else tpt.srcPos + + def check(others: CaptureSet, dom: Type | CaptureSet): Unit = + if others.accountsFor(ref) then + report.warning(em"redundant capture: $dom already accounts for $ref", pos) + + if ref.captureSetOfInfo.elems.isEmpty && !ref.derivesFrom(defn.Caps_Capability) then + report.error(em"$ref cannot be tracked since its capture set is empty", pos) + check(parent.captureSet, parent) + + val others = + for + j <- 0 until retained.length if j != i + r <- retained(j).toCaptureRefs + yield r + val remaining = CaptureSet(others*) + check(remaining, remaining) + end for end for end checkWellformedPost diff --git a/tests/pos/cc-poly-source-capability.scala b/tests/pos/cc-poly-source-capability.scala index 48b2d13599fd..9a21b2d5b802 100644 --- a/tests/pos/cc-poly-source-capability.scala +++ b/tests/pos/cc-poly-source-capability.scala @@ -4,7 +4,9 @@ import caps.{CapSet, Capability} @experimental object Test: - class Label extends Capability + class Async extends Capability + + def listener(async: Async): Listener^{async} = ??? class Listener @@ -15,22 +17,16 @@ import caps.{CapSet, Capability} def allListeners: Set[Listener^{X^}] = listeners - def test1(lbl1: Label, lbl2: Label) = - val src = Source[CapSet^{lbl1, lbl2}] - def l1: Listener^{lbl1} = ??? - val l2: Listener^{lbl2} = ??? - src.register{l1} - src.register{l2} - val ls = src.allListeners - val _: Set[Listener^{lbl1, lbl2}] = ls - - def test2(lbls: List[Label]) = - def makeListener(lbl: Label): Listener^{lbl} = ??? - val listeners = lbls.map(makeListener) - val src = Source[CapSet^{lbls*}] - for l <- listeners do - src.register(l) + def test1(async1: Async, others: List[Async]) = + val src = Source[CapSet^{async1, others*}] + val lst1 = listener(async1) + val lsts = others.map(listener) + val _: List[Listener^{others*}] = lsts + src.register{lst1} + src.register(listener(async1)) + lsts.foreach(src.register) + others.map(listener).foreach(src.register) val ls = src.allListeners - val _: Set[Listener^{lbls*}] = ls + val _: Set[Listener^{async1, others*}] = ls From 1c110713733fc9119a1d34a00e8106e984cbc9c3 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 28 Jun 2024 18:01:51 +0200 Subject: [PATCH 320/827] Improve printing of capture sets before cc Use the syntactic sugar instead of expanding with capsOf --- compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala | 3 +++ tests/pos/cc-poly-1.scala | 3 +++ 2 files changed, 6 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 71ebb7054000..aca5972d4516 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -165,6 +165,8 @@ class PlainPrinter(_ctx: Context) extends Printer { private def toTextRetainedElem[T <: Untyped](ref: Tree[T]): Text = ref match case ref: RefTree[?] if ref.typeOpt.exists => toTextCaptureRef(ref.typeOpt) + case TypeApply(fn, arg :: Nil) if fn.symbol == defn.Caps_capsOf => + toTextRetainedElem(arg) case _ => toText(ref) @@ -416,6 +418,7 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp: SingletonType => toTextRef(tp) case ReachCapability(tp1) => toTextRef(tp1) ~ "*" case MaybeCapability(tp1) => toTextRef(tp1) ~ "?" + case tp: (TypeRef | TypeParamRef) => toText(tp) ~ "^" case _ => toText(tp) protected def isOmittablePrefix(sym: Symbol): Boolean = diff --git a/tests/pos/cc-poly-1.scala b/tests/pos/cc-poly-1.scala index 69b7557b8466..ed32d94f7a99 100644 --- a/tests/pos/cc-poly-1.scala +++ b/tests/pos/cc-poly-1.scala @@ -9,6 +9,7 @@ import caps.{CapSet, Capability} def f[X^](x: D^{X^}): D^{X^} = x def g[X^](x: D^{X^}, y: D^{X^}): D^{X^} = x + def h[X^](): D^{X^} = ??? def test(c1: C, c2: C) = val d: D^{c1, c2} = D() @@ -19,5 +20,7 @@ import caps.{CapSet, Capability} val y = g(d1, d2) val _: D^{d1, d2} = y val _: D^{c1, c2} = y + val z = h() + From 72462a723271a9d4358c7165d9f7980f5a46f043 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 28 Jun 2024 18:49:07 +0200 Subject: [PATCH 321/827] Add some neg tests --- tests/neg/cc-poly-1.check | 12 ++++++++++++ tests/neg/cc-poly-1.scala | 13 +++++++++++++ tests/neg/cc-poly-2.check | 21 +++++++++++++++++++++ tests/neg/cc-poly-2.scala | 16 ++++++++++++++++ 4 files changed, 62 insertions(+) create mode 100644 tests/neg/cc-poly-1.check create mode 100644 tests/neg/cc-poly-1.scala create mode 100644 tests/neg/cc-poly-2.check create mode 100644 tests/neg/cc-poly-2.scala diff --git a/tests/neg/cc-poly-1.check b/tests/neg/cc-poly-1.check new file mode 100644 index 000000000000..abb507078bf4 --- /dev/null +++ b/tests/neg/cc-poly-1.check @@ -0,0 +1,12 @@ +-- [E057] Type Mismatch Error: tests/neg/cc-poly-1.scala:12:6 ---------------------------------------------------------- +12 | f[Any](D()) // error + | ^ + | Type argument Any does not conform to upper bound caps.CapSet^ + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/cc-poly-1.scala:13:6 ---------------------------------------------------------- +13 | f[String](D()) // error + | ^ + | Type argument String does not conform to upper bound caps.CapSet^ + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/cc-poly-1.scala b/tests/neg/cc-poly-1.scala new file mode 100644 index 000000000000..580b124bc8f3 --- /dev/null +++ b/tests/neg/cc-poly-1.scala @@ -0,0 +1,13 @@ +import language.experimental.captureChecking +import caps.{CapSet, Capability} + +object Test: + + class C extends Capability + class D + + def f[X^](x: D^{X^}): D^{X^} = x + + def test(c1: C, c2: C) = + f[Any](D()) // error + f[String](D()) // error diff --git a/tests/neg/cc-poly-2.check b/tests/neg/cc-poly-2.check new file mode 100644 index 000000000000..0615ce19b5ea --- /dev/null +++ b/tests/neg/cc-poly-2.check @@ -0,0 +1,21 @@ +-- [E007] Type Mismatch Error: tests/neg/cc-poly-2.scala:13:15 --------------------------------------------------------- +13 | f[Nothing](d) // error + | ^ + | Found: (d : Test.D^) + | Required: Test.D + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/cc-poly-2.scala:14:19 --------------------------------------------------------- +14 | f[CapSet^{c1}](d) // error + | ^ + | Found: (d : Test.D^) + | Required: Test.D^{c1} + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/cc-poly-2.scala:16:20 --------------------------------------------------------- +16 | val _: D^{c1} = x // error + | ^ + | Found: (x : Test.D^{d}) + | Required: Test.D^{c1} + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/cc-poly-2.scala b/tests/neg/cc-poly-2.scala new file mode 100644 index 000000000000..c5e5df6540da --- /dev/null +++ b/tests/neg/cc-poly-2.scala @@ -0,0 +1,16 @@ +import language.experimental.captureChecking +import caps.{CapSet, Capability} + +object Test: + + class C extends Capability + class D + + def f[X^](x: D^{X^}): D^{X^} = x + + def test(c1: C, c2: C) = + val d: D^ = D() + f[Nothing](d) // error + f[CapSet^{c1}](d) // error + val x = f(d) + val _: D^{c1} = x // error From 0a1854d724ce1677e49c7cac2688d47e7c60f99b Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 29 Jun 2024 20:05:13 +0200 Subject: [PATCH 322/827] Update doc page --- docs/_docs/reference/experimental/cc.md | 141 ++++++++++++++++++++++-- 1 file changed, 132 insertions(+), 9 deletions(-) diff --git a/docs/_docs/reference/experimental/cc.md b/docs/_docs/reference/experimental/cc.md index 5bdf91f628ec..aea14abc7e22 100644 --- a/docs/_docs/reference/experimental/cc.md +++ b/docs/_docs/reference/experimental/cc.md @@ -216,13 +216,13 @@ This widening is called _avoidance_; it is not specific to capture checking but ## Capability Classes -Classes like `CanThrow` or `FileSystem` have the property that their values are always intended to be capabilities. We can make this intention explicit and save boilerplate by declaring these classes with a `@capability` annotation. +Classes like `CanThrow` or `FileSystem` have the property that their values are always intended to be capabilities. We can make this intention explicit and save boilerplate by letting these classes extend the `Capability` class defined in object `cap`. -The capture set of a capability class type is always `{cap}`. This means we could equivalently express the `FileSystem` and `Logger` classes as follows: +The capture set of a `Capability` subclass type is always `{cap}`. This means we could equivalently express the `FileSystem` and `Logger` classes as follows: ```scala -import annotation.capability +import caps.Capability -@capability class FileSystem +class FileSystem extends Capability class Logger(using FileSystem): def log(s: String): Unit = ??? @@ -290,7 +290,7 @@ The captured references of a class include _local capabilities_ and _argument ca the local capabilities of a superclass are also local capabilities of its subclasses. Example: ```scala -@capability class Cap +class Cap extends caps.Capability def test(a: Cap, b: Cap, c: Cap) = class Super(y: Cap): @@ -317,7 +317,7 @@ The inference observes the following constraints: For instance, in ```scala -@capability class Cap +class Cap extends caps.Capability def test(c: Cap) = class A: val x: A = this @@ -502,7 +502,7 @@ Under the language import `language.experimental.captureChecking`, the code is i ``` To integrate exception and capture checking, only two changes are needed: - - `CanThrow` is declared as a `@capability` class, so all references to `CanThrow` instances are tracked. + - `CanThrow` is declared as a class extending `Capability`, so all references to `CanThrow` instances are tracked. - Escape checking is extended to `try` expressions. The result type of a `try` is not allowed to capture the universal capability. @@ -635,9 +635,132 @@ To summarize, there are two "sweet spots" of data structure design: strict lists side-effecting or resource-aware code and lazy lists in purely functional code. Both are already correctly capture-typed without requiring any explicit annotations. Capture annotations only come into play where the semantics gets more complicated because we deal with delayed effects such as in impure lazy lists or side-effecting iterators over strict lists. This property is probably one of the greatest plus points of our approach to capture checking compared to previous techniques which tend to be more noisy. -## Function Type Shorthands +## Existential Capabilities -TBD +In fact, what is written as the top type `cap` can mean different capabilities, depending on scope. For instance, consider the function type +`() -> Iterator[T]^`. This is taken to mean +```scala + () -> Exists x. Iterator[T]^x +``` +In other words, it means an unknown type bound `x` by an "existential" in the scope of the function result. A `cap` in a function result is therefore different from a `cap` at the top-level or in a function parameter. + +Internally, an existential type is represented as a kind of dependent function type. The type above would be modelled as +```scala + () -> (x: Exists) -> Iterator[T]^x +``` +Here, `Exists` is a sealed trait in the `caps` object that serves to mark +dependent functions as representations of existentials. It should be noted +that this is strictly an internal representation. It is explained here because it can show up in error messages. It is generally not recommended to use this syntax in source code. Instead one should rely on the automatic expansion of `^` and `cap` to existentials, which can be +influenced by introducing the right alias types. The rules for this expansion are as follows: + + - If a function result type contains covariant occurrences of `cap`, + we replace these occurrences with a fresh existential variable which + is bound by a quantifier scoping over the result type. + - We might want to do the same expansion in function arguments, but right now this is not done. + - Occurrences of `cap` elsewhere are not translated. They can be seen as representing an existential at the top-level scope. + +**Examples:** + + - `A => B` is an alias type that expands to `(A -> B)^`, therefore + `() -> A => B` expands to `() -> Exists c. A ->{c} B`. + + - `() -> Iterator[A => B]` expands to `() -> Exists c. Iterator[A ->{c} B]` + + - `A -> B^` expands to `A -> Exists c.B^{c}`. + + - If we define `type Fun[T] = A -> T`, then `() -> Fun[B^]` expands to `() -> Exists c.Fun[B^{c}]`, which dealiases to `() -> Exists c.A -> B^{c}`. This demonstrates how aliases can be used to force existential binders to be in some specific outer scope. + + - If we define + ```scala + type F = A -> Fun[B^] + ``` + then the type alias expands to + ```scala + type F = A -> Exists c.A -> B^{c} + ``` + +**Typing Rules:** + + - When we typecheck the body of a function or method, any covariant occurrences of `cap` in the result type are bound with a fresh existential. + - Conversely, when we typecheck the application of a function or method, + with an existential result type `Exists ex.T`, the result of the application is `T` where every occurrence of the existentially bound + variable `ex` is replaced by `cap`. + +## Reach Capabilities + +Say you have a method `f` that takes an impure function argument which gets stored in a `var`: +```scala +def f(op: A => B) + var x: A ->{op} B = op + ... +``` +This is legal even though `var`s cannot have types with `cap` or existential capabilities. The trick is that the type of the variable `x` +is not `A => B` (this would be rejected), but is the "narrowed" type +`A ->{op} B`. In other words, all capabilities retained by values of `x` +are all also referred to by `op`, which justifies the replacement of `cap` by `op`. + +A more complicated situation is if we want to store successive values +held in a list. Example: +```scala +def f(ops: List[A => B]) + var xs = ops + var x: ??? = xs.head + while xs.nonEmpty do + xs = xs.tail + x = xs.head + ... +``` +Here, `x` cannot be given a type with an `ops` capability. In fact, `ops` is pure, i.e. it's capture set is empty, so it cannot be used as the name of a capability. What we would like to express is that `x` refers to +any operation "reachable" through `ops`. This can be expressed using a +_reach capability_ `ops*`. +```scala +def f(ops: List[A => B]) + var xs = ops + var x: A ->{ops*} B = xs.head + ... +``` +Reach capabilities take the form `x*` where `x` is syntactically a regular capability. If `x: T` then `x*` stands for any capability that appears covariantly in `T` and that is accessed through `x`. The least supertype of this capability is the set of all capabilities appearing covariantly in `T`. + +## Capability Polymorphism + +It is sometimes convenient to write operations that are parameterized with a capture set of capabilities. For instance consider a type of event sources +`Source` on which `Listener`s can be registered. Listeners can hold certain capabilities, which show up as a parameter to `Source`: +```scala + class Source[X^]: + private var listeners: Set[Listener^{X^}] = Set.empty + def register(x: Listener^{X^}): Unit = + listeners += x + + def allListeners: Set[Listener^{X^}] = listeners +``` +The type variable `X^` can be instantiated with a set of capabilities. It can occur in capture sets in its scope. For instance, in the example above +we see a variable `listeners` that has as type a `Set` of `Listeners` capturing `X^`. The `register` method takes a listener of this type +and assigns it to the variable. + +Capture set variables `X^` are represented as regular type variables with a +special upper bound `CapSet`. For instance, `Source` could be equivalently +defined as follows: +```scala + class Source[X <: CapSet^]: + ... +``` +`CapSet` is a sealed trait in the `caps` object. It cannot be instantiated or inherited, so its only purpose is to identify capture set type variables and types. Capture set variables can be inferred like regular type variables. When they should be instantiated explicitly one uses a capturing +type `CapSet`. For instance: +```scala + class Async extends caps.Capability + + def listener(async: Async): Listener^{async} = ??? + + def test1(async1: Async, others: List[Async]) = + val src = Source[CapSet^{async1, others*}] + ... +``` +Here, `src` is created as a `Source` on which listeners can be registered that refer to the `async` capability or to any of the capabilities in list `others`. So we can continue the example code above as follows: +```scala + src.register(listener(async1)) + others.map(listener).foreach(src.register) + val ls: Set[Listener^{async, others*}] = src.allListeners +``` ## Compilation Options From c965322ebd576a0869dafba93baf00a6a895565d Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 6 Jul 2024 19:12:38 +0200 Subject: [PATCH 323/827] Fix glb logic involving capturing types --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 8 +++++++ .../dotty/tools/dotc/core/TypeComparer.scala | 13 +++++----- tests/pos/gears-probem-1.scala | 24 +++++++++++++++++++ tests/pos/gears-probem.scala | 18 ++++++++++++++ 4 files changed, 57 insertions(+), 6 deletions(-) create mode 100644 tests/pos/gears-probem-1.scala create mode 100644 tests/pos/gears-probem.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 3b25f0d58035..8edc861ace45 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -227,6 +227,14 @@ extension (tp: Type) case tp: OrType => tp.tp1.isBoxedCapturing || tp.tp2.isBoxedCapturing case _ => false + /** Is the box status of `tp` and `tp2` compatible? I.ee they are + * box boxed, or both unboxed, or one of them has an empty capture set. + */ + def isBoxCompatibleWith(tp2: Type)(using Context): Boolean = + isBoxedCapturing == tp2.isBoxedCapturing + || tp.captureSet.isAlwaysEmpty + || tp2.captureSet.isAlwaysEmpty + /** If this type is a capturing type, the version with boxed statues as given by `boxed`. * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing * type that captures the TermRef. diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 4b8e251ea337..d6868e569a05 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2403,7 +2403,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else def mergedGlb(tp1: Type, tp2: Type): Type = val tp1a = dropIfSuper(tp1, tp2) - if tp1a ne tp1 then glb(tp1a, tp2) + if tp1a ne tp1 then + glb(tp1a, tp2) else val tp2a = dropIfSuper(tp2, tp1) if tp2a ne tp2 then glb(tp1, tp2a) @@ -2721,11 +2722,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp1: TypeVar if tp1.isInstantiated => tp1.underlying & tp2 case CapturingType(parent1, refs1) => - val refs2 = tp2.captureSet - if subCaptures(refs2, refs1, frozen = true).isOK - && tp1.isBoxedCapturing == tp2.isBoxedCapturing - then (parent1 & tp2).capturing(refs2) - else tp1.derivedCapturingType(parent1 & tp2, refs1) + val jointRefs = refs1 ** tp2.captureSet + if jointRefs.isAlwaysEmpty then parent1 & tp2 + else if tp1.isBoxCompatibleWith(tp2) then + tp1.derivedCapturingType(parent1 & tp2, jointRefs) + else NoType case tp1: AnnotatedType if !tp1.isRefining => tp1.underlying & tp2 case _ => diff --git a/tests/pos/gears-probem-1.scala b/tests/pos/gears-probem-1.scala new file mode 100644 index 000000000000..f5c7fdfd0a3c --- /dev/null +++ b/tests/pos/gears-probem-1.scala @@ -0,0 +1,24 @@ +import language.experimental.captureChecking + +trait Future[+T]: + def await: T + +trait Channel[+T]: + def read(): Ok[T] + +class Collector[T](val futures: Seq[Future[T]^]): + val results: Channel[Future[T]^{futures*}] = ??? +end Collector + +class Result[+T, +E]: + def get: T = ??? + +case class Err[+E](e: E) extends Result[Nothing, E] +case class Ok[+T](x: T) extends Result[T, Nothing] + +extension [T](fs: Seq[Future[T]^]) + def awaitAll = + val collector//: Collector[T]{val futures: Seq[Future[T]^{fs*}]} + = Collector(fs) + // val ch = collector.results // also errors + val fut: Future[T]^{fs*} = collector.results.read().get // found ...^{caps.cap} \ No newline at end of file diff --git a/tests/pos/gears-probem.scala b/tests/pos/gears-probem.scala new file mode 100644 index 000000000000..2e445c985de2 --- /dev/null +++ b/tests/pos/gears-probem.scala @@ -0,0 +1,18 @@ +import language.experimental.captureChecking + +trait Future[+T]: + def await: T + +trait Channel[T]: + def read(): Either[Nothing, T] + +class Collector[T](val futures: Seq[Future[T]^]): + val results: Channel[Future[T]^{futures*}] = ??? +end Collector + +extension [T](fs: Seq[Future[T]^]) + def awaitAll = + val collector: Collector[T]{val futures: Seq[Future[T]^{fs*}]} + = Collector(fs) + // val ch = collector.results // also errors + val fut: Future[T]^{fs*} = collector.results.read().right.get // found ...^{caps.cap} \ No newline at end of file From 2cd685a26c14ecc12be8dfa61287f883ce406dcf Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 8 Jul 2024 10:31:29 +0200 Subject: [PATCH 324/827] Add option to avoid intersections for class refinements Add an option to avoid the type intersection when we do a select of a parameter accessor that is mentioned in a class refinement type. It seems to give us a little bit if performance, but nothing significant. So the option is off by default. --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 6 ++++ .../src/dotty/tools/dotc/core/Types.scala | 29 +++++++++++-------- 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 8edc861ace45..053795429bc9 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -27,6 +27,12 @@ object ccConfig: */ inline val allowUnsoundMaps = false + /** If true, when computing the memberinfo of a refined type created + * by addCaptureRefinements take the refineInfo directly without intersecting + * with the parent info. + */ + inline val optimizedRefinements = false + /** If true, use existential capture set variables */ def useExistentials(using Context) = Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.5`) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index fabe7c782280..71de9ef0e0f9 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -43,6 +43,7 @@ import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe +import dotty.tools.dotc.cc.ccConfig object Types extends TypeUtils { @@ -865,19 +866,23 @@ object Types extends TypeUtils { } else val isRefinedMethod = rinfo.isInstanceOf[MethodOrPoly] - val joint = pdenot.meet( - new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId), pre, isRefinedMethod), - pre, - safeIntersection = ctx.base.pendingMemberSearches.contains(name)) - joint match - case joint: SingleDenotation - if isRefinedMethod - && (rinfo <:< joint.info - || name == nme.apply && defn.isFunctionType(tp.parent)) => - // use `rinfo` to keep the right parameter names for named args. See i8516.scala. - joint.derivedSingleDenotation(joint.symbol, rinfo, pre, isRefinedMethod) + rinfo match + case CapturingType(_, refs: CaptureSet.RefiningVar) if ccConfig.optimizedRefinements => + pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, rinfo) case _ => - joint + val joint = pdenot.meet( + new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId), pre, isRefinedMethod), + pre, + safeIntersection = ctx.base.pendingMemberSearches.contains(name)) + joint match + case joint: SingleDenotation + if isRefinedMethod + && (rinfo <:< joint.info + || name == nme.apply && defn.isFunctionType(tp.parent)) => + // use `rinfo` to keep the right parameter names for named args. See i8516.scala. + joint.derivedSingleDenotation(joint.symbol, rinfo, pre, isRefinedMethod) + case _ => + joint } def goApplied(tp: AppliedType, tycon: HKTypeLambda) = From 8b03405b4bd5ef1cf25d8dae6f8c3a293202a0b3 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 8 Jul 2024 10:31:47 +0200 Subject: [PATCH 325/827] Fix printing of reach capabilities --- compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala | 6 +++--- compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala | 2 ++ tests/neg-custom-args/captures/reaches2.check | 4 ++-- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index aca5972d4516..b5ed3bdb4fa7 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -416,10 +416,10 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp) match case tp: TermRef if tp.symbol == defn.captureRoot => Str("cap") case tp: SingletonType => toTextRef(tp) - case ReachCapability(tp1) => toTextRef(tp1) ~ "*" - case MaybeCapability(tp1) => toTextRef(tp1) ~ "?" case tp: (TypeRef | TypeParamRef) => toText(tp) ~ "^" - case _ => toText(tp) + case ReachCapability(tp1) => toTextCaptureRef(tp1) ~ "*" + case MaybeCapability(tp1) => toTextCaptureRef(tp1) ~ "?" + case tp => toText(tp) protected def isOmittablePrefix(sym: Symbol): Boolean = defn.unqualifiedOwnerTypes.exists(_.symbol == sym) || isEmptyPrefix(sym) diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 9852dfc1170d..18a1647572ef 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -330,6 +330,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { "?" ~ (("(ignored: " ~ toText(ignored) ~ ")") provided printDebug) case tp @ PolyProto(targs, resType) => "[applied to [" ~ toTextGlobal(targs, ", ") ~ "] returning " ~ toText(resType) + case tp: AnnotatedType if tp.isReach || tp.isMaybe => + toTextCaptureRef(tp) case _ => super.toText(tp) } diff --git a/tests/neg-custom-args/captures/reaches2.check b/tests/neg-custom-args/captures/reaches2.check index f646a9736395..03860ee4a01b 100644 --- a/tests/neg-custom-args/captures/reaches2.check +++ b/tests/neg-custom-args/captures/reaches2.check @@ -1,10 +1,10 @@ -- Error: tests/neg-custom-args/captures/reaches2.scala:8:10 ----------------------------------------------------------- 8 | ps.map((x, y) => compose1(x, y)) // error // error | ^ - |reference (ps : List[(box A => A, box A => A)]) @reachCapability is not included in the allowed capture set {} + |reference ps* is not included in the allowed capture set {} |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? (ex$15: caps.Exists) -> A^? -- Error: tests/neg-custom-args/captures/reaches2.scala:8:13 ----------------------------------------------------------- 8 | ps.map((x, y) => compose1(x, y)) // error // error | ^ - |reference (ps : List[(box A => A, box A => A)]) @reachCapability is not included in the allowed capture set {} + |reference ps* is not included in the allowed capture set {} |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? (ex$15: caps.Exists) -> A^? From 520100beee5a10728bde7d9919cfc1eca71c27c3 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 9 Jul 2024 11:06:18 +0200 Subject: [PATCH 326/827] Introduced @unboxed parameters --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 1 + library/src/scala/caps.scala | 2 ++ 2 files changed, 3 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index e8a853469f6f..b7aa74ea2a92 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1050,6 +1050,7 @@ class Definitions { @tu lazy val ExperimentalAnnot: ClassSymbol = requiredClass("scala.annotation.experimental") @tu lazy val ThrowsAnnot: ClassSymbol = requiredClass("scala.throws") @tu lazy val TransientAnnot: ClassSymbol = requiredClass("scala.transient") + @tu lazy val UnboxedAnnot: ClassSymbol = requiredClass("scala.caps.unboxed") @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 967246041082..5e675f7d4341 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -39,6 +39,8 @@ import annotation.{experimental, compileTimeOnly} */ final class untrackedCaptures extends annotation.StaticAnnotation + final class unboxed extends annotation.StaticAnnotation + object unsafe: extension [T](x: T) From 77082767faf1f56ce5db16a11df48bfff29459f8 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 9 Jul 2024 15:10:13 +0200 Subject: [PATCH 327/827] Fix accidentally enabled warning about open classes --- compiler/src/dotty/tools/dotc/typer/Namer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 32467de77264..83964417a6f1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1613,7 +1613,7 @@ class Namer { typer: Typer => else if pclazz.isEffectivelySealed && pclazz.associatedFile != cls.associatedFile then if pclazz.is(Sealed) && !pclazz.is(JavaDefined) then report.error(UnableToExtendSealedClass(pclazz), cls.srcPos) - else if sourceVersion.isAtLeast(`3.6`) then + else if sourceVersion.isAtLeast(future) then checkFeature(nme.adhocExtensions, i"Unless $pclazz is declared 'open', its extension in a separate file", cls.topLevelClass, From 2b04423d4ee2d561411dde1ea8708ac64f690997 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 9 Jul 2024 15:25:18 +0200 Subject: [PATCH 328/827] Disable special treatment of eta expansions in recheckClosure --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 7 +++++++ compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 2 +- .../neg-custom-args/captures/effect-swaps-explicit.check | 4 ++-- tests/neg-custom-args/captures/levels.check | 7 ++----- tests/neg-custom-args/captures/vars-simple.check | 8 +++----- tests/neg-custom-args/captures/vars.check | 7 ++----- 6 files changed, 17 insertions(+), 18 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 053795429bc9..df185c589d9e 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -33,6 +33,13 @@ object ccConfig: */ inline val optimizedRefinements = false + /** If enabled, use a special path in recheckClosure for closures + * that are eta expansions. This can improve some error messages but + * currently leads to unsoundess for handlng reach capabilities. + * TODO: The unsoundness needs followin up. + */ + inline val handleEtaExpansionsSpecially = false + /** If true, use existential capture set variables */ def useExistentials(using Context) = Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.5`) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 8eb2f2420369..ce969047524d 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -628,7 +628,7 @@ class CheckCaptures extends Recheck, SymTransformer: // Constrain closure's parameters and result from the expected type before // rechecking the body. val res = recheckClosure(expr, pt, forceDependent = true) - if !isEtaExpansion(mdef) then + if !(isEtaExpansion(mdef) && ccConfig.handleEtaExpansionsSpecially) then // If closure is an eta expanded method reference it's better to not constrain // its internals early since that would give error messages in generated code // which are less intelligible. diff --git a/tests/neg-custom-args/captures/effect-swaps-explicit.check b/tests/neg-custom-args/captures/effect-swaps-explicit.check index 47559ab97568..264dfa663d39 100644 --- a/tests/neg-custom-args/captures/effect-swaps-explicit.check +++ b/tests/neg-custom-args/captures/effect-swaps-explicit.check @@ -25,5 +25,5 @@ -- Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:68:15 --------------------------------------------- 68 | Result.make: //lbl ?=> // error, escaping label from Result | ^^^^^^^^^^^ - |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9}, box E^?]]^): - | box Future[box T^?]^{fr, contextual$9} leaks into outer capture set of type parameter T of method make in object Result + |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9, contextual$9}, box E^?]]^): + | box Future[box T^?]^{fr, contextual$9, contextual$9} leaks into outer capture set of type parameter T of method make in object Result diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check index 2dae3ec3bbc6..ddfa7c051211 100644 --- a/tests/neg-custom-args/captures/levels.check +++ b/tests/neg-custom-args/captures/levels.check @@ -5,13 +5,10 @@ | that type captures the root capability `cap`. | This is often caused by a local capability in an argument of constructor Ref | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/levels.scala:24:11 --------------------------------------- +-- Error: tests/neg-custom-args/captures/levels.scala:24:11 ------------------------------------------------------------ 24 | r.setV(g) // error | ^ - | Found: box (x: String) ->{cap3} String - | Required: box (x$0: String) ->? String + | reference (cap3 : CC^) is not included in the allowed capture set ? of value r | | Note that reference (cap3 : CC^), defined in method scope | cannot be included in outer capture set ? of value r - | - | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/vars-simple.check b/tests/neg-custom-args/captures/vars-simple.check index 2ef301b6ec1f..e9671f775c22 100644 --- a/tests/neg-custom-args/captures/vars-simple.check +++ b/tests/neg-custom-args/captures/vars-simple.check @@ -8,13 +8,11 @@ | since at least one of their capture sets contains the root capability `cap` | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars-simple.scala:16:8 ----------------------------------- +-- Error: tests/neg-custom-args/captures/vars-simple.scala:16:8 -------------------------------------------------------- 16 | a = g // error | ^ - | Found: box (x: String) ->{cap3} String - | Required: box (x: String) ->{cap1, cap2} String - | - | longer explanation available when compiling with `-explain` + | reference (cap3 : Cap) is not included in the allowed capture set {cap1, cap2} + | of an enclosing function literal with expected type box String ->{cap1, cap2} String -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars-simple.scala:17:12 ---------------------------------- 17 | b = List(g) // error | ^^^^^^^ diff --git a/tests/neg-custom-args/captures/vars.check b/tests/neg-custom-args/captures/vars.check index e4b1e71a2000..0d3c2e0f2e11 100644 --- a/tests/neg-custom-args/captures/vars.check +++ b/tests/neg-custom-args/captures/vars.check @@ -5,16 +5,13 @@ | | Note that reference (cap3 : Cap), defined in method scope | cannot be included in outer capture set {cap1} of variable a --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:25:8 ------------------------------------------ +-- Error: tests/neg-custom-args/captures/vars.scala:25:8 --------------------------------------------------------------- 25 | a = g // error | ^ - | Found: (x: String) ->{cap3} String - | Required: (x$0: String) ->{cap1} String + | reference (cap3 : Cap) is not included in the allowed capture set {cap1} of variable a | | Note that reference (cap3 : Cap), defined in method scope | cannot be included in outer capture set {cap1} of variable a - | - | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:27:12 ----------------------------------------- 27 | b = List(g) // error | ^^^^^^^ From 074be8202ffda69d93ea58c43a8c3f3e77a671d6 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 9 Jul 2024 16:55:48 +0200 Subject: [PATCH 329/827] Implement @unboxed annotation exemption for reach capabilities of parameters --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 2 + .../dotty/tools/dotc/cc/CheckCaptures.scala | 77 ++++++++++++++++--- .../dotty/tools/dotc/transform/Recheck.scala | 5 +- tests/neg/leak-problem.scala | 31 ++++++++ 4 files changed, 104 insertions(+), 11 deletions(-) create mode 100644 tests/neg/leak-problem.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index df185c589d9e..b52d53d1ac99 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -40,6 +40,8 @@ object ccConfig: */ inline val handleEtaExpansionsSpecially = false + val useUnboxedParams = true + /** If true, use existential capture set variables */ def useExistentials(using Context) = Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.5`) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index ce969047524d..80da90e1b62f 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -62,6 +62,9 @@ object CheckCaptures: val res = cur cur = cur.outer res + + def ownerString(using Context): String = + if owner.isAnonymousFunction then "enclosing function" else owner.show end Env /** Similar normal substParams, but this is an approximating type map that @@ -386,21 +389,68 @@ class CheckCaptures extends Recheck, SymTransformer: val included = cs.filter: c => c.stripReach match case ref: TermRef => - val isVisible = isVisibleFromEnv(ref.symbol.owner) - if !isVisible && c.isReach then + //if c.isReach then println(i"REACH $c in ${env.owner}") + //assert(!env.owner.isAnonymousFunction) + val refSym = ref.symbol + val refOwner = refSym.owner + val isVisible = isVisibleFromEnv(refOwner) + if !isVisible && c.isReach && refSym.is(Param) && refOwner == env.owner then + if refSym.hasAnnotation(defn.UnboxedAnnot) then + capt.println(i"exempt: $ref in $refOwner") + else // Reach capabilities that go out of scope have to be approximated - // by their underlyiong capture set. See i20503.scala. - checkSubset(CaptureSet.ofInfo(c), env.captured, pos, provenance(env)) + // by their underlying capture set, which cannot be universal. + // Reach capabilities of @unboxed parameters are exempted. + val cs = CaptureSet.ofInfo(c) + if ccConfig.useUnboxedParams then + cs.disallowRootCapability: () => + report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", pos) + checkSubset(cs, env.captured, pos, provenance(env)) isVisible case ref: ThisType => isVisibleFromEnv(ref.cls) case _ => false - capt.println(i"Include call or box capture $included from $cs in ${env.owner}") checkSubset(included, env.captured, pos, provenance(env)) + capt.println(i"Include call or box capture $included from $cs in ${env.owner} --> ${env.captured}") + end markFree /** Include references captured by the called method in the current environment stack */ def includeCallCaptures(sym: Symbol, pos: SrcPos)(using Context): Unit = if sym.exists && curEnv.isOpen then markFree(capturedVars(sym), pos) + private val prefixCalls = util.EqHashSet[GenericApply]() + private val unboxedArgs = util.EqHashSet[Tree]() + + def handleCall(meth: Symbol, call: GenericApply, eval: () => Type)(using Context): Type = + if prefixCalls.remove(call) then return eval() + + val unboxedParamNames = + meth.rawParamss.flatMap: params => + params.collect: + case param if param.hasAnnotation(defn.UnboxedAnnot) => + param.name + .toSet + + def markUnboxedArgs(call: GenericApply): Unit = call.fun.tpe.widen match + case MethodType(pnames) => + for (pname, arg) <- pnames.lazyZip(call.args) do + if unboxedParamNames.contains(pname) then + unboxedArgs.add(arg) + case _ => + + def markPrefixCalls(tree: Tree): Unit = tree match + case tree: GenericApply => + prefixCalls.add(tree) + markUnboxedArgs(tree) + markPrefixCalls(tree.fun) + case _ => + + markUnboxedArgs(call) + markPrefixCalls(call.fun) + val res = eval() + includeCallCaptures(meth, call.srcPos) + res + end handleCall + override def recheckIdent(tree: Ident, pt: Type)(using Context): Type = if tree.symbol.is(Method) then if tree.symbol.info.isParameterless then @@ -470,7 +520,6 @@ class CheckCaptures extends Recheck, SymTransformer: */ override def recheckApply(tree: Apply, pt: Type)(using Context): Type = val meth = tree.fun.symbol - includeCallCaptures(meth, tree.srcPos) // Unsafe box/unbox handlng, only for versions < 3.3 def mapArgUsing(f: Type => Type) = @@ -503,7 +552,7 @@ class CheckCaptures extends Recheck, SymTransformer: tp.derivedCapturingType(forceBox(parent), refs) mapArgUsing(forceBox) else - Existential.toCap(super.recheckApply(tree, pt)) match + handleCall(meth, tree, () => Existential.toCap(super.recheckApply(tree, pt))) match case appType @ CapturingType(appType1, refs) => tree.fun match case Select(qual, _) @@ -521,6 +570,13 @@ class CheckCaptures extends Recheck, SymTransformer: case appType => appType end recheckApply + override def recheckArg(arg: Tree, formal: Type)(using Context): Type = + val argType = recheck(arg, formal) + if unboxedArgs.remove(arg) && ccConfig.useUnboxedParams then + capt.println(i"charging deep capture set of $arg: ${argType} = ${CaptureSet.deepCaptureSet(argType)}") + markFree(CaptureSet.deepCaptureSet(argType), arg.srcPos) + argType + private def isDistinct(xs: List[Type]): Boolean = xs match case x :: xs1 => xs1.isEmpty || !xs1.contains(x) && isDistinct(xs1) case Nil => true @@ -589,6 +645,7 @@ class CheckCaptures extends Recheck, SymTransformer: end instantiate override def recheckTypeApply(tree: TypeApply, pt: Type)(using Context): Type = + val meth = tree.symbol if ccConfig.useSealed then val TypeApply(fn, args) = tree val polyType = atPhase(thisPhase.prev): @@ -596,13 +653,13 @@ class CheckCaptures extends Recheck, SymTransformer: def isExempt(sym: Symbol) = sym.isTypeTestOrCast || sym == defn.Compiletime_erasedValue for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do - if !isExempt(tree.symbol) then - def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" + if !isExempt(meth) then + def where = if meth.exists then i" in an argument of $meth" else "" disallowRootCapabilitiesIn(arg.knownType, NoSymbol, i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) - Existential.toCap(super.recheckTypeApply(tree, pt)) + handleCall(meth, tree, () => Existential.toCap(super.recheckTypeApply(tree, pt))) override def recheckBlock(tree: Block, pt: Type)(using Context): Type = inNestedLevel(super.recheckBlock(tree, pt)) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 3aec18dc2bd0..93d41f06063d 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -289,6 +289,9 @@ abstract class Recheck extends Phase, SymTransformer: /** A hook to massage the type of an applied method; currently not overridden */ protected def prepareFunction(funtpe: MethodType, meth: Symbol)(using Context): MethodType = funtpe + protected def recheckArg(arg: Tree, formal: Type)(using Context): Type = + recheck(arg, formal) + def recheckApply(tree: Apply, pt: Type)(using Context): Type = val funtpe0 = recheck(tree.fun) // reuse the tree's type on signature polymorphic methods, instead of using the (wrong) rechecked one @@ -303,7 +306,7 @@ abstract class Recheck extends Phase, SymTransformer: else fntpe.paramInfos def recheckArgs(args: List[Tree], formals: List[Type], prefs: List[ParamRef]): List[Type] = args match case arg :: args1 => - val argType = recheck(arg, normalizeByName(formals.head)) + val argType = recheckArg(arg, normalizeByName(formals.head)) val formals1 = if fntpe.isParamDependent then formals.tail.map(_.substParam(prefs.head, argType)) diff --git a/tests/neg/leak-problem.scala b/tests/neg/leak-problem.scala new file mode 100644 index 000000000000..354d54d86707 --- /dev/null +++ b/tests/neg/leak-problem.scala @@ -0,0 +1,31 @@ +import language.experimental.captureChecking + +// Some capabilities that should be used locally +trait Async: + // some method + def read(): Unit +def usingAsync[X](op: Async^ => X): X = ??? + +case class Box[+T](get: T) + +def useBoxedAsync(x: Box[Async^]): Unit = + val t0 = x + val t1 = t0.get // error + t1.read() + +def useBoxedAsync1(x: Box[Async^]): Unit = x.get.read() // error + +def test(): Unit = + val useBoxedAsync2 = (x: Box[Async^]) => + val t0 = x + val t1 = x.get // error + t1.read() + + val f: Box[Async^] => Unit = (x: Box[Async^]) => useBoxedAsync(x) + + def boom(x: Async^): () ->{f} Unit = + () => f(Box(x)) + + val leaked = usingAsync[() ->{f} Unit](boom) + + leaked() // scope violation \ No newline at end of file From 1640340872f16b526337e80cfbbda06d4810029a Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 9 Jul 2024 17:02:46 +0200 Subject: [PATCH 330/827] Add @unboxed to failing tests and standard library file Buffer.scala --- .../src/scala/collection/mutable/Buffer.scala | 4 +-- tests/neg-custom-args/captures/i15749a.scala | 4 ++- tests/neg-custom-args/captures/reaches.check | 36 +++++++++++-------- tests/neg-custom-args/captures/reaches.scala | 7 ++-- .../captures/widen-reach.check | 14 +++----- tests/neg/i20503.scala | 6 ++-- tests/neg/leak-problem-unboxed.scala | 32 +++++++++++++++++ .../pos-custom-args/captures/dep-reach.scala | 5 +-- tests/pos-custom-args/captures/reaches.scala | 4 ++- tests/pos/cc-poly-source-capability.scala | 3 +- tests/pos/cc-poly-source.scala | 3 +- tests/pos/gears-probem-1.scala | 3 +- tests/pos/i18699.scala | 6 ++-- tests/pos/reach-capability.scala | 5 +-- tests/pos/reach-problem.scala | 17 +++++++-- 15 files changed, 105 insertions(+), 44 deletions(-) create mode 100644 tests/neg/leak-problem-unboxed.scala diff --git a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala index f9aa9cf28c72..3ff614bfc556 100644 --- a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala +++ b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala @@ -15,7 +15,7 @@ package mutable import scala.annotation.nowarn import language.experimental.captureChecking - +import caps.unboxed /** A `Buffer` is a growable and shrinkable `Seq`. */ trait Buffer[A] @@ -180,7 +180,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] override def iterableFactory: SeqFactory[IndexedBuffer] = IndexedBuffer - def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + def flatMapInPlace(@unboxed f: A => IterableOnce[A]^): this.type = { // There's scope for a better implementation which copies elements in place. var i = 0 val s = size diff --git a/tests/neg-custom-args/captures/i15749a.scala b/tests/neg-custom-args/captures/i15749a.scala index 0158928f4e39..109a73b2b130 100644 --- a/tests/neg-custom-args/captures/i15749a.scala +++ b/tests/neg-custom-args/captures/i15749a.scala @@ -1,4 +1,6 @@ import caps.cap +import caps.unboxed + class Unit object u extends Unit @@ -16,7 +18,7 @@ def test = def force[A](thunk: Unit ->{cap} A): A = thunk(u) - def forceWrapper[A](mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = + def forceWrapper[A](@unboxed mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = // Γ ⊢ mx: Wrapper[□ {cap} Unit => A] // `force` should be typed as ∀(□ {cap} Unit -> A) A, but it can not strictMap[Unit ->{mx*} A, A](mx)(t => force[A](t)) // error // should work diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index f20dbdf311ad..6fdbdefea206 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -1,12 +1,12 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:23:11 -------------------------------------- -23 | cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:24:11 -------------------------------------- +24 | cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} | ^^^^^^^^^^^^^^^^^^^^^^^ | Found: List[box () ->{f} Unit] | Required: List[box () ->{xs*} Unit] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:34:7 --------------------------------------- -34 | (() => f.write()) :: Nil // error since {f*} !<: {xs*} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:35:7 --------------------------------------- +35 | (() => f.write()) :: Nil // error since {f*} !<: {xs*} | ^^^^^^^^^^^^^^^^^^^^^^^ | Found: List[box () ->{f} Unit] | Required: box List[box () ->{xs*} Unit]^? @@ -15,34 +15,42 @@ | cannot be included in outer capture set {xs*} of value cur | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/reaches.scala:37:6 ------------------------------------------------------------ -37 | var cur: List[Proc] = xs // error: Illegal type for var +-- Error: tests/neg-custom-args/captures/reaches.scala:38:6 ------------------------------------------------------------ +38 | var cur: List[Proc] = xs // error: Illegal type for var | ^ | Mutable variable cur cannot have type List[box () => Unit] since | the part box () => Unit of that type captures the root capability `cap`. --- Error: tests/neg-custom-args/captures/reaches.scala:44:15 ----------------------------------------------------------- -44 | val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref +-- Error: tests/neg-custom-args/captures/reaches.scala:45:15 ----------------------------------------------------------- +45 | val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref | ^^^^^^^^^^^^^^^ | Sealed type variable T cannot be instantiated to List[box () => Unit] since | the part box () => Unit of that type captures the root capability `cap`. | This is often caused by a local capability in an argument of constructor Ref | leaking as part of its result. --- Error: tests/neg-custom-args/captures/reaches.scala:54:31 ----------------------------------------------------------- -54 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error +-- Error: tests/neg-custom-args/captures/reaches.scala:55:31 ----------------------------------------------------------- +55 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error | ^^^^^^^^^^^^^^^^^^^^ | Sealed type variable A cannot be instantiated to box () => Unit since | that type captures the root capability `cap`. | This is often caused by a local capability in an argument of constructor Id | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:63:27 -------------------------------------- -63 | val f1: File^{id*} = id(f) // error, since now id(f): File^ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:64:27 -------------------------------------- +64 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ | Found: File^{id, f} | Required: File^{id*} | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/reaches.scala:80:5 ------------------------------------------------------------ -80 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) +-- Error: tests/neg-custom-args/captures/reaches.scala:81:5 ------------------------------------------------------------ +81 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) // error // error | ^^^^^^ | Reach capability cap and universal capability cap cannot both | appear in the type [B](f: ((box A ->{ps*} A, box A ->{ps*} A)) => B): List[B] of this expression +-- Error: tests/neg-custom-args/captures/reaches.scala:81:10 ----------------------------------------------------------- +81 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) // error // error + | ^ + | Local reach capability ps* leaks into capture scope of method mapCompose +-- Error: tests/neg-custom-args/captures/reaches.scala:81:13 ----------------------------------------------------------- +81 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) // error // error + | ^ + | Local reach capability ps* leaks into capture scope of method mapCompose diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index eadb76c69e5b..f3b4e532e1a2 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -1,5 +1,6 @@ //> using options -source 3.4 // (to make sure we use the sealed policy) +import caps.unboxed class File: def write(): Unit = ??? @@ -12,7 +13,7 @@ class Ref[T](init: T): def get: T = x def set(y: T) = { x = y } -def runAll0(xs: List[Proc]): Unit = +def runAll0(@unboxed xs: List[Proc]): Unit = var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR while cur.nonEmpty do val next: () ->{xs*} Unit = cur.head @@ -22,7 +23,7 @@ def runAll0(xs: List[Proc]): Unit = usingFile: f => cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} -def runAll1(xs: List[Proc]): Unit = +def runAll1(@unboxed xs: List[Proc]): Unit = val cur = Ref[List[() ->{xs*} Unit]](xs) // OK, by revised VAR while cur.get.nonEmpty do val next: () ->{xs*} Unit = cur.get.head @@ -77,4 +78,4 @@ def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = z => g(f(z)) def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = - ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) + ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) // error // error diff --git a/tests/neg-custom-args/captures/widen-reach.check b/tests/neg-custom-args/captures/widen-reach.check index dbe811ab99ec..06d21ff445d8 100644 --- a/tests/neg-custom-args/captures/widen-reach.check +++ b/tests/neg-custom-args/captures/widen-reach.check @@ -1,17 +1,11 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/widen-reach.scala:13:26 ---------------------------------- +-- Error: tests/neg-custom-args/captures/widen-reach.scala:13:26 ------------------------------------------------------- 13 | val y2: IO^ -> IO^ = y1.foo // error | ^^^^^^ - | Found: IO^ ->{x*} IO^{x*} - | Required: IO^ -> (ex$6: caps.Exists) -> IO^{ex$6} - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/widen-reach.scala:14:30 ---------------------------------- + | Local reach capability x* leaks into capture scope of method test +-- Error: tests/neg-custom-args/captures/widen-reach.scala:14:30 ------------------------------------------------------- 14 | val y3: IO^ -> IO^{x*} = y1.foo // error | ^^^^^^ - | Found: IO^ ->{x*} IO^{x*} - | Required: IO^ -> IO^{x*} - | - | longer explanation available when compiling with `-explain` + | Local reach capability x* leaks into capture scope of method test -- [E164] Declaration Error: tests/neg-custom-args/captures/widen-reach.scala:9:6 -------------------------------------- 9 | val foo: IO^ -> IO^ = x => x // error | ^ diff --git a/tests/neg/i20503.scala b/tests/neg/i20503.scala index e8770b934ad1..463e4e3f9686 100644 --- a/tests/neg/i20503.scala +++ b/tests/neg/i20503.scala @@ -1,4 +1,5 @@ import language.experimental.captureChecking +import caps.unboxed class List[+A]: def head: A = ??? @@ -7,10 +8,11 @@ class List[+A]: def foreach[U](f: A => U): Unit = ??? def nonEmpty: Boolean = ??? -def runOps(ops: List[() => Unit]): Unit = +def runOps(@unboxed ops: List[() => Unit]): Unit = // See i20156, due to limitation in expressiveness of current system, // we could map over the list of impure elements. OK with existentials. ops.foreach(op => op()) def main(): Unit = - val f: List[() => Unit] -> Unit = runOps // error + val f: List[() => Unit] -> Unit = (ops: List[() => Unit]) => runOps(ops) // error + val _: List[() => Unit] -> Unit = runOps // error diff --git a/tests/neg/leak-problem-unboxed.scala b/tests/neg/leak-problem-unboxed.scala new file mode 100644 index 000000000000..8591145583e2 --- /dev/null +++ b/tests/neg/leak-problem-unboxed.scala @@ -0,0 +1,32 @@ +import language.experimental.captureChecking +import caps.unboxed + +// Some capabilities that should be used locally +trait Async: + // some method + def read(): Unit +def usingAsync[X](op: Async^ => X): X = ??? + +case class Box[+T](get: T) + +def useBoxedAsync(@unboxed x: Box[Async^]): Unit = + val t0 = x + val t1 = t0.get // ok + t1.read() + +def useBoxedAsync1(@unboxed x: Box[Async^]): Unit = x.get.read() // ok + +def test(): Unit = + + val f: Box[Async^] => Unit = (x: Box[Async^]) => useBoxedAsync(x) // error + val _: Box[Async^] => Unit = useBoxedAsync(_) // error + val _: Box[Async^] => Unit = useBoxedAsync // error + val _ = useBoxedAsync(_) // error + val _ = useBoxedAsync // error + + def boom(x: Async^): () ->{f} Unit = + () => f(Box(x)) + + val leaked = usingAsync[() ->{f} Unit](boom) + + leaked() // scope violation \ No newline at end of file diff --git a/tests/pos-custom-args/captures/dep-reach.scala b/tests/pos-custom-args/captures/dep-reach.scala index 56343fbf8e53..177422565736 100644 --- a/tests/pos-custom-args/captures/dep-reach.scala +++ b/tests/pos-custom-args/captures/dep-reach.scala @@ -1,9 +1,10 @@ +import caps.unboxed object Test: class C type Proc = () => Unit def f(c: C^, d: C^): () ->{c, d} Unit = - def foo(xs: Proc*): () ->{xs*} Unit = + def foo(@unboxed xs: Proc*): () ->{xs*} Unit = xs.head val a: () ->{c} Unit = () => () val b: () ->{d} Unit = () => () @@ -12,7 +13,7 @@ object Test: def g(c: C^, d: C^): () ->{c, d} Unit = - def foo(xs: Seq[() => Unit]): () ->{xs*} Unit = + def foo(@unboxed xs: Seq[() => Unit]): () ->{xs*} Unit = xs.head val a: () ->{c} Unit = () => () diff --git a/tests/pos-custom-args/captures/reaches.scala b/tests/pos-custom-args/captures/reaches.scala index b1045e3c999a..976fadc4b649 100644 --- a/tests/pos-custom-args/captures/reaches.scala +++ b/tests/pos-custom-args/captures/reaches.scala @@ -1,3 +1,5 @@ +import caps.unboxed + class C def f(xs: List[C^]) = val y = xs @@ -20,7 +22,7 @@ extension [A](x: A) def :: (xs: List[A]): List[A] = ??? object Nil extends List[Nothing] -def runAll(xs: List[Proc]): Unit = +def runAll(@unboxed xs: List[Proc]): Unit = var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR while cur.nonEmpty do val next: () ->{xs*} Unit = cur.head diff --git a/tests/pos/cc-poly-source-capability.scala b/tests/pos/cc-poly-source-capability.scala index 9a21b2d5b802..0f61e98e5068 100644 --- a/tests/pos/cc-poly-source-capability.scala +++ b/tests/pos/cc-poly-source-capability.scala @@ -1,6 +1,7 @@ import language.experimental.captureChecking import annotation.experimental import caps.{CapSet, Capability} +import caps.unboxed @experimental object Test: @@ -17,7 +18,7 @@ import caps.{CapSet, Capability} def allListeners: Set[Listener^{X^}] = listeners - def test1(async1: Async, others: List[Async]) = + def test1(async1: Async, @unboxed others: List[Async]) = val src = Source[CapSet^{async1, others*}] val lst1 = listener(async1) val lsts = others.map(listener) diff --git a/tests/pos/cc-poly-source.scala b/tests/pos/cc-poly-source.scala index 939f1f682dc8..09b4a3024e3c 100644 --- a/tests/pos/cc-poly-source.scala +++ b/tests/pos/cc-poly-source.scala @@ -1,6 +1,7 @@ import language.experimental.captureChecking import annotation.experimental import caps.{CapSet, Capability} +import caps.unboxed @experimental object Test: @@ -24,7 +25,7 @@ import caps.{CapSet, Capability} val ls = src.allListeners val _: Set[Listener^{lbl1, lbl2}] = ls - def test2(lbls: List[Label^]) = + def test2(@unboxed lbls: List[Label^]) = def makeListener(lbl: Label^): Listener^{lbl} = ??? val listeners = lbls.map(makeListener) val src = Source[CapSet^{lbls*}] diff --git a/tests/pos/gears-probem-1.scala b/tests/pos/gears-probem-1.scala index f5c7fdfd0a3c..c683db9ce01d 100644 --- a/tests/pos/gears-probem-1.scala +++ b/tests/pos/gears-probem-1.scala @@ -1,4 +1,5 @@ import language.experimental.captureChecking +import caps.unboxed trait Future[+T]: def await: T @@ -16,7 +17,7 @@ class Result[+T, +E]: case class Err[+E](e: E) extends Result[Nothing, E] case class Ok[+T](x: T) extends Result[T, Nothing] -extension [T](fs: Seq[Future[T]^]) +extension [T](@unboxed fs: Seq[Future[T]^]) def awaitAll = val collector//: Collector[T]{val futures: Seq[Future[T]^{fs*}]} = Collector(fs) diff --git a/tests/pos/i18699.scala b/tests/pos/i18699.scala index 4bd3fbaad890..54390f6bdd71 100644 --- a/tests/pos/i18699.scala +++ b/tests/pos/i18699.scala @@ -1,7 +1,9 @@ import language.experimental.captureChecking +import caps.unboxed + trait Cap: def use: Int = 42 -def test2(cs: List[Cap^]): Unit = +def test2(@unboxed cs: List[Cap^]): Unit = val t0: Cap^{cs*} = cs.head // error - var t1: Cap^{cs*} = cs.head // error \ No newline at end of file + var t1: Cap^{cs*} = cs.head // error diff --git a/tests/pos/reach-capability.scala b/tests/pos/reach-capability.scala index d551113eb05b..5ad7534061b1 100644 --- a/tests/pos/reach-capability.scala +++ b/tests/pos/reach-capability.scala @@ -1,6 +1,7 @@ import language.experimental.captureChecking import annotation.experimental -import caps.{Capability} +import caps.Capability +import caps.unboxed @experimental object Test2: @@ -11,7 +12,7 @@ import caps.{Capability} class Listener - def test2(lbls: List[Label]) = + def test2(@unboxed lbls: List[Label]) = def makeListener(lbl: Label): Listener^{lbl} = ??? val listeners = lbls.map(makeListener) // should work diff --git a/tests/pos/reach-problem.scala b/tests/pos/reach-problem.scala index 60dd1d4667a7..29d46687a219 100644 --- a/tests/pos/reach-problem.scala +++ b/tests/pos/reach-problem.scala @@ -1,9 +1,22 @@ import language.experimental.captureChecking +import caps.unboxed class Box[T](items: Seq[T^]): def getOne: T^{items*} = ??? object Box: - def getOne[T](items: Seq[T^]): T^{items*} = + def getOne[T](@unboxed items: Seq[T^]): T^{items*} = val bx = Box(items) - bx.getOne \ No newline at end of file + bx.getOne +/* + def head[T](items: Seq[T^]): Unit = + val is = items + val x = is.head + () + + def head2[X^, T](items: Seq[T^{X^}]): T^{X^} = + items.head + + def head3[T](items: Seq[T^]): Unit = + head2[caps.CapSet^{items*}, T](items) +*/ \ No newline at end of file From 4333d3ed1cb9c8e2f56c75a972f01bdd789dcf25 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 10 Jul 2024 09:54:30 +0200 Subject: [PATCH 331/827] Fix isAlwaysPure The condition on capturing types did not make sense. In a type T^{} with an empty capture set `T` can still be a type variable that's instantiated to a type with a capture set. Instead, T^cs is always pure if T is always pure. For instance `List[T]^{p}` is always pure. That's important in the context of the standard library, where such a type usually results from an instantiation of a type variable such as `C[T]^{p}`. --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index b52d53d1ac99..7fe027b6623d 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -282,8 +282,6 @@ extension (tp: Type) val sym = tp.typeSymbol if sym.isClass then sym.isPureClass else tp.superType.isAlwaysPure - case CapturingType(parent, refs) => - parent.isAlwaysPure || refs.isAlwaysEmpty case tp: TypeProxy => tp.superType.isAlwaysPure case tp: AndType => From 90749ea2d6c47b05ff2932ecd216f2cc5cb50103 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 10 Jul 2024 11:08:24 +0200 Subject: [PATCH 332/827] Fix special capture set handling in recheckApply, Step 1 Step1: refactor The logic was querying the original types of trees, but we want the rechecked types instead. --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 4 +- .../dotty/tools/dotc/cc/CheckCaptures.scala | 65 ++++++++++--------- .../dotty/tools/dotc/transform/Recheck.scala | 25 +++++-- .../captures/caseclass/Test_2.scala | 2 +- tests/neg-custom-args/captures/reaches.check | 10 +++ tests/neg-custom-args/captures/reaches.scala | 2 +- .../captures/unsound-reach-2.scala | 2 +- .../captures/unsound-reach-3.scala | 4 +- .../captures/unsound-reach-4.check | 11 ++-- .../captures/unsound-reach-4.scala | 4 +- .../pos-custom-args/captures/caseclass.scala | 2 +- .../dotc/transform/Recheck.scala | 32 +++++---- 12 files changed, 101 insertions(+), 62 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 7fe027b6623d..34dc5fc395d6 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -274,8 +274,8 @@ extension (tp: Type) case _ => tp - /** Is type known to be always pure by its class structure, - * so that adding a capture set to it would not make sense? + /** Is type known to be always pure by its class structure? + * In that case, adding a capture set to it would not make sense. */ def isAlwaysPure(using Context): Boolean = tp.dealias match case tp: (TypeRef | AppliedType) => diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 80da90e1b62f..c32028a22471 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -463,8 +463,8 @@ class CheckCaptures extends Recheck, SymTransformer: /** A specialized implementation of the selection rule. * - * E |- f: f{ m: Cr R }^Cf - * ----------------------- + * E |- f: T{ m: R^Cr }^{f} + * ------------------------ * E |- f.m: R^C * * The implementation picks as `C` one of `{f}` or `Cr`, depending on the @@ -507,17 +507,6 @@ class CheckCaptures extends Recheck, SymTransformer: selType }//.showing(i"recheck sel $tree, $qualType = $result") - /** A specialized implementation of the apply rule. - * - * E |- f: Ra ->Cf Rr^Cr - * E |- a: Ra^Ca - * --------------------- - * E |- f a: Rr^C - * - * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the - * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr - * and Cr otherwise. - */ override def recheckApply(tree: Apply, pt: Type)(using Context): Type = val meth = tree.fun.symbol @@ -552,31 +541,47 @@ class CheckCaptures extends Recheck, SymTransformer: tp.derivedCapturingType(forceBox(parent), refs) mapArgUsing(forceBox) else - handleCall(meth, tree, () => Existential.toCap(super.recheckApply(tree, pt))) match - case appType @ CapturingType(appType1, refs) => - tree.fun match - case Select(qual, _) - if !tree.fun.symbol.isConstructor - && !qual.tpe.isBoxedCapturing - && !tree.args.exists(_.tpe.isBoxedCapturing) - && qual.tpe.captureSet.mightSubcapture(refs) - && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) - => - val callCaptures = tree.args.foldLeft(qual.tpe.captureSet): (cs, arg) => - cs ++ arg.tpe.captureSet - appType.derivedCapturingType(appType1, callCaptures) - .showing(i"narrow $tree: $appType, refs = $refs, qual-cs = ${qual.tpe.captureSet} = $result", capt) - case _ => appType - case appType => appType + handleCall(meth, tree, () => super.recheckApply(tree, pt)) end recheckApply - override def recheckArg(arg: Tree, formal: Type)(using Context): Type = + protected override + def recheckArg(arg: Tree, formal: Type)(using Context): Type = val argType = recheck(arg, formal) if unboxedArgs.remove(arg) && ccConfig.useUnboxedParams then capt.println(i"charging deep capture set of $arg: ${argType} = ${CaptureSet.deepCaptureSet(argType)}") markFree(CaptureSet.deepCaptureSet(argType), arg.srcPos) argType + /** A specialized implementation of the apply rule. + * + * E |- f: Ra ->Cf Rr^Cr + * E |- a: Ra^Ca + * --------------------- + * E |- f a: Rr^C + * + * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the + * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr + * and Cr otherwise. + */ + protected override + def recheckApplication(tree: Apply, qualType: Type, funType: MethodType, argTypes: List[Type])(using Context): Type = + Existential.toCap(super.recheckApplication(tree, qualType, funType, argTypes)) match + case appType @ CapturingType(appType1, refs) + if qualType.exists + && !tree.fun.symbol.isConstructor + && !qualType.isBoxedCapturing // TODO: This is not strng enough, we also have + // to exclude existentials in function results + && !argTypes.exists(_.isBoxedCapturing) + && qualType.captureSet.mightSubcapture(refs) + && argTypes.forall(_.captureSet.mightSubcapture(refs)) + => + val callCaptures = tree.args.foldLeft(qualType.captureSet): (cs, arg) => + cs ++ arg.tpe.captureSet + appType.derivedCapturingType(appType1, callCaptures) + .showing(i"narrow $tree: $appType, refs = $refs, qual-cs = ${qualType.captureSet} = $result", capt) + case appType => + appType + private def isDistinct(xs: List[Type]): Boolean = xs match case x :: xs1 => xs1.isEmpty || !xs1.contains(x) && isDistinct(xs1) case Nil => true diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 93d41f06063d..4b8a8f072774 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -202,11 +202,13 @@ abstract class Recheck extends Phase, SymTransformer: tree.tpe def recheckSelect(tree: Select, pt: Type)(using Context): Type = - val Select(qual, name) = tree + recheckSelection(tree, recheckSelectQualifier(tree), tree.name, pt) + + def recheckSelectQualifier(tree: Select)(using Context): Type = val proto = if tree.symbol == defn.Any_asInstanceOf then WildcardType else AnySelectionProto - recheckSelection(tree, recheck(qual, proto).widenIfUnstable, name, pt) + recheck(tree.qualifier, proto).widenIfUnstable def recheckSelection(tree: Select, qualType: Type, name: Name, sharpen: Denotation => Denotation)(using Context): Type = @@ -292,8 +294,23 @@ abstract class Recheck extends Phase, SymTransformer: protected def recheckArg(arg: Tree, formal: Type)(using Context): Type = recheck(arg, formal) + /** A hook to check all the parts of an application: + * @param tree the application `fn(args)` + * @param qualType if the `fn` is a select `q.m`, the type of the qualifier `q`, + * otherwise NoType + * @param funType the method type of `fn` + * @param argTypes the types of the arguments + */ + protected def recheckApplication(tree: Apply, qualType: Type, funType: MethodType, argTypes: List[Type])(using Context): Type = + constFold(tree, instantiate(funType, argTypes, tree.fun.symbol)) + def recheckApply(tree: Apply, pt: Type)(using Context): Type = - val funtpe0 = recheck(tree.fun) + val (funtpe0, qualType) = tree.fun match + case fun: Select => + val qualType = recheckSelectQualifier(fun) + (recheckSelection(fun, qualType, fun.name, WildcardType), qualType) + case _ => + (recheck(tree.fun), NoType) // reuse the tree's type on signature polymorphic methods, instead of using the (wrong) rechecked one val funtpe1 = if tree.fun.symbol.originalSignaturePolymorphic.exists then tree.fun.tpe else funtpe0 funtpe1.widen match @@ -316,7 +333,7 @@ abstract class Recheck extends Phase, SymTransformer: assert(formals.isEmpty) Nil val argTypes = recheckArgs(tree.args, formals, fntpe.paramRefs) - constFold(tree, instantiate(fntpe, argTypes, tree.fun.symbol)) + recheckApplication(tree, qualType, fntpe1, argTypes) //.showing(i"typed app $tree : $fntpe with ${tree.args}%, % : $argTypes%, % = $result") case tp => assert(false, i"unexpected type of ${tree.fun}: $tp") diff --git a/tests/neg-custom-args/captures/caseclass/Test_2.scala b/tests/neg-custom-args/captures/caseclass/Test_2.scala index 9d97d5537c72..e54ab1774202 100644 --- a/tests/neg-custom-args/captures/caseclass/Test_2.scala +++ b/tests/neg-custom-args/captures/caseclass/Test_2.scala @@ -22,4 +22,4 @@ def test(c: C) = val y4 = y3 match case Ref(xx) => xx - val y4c: () ->{x3} Unit = y4 + val y4c: () ->{y3} Unit = y4 diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index 6fdbdefea206..615e3901a437 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -34,6 +34,16 @@ | that type captures the root capability `cap`. | This is often caused by a local capability in an argument of constructor Id | leaking as part of its result. +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:57:6 --------------------------------------- +57 | id(() => f.write()) // error + | ^^^^^^^^^^^^^^^^^^^ + | Found: () => Unit + | Required: () ->? Unit + | + | Note that the universal capability `cap` + | cannot be included in capture set ? + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:64:27 -------------------------------------- 64 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index f3b4e532e1a2..e602d4b34493 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -54,7 +54,7 @@ class Id[-A, +B >: A](): def test = val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error usingFile: f => - id(() => f.write()) // escape, if it was not for the error above + id(() => f.write()) // error def attack2 = val id: File^ -> File^ = x => x diff --git a/tests/neg-custom-args/captures/unsound-reach-2.scala b/tests/neg-custom-args/captures/unsound-reach-2.scala index 384af31ee1fc..5bea18bdccba 100644 --- a/tests/neg-custom-args/captures/unsound-reach-2.scala +++ b/tests/neg-custom-args/captures/unsound-reach-2.scala @@ -20,7 +20,7 @@ def bad(): Unit = var escaped: File^{backdoor*} = null withFile("hello.txt"): f => - boom.use(f): // error + boom.use(f): new Consumer[File^{backdoor*}]: // error def apply(f1: File^{backdoor*}) = escaped = f1 diff --git a/tests/neg-custom-args/captures/unsound-reach-3.scala b/tests/neg-custom-args/captures/unsound-reach-3.scala index 985beb7ae55d..0063216e957e 100644 --- a/tests/neg-custom-args/captures/unsound-reach-3.scala +++ b/tests/neg-custom-args/captures/unsound-reach-3.scala @@ -16,8 +16,8 @@ def bad(): Unit = val boom: Foo[File^{backdoor*}] = backdoor var escaped: File^{backdoor*} = null - withFile("hello.txt"): f => - escaped = boom.use(f) // error + withFile("hello.txt"): f => // error + escaped = boom.use(f) // boom.use: (x: File^) -> File^{backdoor*}, it is a selection so reach capabilities are allowed // f: File^, so there is no reach capabilities diff --git a/tests/neg-custom-args/captures/unsound-reach-4.check b/tests/neg-custom-args/captures/unsound-reach-4.check index 9abf86c772d5..d359b298555e 100644 --- a/tests/neg-custom-args/captures/unsound-reach-4.check +++ b/tests/neg-custom-args/captures/unsound-reach-4.check @@ -1,5 +1,6 @@ --- Error: tests/neg-custom-args/captures/unsound-reach-4.scala:22:19 --------------------------------------------------- -22 | escaped = boom.use(f) // error - | ^^^^^^^^ - | Reach capability backdoor* and universal capability cap cannot both - | appear in the type (x: F): box File^{backdoor*} of this expression +-- Error: tests/neg-custom-args/captures/unsound-reach-4.scala:21:25 --------------------------------------------------- +21 | withFile("hello.txt"): f => // error + | ^ + | Reach capability backdoor* and universal capability cap cannot both + | appear in the type (f: File^) ->{backdoor*} Unit of this expression +22 | escaped = boom.use(f) diff --git a/tests/neg-custom-args/captures/unsound-reach-4.scala b/tests/neg-custom-args/captures/unsound-reach-4.scala index 14050b4afff2..bc66085614f2 100644 --- a/tests/neg-custom-args/captures/unsound-reach-4.scala +++ b/tests/neg-custom-args/captures/unsound-reach-4.scala @@ -18,5 +18,5 @@ def bad(): Unit = val boom: Foo[File^{backdoor*}] = backdoor var escaped: File^{backdoor*} = null - withFile("hello.txt"): f => - escaped = boom.use(f) // error + withFile("hello.txt"): f => // error + escaped = boom.use(f) diff --git a/tests/pos-custom-args/captures/caseclass.scala b/tests/pos-custom-args/captures/caseclass.scala index 0aa656eaf9cb..fe7e02b1b6c2 100644 --- a/tests/pos-custom-args/captures/caseclass.scala +++ b/tests/pos-custom-args/captures/caseclass.scala @@ -31,4 +31,4 @@ object test2: val y4 = y3 match case Ref(xx) => xx - val y4c: () ->{x3} Unit = y4 + val y4c: () ->{y3} Unit = y4 diff --git a/tests/pos-with-compiler-cc/dotc/transform/Recheck.scala b/tests/pos-with-compiler-cc/dotc/transform/Recheck.scala index c524bbb7702f..e566a6d7482a 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Recheck.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Recheck.scala @@ -170,23 +170,15 @@ abstract class Recheck extends Phase, SymTransformer: def recheckIdent(tree: Ident)(using Context): Type = tree.tpe - def recheckSelect(tree: Select, pt: Type)(using Context): Type = + def recheckSelectQualifier(tree: Select)(using Conext): Type = val Select(qual, name) = tree val proto = if tree.symbol == defn.Any_asInstanceOf then WildcardType else AnySelectionProto - recheckSelection(tree, recheck(qual, proto).widenIfUnstable, name, pt) + recheck(qual, proto).widenIfUnstable - /** When we select the `apply` of a function with type such as `(=> A) => B`, - * we need to convert the parameter type `=> A` to `() ?=> A`. See doc comment - * of `mapExprType`. - */ - def normalizeByName(mbr: SingleDenotation)(using Context): SingleDenotation = mbr.info match - case mt: MethodType if mt.paramInfos.exists(_.isInstanceOf[ExprType]) => - mbr.derivedSingleDenotation(mbr.symbol, - mt.derivedLambdaType(paramInfos = mt.paramInfos.map(_.mapExprType))) - case _ => - mbr + def recheckSelect(tree: Select, pt: Type)(using Context): Type = + recheckSelection(tree, recheckSelectQualifier(tree), name, pt) def recheckSelection(tree: Select, qualType: Type, name: Name, sharpen: Denotation => Denotation)(using Context): Type = @@ -210,11 +202,21 @@ abstract class Recheck extends Phase, SymTransformer: constFold(tree, newType) //.showing(i"recheck select $qualType . $name : ${mbr.info} = $result") - /** Keep the symbol of the `select` but re-infer its type */ def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context): Type = recheckSelection(tree, qualType, name, sharpen = identity[Denotation]) + /** When we select the `apply` of a function with type such as `(=> A) => B`, + * we need to convert the parameter type `=> A` to `() ?=> A`. See doc comment + * of `mapExprType`. + */ + def normalizeByName(mbr: SingleDenotation)(using Context): SingleDenotation = mbr.info match + case mt: MethodType if mt.paramInfos.exists(_.isInstanceOf[ExprType]) => + mbr.derivedSingleDenotation(mbr.symbol, + mt.derivedLambdaType(paramInfos = mt.paramInfos.map(_.mapExprType))) + case _ => + mbr + def recheckBind(tree: Bind, pt: Type)(using Context): Type = tree match case Bind(name, body) => recheck(body, pt) @@ -260,7 +262,11 @@ abstract class Recheck extends Phase, SymTransformer: protected def instantiate(mt: MethodType, argTypes: List[Type], sym: Symbol)(using Context): Type = mt.instantiate(argTypes) + def recheckApplication(tree: Tree, qualType: Type, argTypes: List[Type])(using Context): Type = + constFold(tree, instantiate(fntpe, argTypes, tree.fun.symbol)) + def recheckApply(tree: Apply, pt: Type)(using Context): Type = + fun val funTp = recheck(tree.fun) // reuse the tree's type on signature polymorphic methods, instead of using the (wrong) rechecked one val funtpe = if tree.fun.symbol.originalSignaturePolymorphic.exists then tree.fun.tpe else funTp From bc818a911374f4f0e278880714b3cc0d501e2d8c Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 10 Jul 2024 11:58:38 +0200 Subject: [PATCH 333/827] Fix special capture set handling in recheckApply, Step 2 Step 2: Change the logic. The previous one was unsound. The new logic is a bot too conservative. I left comments in tests where it could be improved. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 22 ++++++++----------- .../src/scala/collection/Iterator.scala | 5 +++-- .../immutable/LazyListIterable.scala | 4 +++- .../src/scala/collection/mutable/Buffer.scala | 6 ++++- tests/neg-custom-args/captures/reaches.check | 2 +- .../captures/nested-classes-2.scala | 2 +- .../colltest5/CollectionStrawManCC5_1.scala | 12 +++++++++- 7 files changed, 33 insertions(+), 20 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index c32028a22471..283df3254ab5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -554,14 +554,15 @@ class CheckCaptures extends Recheck, SymTransformer: /** A specialized implementation of the apply rule. * - * E |- f: Ra ->Cf Rr^Cr - * E |- a: Ra^Ca + * E |- q: Tq^Cq + * E |- q.f: Ta ->Cf Tr^Cr + * E |- a: Ta * --------------------- - * E |- f a: Rr^C + * E |- f(a): Tr^C * - * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the - * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr - * and Cr otherwise. + * The implementation picks `C` as `Cq` instead of `Cr`, if + * 1. The argument(s) Ta are always pure + * 2. `Cq` might subcapture `Cr`. */ protected override def recheckApplication(tree: Apply, qualType: Type, funType: MethodType, argTypes: List[Type])(using Context): Type = @@ -569,15 +570,10 @@ class CheckCaptures extends Recheck, SymTransformer: case appType @ CapturingType(appType1, refs) if qualType.exists && !tree.fun.symbol.isConstructor - && !qualType.isBoxedCapturing // TODO: This is not strng enough, we also have - // to exclude existentials in function results - && !argTypes.exists(_.isBoxedCapturing) + && argTypes.forall(_.isAlwaysPure) && qualType.captureSet.mightSubcapture(refs) - && argTypes.forall(_.captureSet.mightSubcapture(refs)) => - val callCaptures = tree.args.foldLeft(qualType.captureSet): (cs, arg) => - cs ++ arg.tpe.captureSet - appType.derivedCapturingType(appType1, callCaptures) + appType.derivedCapturingType(appType1, qualType.captureSet) .showing(i"narrow $tree: $appType, refs = $refs, qual-cs = ${qualType.captureSet} = $result", capt) case appType => appType diff --git a/scala2-library-cc/src/scala/collection/Iterator.scala b/scala2-library-cc/src/scala/collection/Iterator.scala index 4d1b0ed4ff95..d75f136e0253 100644 --- a/scala2-library-cc/src/scala/collection/Iterator.scala +++ b/scala2-library-cc/src/scala/collection/Iterator.scala @@ -17,7 +17,7 @@ import scala.annotation.tailrec import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics import language.experimental.captureChecking - +import caps.unsafe.unsafeAssumePure /** Iterators are data structures that allow to iterate over a sequence * of elements. They have a `hasNext` method for checking @@ -595,7 +595,8 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite private[this] def nextCur(): Unit = { cur = null - cur = f(self.next()).iterator + cur = f(self.next()).iterator.unsafeAssumePure + // !!! see explanation in colltest5.CollectionStrawManCC5_1.flatMap why the unsafeAssumePure is needed _hasNext = -1 } diff --git a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala index 2f7b017a6729..baf89ef54aab 100644 --- a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala +++ b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala @@ -25,6 +25,7 @@ import scala.runtime.Statics import language.experimental.captureChecking import annotation.unchecked.uncheckedCaptures import caps.untrackedCaptures +import caps.unsafe.unsafeAssumePure /** This class implements an immutable linked list. We call it "lazy" * because it computes its elements only when they are needed. @@ -1041,7 +1042,8 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { var itHasNext = false var rest = restRef // var rest = restRef.elem while (!itHasNext && !rest.isEmpty) { - it = f(rest.head).iterator + it = f(rest.head).iterator.unsafeAssumePure + // !!! see explanation in colltest5.CollectionStrawManCC5_1.flatMap why the unsafeAssumePure is needed itHasNext = it.hasNext if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw rest = rest.tail diff --git a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala index 3ff614bfc556..eebc2fbc1417 100644 --- a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala +++ b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala @@ -16,6 +16,7 @@ package mutable import scala.annotation.nowarn import language.experimental.captureChecking import caps.unboxed +import caps.unsafe.unsafeAssumePure /** A `Buffer` is a growable and shrinkable `Seq`. */ trait Buffer[A] @@ -185,7 +186,10 @@ trait IndexedBuffer[A] extends IndexedSeq[A] var i = 0 val s = size val newElems = new Array[(IterableOnce[A]^{f*})](s) - while (i < s) { newElems(i) = f(this(i)); i += 1 } + while i < s do + newElems(i) = f(this(i)).unsafeAssumePure + // !!! see explanation in colltest5.CollectionStrawManCC5_1.flatMap why the unsafeAssumePure is needed + i += 1 clear() i = 0 while (i < s) { ++=(newElems(i)); i += 1 } diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index 615e3901a437..96859290075a 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -47,7 +47,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:64:27 -------------------------------------- 64 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ - | Found: File^{id, f} + | Found: File^ | Required: File^{id*} | | longer explanation available when compiling with `-explain` diff --git a/tests/pos-custom-args/captures/nested-classes-2.scala b/tests/pos-custom-args/captures/nested-classes-2.scala index 744635ee949b..17ee3b8f3cb0 100644 --- a/tests/pos-custom-args/captures/nested-classes-2.scala +++ b/tests/pos-custom-args/captures/nested-classes-2.scala @@ -20,5 +20,5 @@ def test2(x1: (() => Unit), x2: (() => Unit) => Unit) = def test3(y1: (() => Unit), y2: (() => Unit) => Unit) = val cc1: C1^{y1, y2} = C1(y1, y2) val cc2 = cc1.c2(x1, x2) - val cc3: cc1.C2^{cc1, x1, x2} = cc2 + val cc3: cc1.C2^{cc2} = cc2 diff --git a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala index 5443758afa72..da15fee9849b 100644 --- a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala +++ b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala @@ -5,6 +5,7 @@ import Predef.{augmentString as _, wrapString as _, *} import scala.reflect.ClassTag import annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import annotation.tailrec +import caps.unsafe.unsafeAssumePure /** A strawman architecture for new collections. It contains some * example collection classes and methods with the intent to expose @@ -555,7 +556,16 @@ object CollectionStrawMan5 { private var myCurrent: Iterator[B]^{this, f} = Iterator.empty private def current = { while (!myCurrent.hasNext && self.hasNext) - myCurrent = f(self.next()).iterator + myCurrent = f(self.next()).iterator.unsafeAssumePure + // !!! This is unsafe since the iterator's result could return a capability + // depending on the argument self.next() of type A. To exclude that we'd have + // to define f to be of type EX c. A ->{c} IterableOnce[B]^{c}, i.e. `c` may + // not depend on A. But to get there we have to + // - improve the way we express existentials using `^` + // - rework the recheckApplication code to cater for this. Right now + // we cannot do anything since `A` is not always pure. But if we took + // the existential scope of the result into account, this could provide + // a solution. myCurrent } def hasNext = current.hasNext From bb94805775ca9e3d7b1b2df24d78b2751640cd17 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 28 Jun 2024 19:08:42 +0200 Subject: [PATCH 334/827] Refactor CaptureRef operations Make all operations final methods on Type or CaptureRef --- .../dotty/tools/dotc/core/TypeComparer.scala | 12 -- .../src/dotty/tools/dotc/core/Types.scala | 145 +++++++++--------- 2 files changed, 69 insertions(+), 88 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index d6868e569a05..e63aab484605 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2834,18 +2834,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false Existential.isExistentialVar(tp1) && canInstantiateWith(assocExistentials) - /** Are tp1, tp2 termRefs that can be linked? This should never be called - * normally, since exietential variables appear only in capture sets - * which are in annotations that are ignored during normal typing. The real - * work is done in CaptureSet#subsumes which calls linkOK directly. - */ - private def existentialVarsConform(tp1: Type, tp2: Type) = - tp2 match - case tp2: TermParamRef => tp1 match - case tp1: CaptureRef if tp1.isTrackableRef => subsumesExistentially(tp2, tp1) - case _ => false - case _ => false - /** bi-map taking existentials to the left of a comparison to matching * existentials on the right. This is not a bijection. However * we have `forwards(backwards(bv)) == bv` for an existentially bound `bv`. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 71de9ef0e0f9..9ed9fc079975 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -518,9 +518,43 @@ object Types extends TypeUtils { def isDeclaredVarianceLambda: Boolean = false /** Is this type a CaptureRef that can be tracked? - * This is true for all ThisTypes or ParamRefs but only for some NamedTypes. + * This is true for + * - all ThisTypes and all TermParamRef, + * - stable TermRefs with NoPrefix or ThisTypes as prefixes, + * - the root capability `caps.cap` + * - abstract or parameter TypeRefs that derive from caps.CapSet + * - annotated types that represent reach or maybe capabilities + */ + final def isTrackableRef(using Context): Boolean = this match + case _: (ThisType | TermParamRef) => + true + case tp: TermRef => + ((tp.prefix eq NoPrefix) + || tp.symbol.is(ParamAccessor) && tp.prefix.isThisTypeOf(tp.symbol.owner) + || tp.isRootCapability + ) && !tp.symbol.isOneOf(UnstableValueFlags) + case tp: TypeRef => + tp.symbol.isAbstractOrParamType && tp.derivesFrom(defn.Caps_CapSet) + case tp: TypeParamRef => + tp.derivesFrom(defn.Caps_CapSet) + case AnnotatedType(parent, annot) => + annot.symbol == defn.ReachCapabilityAnnot + || annot.symbol == defn.MaybeCapabilityAnnot + case _ => + false + + /** The capture set of a type. This is: + * - For trackable capture references: The singleton capture set consisting of + * just the reference, provided the underlying capture set of their info is not empty. + * - For other capture references: The capture set of their info + * - For all other types: The result of CaptureSet.ofType */ - def isTrackableRef(using Context): Boolean = false + final def captureSet(using Context): CaptureSet = this match + case tp: CaptureRef if tp.isTrackableRef => + val cs = tp.captureSetOfInfo + if cs.isAlwaysEmpty then cs else tp.singletonCaptureSet + case tp: SingletonCaptureRef => tp.captureSetOfInfo + case _ => CaptureSet.ofType(this, followResult = false) /** Does this type contain wildcard types? */ final def containsWildcardTypes(using Context) = @@ -1657,9 +1691,6 @@ object Types extends TypeUtils { case _ => if (isRepeatedParam) this.argTypesHi.head else this } - /** The capture set of this type. Overridden and cached in CaptureRef */ - def captureSet(using Context): CaptureSet = CaptureSet.ofType(this, followResult = false) - // ----- Normalizing typerefs over refined types ---------------------------- /** If this normalizes* to a refinement type that has a refinement for `name` (which might be followed @@ -2275,31 +2306,54 @@ object Types extends TypeUtils { isTrackableRef && (isMaxCapability || !captureSetOfInfo.isAlwaysEmpty) /** Is this a reach reference of the form `x*`? */ - def isReach(using Context): Boolean = false // overridden in AnnotatedType + final def isReach(using Context): Boolean = this match + case AnnotatedType(_, annot) => annot.symbol == defn.ReachCapabilityAnnot + case _ => false /** Is this a maybe reference of the form `x?`? */ - def isMaybe(using Context): Boolean = false // overridden in AnnotatedType + final def isMaybe(using Context): Boolean = this match + case AnnotatedType(_, annot) => annot.symbol == defn.MaybeCapabilityAnnot + case _ => false - def stripReach(using Context): CaptureRef = this // overridden in AnnotatedType - def stripMaybe(using Context): CaptureRef = this // overridden in AnnotatedType + final def stripReach(using Context): CaptureRef = + if isReach then + val AnnotatedType(parent: CaptureRef, _) = this: @unchecked + parent + else this + + final def stripMaybe(using Context): CaptureRef = + if isMaybe then + val AnnotatedType(parent: CaptureRef, _) = this: @unchecked + parent + else this /** Is this reference the generic root capability `cap` ? */ - def isRootCapability(using Context): Boolean = false + final def isRootCapability(using Context): Boolean = this match + case tp: TermRef => tp.name == nme.CAPTURE_ROOT && tp.symbol == defn.captureRoot + case _ => false /** Is this reference capability that does not derive from another capability ? */ - def isMaxCapability(using Context): Boolean = false + final def isMaxCapability(using Context): Boolean = this match + case tp: TermRef => tp.isRootCapability || tp.info.derivesFrom(defn.Caps_Exists) + case tp: TermParamRef => tp.underlying.derivesFrom(defn.Caps_Exists) + case _ => false /** Normalize reference so that it can be compared with `eq` for equality */ - def normalizedRef(using Context): CaptureRef = this + final def normalizedRef(using Context): CaptureRef = this match + case tp @ AnnotatedType(parent: CaptureRef, annot) if isTrackableRef => + tp.derivedAnnotatedType(parent.normalizedRef, annot) + case tp: TermRef if isTrackableRef => + tp.symbol.termRef + case _ => this /** The capture set consisting of exactly this reference */ - def singletonCaptureSet(using Context): CaptureSet.Const = + final def singletonCaptureSet(using Context): CaptureSet.Const = if mySingletonCaptureSet == null then mySingletonCaptureSet = CaptureSet(this.normalizedRef) mySingletonCaptureSet.uncheckedNN /** The capture set of the type underlying this reference */ - def captureSetOfInfo(using Context): CaptureSet = + final def captureSetOfInfo(using Context): CaptureSet = if ctx.runId == myCaptureSetRunId then myCaptureSet.nn else if myCaptureSet.asInstanceOf[AnyRef] eq CaptureSet.Pending then CaptureSet.empty else @@ -2312,17 +2366,9 @@ object Types extends TypeUtils { myCaptureSetRunId = ctx.runId computed - def invalidateCaches() = + final def invalidateCaches() = myCaptureSetRunId = NoRunId - override def captureSet(using Context): CaptureSet = - val cs = captureSetOfInfo - if isTrackableRef then - if cs.isAlwaysEmpty then cs else singletonCaptureSet - else dealias match - case _: (TypeRef | TypeParamRef) => CaptureSet.empty - case _ => cs - end CaptureRef trait SingletonCaptureRef extends SingletonType, CaptureRef @@ -3015,26 +3061,6 @@ object Types extends TypeUtils { def implicitName(using Context): TermName = name def underlyingRef: TermRef = this - /** A term reference can be tracked if it is a local term ref to a value - * or a method term parameter. References to term parameters of classes - * cannot be tracked individually. - * They are subsumed in the capture sets of the enclosing class. - * TODO: ^^^ What about call-by-name? - */ - override def isTrackableRef(using Context) = - ((prefix eq NoPrefix) - || symbol.is(ParamAccessor) && prefix.isThisTypeOf(symbol.owner) - || isRootCapability - ) && !symbol.isOneOf(UnstableValueFlags) - - override def isRootCapability(using Context): Boolean = - name == nme.CAPTURE_ROOT && symbol == defn.captureRoot - - override def isMaxCapability(using Context): Boolean = - symbol == defn.captureRoot || info.derivesFrom(defn.Caps_Exists) - - override def normalizedRef(using Context): CaptureRef = - if isTrackableRef then symbol.termRef else this } abstract case class TypeRef(override val prefix: Type, @@ -3089,8 +3115,6 @@ object Types extends TypeUtils { def validated(using Context): this.type = this - override def isTrackableRef(using Context) = - symbol.isAbstractOrParamType && derivesFrom(defn.Caps_CapSet) } final class CachedTermRef(prefix: Type, designator: Designator, hc: Int) extends TermRef(prefix, designator) { @@ -3192,8 +3216,6 @@ object Types extends TypeUtils { // can happen in IDE if `cls` is stale } - override def isTrackableRef(using Context) = true - override def computeHash(bs: Binders): Int = doHash(bs, tref) override def eql(that: Type): Boolean = that match { @@ -4836,9 +4858,6 @@ object Types extends TypeUtils { type BT = TermLambda def kindString: String = "Term" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) - override def isTrackableRef(using Context) = true - override def isMaxCapability(using Context) = - underlying.derivesFrom(defn.Caps_Exists) } private final class TermParamRefImpl(binder: TermLambda, paramNum: Int) extends TermParamRef(binder, paramNum) @@ -4867,8 +4886,6 @@ object Types extends TypeUtils { case bound: OrType => occursIn(bound.tp1, fromBelow) || occursIn(bound.tp2, fromBelow) case _ => false } - - override def isTrackableRef(using Context) = derivesFrom(defn.Caps_CapSet) } private final class TypeParamRefImpl(binder: TypeLambda, paramNum: Int) extends TypeParamRef(binder, paramNum) @@ -5834,30 +5851,6 @@ object Types extends TypeUtils { isRefiningCache } - override def isTrackableRef(using Context) = - (isReach || isMaybe) && parent.isTrackableRef - - /** Is this a reach reference of the form `x*`? */ - override def isReach(using Context): Boolean = - annot.symbol == defn.ReachCapabilityAnnot - - /** Is this a reach reference of the form `x*`? */ - override def isMaybe(using Context): Boolean = - annot.symbol == defn.MaybeCapabilityAnnot - - override def stripReach(using Context): CaptureRef = - if isReach then parent.asInstanceOf[CaptureRef] else this - - override def stripMaybe(using Context): CaptureRef = - if isMaybe then parent.asInstanceOf[CaptureRef] else this - - override def normalizedRef(using Context): CaptureRef = - if isReach then AnnotatedType(stripReach.normalizedRef, annot) else this - - override def captureSet(using Context): CaptureSet = - if isReach then super.captureSet - else CaptureSet.ofType(this, followResult = false) - // equals comes from case class; no matching override is needed override def computeHash(bs: Binders): Int = From cc95088671374772d0923dc93621e0b1b074af32 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 28 Jun 2024 21:34:09 +0200 Subject: [PATCH 335/827] Break out CaptureRef into a separate file Move extension methods on CaptureRef into CaptureRef itself or into CaptureOps --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 54 +++++++ .../src/dotty/tools/dotc/cc/CaptureRef.scala | 124 ++++++++++++++++ .../src/dotty/tools/dotc/cc/CaptureSet.scala | 27 ---- .../src/dotty/tools/dotc/core/TypeOps.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 136 +----------------- 5 files changed, 181 insertions(+), 162 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/cc/CaptureRef.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 34dc5fc395d6..889cb41e481b 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -183,6 +183,60 @@ extension (tree: Tree) extension (tp: Type) + /** Is this type a CaptureRef that can be tracked? + * This is true for + * - all ThisTypes and all TermParamRef, + * - stable TermRefs with NoPrefix or ThisTypes as prefixes, + * - the root capability `caps.cap` + * - abstract or parameter TypeRefs that derive from caps.CapSet + * - annotated types that represent reach or maybe capabilities + */ + final def isTrackableRef(using Context): Boolean = tp match + case _: (ThisType | TermParamRef) => + true + case tp: TermRef => + ((tp.prefix eq NoPrefix) + || tp.symbol.is(ParamAccessor) && tp.prefix.isThisTypeOf(tp.symbol.owner) + || tp.isRootCapability + ) && !tp.symbol.isOneOf(UnstableValueFlags) + case tp: TypeRef => + tp.symbol.isAbstractOrParamType && tp.derivesFrom(defn.Caps_CapSet) + case tp: TypeParamRef => + tp.derivesFrom(defn.Caps_CapSet) + case AnnotatedType(parent, annot) => + annot.symbol == defn.ReachCapabilityAnnot + || annot.symbol == defn.MaybeCapabilityAnnot + case _ => + false + + /** The capture set of a type. This is: + * - For trackable capture references: The singleton capture set consisting of + * just the reference, provided the underlying capture set of their info is not empty. + * - For other capture references: The capture set of their info + * - For all other types: The result of CaptureSet.ofType + */ + final def captureSet(using Context): CaptureSet = tp match + case tp: CaptureRef if tp.isTrackableRef => + val cs = tp.captureSetOfInfo + if cs.isAlwaysEmpty then cs else tp.singletonCaptureSet + case tp: SingletonCaptureRef => tp.captureSetOfInfo + case _ => CaptureSet.ofType(tp, followResult = false) + + /** A type capturing `ref` */ + def capturing(ref: CaptureRef)(using Context): Type = + if tp.captureSet.accountsFor(ref) then tp + else CapturingType(tp, ref.singletonCaptureSet) + + /** A type capturing the capture set `cs`. If this type is already a capturing type + * the two capture sets are combined. + */ + def capturing(cs: CaptureSet)(using Context): Type = + if cs.isAlwaysEmpty || cs.isConst && cs.subCaptures(tp.captureSet, frozen = true).isOK + then tp + else tp match + case CapturingType(parent, cs1) => parent.capturing(cs1 ++ cs) + case _ => CapturingType(tp, cs) + /** @pre `tp` is a CapturingType */ def derivedCapturingType(parent: Type, refs: CaptureSet)(using Context): Type = tp match case tp @ CapturingType(p, r) => diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala new file mode 100644 index 000000000000..6578da89bbf8 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -0,0 +1,124 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Decorators.* +import util.{SimpleIdentitySet, Property} +import typer.ErrorReporting.Addenda +import TypeComparer.subsumesExistentially +import util.common.alwaysTrue +import scala.collection.mutable +import CCState.* +import Periods.NoRunId +import compiletime.uninitialized +import StdNames.nme + +/** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs, + * as well as two kinds of AnnotatedTypes representing reach and maybe capabilities. + */ +trait CaptureRef extends TypeProxy, ValueType: + private var myCaptureSet: CaptureSet | Null = uninitialized + private var myCaptureSetRunId: Int = NoRunId + private var mySingletonCaptureSet: CaptureSet.Const | Null = null + + /** Is the reference tracked? This is true if it can be tracked and the capture + * set of the underlying type is not always empty. + */ + final def isTracked(using Context): Boolean = + this.isTrackableRef && (isMaxCapability || !captureSetOfInfo.isAlwaysEmpty) + + /** Is this a reach reference of the form `x*`? */ + final def isReach(using Context): Boolean = this match + case AnnotatedType(_, annot) => annot.symbol == defn.ReachCapabilityAnnot + case _ => false + + /** Is this a maybe reference of the form `x?`? */ + final def isMaybe(using Context): Boolean = this match + case AnnotatedType(_, annot) => annot.symbol == defn.MaybeCapabilityAnnot + case _ => false + + final def stripReach(using Context): CaptureRef = + if isReach then + val AnnotatedType(parent: CaptureRef, _) = this: @unchecked + parent + else this + + final def stripMaybe(using Context): CaptureRef = + if isMaybe then + val AnnotatedType(parent: CaptureRef, _) = this: @unchecked + parent + else this + + /** Is this reference the generic root capability `cap` ? */ + final def isRootCapability(using Context): Boolean = this match + case tp: TermRef => tp.name == nme.CAPTURE_ROOT && tp.symbol == defn.captureRoot + case _ => false + + /** Is this reference capability that does not derive from another capability ? */ + final def isMaxCapability(using Context): Boolean = this match + case tp: TermRef => tp.isRootCapability || tp.info.derivesFrom(defn.Caps_Exists) + case tp: TermParamRef => tp.underlying.derivesFrom(defn.Caps_Exists) + case _ => false + + /** Normalize reference so that it can be compared with `eq` for equality */ + final def normalizedRef(using Context): CaptureRef = this match + case tp @ AnnotatedType(parent: CaptureRef, annot) if tp.isTrackableRef => + tp.derivedAnnotatedType(parent.normalizedRef, annot) + case tp: TermRef if tp.isTrackableRef => + tp.symbol.termRef + case _ => this + + /** The capture set consisting of exactly this reference */ + final def singletonCaptureSet(using Context): CaptureSet.Const = + if mySingletonCaptureSet == null then + mySingletonCaptureSet = CaptureSet(this.normalizedRef) + mySingletonCaptureSet.uncheckedNN + + /** The capture set of the type underlying this reference */ + final def captureSetOfInfo(using Context): CaptureSet = + if ctx.runId == myCaptureSetRunId then myCaptureSet.nn + else if myCaptureSet.asInstanceOf[AnyRef] eq CaptureSet.Pending then CaptureSet.empty + else + myCaptureSet = CaptureSet.Pending + val computed = CaptureSet.ofInfo(this) + if !isCaptureChecking || underlying.isProvisional then + myCaptureSet = null + else + myCaptureSet = computed + myCaptureSetRunId = ctx.runId + computed + + final def invalidateCaches() = + myCaptureSetRunId = NoRunId + + /** x subsumes x + * this subsumes this.f + * x subsumes y ==> x* subsumes y, x subsumes y? + * x subsumes y ==> x* subsumes y*, x? subsumes y? + * x: x1.type /\ x1 subsumes y ==> x subsumes y + */ + final def subsumes(y: CaptureRef)(using Context): Boolean = + (this eq y) + || this.isRootCapability + || y.match + case y: TermRef => + (y.prefix eq this) + || y.info.match + case y1: SingletonCaptureRef => this.subsumes(y1) + case _ => false + case MaybeCapability(y1) => this.stripMaybe.subsumes(y1) + case _ => false + || this.match + case ReachCapability(x1) => x1.subsumes(y.stripReach) + case x: TermRef => + x.info match + case x1: SingletonCaptureRef => x1.subsumes(y) + case _ => false + case x: TermParamRef => subsumesExistentially(x, y) + case _ => false + +end CaptureRef + +trait SingletonCaptureRef extends SingletonType, CaptureRef + diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index a2233f862e53..aa65db2375e8 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -152,33 +152,6 @@ sealed abstract class CaptureSet extends Showable: cs.addDependent(this)(using ctx, UnrecordedState) this - /** x subsumes x - * this subsumes this.f - * x subsumes y ==> x* subsumes y, x subsumes y? - * x subsumes y ==> x* subsumes y*, x? subsumes y? - * x: x1.type /\ x1 subsumes y ==> x subsumes y - */ - extension (x: CaptureRef) - private def subsumes(y: CaptureRef)(using Context): Boolean = - (x eq y) - || x.isRootCapability - || y.match - case y: TermRef => - (y.prefix eq x) - || y.info.match - case y1: SingletonCaptureRef => x.subsumes(y1) - case _ => false - case MaybeCapability(y1) => x.stripMaybe.subsumes(y1) - case _ => false - || x.match - case ReachCapability(x1) => x1.subsumes(y.stripReach) - case x: TermRef => - x.info match - case x1: SingletonCaptureRef => x1.subsumes(y) - case _ => false - case x: TermParamRef => subsumesExistentially(x, y) - case _ => false - /** {x} <:< this where <:< is subcapturing, but treating all variables * as frozen. */ diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 3bc7a7223abb..8089735bdb0f 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -18,7 +18,7 @@ import typer.ForceDegree import typer.Inferencing.* import typer.IfBottom import reporting.TestingReporter -import cc.{CapturingType, derivedCapturingType, CaptureSet, isBoxed, isBoxedCapturing} +import cc.{CapturingType, derivedCapturingType, CaptureSet, captureSet, isBoxed, isBoxedCapturing} import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 9ed9fc079975..036795a3a57a 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -38,7 +38,8 @@ import config.Printers.{core, typr, matchTypes} import reporting.{trace, Message} import java.lang.ref.WeakReference import compiletime.uninitialized -import cc.{CapturingType, CaptureSet, derivedCapturingType, isBoxedCapturing, isCaptureChecking, isRetains, isRetainsLike} +import cc.{CapturingType, CaptureRef, CaptureSet, SingletonCaptureRef, isTrackableRef, + derivedCapturingType, isBoxedCapturing, isCaptureChecking, isRetains, isRetainsLike} import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable @@ -517,45 +518,6 @@ object Types extends TypeUtils { */ def isDeclaredVarianceLambda: Boolean = false - /** Is this type a CaptureRef that can be tracked? - * This is true for - * - all ThisTypes and all TermParamRef, - * - stable TermRefs with NoPrefix or ThisTypes as prefixes, - * - the root capability `caps.cap` - * - abstract or parameter TypeRefs that derive from caps.CapSet - * - annotated types that represent reach or maybe capabilities - */ - final def isTrackableRef(using Context): Boolean = this match - case _: (ThisType | TermParamRef) => - true - case tp: TermRef => - ((tp.prefix eq NoPrefix) - || tp.symbol.is(ParamAccessor) && tp.prefix.isThisTypeOf(tp.symbol.owner) - || tp.isRootCapability - ) && !tp.symbol.isOneOf(UnstableValueFlags) - case tp: TypeRef => - tp.symbol.isAbstractOrParamType && tp.derivesFrom(defn.Caps_CapSet) - case tp: TypeParamRef => - tp.derivesFrom(defn.Caps_CapSet) - case AnnotatedType(parent, annot) => - annot.symbol == defn.ReachCapabilityAnnot - || annot.symbol == defn.MaybeCapabilityAnnot - case _ => - false - - /** The capture set of a type. This is: - * - For trackable capture references: The singleton capture set consisting of - * just the reference, provided the underlying capture set of their info is not empty. - * - For other capture references: The capture set of their info - * - For all other types: The result of CaptureSet.ofType - */ - final def captureSet(using Context): CaptureSet = this match - case tp: CaptureRef if tp.isTrackableRef => - val cs = tp.captureSetOfInfo - if cs.isAlwaysEmpty then cs else tp.singletonCaptureSet - case tp: SingletonCaptureRef => tp.captureSetOfInfo - case _ => CaptureSet.ofType(this, followResult = false) - /** Does this type contain wildcard types? */ final def containsWildcardTypes(using Context) = existsPart(_.isInstanceOf[WildcardType], StopAt.Static, forceLazy = false) @@ -2081,20 +2043,6 @@ object Types extends TypeUtils { case _ => this - /** A type capturing `ref` */ - def capturing(ref: CaptureRef)(using Context): Type = - if captureSet.accountsFor(ref) then this - else CapturingType(this, ref.singletonCaptureSet) - - /** A type capturing the capture set `cs`. If this type is already a capturing type - * the two capture sets are combined. - */ - def capturing(cs: CaptureSet)(using Context): Type = - if cs.isAlwaysEmpty || cs.isConst && cs.subCaptures(captureSet, frozen = true).isOK then this - else this match - case CapturingType(parent, cs1) => parent.capturing(cs1 ++ cs) - case _ => CapturingType(this, cs) - /** The set of distinct symbols referred to by this type, after all aliases are expanded */ def coveringSet(using Context): Set[Symbol] = (new CoveringSetAccumulator).apply(Set.empty[Symbol], this) @@ -2293,86 +2241,6 @@ object Types extends TypeUtils { def isOverloaded(using Context): Boolean = false } - /** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs */ - trait CaptureRef extends TypeProxy, ValueType: - private var myCaptureSet: CaptureSet | Null = uninitialized - private var myCaptureSetRunId: Int = NoRunId - private var mySingletonCaptureSet: CaptureSet.Const | Null = null - - /** Is the reference tracked? This is true if it can be tracked and the capture - * set of the underlying type is not always empty. - */ - final def isTracked(using Context): Boolean = - isTrackableRef && (isMaxCapability || !captureSetOfInfo.isAlwaysEmpty) - - /** Is this a reach reference of the form `x*`? */ - final def isReach(using Context): Boolean = this match - case AnnotatedType(_, annot) => annot.symbol == defn.ReachCapabilityAnnot - case _ => false - - /** Is this a maybe reference of the form `x?`? */ - final def isMaybe(using Context): Boolean = this match - case AnnotatedType(_, annot) => annot.symbol == defn.MaybeCapabilityAnnot - case _ => false - - final def stripReach(using Context): CaptureRef = - if isReach then - val AnnotatedType(parent: CaptureRef, _) = this: @unchecked - parent - else this - - final def stripMaybe(using Context): CaptureRef = - if isMaybe then - val AnnotatedType(parent: CaptureRef, _) = this: @unchecked - parent - else this - - /** Is this reference the generic root capability `cap` ? */ - final def isRootCapability(using Context): Boolean = this match - case tp: TermRef => tp.name == nme.CAPTURE_ROOT && tp.symbol == defn.captureRoot - case _ => false - - /** Is this reference capability that does not derive from another capability ? */ - final def isMaxCapability(using Context): Boolean = this match - case tp: TermRef => tp.isRootCapability || tp.info.derivesFrom(defn.Caps_Exists) - case tp: TermParamRef => tp.underlying.derivesFrom(defn.Caps_Exists) - case _ => false - - /** Normalize reference so that it can be compared with `eq` for equality */ - final def normalizedRef(using Context): CaptureRef = this match - case tp @ AnnotatedType(parent: CaptureRef, annot) if isTrackableRef => - tp.derivedAnnotatedType(parent.normalizedRef, annot) - case tp: TermRef if isTrackableRef => - tp.symbol.termRef - case _ => this - - /** The capture set consisting of exactly this reference */ - final def singletonCaptureSet(using Context): CaptureSet.Const = - if mySingletonCaptureSet == null then - mySingletonCaptureSet = CaptureSet(this.normalizedRef) - mySingletonCaptureSet.uncheckedNN - - /** The capture set of the type underlying this reference */ - final def captureSetOfInfo(using Context): CaptureSet = - if ctx.runId == myCaptureSetRunId then myCaptureSet.nn - else if myCaptureSet.asInstanceOf[AnyRef] eq CaptureSet.Pending then CaptureSet.empty - else - myCaptureSet = CaptureSet.Pending - val computed = CaptureSet.ofInfo(this) - if !isCaptureChecking || underlying.isProvisional then - myCaptureSet = null - else - myCaptureSet = computed - myCaptureSetRunId = ctx.runId - computed - - final def invalidateCaches() = - myCaptureSetRunId = NoRunId - - end CaptureRef - - trait SingletonCaptureRef extends SingletonType, CaptureRef - /** A trait for types that bind other types that refer to them. * Instances are: LambdaType, RecType. */ From 3667ab84cae22269ad0ae757d1e4986338f6d583 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 10 Jul 2024 18:34:25 +0200 Subject: [PATCH 336/827] Don't box arguments of any form of type cast or test Previously, only asInstanceOf was excluded. --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 1 + compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 11 +++++++++++ compiler/src/dotty/tools/dotc/cc/Setup.scala | 9 +++------ compiler/src/dotty/tools/dotc/core/Definitions.scala | 6 ++++++ .../captures/opaque-inline-problem.scala | 12 ++++++++++++ 5 files changed, 33 insertions(+), 6 deletions(-) create mode 100644 tests/pos-custom-args/captures/opaque-inline-problem.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 889cb41e481b..af17cdc6b11a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -573,6 +573,7 @@ extension (sym: Symbol) && sym != defn.Caps_unsafeBox && sym != defn.Caps_unsafeUnbox && !defn.isPolymorphicAfterErasure(sym) + && !defn.isTypeTestOrCast(sym) def isRefiningParamAccessor(using Context): Boolean = sym.is(ParamAccessor) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 283df3254ab5..f330421a2647 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -563,6 +563,17 @@ class CheckCaptures extends Recheck, SymTransformer: * The implementation picks `C` as `Cq` instead of `Cr`, if * 1. The argument(s) Ta are always pure * 2. `Cq` might subcapture `Cr`. + * TODO: We could generalize this as follows: + * + * If the function `f` does not have an `@unboxed` parameter, then + * any unboxing it does would be charged to the environment of the function + * so they have to appear in Cq. So another approximation of the + * result capability set is `Cq u Ca` where `Ca` is the capture set of the + * argument. + * If the function `f` does have an `@unboxed` parameter, then it could in addition + * unbox reach capabilities over its formal parameter. Therefore, the approximation + * would be `Cq u dcs(Ca)` instead. + * If the approximation is known to subcapture the declared result Cr, we pick it. */ protected override def recheckApplication(tree: Apply, qualType: Type, funType: MethodType, argTypes: List[Type])(using Context): Type = diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index cb74e2c71e73..26817cb838c6 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -439,12 +439,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tree @ TypeApply(fn, args) => traverse(fn) - fn match - case Select(qual, nme.asInstanceOf_) => - // No need to box type arguments of an asInstanceOf call. See #20224. - case _ => - for case arg: TypeTree <- args do - transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + if !defn.isTypeTestOrCast(fn.symbol) then + for case arg: TypeTree <- args do + transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed case tree: TypeDef if tree.symbol.isClass => val sym = tree.symbol diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index b7aa74ea2a92..8c2f448a4a5e 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1768,6 +1768,12 @@ class Definitions { def isPolymorphicAfterErasure(sym: Symbol): Boolean = (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf) || (sym eq Object_synchronized) + def isTypeTestOrCast(sym: Symbol): Boolean = + (sym eq Any_isInstanceOf) + || (sym eq Any_asInstanceOf) + || (sym eq Any_typeTest) + || (sym eq Any_typeCast) + /** Is this type a `TupleN` type? * * @return true if the dealiased type of `tp` is `TupleN[T1, T2, ..., Tn]` diff --git a/tests/pos-custom-args/captures/opaque-inline-problem.scala b/tests/pos-custom-args/captures/opaque-inline-problem.scala new file mode 100644 index 000000000000..de9a4437becd --- /dev/null +++ b/tests/pos-custom-args/captures/opaque-inline-problem.scala @@ -0,0 +1,12 @@ +trait Async extends caps.Capability: + def group: Int + +object Async: + inline def current(using async: Async): async.type = async + opaque type Spawn <: Async = Async + def blocking[T](f: Spawn => T): T = ??? + +def main() = + Async.blocking: spawn => + val c = Async.current(using spawn) + val a = c.group From d8ace30d171f4d9b2e1864611e86d97c5e343942 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 11 Jul 2024 10:20:57 +0200 Subject: [PATCH 337/827] Revert "Fix special capture set handling in recheckApply, Step 2" This reverts commit f1f5a05d3d9461fd8437d0469583fe860d0116ba. # Conflicts: # compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 33 ++++++++----------- .../src/scala/collection/Iterator.scala | 5 ++- .../immutable/LazyListIterable.scala | 4 +-- .../src/scala/collection/mutable/Buffer.scala | 6 +--- tests/neg-custom-args/captures/reaches.check | 2 +- .../captures/nested-classes-2.scala | 2 +- .../colltest5/CollectionStrawManCC5_1.scala | 12 +------ 7 files changed, 20 insertions(+), 44 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index f330421a2647..c32028a22471 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -554,26 +554,14 @@ class CheckCaptures extends Recheck, SymTransformer: /** A specialized implementation of the apply rule. * - * E |- q: Tq^Cq - * E |- q.f: Ta ->Cf Tr^Cr - * E |- a: Ta + * E |- f: Ra ->Cf Rr^Cr + * E |- a: Ra^Ca * --------------------- - * E |- f(a): Tr^C + * E |- f a: Rr^C * - * The implementation picks `C` as `Cq` instead of `Cr`, if - * 1. The argument(s) Ta are always pure - * 2. `Cq` might subcapture `Cr`. - * TODO: We could generalize this as follows: - * - * If the function `f` does not have an `@unboxed` parameter, then - * any unboxing it does would be charged to the environment of the function - * so they have to appear in Cq. So another approximation of the - * result capability set is `Cq u Ca` where `Ca` is the capture set of the - * argument. - * If the function `f` does have an `@unboxed` parameter, then it could in addition - * unbox reach capabilities over its formal parameter. Therefore, the approximation - * would be `Cq u dcs(Ca)` instead. - * If the approximation is known to subcapture the declared result Cr, we pick it. + * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the + * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr + * and Cr otherwise. */ protected override def recheckApplication(tree: Apply, qualType: Type, funType: MethodType, argTypes: List[Type])(using Context): Type = @@ -581,10 +569,15 @@ class CheckCaptures extends Recheck, SymTransformer: case appType @ CapturingType(appType1, refs) if qualType.exists && !tree.fun.symbol.isConstructor - && argTypes.forall(_.isAlwaysPure) + && !qualType.isBoxedCapturing // TODO: This is not strng enough, we also have + // to exclude existentials in function results + && !argTypes.exists(_.isBoxedCapturing) && qualType.captureSet.mightSubcapture(refs) + && argTypes.forall(_.captureSet.mightSubcapture(refs)) => - appType.derivedCapturingType(appType1, qualType.captureSet) + val callCaptures = tree.args.foldLeft(qualType.captureSet): (cs, arg) => + cs ++ arg.tpe.captureSet + appType.derivedCapturingType(appType1, callCaptures) .showing(i"narrow $tree: $appType, refs = $refs, qual-cs = ${qualType.captureSet} = $result", capt) case appType => appType diff --git a/scala2-library-cc/src/scala/collection/Iterator.scala b/scala2-library-cc/src/scala/collection/Iterator.scala index d75f136e0253..4d1b0ed4ff95 100644 --- a/scala2-library-cc/src/scala/collection/Iterator.scala +++ b/scala2-library-cc/src/scala/collection/Iterator.scala @@ -17,7 +17,7 @@ import scala.annotation.tailrec import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure + /** Iterators are data structures that allow to iterate over a sequence * of elements. They have a `hasNext` method for checking @@ -595,8 +595,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite private[this] def nextCur(): Unit = { cur = null - cur = f(self.next()).iterator.unsafeAssumePure - // !!! see explanation in colltest5.CollectionStrawManCC5_1.flatMap why the unsafeAssumePure is needed + cur = f(self.next()).iterator _hasNext = -1 } diff --git a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala index baf89ef54aab..2f7b017a6729 100644 --- a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala +++ b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala @@ -25,7 +25,6 @@ import scala.runtime.Statics import language.experimental.captureChecking import annotation.unchecked.uncheckedCaptures import caps.untrackedCaptures -import caps.unsafe.unsafeAssumePure /** This class implements an immutable linked list. We call it "lazy" * because it computes its elements only when they are needed. @@ -1042,8 +1041,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { var itHasNext = false var rest = restRef // var rest = restRef.elem while (!itHasNext && !rest.isEmpty) { - it = f(rest.head).iterator.unsafeAssumePure - // !!! see explanation in colltest5.CollectionStrawManCC5_1.flatMap why the unsafeAssumePure is needed + it = f(rest.head).iterator itHasNext = it.hasNext if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw rest = rest.tail diff --git a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala index eebc2fbc1417..3ff614bfc556 100644 --- a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala +++ b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala @@ -16,7 +16,6 @@ package mutable import scala.annotation.nowarn import language.experimental.captureChecking import caps.unboxed -import caps.unsafe.unsafeAssumePure /** A `Buffer` is a growable and shrinkable `Seq`. */ trait Buffer[A] @@ -186,10 +185,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] var i = 0 val s = size val newElems = new Array[(IterableOnce[A]^{f*})](s) - while i < s do - newElems(i) = f(this(i)).unsafeAssumePure - // !!! see explanation in colltest5.CollectionStrawManCC5_1.flatMap why the unsafeAssumePure is needed - i += 1 + while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 while (i < s) { ++=(newElems(i)); i += 1 } diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index 96859290075a..615e3901a437 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -47,7 +47,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:64:27 -------------------------------------- 64 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ - | Found: File^ + | Found: File^{id, f} | Required: File^{id*} | | longer explanation available when compiling with `-explain` diff --git a/tests/pos-custom-args/captures/nested-classes-2.scala b/tests/pos-custom-args/captures/nested-classes-2.scala index 17ee3b8f3cb0..744635ee949b 100644 --- a/tests/pos-custom-args/captures/nested-classes-2.scala +++ b/tests/pos-custom-args/captures/nested-classes-2.scala @@ -20,5 +20,5 @@ def test2(x1: (() => Unit), x2: (() => Unit) => Unit) = def test3(y1: (() => Unit), y2: (() => Unit) => Unit) = val cc1: C1^{y1, y2} = C1(y1, y2) val cc2 = cc1.c2(x1, x2) - val cc3: cc1.C2^{cc2} = cc2 + val cc3: cc1.C2^{cc1, x1, x2} = cc2 diff --git a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala index da15fee9849b..5443758afa72 100644 --- a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala +++ b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala @@ -5,7 +5,6 @@ import Predef.{augmentString as _, wrapString as _, *} import scala.reflect.ClassTag import annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import annotation.tailrec -import caps.unsafe.unsafeAssumePure /** A strawman architecture for new collections. It contains some * example collection classes and methods with the intent to expose @@ -556,16 +555,7 @@ object CollectionStrawMan5 { private var myCurrent: Iterator[B]^{this, f} = Iterator.empty private def current = { while (!myCurrent.hasNext && self.hasNext) - myCurrent = f(self.next()).iterator.unsafeAssumePure - // !!! This is unsafe since the iterator's result could return a capability - // depending on the argument self.next() of type A. To exclude that we'd have - // to define f to be of type EX c. A ->{c} IterableOnce[B]^{c}, i.e. `c` may - // not depend on A. But to get there we have to - // - improve the way we express existentials using `^` - // - rework the recheckApplication code to cater for this. Right now - // we cannot do anything since `A` is not always pure. But if we took - // the existential scope of the result into account, this could provide - // a solution. + myCurrent = f(self.next()).iterator myCurrent } def hasNext = current.hasNext From 7db1d43518f6bbb23bbb2d00d7e9a910658d86d3 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 11 Jul 2024 10:31:02 +0200 Subject: [PATCH 338/827] Fix special capture set handling in recheckApply, Step 2 revised Step 2: Change the logic. The previous one was unsound. The new logic is makes use of the distinction between regular and unboxed parameters. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 43 ++++++----- tests/neg-custom-args/captures/reaches.check | 72 +++++++------------ tests/neg-custom-args/captures/reaches.scala | 23 +++--- tests/pos-custom-args/captures/Buffer.scala | 22 ++++++ .../captures/opaque-inline-problem.scala | 7 +- 5 files changed, 91 insertions(+), 76 deletions(-) create mode 100644 tests/pos-custom-args/captures/Buffer.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index c32028a22471..8df958d7a977 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -547,36 +547,47 @@ class CheckCaptures extends Recheck, SymTransformer: protected override def recheckArg(arg: Tree, formal: Type)(using Context): Type = val argType = recheck(arg, formal) - if unboxedArgs.remove(arg) && ccConfig.useUnboxedParams then + if unboxedArgs.contains(arg) && ccConfig.useUnboxedParams then capt.println(i"charging deep capture set of $arg: ${argType} = ${CaptureSet.deepCaptureSet(argType)}") markFree(CaptureSet.deepCaptureSet(argType), arg.srcPos) argType /** A specialized implementation of the apply rule. * - * E |- f: Ra ->Cf Rr^Cr - * E |- a: Ra^Ca + * E |- q: Tq^Cq + * E |- q.f: Ta^Ca ->Cf Tr^Cr + * E |- a: Ta * --------------------- - * E |- f a: Rr^C + * E |- f(a): Tr^C * - * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the - * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr - * and Cr otherwise. + * If the function `f` does not have an `@unboxed` parameter, then + * any unboxing it does would be charged to the environment of the function + * so they have to appear in Cq. Since any capabilities of the result of the + * application must already be present in the application, an upper + * approximation of the result capture set is Cq \union Ca, where `Ca` + * is the capture set of the argument. + * If the function `f` does have an `@unboxed` parameter, then it could in addition + * unbox reach capabilities over its formal parameter. Therefore, the approximation + * would be `Cq \union dcs(Ca)` instead. + * If the approximation is known to subcapture the declared result Cr, we pick it for C + * otherwise we pick Cr. */ protected override def recheckApplication(tree: Apply, qualType: Type, funType: MethodType, argTypes: List[Type])(using Context): Type = - Existential.toCap(super.recheckApplication(tree, qualType, funType, argTypes)) match + val appType = Existential.toCap(super.recheckApplication(tree, qualType, funType, argTypes)) + val qualCaptures = qualType.captureSet + val argCaptures = + for (arg, argType) <- tree.args.lazyZip(argTypes) yield + if unboxedArgs.remove(arg) // need to ensure the remove happens, that's why argCaptures is computed even if not needed. + then CaptureSet.deepCaptureSet(argType) + else argType.captureSet + appType match case appType @ CapturingType(appType1, refs) if qualType.exists && !tree.fun.symbol.isConstructor - && !qualType.isBoxedCapturing // TODO: This is not strng enough, we also have - // to exclude existentials in function results - && !argTypes.exists(_.isBoxedCapturing) - && qualType.captureSet.mightSubcapture(refs) - && argTypes.forall(_.captureSet.mightSubcapture(refs)) - => - val callCaptures = tree.args.foldLeft(qualType.captureSet): (cs, arg) => - cs ++ arg.tpe.captureSet + && qualCaptures.mightSubcapture(refs) + && argCaptures.forall(_.mightSubcapture(refs)) => + val callCaptures = argCaptures.foldLeft(qualCaptures)(_ ++ _) appType.derivedCapturingType(appType1, callCaptures) .showing(i"narrow $tree: $appType, refs = $refs, qual-cs = ${qualType.captureSet} = $result", capt) case appType => diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index 615e3901a437..a0797a6ba0a9 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -1,12 +1,12 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:24:11 -------------------------------------- -24 | cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:22:11 -------------------------------------- +22 | cur = (() => f.write()) :: Nil // error | ^^^^^^^^^^^^^^^^^^^^^^^ | Found: List[box () ->{f} Unit] | Required: List[box () ->{xs*} Unit] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:35:7 --------------------------------------- -35 | (() => f.write()) :: Nil // error since {f*} !<: {xs*} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:33:7 --------------------------------------- +33 | (() => f.write()) :: Nil // error | ^^^^^^^^^^^^^^^^^^^^^^^ | Found: List[box () ->{f} Unit] | Required: box List[box () ->{xs*} Unit]^? @@ -15,52 +15,34 @@ | cannot be included in outer capture set {xs*} of value cur | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/reaches.scala:38:6 ------------------------------------------------------------ -38 | var cur: List[Proc] = xs // error: Illegal type for var - | ^ - | Mutable variable cur cannot have type List[box () => Unit] since - | the part box () => Unit of that type captures the root capability `cap`. --- Error: tests/neg-custom-args/captures/reaches.scala:45:15 ----------------------------------------------------------- -45 | val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref - | ^^^^^^^^^^^^^^^ - | Sealed type variable T cannot be instantiated to List[box () => Unit] since - | the part box () => Unit of that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of constructor Ref - | leaking as part of its result. --- Error: tests/neg-custom-args/captures/reaches.scala:55:31 ----------------------------------------------------------- -55 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error - | ^^^^^^^^^^^^^^^^^^^^ - | Sealed type variable A cannot be instantiated to box () => Unit since - | that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of constructor Id - | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:57:6 --------------------------------------- -57 | id(() => f.write()) // error - | ^^^^^^^^^^^^^^^^^^^ - | Found: () => Unit - | Required: () ->? Unit - | - | Note that the universal capability `cap` - | cannot be included in capture set ? - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:64:27 -------------------------------------- -64 | val f1: File^{id*} = id(f) // error, since now id(f): File^ +-- Error: tests/neg-custom-args/captures/reaches.scala:38:31 ----------------------------------------------------------- +38 | val next: () => Unit = cur.head // error + | ^^^^^^^^ + | The expression's type box () => Unit is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/reaches.scala:45:35 ----------------------------------------------------------- +45 | val next: () => Unit = cur.get.head // error + | ^^^^^^^^^^^^ + | The expression's type box () => Unit is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:62:27 -------------------------------------- +62 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ - | Found: File^{id, f} + | Found: File^{f} | Required: File^{id*} | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/reaches.scala:81:5 ------------------------------------------------------------ -81 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) // error // error - | ^^^^^^ - | Reach capability cap and universal capability cap cannot both - | appear in the type [B](f: ((box A ->{ps*} A, box A ->{ps*} A)) => B): List[B] of this expression --- Error: tests/neg-custom-args/captures/reaches.scala:81:10 ----------------------------------------------------------- -81 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) // error // error +-- Error: tests/neg-custom-args/captures/reaches.scala:79:10 ----------------------------------------------------------- +79 | ps.map((x, y) => compose1(x, y)) // error // error | ^ | Local reach capability ps* leaks into capture scope of method mapCompose --- Error: tests/neg-custom-args/captures/reaches.scala:81:13 ----------------------------------------------------------- -81 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) // error // error +-- Error: tests/neg-custom-args/captures/reaches.scala:79:13 ----------------------------------------------------------- +79 | ps.map((x, y) => compose1(x, y)) // error // error | ^ | Local reach capability ps* leaks into capture scope of method mapCompose +-- [E057] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:53:51 -------------------------------------- +53 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error + | ^ + | Type argument () -> Unit does not conform to lower bound () => Unit + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index e602d4b34493..08b2feed1bfe 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -1,5 +1,3 @@ -//> using options -source 3.4 -// (to make sure we use the sealed policy) import caps.unboxed class File: def write(): Unit = ??? @@ -14,14 +12,14 @@ class Ref[T](init: T): def set(y: T) = { x = y } def runAll0(@unboxed xs: List[Proc]): Unit = - var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR + var cur: List[() ->{xs*} Unit] = xs while cur.nonEmpty do val next: () ->{xs*} Unit = cur.head next() cur = cur.tail: List[() ->{xs*} Unit] usingFile: f => - cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} + cur = (() => f.write()) :: Nil // error def runAll1(@unboxed xs: List[Proc]): Unit = val cur = Ref[List[() ->{xs*} Unit]](xs) // OK, by revised VAR @@ -32,19 +30,19 @@ def runAll1(@unboxed xs: List[Proc]): Unit = usingFile: f => cur.set: - (() => f.write()) :: Nil // error since {f*} !<: {xs*} + (() => f.write()) :: Nil // error def runAll2(xs: List[Proc]): Unit = - var cur: List[Proc] = xs // error: Illegal type for var + var cur: List[Proc] = xs while cur.nonEmpty do - val next: () => Unit = cur.head + val next: () => Unit = cur.head // error next() cur = cur.tail def runAll3(xs: List[Proc]): Unit = - val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref + val cur = Ref[List[Proc]](xs) while cur.get.nonEmpty do - val next: () => Unit = cur.get.head + val next: () => Unit = cur.get.head // error next() cur.set(cur.get.tail: List[Proc]) @@ -54,7 +52,7 @@ class Id[-A, +B >: A](): def test = val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error usingFile: f => - id(() => f.write()) // error + id(() => f.write()) def attack2 = val id: File^ -> File^ = x => x @@ -78,4 +76,7 @@ def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = z => g(f(z)) def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = - ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * (should work now) // error // error + ps.map((x, y) => compose1(x, y)) // error // error + +def mapCompose2[A](@unboxed ps: List[(A => A, A => A)]): List[A ->{ps*} A] = + ps.map((x, y) => compose1(x, y)) diff --git a/tests/pos-custom-args/captures/Buffer.scala b/tests/pos-custom-args/captures/Buffer.scala new file mode 100644 index 000000000000..17164dccc370 --- /dev/null +++ b/tests/pos-custom-args/captures/Buffer.scala @@ -0,0 +1,22 @@ +import language.experimental.captureChecking + +// Extract of the problem in collection.mutable.Buffers +trait Buffer[A]: + + def apply(i: Int): A = ??? + + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + val g = f + val s = 10 + // capture checking: we need the copy since we box/unbox on g* on the next line + // TODO: This looks fishy, need to investigate + // Alternative would be to mark `f` as @unboxed. It's not inferred + // since `^ appears in a function result, not under a box. + val newElems = new Array[(IterableOnce[A]^{f})](s) + var i = 0 + while i < s do + val x = g(this(i)) + newElems(i) = x + i += 1 + this + } \ No newline at end of file diff --git a/tests/pos-custom-args/captures/opaque-inline-problem.scala b/tests/pos-custom-args/captures/opaque-inline-problem.scala index de9a4437becd..ed482e3fc164 100644 --- a/tests/pos-custom-args/captures/opaque-inline-problem.scala +++ b/tests/pos-custom-args/captures/opaque-inline-problem.scala @@ -4,9 +4,8 @@ trait Async extends caps.Capability: object Async: inline def current(using async: Async): async.type = async opaque type Spawn <: Async = Async - def blocking[T](f: Spawn => T): T = ??? + def blocking[T](f: Spawn ?=> T): T = ??? def main() = - Async.blocking: spawn => - val c = Async.current(using spawn) - val a = c.group + Async.blocking: + val a = Async.current.group \ No newline at end of file From c1036bf469f91dfabd8247c08c82bdf0dc6b075f Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 11 Jul 2024 16:17:58 +0200 Subject: [PATCH 339/827] Fix captureSet computations for false reach capabilities Fix the capture set computation of a type T @reachCapability where T is not a singleton captureRef. Such types can be the results of typemaps. The capture set in this case should be the deep capture set of T. --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 3 ++- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 10 ++++++++-- .../tools/dotc/printing/RefinedPrinter.scala | 4 ++-- .../captures/leak-problem-2.check | 7 +++++++ .../captures/leak-problem-2.scala | 9 +++++++++ tests/neg-custom-args/captures/reaches.check | 18 ++++++++++++------ 6 files changed, 40 insertions(+), 11 deletions(-) create mode 100644 tests/neg-custom-args/captures/leak-problem-2.check create mode 100644 tests/neg-custom-args/captures/leak-problem-2.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index af17cdc6b11a..dad063f43c87 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -204,8 +204,9 @@ extension (tp: Type) case tp: TypeParamRef => tp.derivesFrom(defn.Caps_CapSet) case AnnotatedType(parent, annot) => - annot.symbol == defn.ReachCapabilityAnnot + (annot.symbol == defn.ReachCapabilityAnnot || annot.symbol == defn.MaybeCapabilityAnnot + ) && parent.isTrackableRef case _ => false diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index aa65db2375e8..0aab33d31fdb 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -1057,7 +1057,7 @@ object CaptureSet: /** Capture set of a type */ def ofType(tp: Type, followResult: Boolean)(using Context): CaptureSet = def recur(tp: Type): CaptureSet = trace(i"ofType $tp, ${tp.getClass} $followResult", show = true): - tp.dealias match + tp.dealiasKeepAnnots match case tp: TermRef => tp.captureSet case tp: TermParamRef => @@ -1068,6 +1068,12 @@ object CaptureSet: empty case CapturingType(parent, refs) => recur(parent) ++ refs + case tp @ AnnotatedType(parent, ann) if ann.hasSymbol(defn.ReachCapabilityAnnot) => + parent match + case parent: SingletonCaptureRef if parent.isTrackableRef => + tp.singletonCaptureSet + case _ => + CaptureSet.deepCaptureSet(parent) case tpd @ defn.RefinedFunctionOf(rinfo: MethodType) if followResult => ofType(tpd.parent, followResult = false) // pick up capture set from parent type ++ (recur(rinfo.resType) // add capture set of result @@ -1083,7 +1089,7 @@ object CaptureSet: case tparams @ (LambdaParam(tl, _) :: _) => cs.substParams(tl, args) case _ => cs case tp: TypeProxy => - recur(tp.underlying) + recur(tp.superType) case AndType(tp1, tp2) => recur(tp1) ** recur(tp2) case OrType(tp1, tp2) => diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 18a1647572ef..3bdc67cb91ff 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -27,7 +27,7 @@ import config.{Config, Feature} import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.ast.untpd.{MemberDef, Modifiers, PackageDef, RefTree, Template, TypeDef, ValOrDefDef} -import cc.{CaptureSet, CapturingType, toCaptureSet, IllegalCaptureRef, isRetains} +import cc.{CaptureSet, CapturingType, toCaptureSet, IllegalCaptureRef, isRetains, ReachCapability, MaybeCapability} import dotty.tools.dotc.parsing.JavaParsers class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { @@ -330,7 +330,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { "?" ~ (("(ignored: " ~ toText(ignored) ~ ")") provided printDebug) case tp @ PolyProto(targs, resType) => "[applied to [" ~ toTextGlobal(targs, ", ") ~ "] returning " ~ toText(resType) - case tp: AnnotatedType if tp.isReach || tp.isMaybe => + case ReachCapability(_) | MaybeCapability(_) => toTextCaptureRef(tp) case _ => super.toText(tp) diff --git a/tests/neg-custom-args/captures/leak-problem-2.check b/tests/neg-custom-args/captures/leak-problem-2.check new file mode 100644 index 000000000000..42282ff7f9f4 --- /dev/null +++ b/tests/neg-custom-args/captures/leak-problem-2.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/leak-problem-2.scala:8:8 --------------------------------- +8 | = race(Seq(src1, src2)) // error + | ^^^^^^^^^^^^^^^^^^^^^ + | Found: Source[box T^?]^{src1, src2} + | Required: Source[T] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/leak-problem-2.scala b/tests/neg-custom-args/captures/leak-problem-2.scala new file mode 100644 index 000000000000..82465924b852 --- /dev/null +++ b/tests/neg-custom-args/captures/leak-problem-2.scala @@ -0,0 +1,9 @@ +import language.experimental.captureChecking + +trait Source[+T] + +def race[T](@caps.unboxed sources: Seq[Source[T]^]): Source[T]^{sources*} = ??? + +def raceTwo[T](src1: Source[T]^, src2: Source[T]^): Source[T]^{} + = race(Seq(src1, src2)) // error + // this compiled and returned a Source that does not capture src1 and src2. \ No newline at end of file diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index a0797a6ba0a9..aa45c738dcc5 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -25,6 +25,18 @@ | ^^^^^^^^^^^^ | The expression's type box () => Unit is not allowed to capture the root capability `cap`. | This usually means that a capability persists longer than its allowed lifetime. +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:53:2 --------------------------------------- +53 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error + | ^ + | Found: box () => Unit + | Required: () => Unit + | + | Note that box () => Unit cannot be box-converted to () => Unit + | since at least one of their capture sets contains the root capability `cap` +54 | usingFile: f => +55 | id(() => f.write()) + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:62:27 -------------------------------------- 62 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ @@ -40,9 +52,3 @@ 79 | ps.map((x, y) => compose1(x, y)) // error // error | ^ | Local reach capability ps* leaks into capture scope of method mapCompose --- [E057] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:53:51 -------------------------------------- -53 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error - | ^ - | Type argument () -> Unit does not conform to lower bound () => Unit - | - | longer explanation available when compiling with `-explain` From 5a351600b854306ac7038b50e8e66aa21d8270b1 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 11 Jul 2024 16:54:06 +0200 Subject: [PATCH 340/827] Tweak deep capture set computations --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 16 +++++++++++++++- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 9 +++++++-- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 6 +++--- 3 files changed, 25 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index dad063f43c87..46c346011c49 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -223,6 +223,20 @@ extension (tp: Type) case tp: SingletonCaptureRef => tp.captureSetOfInfo case _ => CaptureSet.ofType(tp, followResult = false) + /** The deep capture set of a type. + * For singleton capabilities `x` and reach capabilities `x*`, this is `{x*}`, provided + * the underlying capture set resulting from traversing the type is non-empty. + * For other types this is the union of all covariant capture sets embedded + * in the type, as computed by `CaptureSet.ofTypeDeeply`. + */ + def deepCaptureSet(using Context): CaptureSet = + val dcs = CaptureSet.ofTypeDeeply(tp) + if dcs.isAlwaysEmpty then dcs + else tp match + case tp @ ReachCapability(_) => tp.singletonCaptureSet + case tp: SingletonCaptureRef => tp.reach.singletonCaptureSet + case _ => dcs + /** A type capturing `ref` */ def capturing(ref: CaptureRef)(using Context): Type = if tp.captureSet.accountsFor(ref) then tp @@ -587,7 +601,7 @@ extension (sym: Symbol) } def hasTrackedParts(using Context): Boolean = - !CaptureSet.deepCaptureSet(sym.info).isAlwaysEmpty + !CaptureSet.ofTypeDeeply(sym.info).isAlwaysEmpty extension (tp: AnnotatedType) /** Is this a boxed capturing type? */ diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 0aab33d31fdb..87638b078040 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -1073,7 +1073,7 @@ object CaptureSet: case parent: SingletonCaptureRef if parent.isTrackableRef => tp.singletonCaptureSet case _ => - CaptureSet.deepCaptureSet(parent) + CaptureSet.ofTypeDeeply(parent) case tpd @ defn.RefinedFunctionOf(rinfo: MethodType) if followResult => ofType(tpd.parent, followResult = false) // pick up capture set from parent type ++ (recur(rinfo.resType) // add capture set of result @@ -1099,12 +1099,17 @@ object CaptureSet: recur(tp) //.showing(i"capture set of $tp = $result", captDebug) - def deepCaptureSet(tp: Type)(using Context): CaptureSet = + /** The deep capture set of a type is the union of all covariant occurrences of + * capture sets. Nested existential sets are approximated with `cap`. + */ + def ofTypeDeeply(tp: Type)(using Context): CaptureSet = val collect = new TypeAccumulator[CaptureSet]: def apply(cs: CaptureSet, t: Type) = t.dealias match case t @ CapturingType(p, cs1) => val cs2 = apply(cs, p) if variance > 0 then cs2 ++ cs1 else cs2 + case t @ Existential(_, _) => + apply(cs, Existential.toCap(t)) case _ => foldOver(cs, t) collect(CaptureSet.empty, tp) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 8df958d7a977..10668a6fbee9 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -548,8 +548,8 @@ class CheckCaptures extends Recheck, SymTransformer: def recheckArg(arg: Tree, formal: Type)(using Context): Type = val argType = recheck(arg, formal) if unboxedArgs.contains(arg) && ccConfig.useUnboxedParams then - capt.println(i"charging deep capture set of $arg: ${argType} = ${CaptureSet.deepCaptureSet(argType)}") - markFree(CaptureSet.deepCaptureSet(argType), arg.srcPos) + capt.println(i"charging deep capture set of $arg: ${argType} = ${argType.deepCaptureSet}") + markFree(argType.deepCaptureSet, arg.srcPos) argType /** A specialized implementation of the apply rule. @@ -579,7 +579,7 @@ class CheckCaptures extends Recheck, SymTransformer: val argCaptures = for (arg, argType) <- tree.args.lazyZip(argTypes) yield if unboxedArgs.remove(arg) // need to ensure the remove happens, that's why argCaptures is computed even if not needed. - then CaptureSet.deepCaptureSet(argType) + then argType.deepCaptureSet else argType.captureSet appType match case appType @ CapturingType(appType1, refs) From 082214e3a5473c2966d6f478afa6abfb73bc40ed Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 11 Jul 2024 17:02:10 +0200 Subject: [PATCH 341/827] Make the unboxed parameter scheme standard Drop the config option that enables it. --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 2 -- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 7 +++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 46c346011c49..116b35e34aea 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -40,8 +40,6 @@ object ccConfig: */ inline val handleEtaExpansionsSpecially = false - val useUnboxedParams = true - /** If true, use existential capture set variables */ def useExistentials(using Context) = Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.5`) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 10668a6fbee9..c53f53fa43ba 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -402,9 +402,8 @@ class CheckCaptures extends Recheck, SymTransformer: // by their underlying capture set, which cannot be universal. // Reach capabilities of @unboxed parameters are exempted. val cs = CaptureSet.ofInfo(c) - if ccConfig.useUnboxedParams then - cs.disallowRootCapability: () => - report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", pos) + cs.disallowRootCapability: () => + report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", pos) checkSubset(cs, env.captured, pos, provenance(env)) isVisible case ref: ThisType => isVisibleFromEnv(ref.cls) @@ -547,7 +546,7 @@ class CheckCaptures extends Recheck, SymTransformer: protected override def recheckArg(arg: Tree, formal: Type)(using Context): Type = val argType = recheck(arg, formal) - if unboxedArgs.contains(arg) && ccConfig.useUnboxedParams then + if unboxedArgs.contains(arg) then capt.println(i"charging deep capture set of $arg: ${argType} = ${argType.deepCaptureSet}") markFree(argType.deepCaptureSet, arg.srcPos) argType From afcbdcbba3e259b0a6101772155a933c01b46f91 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 11 Jul 2024 17:07:44 +0200 Subject: [PATCH 342/827] Rename @unboxed --> @unbox --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 4 ++-- compiler/src/dotty/tools/dotc/core/Definitions.scala | 2 +- library/src/scala/caps.scala | 5 ++++- .../src/scala/collection/mutable/Buffer.scala | 5 ++--- tests/neg-custom-args/captures/i15749a.scala | 4 ++-- tests/neg-custom-args/captures/leak-problem-2.scala | 2 +- tests/neg-custom-args/captures/reaches.scala | 8 ++++---- tests/neg/i20503.scala | 4 ++-- tests/neg/leak-problem-unboxed.scala | 6 +++--- tests/pos-custom-args/captures/Buffer.scala | 2 +- tests/pos-custom-args/captures/dep-reach.scala | 6 +++--- tests/pos-custom-args/captures/reaches.scala | 4 ++-- tests/pos/cc-poly-source-capability.scala | 4 ++-- tests/pos/cc-poly-source.scala | 4 ++-- tests/pos/gears-probem-1.scala | 4 ++-- tests/pos/i18699.scala | 4 ++-- tests/pos/reach-capability.scala | 4 ++-- tests/pos/reach-problem.scala | 4 ++-- 18 files changed, 39 insertions(+), 37 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index c53f53fa43ba..a83c32eb1284 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -395,7 +395,7 @@ class CheckCaptures extends Recheck, SymTransformer: val refOwner = refSym.owner val isVisible = isVisibleFromEnv(refOwner) if !isVisible && c.isReach && refSym.is(Param) && refOwner == env.owner then - if refSym.hasAnnotation(defn.UnboxedAnnot) then + if refSym.hasAnnotation(defn.UnboxAnnot) then capt.println(i"exempt: $ref in $refOwner") else // Reach capabilities that go out of scope have to be approximated @@ -425,7 +425,7 @@ class CheckCaptures extends Recheck, SymTransformer: val unboxedParamNames = meth.rawParamss.flatMap: params => params.collect: - case param if param.hasAnnotation(defn.UnboxedAnnot) => + case param if param.hasAnnotation(defn.UnboxAnnot) => param.name .toSet diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 8c2f448a4a5e..78a9ea360279 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1050,7 +1050,7 @@ class Definitions { @tu lazy val ExperimentalAnnot: ClassSymbol = requiredClass("scala.annotation.experimental") @tu lazy val ThrowsAnnot: ClassSymbol = requiredClass("scala.throws") @tu lazy val TransientAnnot: ClassSymbol = requiredClass("scala.transient") - @tu lazy val UnboxedAnnot: ClassSymbol = requiredClass("scala.caps.unboxed") + @tu lazy val UnboxAnnot: ClassSymbol = requiredClass("scala.caps.unbox") @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 5e675f7d4341..1416a7b35f83 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -39,7 +39,10 @@ import annotation.{experimental, compileTimeOnly} */ final class untrackedCaptures extends annotation.StaticAnnotation - final class unboxed extends annotation.StaticAnnotation + /** This should go into annotations. For now it is here, so that we + * can experiment with it quickly between minor releases + */ + final class unbox extends annotation.StaticAnnotation object unsafe: diff --git a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala index 3ff614bfc556..27e5a8997d48 100644 --- a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala +++ b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala @@ -15,7 +15,6 @@ package mutable import scala.annotation.nowarn import language.experimental.captureChecking -import caps.unboxed /** A `Buffer` is a growable and shrinkable `Seq`. */ trait Buffer[A] @@ -180,11 +179,11 @@ trait IndexedBuffer[A] extends IndexedSeq[A] override def iterableFactory: SeqFactory[IndexedBuffer] = IndexedBuffer - def flatMapInPlace(@unboxed f: A => IterableOnce[A]^): this.type = { + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { // There's scope for a better implementation which copies elements in place. var i = 0 val s = size - val newElems = new Array[(IterableOnce[A]^{f*})](s) + val newElems = new Array[(IterableOnce[A]^{f})](s) while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 diff --git a/tests/neg-custom-args/captures/i15749a.scala b/tests/neg-custom-args/captures/i15749a.scala index 109a73b2b130..57fca27fae66 100644 --- a/tests/neg-custom-args/captures/i15749a.scala +++ b/tests/neg-custom-args/captures/i15749a.scala @@ -1,5 +1,5 @@ import caps.cap -import caps.unboxed +import caps.unbox class Unit object u extends Unit @@ -18,7 +18,7 @@ def test = def force[A](thunk: Unit ->{cap} A): A = thunk(u) - def forceWrapper[A](@unboxed mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = + def forceWrapper[A](@unbox mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = // Γ ⊢ mx: Wrapper[□ {cap} Unit => A] // `force` should be typed as ∀(□ {cap} Unit -> A) A, but it can not strictMap[Unit ->{mx*} A, A](mx)(t => force[A](t)) // error // should work diff --git a/tests/neg-custom-args/captures/leak-problem-2.scala b/tests/neg-custom-args/captures/leak-problem-2.scala index 82465924b852..08a3a6c2d9ca 100644 --- a/tests/neg-custom-args/captures/leak-problem-2.scala +++ b/tests/neg-custom-args/captures/leak-problem-2.scala @@ -2,7 +2,7 @@ import language.experimental.captureChecking trait Source[+T] -def race[T](@caps.unboxed sources: Seq[Source[T]^]): Source[T]^{sources*} = ??? +def race[T](@caps.unbox sources: Seq[Source[T]^]): Source[T]^{sources*} = ??? def raceTwo[T](src1: Source[T]^, src2: Source[T]^): Source[T]^{} = race(Seq(src1, src2)) // error diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index 08b2feed1bfe..c2d8001e2a7c 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -1,4 +1,4 @@ -import caps.unboxed +import caps.unbox class File: def write(): Unit = ??? @@ -11,7 +11,7 @@ class Ref[T](init: T): def get: T = x def set(y: T) = { x = y } -def runAll0(@unboxed xs: List[Proc]): Unit = +def runAll0(@unbox xs: List[Proc]): Unit = var cur: List[() ->{xs*} Unit] = xs while cur.nonEmpty do val next: () ->{xs*} Unit = cur.head @@ -21,7 +21,7 @@ def runAll0(@unboxed xs: List[Proc]): Unit = usingFile: f => cur = (() => f.write()) :: Nil // error -def runAll1(@unboxed xs: List[Proc]): Unit = +def runAll1(@unbox xs: List[Proc]): Unit = val cur = Ref[List[() ->{xs*} Unit]](xs) // OK, by revised VAR while cur.get.nonEmpty do val next: () ->{xs*} Unit = cur.get.head @@ -78,5 +78,5 @@ def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = ps.map((x, y) => compose1(x, y)) // error // error -def mapCompose2[A](@unboxed ps: List[(A => A, A => A)]): List[A ->{ps*} A] = +def mapCompose2[A](@unbox ps: List[(A => A, A => A)]): List[A ->{ps*} A] = ps.map((x, y) => compose1(x, y)) diff --git a/tests/neg/i20503.scala b/tests/neg/i20503.scala index 463e4e3f9686..3fb0573f6c2f 100644 --- a/tests/neg/i20503.scala +++ b/tests/neg/i20503.scala @@ -1,5 +1,5 @@ import language.experimental.captureChecking -import caps.unboxed +import caps.unbox class List[+A]: def head: A = ??? @@ -8,7 +8,7 @@ class List[+A]: def foreach[U](f: A => U): Unit = ??? def nonEmpty: Boolean = ??? -def runOps(@unboxed ops: List[() => Unit]): Unit = +def runOps(@unbox ops: List[() => Unit]): Unit = // See i20156, due to limitation in expressiveness of current system, // we could map over the list of impure elements. OK with existentials. ops.foreach(op => op()) diff --git a/tests/neg/leak-problem-unboxed.scala b/tests/neg/leak-problem-unboxed.scala index 8591145583e2..7de3d84bfcca 100644 --- a/tests/neg/leak-problem-unboxed.scala +++ b/tests/neg/leak-problem-unboxed.scala @@ -1,5 +1,5 @@ import language.experimental.captureChecking -import caps.unboxed +import caps.unbox // Some capabilities that should be used locally trait Async: @@ -9,12 +9,12 @@ def usingAsync[X](op: Async^ => X): X = ??? case class Box[+T](get: T) -def useBoxedAsync(@unboxed x: Box[Async^]): Unit = +def useBoxedAsync(@unbox x: Box[Async^]): Unit = val t0 = x val t1 = t0.get // ok t1.read() -def useBoxedAsync1(@unboxed x: Box[Async^]): Unit = x.get.read() // ok +def useBoxedAsync1(@unbox x: Box[Async^]): Unit = x.get.read() // ok def test(): Unit = diff --git a/tests/pos-custom-args/captures/Buffer.scala b/tests/pos-custom-args/captures/Buffer.scala index 17164dccc370..2412e5b388ca 100644 --- a/tests/pos-custom-args/captures/Buffer.scala +++ b/tests/pos-custom-args/captures/Buffer.scala @@ -10,7 +10,7 @@ trait Buffer[A]: val s = 10 // capture checking: we need the copy since we box/unbox on g* on the next line // TODO: This looks fishy, need to investigate - // Alternative would be to mark `f` as @unboxed. It's not inferred + // Alternative would be to mark `f` as @unbox. It's not inferred // since `^ appears in a function result, not under a box. val newElems = new Array[(IterableOnce[A]^{f})](s) var i = 0 diff --git a/tests/pos-custom-args/captures/dep-reach.scala b/tests/pos-custom-args/captures/dep-reach.scala index 177422565736..c81197aa738d 100644 --- a/tests/pos-custom-args/captures/dep-reach.scala +++ b/tests/pos-custom-args/captures/dep-reach.scala @@ -1,10 +1,10 @@ -import caps.unboxed +import caps.unbox object Test: class C type Proc = () => Unit def f(c: C^, d: C^): () ->{c, d} Unit = - def foo(@unboxed xs: Proc*): () ->{xs*} Unit = + def foo(@unbox xs: Proc*): () ->{xs*} Unit = xs.head val a: () ->{c} Unit = () => () val b: () ->{d} Unit = () => () @@ -13,7 +13,7 @@ object Test: def g(c: C^, d: C^): () ->{c, d} Unit = - def foo(@unboxed xs: Seq[() => Unit]): () ->{xs*} Unit = + def foo(@unbox xs: Seq[() => Unit]): () ->{xs*} Unit = xs.head val a: () ->{c} Unit = () => () diff --git a/tests/pos-custom-args/captures/reaches.scala b/tests/pos-custom-args/captures/reaches.scala index 976fadc4b649..ab0da9b67d18 100644 --- a/tests/pos-custom-args/captures/reaches.scala +++ b/tests/pos-custom-args/captures/reaches.scala @@ -1,4 +1,4 @@ -import caps.unboxed +import caps.unbox class C def f(xs: List[C^]) = @@ -22,7 +22,7 @@ extension [A](x: A) def :: (xs: List[A]): List[A] = ??? object Nil extends List[Nothing] -def runAll(@unboxed xs: List[Proc]): Unit = +def runAll(@unbox xs: List[Proc]): Unit = var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR while cur.nonEmpty do val next: () ->{xs*} Unit = cur.head diff --git a/tests/pos/cc-poly-source-capability.scala b/tests/pos/cc-poly-source-capability.scala index 0f61e98e5068..3b6c0bde1398 100644 --- a/tests/pos/cc-poly-source-capability.scala +++ b/tests/pos/cc-poly-source-capability.scala @@ -1,7 +1,7 @@ import language.experimental.captureChecking import annotation.experimental import caps.{CapSet, Capability} -import caps.unboxed +import caps.unbox @experimental object Test: @@ -18,7 +18,7 @@ import caps.unboxed def allListeners: Set[Listener^{X^}] = listeners - def test1(async1: Async, @unboxed others: List[Async]) = + def test1(async1: Async, @unbox others: List[Async]) = val src = Source[CapSet^{async1, others*}] val lst1 = listener(async1) val lsts = others.map(listener) diff --git a/tests/pos/cc-poly-source.scala b/tests/pos/cc-poly-source.scala index 09b4a3024e3c..4cfbbaa06936 100644 --- a/tests/pos/cc-poly-source.scala +++ b/tests/pos/cc-poly-source.scala @@ -1,7 +1,7 @@ import language.experimental.captureChecking import annotation.experimental import caps.{CapSet, Capability} -import caps.unboxed +import caps.unbox @experimental object Test: @@ -25,7 +25,7 @@ import caps.unboxed val ls = src.allListeners val _: Set[Listener^{lbl1, lbl2}] = ls - def test2(@unboxed lbls: List[Label^]) = + def test2(@unbox lbls: List[Label^]) = def makeListener(lbl: Label^): Listener^{lbl} = ??? val listeners = lbls.map(makeListener) val src = Source[CapSet^{lbls*}] diff --git a/tests/pos/gears-probem-1.scala b/tests/pos/gears-probem-1.scala index c683db9ce01d..ab71616b72fc 100644 --- a/tests/pos/gears-probem-1.scala +++ b/tests/pos/gears-probem-1.scala @@ -1,5 +1,5 @@ import language.experimental.captureChecking -import caps.unboxed +import caps.unbox trait Future[+T]: def await: T @@ -17,7 +17,7 @@ class Result[+T, +E]: case class Err[+E](e: E) extends Result[Nothing, E] case class Ok[+T](x: T) extends Result[T, Nothing] -extension [T](@unboxed fs: Seq[Future[T]^]) +extension [T](@unbox fs: Seq[Future[T]^]) def awaitAll = val collector//: Collector[T]{val futures: Seq[Future[T]^{fs*}]} = Collector(fs) diff --git a/tests/pos/i18699.scala b/tests/pos/i18699.scala index 54390f6bdd71..1937d7dca8c5 100644 --- a/tests/pos/i18699.scala +++ b/tests/pos/i18699.scala @@ -1,9 +1,9 @@ import language.experimental.captureChecking -import caps.unboxed +import caps.unbox trait Cap: def use: Int = 42 -def test2(@unboxed cs: List[Cap^]): Unit = +def test2(@unbox cs: List[Cap^]): Unit = val t0: Cap^{cs*} = cs.head // error var t1: Cap^{cs*} = cs.head // error diff --git a/tests/pos/reach-capability.scala b/tests/pos/reach-capability.scala index 5ad7534061b1..50ea479ec3c1 100644 --- a/tests/pos/reach-capability.scala +++ b/tests/pos/reach-capability.scala @@ -1,7 +1,7 @@ import language.experimental.captureChecking import annotation.experimental import caps.Capability -import caps.unboxed +import caps.unbox @experimental object Test2: @@ -12,7 +12,7 @@ import caps.unboxed class Listener - def test2(@unboxed lbls: List[Label]) = + def test2(@unbox lbls: List[Label]) = def makeListener(lbl: Label): Listener^{lbl} = ??? val listeners = lbls.map(makeListener) // should work diff --git a/tests/pos/reach-problem.scala b/tests/pos/reach-problem.scala index 29d46687a219..d6b7b79011a6 100644 --- a/tests/pos/reach-problem.scala +++ b/tests/pos/reach-problem.scala @@ -1,11 +1,11 @@ import language.experimental.captureChecking -import caps.unboxed +import caps.unbox class Box[T](items: Seq[T^]): def getOne: T^{items*} = ??? object Box: - def getOne[T](@unboxed items: Seq[T^]): T^{items*} = + def getOne[T](@unbox items: Seq[T^]): T^{items*} = val bx = Box(items) bx.getOne /* From 0b22948b91aa6b8e0651aa576cf7e40af6a02483 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 11 Jul 2024 17:15:38 +0200 Subject: [PATCH 343/827] Another test --- tests/pos/Buffer.scala | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 tests/pos/Buffer.scala diff --git a/tests/pos/Buffer.scala b/tests/pos/Buffer.scala new file mode 100644 index 000000000000..2412e5b388ca --- /dev/null +++ b/tests/pos/Buffer.scala @@ -0,0 +1,22 @@ +import language.experimental.captureChecking + +// Extract of the problem in collection.mutable.Buffers +trait Buffer[A]: + + def apply(i: Int): A = ??? + + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + val g = f + val s = 10 + // capture checking: we need the copy since we box/unbox on g* on the next line + // TODO: This looks fishy, need to investigate + // Alternative would be to mark `f` as @unbox. It's not inferred + // since `^ appears in a function result, not under a box. + val newElems = new Array[(IterableOnce[A]^{f})](s) + var i = 0 + while i < s do + val x = g(this(i)) + newElems(i) = x + i += 1 + this + } \ No newline at end of file From 91cbca8375fa3a9b09088862535fd9ae10f2d529 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 12 Jul 2024 12:01:55 +0200 Subject: [PATCH 344/827] Check that overrides don't change the @unbox status of their parameters --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 17 ++++++++++++++- .../dotty/tools/dotc/typer/RefChecks.scala | 3 +++ .../captures/unbox-overrides.check | 21 +++++++++++++++++++ .../captures/unbox-overrides.scala | 15 +++++++++++++ 4 files changed, 55 insertions(+), 1 deletion(-) create mode 100644 tests/neg-custom-args/captures/unbox-overrides.check create mode 100644 tests/neg-custom-args/captures/unbox-overrides.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index a83c32eb1284..30dfe8f8881c 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -22,7 +22,7 @@ import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult import CCState.* import StdNames.nme import NameKinds.{DefaultGetterName, WildcardParamName, UniqueNameKind} -import reporting.{trace, Message} +import reporting.{trace, Message, OverrideError} /** The capture checker */ object CheckCaptures: @@ -1271,6 +1271,21 @@ class CheckCaptures extends Recheck, SymTransformer: !setup.isPreCC(overriding) && !setup.isPreCC(overridden) override def checkInheritedTraitParameters: Boolean = false + + /** Check that overrides don't change the @unbox status of their parameters */ + override def additionalChecks(member: Symbol, other: Symbol)(using Context): Unit = + for + (params1, params2) <- member.rawParamss.lazyZip(other.rawParamss) + (param1, param2) <- params1.lazyZip(params2) + do + if param1.hasAnnotation(defn.UnboxAnnot) != param2.hasAnnotation(defn.UnboxAnnot) then + report.error( + OverrideError( + i"has a parameter ${param1.name} with different @unbox status than the corresponding parameter in the overridden definition", + self, member, other, self.memberInfo(member), self.memberInfo(other) + ), + if member.owner == clazz then member.srcPos else clazz.srcPos + ) end OverridingPairsCheckerCC def traverse(t: Tree)(using Context) = diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index cb1aea27c444..2601bfb42074 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -250,12 +250,15 @@ object RefChecks { */ def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = true + protected def additionalChecks(overriding: Symbol, overridden: Symbol)(using Context): Unit = () + private val subtypeChecker: (Type, Type) => Context ?=> Boolean = this.checkSubType def checkAll(checkOverride: ((Type, Type) => Context ?=> Boolean, Symbol, Symbol) => Unit) = while hasNext do if needsCheck(overriding, overridden) then checkOverride(subtypeChecker, overriding, overridden) + additionalChecks(overriding, overridden) next() // The OverridingPairs cursor does assume that concrete overrides abstract diff --git a/tests/neg-custom-args/captures/unbox-overrides.check b/tests/neg-custom-args/captures/unbox-overrides.check new file mode 100644 index 000000000000..b9a3be7bffbc --- /dev/null +++ b/tests/neg-custom-args/captures/unbox-overrides.check @@ -0,0 +1,21 @@ +-- [E164] Declaration Error: tests/neg-custom-args/captures/unbox-overrides.scala:8:6 ---------------------------------- +8 | def foo(x: C): C // error + | ^ + |error overriding method foo in trait A of type (x: C): C; + | method foo of type (x: C): C has a parameter x with different @unbox status than the corresponding parameter in the overridden definition + | + | longer explanation available when compiling with `-explain` +-- [E164] Declaration Error: tests/neg-custom-args/captures/unbox-overrides.scala:9:6 ---------------------------------- +9 | def bar(@unbox x: C): C // error + | ^ + |error overriding method bar in trait A of type (x: C): C; + | method bar of type (x: C): C has a parameter x with different @unbox status than the corresponding parameter in the overridden definition + | + | longer explanation available when compiling with `-explain` +-- [E164] Declaration Error: tests/neg-custom-args/captures/unbox-overrides.scala:15:15 -------------------------------- +15 |abstract class C extends A[C], B2 // error + | ^ + |error overriding method foo in trait A of type (x: C): C; + | method foo in trait B2 of type (x: C): C has a parameter x with different @unbox status than the corresponding parameter in the overridden definition + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/unbox-overrides.scala b/tests/neg-custom-args/captures/unbox-overrides.scala new file mode 100644 index 000000000000..5abb5013bfbe --- /dev/null +++ b/tests/neg-custom-args/captures/unbox-overrides.scala @@ -0,0 +1,15 @@ +import caps.unbox + +trait A[X]: + def foo(@unbox x: X): X + def bar(x: X): X + +trait B extends A[C]: + def foo(x: C): C // error + def bar(@unbox x: C): C // error + +trait B2: + def foo(x: C): C + def bar(@unbox x: C): C + +abstract class C extends A[C], B2 // error From 54828c7a2d8b7ffda1a12155a89e3bb6be9f26be Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 13 Jul 2024 23:01:54 +0200 Subject: [PATCH 345/827] Fix lubs over capturing types Also, fix Seq rechecking so that elements are always box adapted --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 3 +++ .../dotty/tools/dotc/core/TypeComparer.scala | 5 +++++ .../dotty/tools/dotc/transform/Recheck.scala | 13 ++++++++---- tests/neg-custom-args/captures/lazylist.check | 10 ++++----- tests/neg-custom-args/captures/lazylist.scala | 2 +- tests/neg-custom-args/captures/lubs.check | 21 +++++++++++++++++++ tests/neg-custom-args/captures/lubs.scala | 20 ++++++++++++++++++ .../captures/spread-problem.check | 14 +++++++++++++ .../captures/spread-problem.scala | 11 ++++++++++ 9 files changed, 89 insertions(+), 10 deletions(-) create mode 100644 tests/neg-custom-args/captures/lubs.check create mode 100644 tests/neg-custom-args/captures/lubs.scala create mode 100644 tests/neg-custom-args/captures/spread-problem.check create mode 100644 tests/neg-custom-args/captures/spread-problem.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 30dfe8f8881c..667d91a10330 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -719,6 +719,9 @@ class CheckCaptures extends Recheck, SymTransformer: openClosures = openClosures.tail end recheckClosureBlock + override def seqLiteralElemProto(tree: SeqLiteral, pt: Type, declared: Type)(using Context) = + super.seqLiteralElemProto(tree, pt, declared).boxed + /** Maps mutable variables to the symbols that capture them (in the * CheckCaptures sense, i.e. symbol is referred to from a different method * than the one it is defined in). diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index e63aab484605..c8e00686e62b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2751,6 +2751,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } case tp1: TypeVar if tp1.isInstantiated => lub(tp1.underlying, tp2, isSoft = isSoft) + case CapturingType(parent1, refs1) => + if tp1.isBoxCompatibleWith(tp2) then + tp1.derivedCapturingType(lub(parent1, tp2, isSoft = isSoft), refs1) + else // TODO: Analyze cases where they are not box compatible + NoType case tp1: AnnotatedType if !tp1.isRefining => lub(tp1.underlying, tp2, isSoft = isSoft) case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 4b8a8f072774..03f0001110d3 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -23,6 +23,7 @@ import reporting.trace import annotation.constructorOnly import cc.CaptureSet.IdempotentCaptRefMap import annotation.tailrec +import dotty.tools.dotc.cc.boxed object Recheck: import tpd.* @@ -438,12 +439,16 @@ abstract class Recheck extends Phase, SymTransformer: val finalizerType = recheck(tree.finalizer, defn.UnitType) TypeComparer.lub(bodyType :: casesTypes) + def seqLiteralElemProto(tree: SeqLiteral, pt: Type, declared: Type)(using Context): Type = + declared.orElse: + pt.stripNull().elemType match + case NoType => WildcardType + case bounds: TypeBounds => WildcardType(bounds) + case elemtp => elemtp + def recheckSeqLiteral(tree: SeqLiteral, pt: Type)(using Context): Type = - val elemProto = pt.stripNull().elemType match - case NoType => WildcardType - case bounds: TypeBounds => WildcardType(bounds) - case elemtp => elemtp val declaredElemType = recheck(tree.elemtpt) + val elemProto = seqLiteralElemProto(tree, pt, declaredElemType) val elemTypes = tree.elems.map(recheck(_, elemProto)) seqLitType(tree, TypeComparer.lub(declaredElemType :: elemTypes)) diff --git a/tests/neg-custom-args/captures/lazylist.check b/tests/neg-custom-args/captures/lazylist.check index 643ef78841f0..f0fbd1a025b5 100644 --- a/tests/neg-custom-args/captures/lazylist.check +++ b/tests/neg-custom-args/captures/lazylist.check @@ -26,11 +26,11 @@ | Required: lazylists.LazyList[Int]^{cap2} | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:41:48 ------------------------------------- -41 | val ref4c: LazyList[Int]^{cap1, ref3, cap3} = ref4 // error - | ^^^^ - | Found: (ref4 : lazylists.LazyList[Int]^{cap3, cap2, ref1}) - | Required: lazylists.LazyList[Int]^{cap1, ref3, cap3} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:41:42 ------------------------------------- +41 | val ref4c: LazyList[Int]^{cap1, ref3} = ref4 // error + | ^^^^ + | Found: (ref4 : lazylists.LazyList[Int]^{cap3, ref2, ref1}) + | Required: lazylists.LazyList[Int]^{cap1, ref3} | | longer explanation available when compiling with `-explain` -- [E164] Declaration Error: tests/neg-custom-args/captures/lazylist.scala:22:6 ---------------------------------------- diff --git a/tests/neg-custom-args/captures/lazylist.scala b/tests/neg-custom-args/captures/lazylist.scala index e6e4d003f7ae..f3cd0fd31e7a 100644 --- a/tests/neg-custom-args/captures/lazylist.scala +++ b/tests/neg-custom-args/captures/lazylist.scala @@ -38,4 +38,4 @@ def test(cap1: Cap, cap2: Cap, cap3: Cap) = val ref3 = ref1.map(g) val ref3c: LazyList[Int]^{cap2} = ref3 // error val ref4 = (if cap1 == cap2 then ref1 else ref2).map(h) - val ref4c: LazyList[Int]^{cap1, ref3, cap3} = ref4 // error + val ref4c: LazyList[Int]^{cap1, ref3} = ref4 // error diff --git a/tests/neg-custom-args/captures/lubs.check b/tests/neg-custom-args/captures/lubs.check new file mode 100644 index 000000000000..b2eaf6ae6f4e --- /dev/null +++ b/tests/neg-custom-args/captures/lubs.check @@ -0,0 +1,21 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lubs.scala:17:13 ----------------------------------------- +17 | val _: D = x1 // error + | ^^ + | Found: (x1 : D^{d1}) + | Required: D + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lubs.scala:18:13 ----------------------------------------- +18 | val _: D = x2 // error + | ^^ + | Found: (x2 : D^{d1}) + | Required: D + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lubs.scala:19:13 ----------------------------------------- +19 | val _: D = x3 // error + | ^^ + | Found: (x3 : D^{d1, d2}) + | Required: D + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/lubs.scala b/tests/neg-custom-args/captures/lubs.scala new file mode 100644 index 000000000000..3a2eb59b48b5 --- /dev/null +++ b/tests/neg-custom-args/captures/lubs.scala @@ -0,0 +1,20 @@ +import java.sql.Date + +class C extends caps.Capability +class D + +def Test(c1: C, c2: C) = + val d: D = ??? + val d1: D^{c1} = ??? + val d2: D^{c2} = ??? + val x1 = if ??? then d else d1 + val _: D^{c1} = x1 + val x2 = if ??? then d1 else d + val _: D^{c1} = x2 + val x3 = if ??? then d1 else d2 + val _: D^{c1, c2} = x3 + + val _: D = x1 // error + val _: D = x2 // error + val _: D = x3 // error + diff --git a/tests/neg-custom-args/captures/spread-problem.check b/tests/neg-custom-args/captures/spread-problem.check new file mode 100644 index 000000000000..31cf38a51727 --- /dev/null +++ b/tests/neg-custom-args/captures/spread-problem.check @@ -0,0 +1,14 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/spread-problem.scala:8:6 --------------------------------- +8 | race(Seq(src1, src2)*) // error + | ^^^^^^^^^^^^^^^^^^^^^^ + | Found: Source[box T^?]^{src1, src2} + | Required: Source[T] + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/spread-problem.scala:11:6 -------------------------------- +11 | race(src1, src2) // error + | ^^^^^^^^^^^^^^^^ + | Found: Source[box T^?]^{src1, src2} + | Required: Source[T] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/spread-problem.scala b/tests/neg-custom-args/captures/spread-problem.scala new file mode 100644 index 000000000000..579c7817b9c1 --- /dev/null +++ b/tests/neg-custom-args/captures/spread-problem.scala @@ -0,0 +1,11 @@ +import language.experimental.captureChecking + +trait Source[+T] + +def race[T](@caps.unbox sources: (Source[T]^)*): Source[T]^{sources*} = ??? + +def raceTwo[T](src1: Source[T]^, src2: Source[T]^): Source[T]^{} = + race(Seq(src1, src2)*) // error + +def raceThree[T](src1: Source[T]^, src2: Source[T]^): Source[T]^{} = + race(src1, src2) // error \ No newline at end of file From c8d418a1dab011b001dde8d30aff75df52215286 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 14 Jul 2024 11:35:50 +0200 Subject: [PATCH 346/827] Don't do post checks in inlined code Capability references in inlined code might end up not being tracked or being redundant. Don't flag this as an error. --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- tests/pos-custom-args/captures/inline-problem.scala | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 tests/pos-custom-args/captures/inline-problem.scala diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 26817cb838c6..c048edfb2102 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -753,7 +753,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: /** Check well formed at post check time */ private def checkWellformedLater(parent: Type, ann: Tree, tpt: Tree)(using Context): Unit = - if !tpt.span.isZeroExtent then + if !tpt.span.isZeroExtent && enclosingInlineds.isEmpty then todoAtPostCheck += (ctx1 => checkWellformedPost(parent, ann, tpt)(using ctx1.withOwner(ctx.owner))) diff --git a/tests/pos-custom-args/captures/inline-problem.scala b/tests/pos-custom-args/captures/inline-problem.scala new file mode 100644 index 000000000000..78034c20050a --- /dev/null +++ b/tests/pos-custom-args/captures/inline-problem.scala @@ -0,0 +1,5 @@ +trait Listener[+T] + +inline def consume[T](f: T => Unit): Listener[T]^{f} = ??? + +val consumePure = consume(_ => ()) \ No newline at end of file From a2bec5e6bf55d756f279f19112cf6fe3e1a68202 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 10 Jul 2024 12:59:24 +0200 Subject: [PATCH 347/827] add child to parent in completion context This ensures that the positions of forced Child annotations always happens in the source file of the parent class, and not in another file that forced the completion to happen --- .../src/dotty/tools/dotc/typer/Namer.scala | 10 +- tests/pos/i21154/A.scala | 6 + tests/pos/i21154/Z.scala | 9 + tests/pos/i21154/Z.tastycheck | 1340 +++++++++++++++++ 4 files changed, 1361 insertions(+), 4 deletions(-) create mode 100644 tests/pos/i21154/A.scala create mode 100644 tests/pos/i21154/Z.scala create mode 100644 tests/pos/i21154/Z.tastycheck diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 83964417a6f1..4653a1d098af 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -848,7 +848,7 @@ class Namer { typer: Typer => else try completeInCreationContext(denot) - if (denot.isCompleted) registerIfChild(denot) + if (denot.isCompleted) registerIfChildInCreationContext(denot) catch case ex: CompilationUnit.SuspendException => val completer = SuspendCompleter() @@ -937,10 +937,12 @@ class Namer { typer: Typer => denot.markAbsent() end invalidateIfClashingSynthetic - /** If completed symbol is an enum value or a named class, register it as a child + /** Intentionally left without `using Context` parameter. + * This action should be performed in the context of where the completer was created. + * If completed symbol is an enum value or a named class, register it as a child * in all direct parent classes which are sealed. */ - def registerIfChild(denot: SymDenotation)(using Context): Unit = { + def registerIfChildInCreationContext(denot: SymDenotation): Unit = { val sym = denot.symbol def register(child: Symbol, parentCls: ClassSymbol) = { @@ -964,7 +966,7 @@ class Namer { typer: Typer => end if } - /** Intentionally left without `implicit ctx` parameter. We need + /** Intentionally left without `using Context` parameter. We need * to pick up the context at the point where the completer was created. */ def completeInCreationContext(denot: SymDenotation): Unit = { diff --git a/tests/pos/i21154/A.scala b/tests/pos/i21154/A.scala new file mode 100644 index 000000000000..f5fbbc09e272 --- /dev/null +++ b/tests/pos/i21154/A.scala @@ -0,0 +1,6 @@ +import Z.* + +object A: + val a: Option[AOptions] = ??? + val b: Option[BOptions] = ??? + val c: Option[COptions] = ??? diff --git a/tests/pos/i21154/Z.scala b/tests/pos/i21154/Z.scala new file mode 100644 index 000000000000..e631d159f8c5 --- /dev/null +++ b/tests/pos/i21154/Z.scala @@ -0,0 +1,9 @@ +//> using options -Ytest-pickler-check + +// in the original issue https://github.com/scala/scala3/issues/21154, the non-deterministic tasty +// depends on the order of compilation of files, the use-site (A.scala) has to come first, +// and the file defining the enum has to come second (Z.scala), A.scala in namer will force Z to complete. +enum Z: + case AOptions() + case BOptions() + case COptions() diff --git a/tests/pos/i21154/Z.tastycheck b/tests/pos/i21154/Z.tastycheck new file mode 100644 index 000000000000..ac45ca6c4a53 --- /dev/null +++ b/tests/pos/i21154/Z.tastycheck @@ -0,0 +1,1340 @@ +Header: + version: + tooling: + UUID: + +Names (936 bytes, starting from ): + 0: ASTs + 1: + 2: Z + 3: + 4: java + 5: lang + 6: java[Qualified . lang] + 7: Object + 8: java[Qualified . lang][Qualified . Object] + 9: [Signed Signature(List(),java.lang.Object) @] + 10: Enum + 11: scala + 12: reflect + 13: scala[Qualified . reflect] + 14: Unit + 15: AOptions + 16: BOptions + 17: COptions + 18: SourceFile + 19: annotation + 20: scala[Qualified . annotation] + 21: internal + 22: scala[Qualified . annotation][Qualified . internal] + 23: scala[Qualified . annotation][Qualified . internal][Qualified . SourceFile] + 24: String + 25: java[Qualified . lang][Qualified . String] + 26: [Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @] + 27: + 28: Child + 29: scala[Qualified . annotation][Qualified . internal][Qualified . Child] + 30: [Signed Signature(List(1),scala.annotation.internal.Child) @] + 31: Z[ModuleClass] + 32: [Signed Signature(List(),Z$) @] + 33: AnyRef + 34: Sum + 35: Mirror + 36: Mirror[ModuleClass] + 37: deriving + 38: scala[Qualified . deriving] + 39: _ + 40: writeReplace + 41: runtime + 42: scala[Qualified . runtime] + 43: ModuleSerializationProxy + 44: scala[Qualified . runtime][Qualified . ModuleSerializationProxy] + 45: Class + 46: java[Qualified . lang][Qualified . Class] + 47: [Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @] + 48: [Signed Signature(List(),Z) @] + 49: hashCode + 50: Int + 51: _hashCode + 52: scala[Qualified . Int] + 53: Product + 54: scala[Qualified . Product] + 55: _hashCode[Signed Signature(List(scala.Product),scala.Int) @_hashCode] + 56: ScalaRunTime + 57: ScalaRunTime[ModuleClass] + 58: equals + 59: x$0 + 60: Any + 61: Boolean + 62: || + 63: scala[Qualified . Boolean] + 64: ||[Signed Signature(List(scala.Boolean),scala.Boolean) @||] + 65: eq + 66: eq[Signed Signature(List(java.lang.Object),scala.Boolean) @eq] + 67: $asInstanceOf$ + 68: $asInstanceOf$[Signed Signature(List(1),java.lang.Object) @$asInstanceOf$] + 69: unchecked + 70: scala[Qualified . unchecked] + 71: [Signed Signature(List(),scala.unchecked) @] + 72: toString + 73: _toString + 74: _toString[Signed Signature(List(scala.Product),java.lang.String) @_toString] + 75: canEqual + 76: that + 77: isInstanceOf + 78: isInstanceOf[Signed Signature(List(1),scala.Boolean) @isInstanceOf] + 79: productArity + 80: productPrefix + 81: Predef + 82: productElement + 83: n + 84: IndexOutOfBoundsException + 85: java[Qualified . lang][Qualified . IndexOutOfBoundsException] + 86: [Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @] + 87: toString[Signed Signature(List(),java.lang.String) @toString] + 88: productElementName + 89: copy + 90: Z[ModuleClass][Qualified . AOptions] + 91: [Signed Signature(List(),Z$.AOptions) @] + 92: ordinal + 93: AOptions[ModuleClass] + 94: Z[ModuleClass][Qualified . AOptions][ModuleClass] + 95: [Signed Signature(List(),Z$.AOptions$) @] + 96: apply + 97: unapply + 98: x$1 + 99: MirroredMonoType + 100: fromProduct + 101: Z[ModuleClass][Qualified . BOptions] + 102: [Signed Signature(List(),Z$.BOptions) @] + 103: BOptions[ModuleClass] + 104: Z[ModuleClass][Qualified . BOptions][ModuleClass] + 105: [Signed Signature(List(),Z$.BOptions$) @] + 106: Z[ModuleClass][Qualified . COptions] + 107: [Signed Signature(List(),Z$.COptions) @] + 108: COptions[ModuleClass] + 109: Z[ModuleClass][Qualified . COptions][ModuleClass] + 110: [Signed Signature(List(),Z$.COptions$) @] + 111: fromOrdinal + 112: util + 113: java[Qualified . util] + 114: NoSuchElementException + 115: java[Qualified . util][Qualified . NoSuchElementException] + 116: [Signed Signature(List(java.lang.String),java.util.NoSuchElementException) @] + 117: + + 118: +[Signed Signature(List(java.lang.Object),java.lang.String) @+] + 119: enum Z has no case with ordinal: + 120: Positions + 121: Comments + 122: Attributes + +Trees (1886 bytes, starting from ): + 0: PACKAGE(1883) + 3: TERMREFpkg 1 [] + 5: TYPEDEF(132) 2 [Z] + 9: TEMPLATE(39) + 11: APPLY(10) + 13: SELECTin(8) 9 [[Signed Signature(List(),java.lang.Object) @]] + 16: NEW + 17: TYPEREF 7 [Object] + 19: TERMREFpkg 6 [java[Qualified . lang]] + 21: SHAREDtype 17 + 23: TYPEREF 10 [Enum] + 25: TERMREFpkg 13 [scala[Qualified . reflect]] + 27: DEFDEF(7) 3 [] + 30: EMPTYCLAUSE + 31: TYPEREF 14 [Unit] + 33: TERMREFpkg 11 [scala] + 35: STABLE + 36: IMPORT(12) + 38: TERMREFsymbol 140 + 41: THIS + 42: TYPEREFpkg 1 [] + 44: IMPORTED 15 [AOptions] + 46: IMPORTED 16 [BOptions] + 48: IMPORTED 17 [COptions] + 50: ENUM + 51: SEALED + 52: ABSTRACT + 53: ANNOTATION(16) + 55: TYPEREF 18 [SourceFile] + 57: TERMREFpkg 22 [scala[Qualified . annotation][Qualified . internal]] + 59: APPLY(10) + 61: SELECTin(6) 26 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] + 64: NEW + 65: SHAREDtype 55 + 67: SHAREDtype 55 + 69: STRINGconst 27 [] + 71: ANNOTATION(25) + 73: TYPEREF 28 [Child] + 75: SHAREDtype 57 + 77: APPLY(19) + 79: TYPEAPPLY(17) + 81: SELECTin(6) 30 [[Signed Signature(List(1),scala.annotation.internal.Child) @]] + 84: NEW + 85: SHAREDtype 73 + 87: SHAREDtype 73 + 89: TYPEREFsymbol 1280 + 92: THIS + 93: TYPEREFsymbol 160 + 96: SHAREDtype 41 + 98: ANNOTATION(19) + 100: SHAREDtype 73 + 102: APPLY(15) + 104: TYPEAPPLY(13) + 106: SELECTin(6) 30 [[Signed Signature(List(1),scala.annotation.internal.Child) @]] + 109: NEW + 110: SHAREDtype 73 + 112: SHAREDtype 73 + 114: TYPEREFsymbol 769 + 117: SHAREDtype 92 + 119: ANNOTATION(19) + 121: SHAREDtype 73 + 123: APPLY(15) + 125: TYPEAPPLY(13) + 127: SELECTin(6) 30 [[Signed Signature(List(1),scala.annotation.internal.Child) @]] + 130: NEW + 131: SHAREDtype 73 + 133: SHAREDtype 73 + 135: TYPEREFsymbol 223 + 138: SHAREDtype 92 + 140: VALDEF(18) 2 [Z] + 143: IDENTtpt 31 [Z[ModuleClass]] + 145: SHAREDtype 93 + 147: APPLY(9) + 149: SELECTin(7) 32 [[Signed Signature(List(),Z$) @]] + 152: NEW + 153: SHAREDterm 143 + 156: SHAREDtype 93 + 158: OBJECT + 159: SYNTHETIC + 160: TYPEDEF(1723) 31 [Z[ModuleClass]] + 164: TEMPLATE(1701) + 167: APPLY(10) + 169: SELECTin(8) 9 [[Signed Signature(List(),java.lang.Object) @]] + 172: NEW + 173: TYPEREF 33 [AnyRef] + 175: SHAREDtype 33 + 177: SHAREDtype 17 + 179: TYPEREF 34 [Sum] + 181: THIS + 182: TYPEREF 36 [Mirror[ModuleClass]] + 184: TERMREFpkg 38 [scala[Qualified . deriving]] + 186: SELFDEF 39 [_] + 188: SINGLETONtpt + 189: SHAREDtype 38 + 191: DEFDEF(5) 3 [] + 194: EMPTYCLAUSE + 195: SHAREDtype 31 + 197: STABLE + 198: DEFDEF(23) 40 [writeReplace] + 201: EMPTYCLAUSE + 202: SHAREDtype 173 + 205: APPLY(14) + 207: SELECTin(9) 47 [[Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @]] + 210: NEW + 211: TYPEREF 43 [ModuleSerializationProxy] + 213: TERMREFpkg 42 [scala[Qualified . runtime]] + 215: SHAREDtype 211 + 218: CLASSconst + 219: SHAREDtype 38 + 221: PRIVATE + 222: SYNTHETIC + 223: TYPEDEF(369) 15 [AOptions] + 227: TEMPLATE(362) + 230: APPLY(11) + 232: SELECTin(9) 48 [[Signed Signature(List(),Z) @]] + 235: NEW + 236: TYPEREFsymbol 5 + 238: SHAREDtype 41 + 240: SHAREDtype 236 + 243: DEFDEF(5) 3 [] + 246: EMPTYCLAUSE + 247: SHAREDtype 31 + 249: STABLE + 250: DEFDEF(24) 49 [hashCode] + 253: EMPTYCLAUSE + 254: TYPEREF 50 [Int] + 256: SHAREDtype 33 + 258: APPLY(14) + 260: TERMREF 55 [_hashCode[Signed Signature(List(scala.Product),scala.Int) @_hashCode]] + 262: THIS + 263: TYPEREF 57 [ScalaRunTime[ModuleClass]] + 265: SHAREDtype 213 + 268: QUALTHIS + 269: IDENTtpt 15 [AOptions] + 271: SHAREDtype 135 + 274: OVERRIDE + 275: SYNTHETIC + 276: DEFDEF(96) 58 [equals] + 279: PARAM(5) 59 [x$0] + 282: TYPEREF 60 [Any] + 284: SHAREDtype 33 + 286: TYPEREF 61 [Boolean] + 288: SHAREDtype 33 + 290: APPLY(80) + 292: SELECTin(30) 64 [||[Signed Signature(List(scala.Boolean),scala.Boolean) @||]] + 295: APPLY(24) + 297: SELECTin(9) 66 [eq[Signed Signature(List(java.lang.Object),scala.Boolean) @eq]] + 300: QUALTHIS + 301: IDENTtpt 15 [AOptions] + 303: SHAREDtype 135 + 306: SHAREDtype 17 + 308: TYPEAPPLY(11) + 310: SELECTin(7) 68 [$asInstanceOf$[Signed Signature(List(1),java.lang.Object) @$asInstanceOf$]] + 313: TERMREFdirect 279 + 316: SHAREDtype 282 + 319: SHAREDtype 17 + 321: SHAREDtype 286 + 324: MATCH(46) + 326: SHAREDterm 313 + 329: CASEDEF(33) + 331: BIND(30) 59 [x$0] + 334: SHAREDtype 135 + 337: TYPED(23) + 339: IDENT 39 [_] + 341: ANNOTATEDtype(16) + 343: SHAREDtype 135 + 346: APPLY(11) + 348: SELECTin(9) 71 [[Signed Signature(List(),scala.unchecked) @]] + 351: NEW + 352: TYPEREF 69 [unchecked] + 354: SHAREDtype 33 + 356: SHAREDtype 352 + 359: SHAREDtype 341 + 362: SYNTHETIC + 363: TRUEconst + 364: CASEDEF(6) + 366: IDENT 39 [_] + 368: SHAREDtype 282 + 371: FALSEconst + 372: OVERRIDE + 373: SYNTHETIC + 374: DEFDEF(21) 72 [toString] + 377: EMPTYCLAUSE + 378: TYPEREF 24 [String] + 380: SHAREDtype 19 + 382: APPLY(11) + 384: TERMREF 74 [_toString[Signed Signature(List(scala.Product),java.lang.String) @_toString]] + 386: SHAREDtype 262 + 389: QUALTHIS + 390: IDENTtpt 15 [AOptions] + 392: SHAREDtype 135 + 395: OVERRIDE + 396: SYNTHETIC + 397: DEFDEF(40) 75 [canEqual] + 400: PARAM(4) 76 [that] + 403: SHAREDtype 282 + 406: SHAREDtype 286 + 409: TYPEAPPLY(26) + 411: SELECTin(7) 78 [isInstanceOf[Signed Signature(List(1),scala.Boolean) @isInstanceOf]] + 414: TERMREFdirect 400 + 417: SHAREDtype 282 + 420: ANNOTATEDtype(15) + 422: SHAREDtype 135 + 425: APPLY(10) + 427: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] + 430: NEW + 431: SHAREDtype 352 + 434: SHAREDtype 352 + 437: OVERRIDE + 438: SYNTHETIC + 439: DEFDEF(8) 79 [productArity] + 442: SHAREDtype 254 + 445: INTconst 0 + 447: OVERRIDE + 448: SYNTHETIC + 449: DEFDEF(11) 80 [productPrefix] + 452: TYPEREF 24 [String] + 454: TERMREF 81 [Predef] + 456: SHAREDtype 33 + 458: STRINGconst 15 [AOptions] + 460: OVERRIDE + 461: SYNTHETIC + 462: DEFDEF(49) 82 [productElement] + 465: PARAM(4) 83 [n] + 468: SHAREDtype 254 + 471: SHAREDtype 282 + 474: MATCH(35) + 476: TERMREFdirect 465 + 479: CASEDEF(30) + 481: IDENT 39 [_] + 483: SHAREDtype 254 + 486: THROW + 487: APPLY(22) + 489: SELECTin(9) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] + 492: NEW + 493: TYPEREF 84 [IndexOutOfBoundsException] + 495: SHAREDtype 19 + 497: SHAREDtype 493 + 500: APPLY(9) + 502: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] + 505: SHAREDterm 476 + 508: SHAREDtype 282 + 511: OVERRIDE + 512: SYNTHETIC + 513: DEFDEF(48) 88 [productElementName] + 516: PARAM(4) 83 [n] + 519: SHAREDtype 254 + 522: SHAREDtype 452 + 525: MATCH(34) + 527: TERMREFdirect 516 + 530: CASEDEF(29) + 532: IDENT 39 [_] + 534: SHAREDtype 254 + 537: THROW + 538: APPLY(21) + 540: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] + 543: NEW + 544: SHAREDtype 493 + 547: SHAREDtype 493 + 550: APPLY(9) + 552: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] + 555: SHAREDterm 527 + 558: SHAREDtype 282 + 561: OVERRIDE + 562: SYNTHETIC + 563: DEFDEF(18) 89 [copy] + 566: EMPTYCLAUSE + 567: SHAREDtype 135 + 570: APPLY(10) + 572: SELECTin(8) 91 [[Signed Signature(List(),Z$.AOptions) @]] + 575: NEW + 576: SHAREDtype 135 + 579: SHAREDtype 135 + 582: SYNTHETIC + 583: DEFDEF(7) 92 [ordinal] + 586: SHAREDtype 254 + 589: INTconst 0 + 591: SYNTHETIC + 592: FINAL + 593: CASE + 594: ENUM + 595: VALDEF(22) 15 [AOptions] + 598: IDENTtpt 93 [AOptions[ModuleClass]] + 600: TYPEREFsymbol 619 + 603: SHAREDtype 92 + 605: APPLY(10) + 607: SELECTin(8) 95 [[Signed Signature(List(),Z$.AOptions$) @]] + 610: NEW + 611: SHAREDterm 598 + 614: SHAREDtype 600 + 617: OBJECT + 618: SYNTHETIC + 619: TYPEDEF(147) 93 [AOptions[ModuleClass]] + 623: TEMPLATE(141) + 626: APPLY(9) + 628: SELECTin(7) 9 [[Signed Signature(List(),java.lang.Object) @]] + 631: NEW + 632: SHAREDtype 173 + 635: SHAREDtype 17 + 637: TYPEREF 53 [Product] + 639: SHAREDtype 181 + 642: SELFDEF 39 [_] + 644: SINGLETONtpt + 645: TERMREFsymbol 595 + 648: SHAREDtype 92 + 650: DEFDEF(5) 3 [] + 653: EMPTYCLAUSE + 654: SHAREDtype 31 + 656: STABLE + 657: DEFDEF(23) 40 [writeReplace] + 660: EMPTYCLAUSE + 661: SHAREDtype 173 + 664: APPLY(14) + 666: SELECTin(8) 47 [[Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @]] + 669: NEW + 670: SHAREDtype 211 + 673: SHAREDtype 211 + 676: CLASSconst + 677: SHAREDtype 645 + 680: PRIVATE + 681: SYNTHETIC + 682: DEFDEF(18) 96 [apply] + 685: EMPTYCLAUSE + 686: SHAREDtype 135 + 689: APPLY(10) + 691: SELECTin(8) 91 [[Signed Signature(List(),Z$.AOptions) @]] + 694: NEW + 695: SHAREDtype 135 + 698: SHAREDtype 135 + 701: SYNTHETIC + 702: DEFDEF(12) 97 [unapply] + 705: PARAM(5) 98 [x$1] + 708: SHAREDtype 135 + 711: SYNTHETIC + 712: SINGLETONtpt + 713: TRUEconst + 714: TRUEconst + 715: SYNTHETIC + 716: DEFDEF(8) 72 [toString] + 719: SHAREDtype 378 + 722: STRINGconst 15 [AOptions] + 724: OVERRIDE + 725: SYNTHETIC + 726: TYPEDEF(9) 99 [MirroredMonoType] + 729: TYPEBOUNDS(5) + 731: TYPEREFsymbol 223 + 734: SHAREDtype 38 + 736: SYNTHETIC + 737: DEFDEF(28) 100 [fromProduct] + 740: PARAM(5) 59 [x$0] + 743: TYPEREF 53 [Product] + 745: SHAREDtype 33 + 747: TYPEREFsymbol 726 + 750: THIS + 751: SHAREDtype 600 + 754: APPLY(10) + 756: SELECTin(8) 91 [[Signed Signature(List(),Z$.AOptions) @]] + 759: NEW + 760: SHAREDtype 135 + 763: SHAREDtype 135 + 766: SYNTHETIC + 767: OBJECT + 768: SYNTHETIC + 769: TYPEDEF(343) 16 [BOptions] + 773: TEMPLATE(336) + 776: APPLY(10) + 778: SELECTin(8) 48 [[Signed Signature(List(),Z) @]] + 781: NEW + 782: SHAREDtype 236 + 785: SHAREDtype 236 + 788: DEFDEF(5) 3 [] + 791: EMPTYCLAUSE + 792: SHAREDtype 31 + 794: STABLE + 795: DEFDEF(17) 49 [hashCode] + 798: EMPTYCLAUSE + 799: SHAREDtype 254 + 802: APPLY(8) + 804: SHAREDtype 260 + 807: QUALTHIS + 808: IDENTtpt 16 [BOptions] + 810: SHAREDtype 114 + 812: OVERRIDE + 813: SYNTHETIC + 814: DEFDEF(90) 58 [equals] + 817: PARAM(4) 59 [x$0] + 820: SHAREDtype 282 + 823: SHAREDtype 286 + 826: APPLY(76) + 828: SELECTin(29) 64 [||[Signed Signature(List(scala.Boolean),scala.Boolean) @||]] + 831: APPLY(23) + 833: SELECTin(8) 66 [eq[Signed Signature(List(java.lang.Object),scala.Boolean) @eq]] + 836: QUALTHIS + 837: IDENTtpt 16 [BOptions] + 839: SHAREDtype 114 + 841: SHAREDtype 17 + 843: TYPEAPPLY(11) + 845: SELECTin(7) 68 [$asInstanceOf$[Signed Signature(List(1),java.lang.Object) @$asInstanceOf$]] + 848: TERMREFdirect 817 + 851: SHAREDtype 282 + 854: SHAREDtype 17 + 856: SHAREDtype 286 + 859: MATCH(43) + 861: SHAREDterm 848 + 864: CASEDEF(30) + 866: BIND(27) 59 [x$0] + 869: SHAREDtype 114 + 871: TYPED(21) + 873: IDENT 39 [_] + 875: ANNOTATEDtype(14) + 877: SHAREDtype 114 + 879: APPLY(10) + 881: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] + 884: NEW + 885: SHAREDtype 352 + 888: SHAREDtype 352 + 891: SHAREDtype 875 + 894: SYNTHETIC + 895: TRUEconst + 896: CASEDEF(6) + 898: IDENT 39 [_] + 900: SHAREDtype 282 + 903: FALSEconst + 904: OVERRIDE + 905: SYNTHETIC + 906: DEFDEF(17) 72 [toString] + 909: EMPTYCLAUSE + 910: SHAREDtype 378 + 913: APPLY(8) + 915: SHAREDtype 384 + 918: QUALTHIS + 919: IDENTtpt 16 [BOptions] + 921: SHAREDtype 114 + 923: OVERRIDE + 924: SYNTHETIC + 925: DEFDEF(39) 75 [canEqual] + 928: PARAM(4) 76 [that] + 931: SHAREDtype 282 + 934: SHAREDtype 286 + 937: TYPEAPPLY(25) + 939: SELECTin(7) 78 [isInstanceOf[Signed Signature(List(1),scala.Boolean) @isInstanceOf]] + 942: TERMREFdirect 928 + 945: SHAREDtype 282 + 948: ANNOTATEDtype(14) + 950: SHAREDtype 114 + 952: APPLY(10) + 954: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] + 957: NEW + 958: SHAREDtype 352 + 961: SHAREDtype 352 + 964: OVERRIDE + 965: SYNTHETIC + 966: DEFDEF(8) 79 [productArity] + 969: SHAREDtype 254 + 972: INTconst 0 + 974: OVERRIDE + 975: SYNTHETIC + 976: DEFDEF(8) 80 [productPrefix] + 979: SHAREDtype 452 + 982: STRINGconst 16 [BOptions] + 984: OVERRIDE + 985: SYNTHETIC + 986: DEFDEF(48) 82 [productElement] + 989: PARAM(4) 83 [n] + 992: SHAREDtype 254 + 995: SHAREDtype 282 + 998: MATCH(34) + 1000: TERMREFdirect 989 + 1003: CASEDEF(29) + 1005: IDENT 39 [_] + 1007: SHAREDtype 254 + 1010: THROW + 1011: APPLY(21) + 1013: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] + 1016: NEW + 1017: SHAREDtype 493 + 1020: SHAREDtype 493 + 1023: APPLY(9) + 1025: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] + 1028: SHAREDterm 1000 + 1031: SHAREDtype 282 + 1034: OVERRIDE + 1035: SYNTHETIC + 1036: DEFDEF(48) 88 [productElementName] + 1039: PARAM(4) 83 [n] + 1042: SHAREDtype 254 + 1045: SHAREDtype 452 + 1048: MATCH(34) + 1050: TERMREFdirect 1039 + 1053: CASEDEF(29) + 1055: IDENT 39 [_] + 1057: SHAREDtype 254 + 1060: THROW + 1061: APPLY(21) + 1063: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] + 1066: NEW + 1067: SHAREDtype 493 + 1070: SHAREDtype 493 + 1073: APPLY(9) + 1075: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] + 1078: SHAREDterm 1050 + 1081: SHAREDtype 282 + 1084: OVERRIDE + 1085: SYNTHETIC + 1086: DEFDEF(15) 89 [copy] + 1089: EMPTYCLAUSE + 1090: SHAREDtype 114 + 1092: APPLY(8) + 1094: SELECTin(6) 102 [[Signed Signature(List(),Z$.BOptions) @]] + 1097: NEW + 1098: SHAREDtype 114 + 1100: SHAREDtype 114 + 1102: SYNTHETIC + 1103: DEFDEF(7) 92 [ordinal] + 1106: SHAREDtype 254 + 1109: INTconst 1 + 1111: SYNTHETIC + 1112: FINAL + 1113: CASE + 1114: ENUM + 1115: VALDEF(22) 16 [BOptions] + 1118: IDENTtpt 103 [BOptions[ModuleClass]] + 1120: TYPEREFsymbol 1139 + 1123: SHAREDtype 92 + 1125: APPLY(10) + 1127: SELECTin(8) 105 [[Signed Signature(List(),Z$.BOptions$) @]] + 1130: NEW + 1131: SHAREDterm 1118 + 1134: SHAREDtype 1120 + 1137: OBJECT + 1138: SYNTHETIC + 1139: TYPEDEF(138) 103 [BOptions[ModuleClass]] + 1143: TEMPLATE(132) + 1146: APPLY(9) + 1148: SELECTin(7) 9 [[Signed Signature(List(),java.lang.Object) @]] + 1151: NEW + 1152: SHAREDtype 173 + 1155: SHAREDtype 17 + 1157: SHAREDtype 637 + 1160: SELFDEF 39 [_] + 1162: SINGLETONtpt + 1163: TERMREFsymbol 1115 + 1166: SHAREDtype 92 + 1168: DEFDEF(5) 3 [] + 1171: EMPTYCLAUSE + 1172: SHAREDtype 31 + 1174: STABLE + 1175: DEFDEF(23) 40 [writeReplace] + 1178: EMPTYCLAUSE + 1179: SHAREDtype 173 + 1182: APPLY(14) + 1184: SELECTin(8) 47 [[Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @]] + 1187: NEW + 1188: SHAREDtype 211 + 1191: SHAREDtype 211 + 1194: CLASSconst + 1195: SHAREDtype 1163 + 1198: PRIVATE + 1199: SYNTHETIC + 1200: DEFDEF(15) 96 [apply] + 1203: EMPTYCLAUSE + 1204: SHAREDtype 114 + 1206: APPLY(8) + 1208: SELECTin(6) 102 [[Signed Signature(List(),Z$.BOptions) @]] + 1211: NEW + 1212: SHAREDtype 114 + 1214: SHAREDtype 114 + 1216: SYNTHETIC + 1217: DEFDEF(11) 97 [unapply] + 1220: PARAM(4) 98 [x$1] + 1223: SHAREDtype 114 + 1225: SYNTHETIC + 1226: SINGLETONtpt + 1227: TRUEconst + 1228: TRUEconst + 1229: SYNTHETIC + 1230: DEFDEF(8) 72 [toString] + 1233: SHAREDtype 378 + 1236: STRINGconst 16 [BOptions] + 1238: OVERRIDE + 1239: SYNTHETIC + 1240: TYPEDEF(9) 99 [MirroredMonoType] + 1243: TYPEBOUNDS(5) + 1245: TYPEREFsymbol 769 + 1248: SHAREDtype 38 + 1250: SYNTHETIC + 1251: DEFDEF(25) 100 [fromProduct] + 1254: PARAM(4) 59 [x$0] + 1257: SHAREDtype 743 + 1260: TYPEREFsymbol 1240 + 1263: THIS + 1264: SHAREDtype 1120 + 1267: APPLY(8) + 1269: SELECTin(6) 102 [[Signed Signature(List(),Z$.BOptions) @]] + 1272: NEW + 1273: SHAREDtype 114 + 1275: SHAREDtype 114 + 1277: SYNTHETIC + 1278: OBJECT + 1279: SYNTHETIC + 1280: TYPEDEF(343) 17 [COptions] + 1284: TEMPLATE(336) + 1287: APPLY(10) + 1289: SELECTin(8) 48 [[Signed Signature(List(),Z) @]] + 1292: NEW + 1293: SHAREDtype 236 + 1296: SHAREDtype 236 + 1299: DEFDEF(5) 3 [] + 1302: EMPTYCLAUSE + 1303: SHAREDtype 31 + 1305: STABLE + 1306: DEFDEF(17) 49 [hashCode] + 1309: EMPTYCLAUSE + 1310: SHAREDtype 254 + 1313: APPLY(8) + 1315: SHAREDtype 260 + 1318: QUALTHIS + 1319: IDENTtpt 17 [COptions] + 1321: SHAREDtype 89 + 1323: OVERRIDE + 1324: SYNTHETIC + 1325: DEFDEF(90) 58 [equals] + 1328: PARAM(4) 59 [x$0] + 1331: SHAREDtype 282 + 1334: SHAREDtype 286 + 1337: APPLY(76) + 1339: SELECTin(29) 64 [||[Signed Signature(List(scala.Boolean),scala.Boolean) @||]] + 1342: APPLY(23) + 1344: SELECTin(8) 66 [eq[Signed Signature(List(java.lang.Object),scala.Boolean) @eq]] + 1347: QUALTHIS + 1348: IDENTtpt 17 [COptions] + 1350: SHAREDtype 89 + 1352: SHAREDtype 17 + 1354: TYPEAPPLY(11) + 1356: SELECTin(7) 68 [$asInstanceOf$[Signed Signature(List(1),java.lang.Object) @$asInstanceOf$]] + 1359: TERMREFdirect 1328 + 1362: SHAREDtype 282 + 1365: SHAREDtype 17 + 1367: SHAREDtype 286 + 1370: MATCH(43) + 1372: SHAREDterm 1359 + 1375: CASEDEF(30) + 1377: BIND(27) 59 [x$0] + 1380: SHAREDtype 89 + 1382: TYPED(21) + 1384: IDENT 39 [_] + 1386: ANNOTATEDtype(14) + 1388: SHAREDtype 89 + 1390: APPLY(10) + 1392: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] + 1395: NEW + 1396: SHAREDtype 352 + 1399: SHAREDtype 352 + 1402: SHAREDtype 1386 + 1405: SYNTHETIC + 1406: TRUEconst + 1407: CASEDEF(6) + 1409: IDENT 39 [_] + 1411: SHAREDtype 282 + 1414: FALSEconst + 1415: OVERRIDE + 1416: SYNTHETIC + 1417: DEFDEF(17) 72 [toString] + 1420: EMPTYCLAUSE + 1421: SHAREDtype 378 + 1424: APPLY(8) + 1426: SHAREDtype 384 + 1429: QUALTHIS + 1430: IDENTtpt 17 [COptions] + 1432: SHAREDtype 89 + 1434: OVERRIDE + 1435: SYNTHETIC + 1436: DEFDEF(39) 75 [canEqual] + 1439: PARAM(4) 76 [that] + 1442: SHAREDtype 282 + 1445: SHAREDtype 286 + 1448: TYPEAPPLY(25) + 1450: SELECTin(7) 78 [isInstanceOf[Signed Signature(List(1),scala.Boolean) @isInstanceOf]] + 1453: TERMREFdirect 1439 + 1456: SHAREDtype 282 + 1459: ANNOTATEDtype(14) + 1461: SHAREDtype 89 + 1463: APPLY(10) + 1465: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] + 1468: NEW + 1469: SHAREDtype 352 + 1472: SHAREDtype 352 + 1475: OVERRIDE + 1476: SYNTHETIC + 1477: DEFDEF(8) 79 [productArity] + 1480: SHAREDtype 254 + 1483: INTconst 0 + 1485: OVERRIDE + 1486: SYNTHETIC + 1487: DEFDEF(8) 80 [productPrefix] + 1490: SHAREDtype 452 + 1493: STRINGconst 17 [COptions] + 1495: OVERRIDE + 1496: SYNTHETIC + 1497: DEFDEF(48) 82 [productElement] + 1500: PARAM(4) 83 [n] + 1503: SHAREDtype 254 + 1506: SHAREDtype 282 + 1509: MATCH(34) + 1511: TERMREFdirect 1500 + 1514: CASEDEF(29) + 1516: IDENT 39 [_] + 1518: SHAREDtype 254 + 1521: THROW + 1522: APPLY(21) + 1524: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] + 1527: NEW + 1528: SHAREDtype 493 + 1531: SHAREDtype 493 + 1534: APPLY(9) + 1536: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] + 1539: SHAREDterm 1511 + 1542: SHAREDtype 282 + 1545: OVERRIDE + 1546: SYNTHETIC + 1547: DEFDEF(48) 88 [productElementName] + 1550: PARAM(4) 83 [n] + 1553: SHAREDtype 254 + 1556: SHAREDtype 452 + 1559: MATCH(34) + 1561: TERMREFdirect 1550 + 1564: CASEDEF(29) + 1566: IDENT 39 [_] + 1568: SHAREDtype 254 + 1571: THROW + 1572: APPLY(21) + 1574: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] + 1577: NEW + 1578: SHAREDtype 493 + 1581: SHAREDtype 493 + 1584: APPLY(9) + 1586: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] + 1589: SHAREDterm 1561 + 1592: SHAREDtype 282 + 1595: OVERRIDE + 1596: SYNTHETIC + 1597: DEFDEF(15) 89 [copy] + 1600: EMPTYCLAUSE + 1601: SHAREDtype 89 + 1603: APPLY(8) + 1605: SELECTin(6) 107 [[Signed Signature(List(),Z$.COptions) @]] + 1608: NEW + 1609: SHAREDtype 89 + 1611: SHAREDtype 89 + 1613: SYNTHETIC + 1614: DEFDEF(7) 92 [ordinal] + 1617: SHAREDtype 254 + 1620: INTconst 2 + 1622: SYNTHETIC + 1623: FINAL + 1624: CASE + 1625: ENUM + 1626: VALDEF(22) 17 [COptions] + 1629: IDENTtpt 108 [COptions[ModuleClass]] + 1631: TYPEREFsymbol 1650 + 1634: SHAREDtype 92 + 1636: APPLY(10) + 1638: SELECTin(8) 110 [[Signed Signature(List(),Z$.COptions$) @]] + 1641: NEW + 1642: SHAREDterm 1629 + 1645: SHAREDtype 1631 + 1648: OBJECT + 1649: SYNTHETIC + 1650: TYPEDEF(138) 108 [COptions[ModuleClass]] + 1654: TEMPLATE(132) + 1657: APPLY(9) + 1659: SELECTin(7) 9 [[Signed Signature(List(),java.lang.Object) @]] + 1662: NEW + 1663: SHAREDtype 173 + 1666: SHAREDtype 17 + 1668: SHAREDtype 637 + 1671: SELFDEF 39 [_] + 1673: SINGLETONtpt + 1674: TERMREFsymbol 1626 + 1677: SHAREDtype 92 + 1679: DEFDEF(5) 3 [] + 1682: EMPTYCLAUSE + 1683: SHAREDtype 31 + 1685: STABLE + 1686: DEFDEF(23) 40 [writeReplace] + 1689: EMPTYCLAUSE + 1690: SHAREDtype 173 + 1693: APPLY(14) + 1695: SELECTin(8) 47 [[Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @]] + 1698: NEW + 1699: SHAREDtype 211 + 1702: SHAREDtype 211 + 1705: CLASSconst + 1706: SHAREDtype 1674 + 1709: PRIVATE + 1710: SYNTHETIC + 1711: DEFDEF(15) 96 [apply] + 1714: EMPTYCLAUSE + 1715: SHAREDtype 89 + 1717: APPLY(8) + 1719: SELECTin(6) 107 [[Signed Signature(List(),Z$.COptions) @]] + 1722: NEW + 1723: SHAREDtype 89 + 1725: SHAREDtype 89 + 1727: SYNTHETIC + 1728: DEFDEF(11) 97 [unapply] + 1731: PARAM(4) 98 [x$1] + 1734: SHAREDtype 89 + 1736: SYNTHETIC + 1737: SINGLETONtpt + 1738: TRUEconst + 1739: TRUEconst + 1740: SYNTHETIC + 1741: DEFDEF(8) 72 [toString] + 1744: SHAREDtype 378 + 1747: STRINGconst 17 [COptions] + 1749: OVERRIDE + 1750: SYNTHETIC + 1751: TYPEDEF(9) 99 [MirroredMonoType] + 1754: TYPEBOUNDS(5) + 1756: TYPEREFsymbol 1280 + 1759: SHAREDtype 38 + 1761: SYNTHETIC + 1762: DEFDEF(25) 100 [fromProduct] + 1765: PARAM(4) 59 [x$0] + 1768: SHAREDtype 743 + 1771: TYPEREFsymbol 1751 + 1774: THIS + 1775: SHAREDtype 1631 + 1778: APPLY(8) + 1780: SELECTin(6) 107 [[Signed Signature(List(),Z$.COptions) @]] + 1783: NEW + 1784: SHAREDtype 89 + 1786: SHAREDtype 89 + 1788: SYNTHETIC + 1789: OBJECT + 1790: SYNTHETIC + 1791: DEFDEF(46) 111 [fromOrdinal] + 1794: PARAM(4) 92 [ordinal] + 1797: SHAREDtype 254 + 1800: SHAREDtype 236 + 1803: THROW + 1804: APPLY(32) + 1806: SELECTin(9) 116 [[Signed Signature(List(java.lang.String),java.util.NoSuchElementException) @]] + 1809: NEW + 1810: TYPEREF 114 [NoSuchElementException] + 1812: TERMREFpkg 113 [java[Qualified . util]] + 1814: SHAREDtype 1810 + 1817: APPLY(19) + 1819: SELECTin(6) 118 [+[Signed Signature(List(java.lang.Object),java.lang.String) @+]] + 1822: STRINGconst 119 [enum Z has no case with ordinal: ] + 1824: SHAREDtype 378 + 1827: APPLY(9) + 1829: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] + 1832: TERMREFdirect 1794 + 1835: SHAREDtype 282 + 1838: SYNTHETIC + 1839: TYPEDEF(7) 99 [MirroredMonoType] + 1842: TYPEBOUNDS(3) + 1844: SHAREDtype 236 + 1847: SYNTHETIC + 1848: DEFDEF(18) 92 [ordinal] + 1851: PARAM(6) 59 [x$0] + 1854: TYPEREFsymbol 1839 + 1857: SHAREDtype 92 + 1859: SHAREDtype 254 + 1862: SELECT 92 [ordinal] + 1864: TERMREFdirect 1851 + 1867: SYNTHETIC + 1868: OBJECT + 1869: SYNTHETIC + 1870: ANNOTATION(14) + 1872: SHAREDtype 55 + 1874: APPLY(10) + 1876: SELECTin(6) 26 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] + 1879: NEW + 1880: SHAREDtype 55 + 1882: SHAREDtype 55 + 1884: STRINGconst 27 [] + 1886: + +Positions (535 bytes, starting from ): + lines: 10 + line sizes: + 38, 0, 98, 90, 106, 7, 17, 17, 17, 0 + positions: + 0: 337 .. 398 + 5: 337 .. 398 + 9: 347 .. 398 + 17: 342 .. 342 + 23: 347 .. 347 + 27: 347 .. 347 + 31: 347 .. 347 + 38: 347 .. 347 + 44: 347 .. 347 + 46: 347 .. 347 + 48: 347 .. 347 + 59: 337 .. 398 + 65: 337 .. 337 + 69: 337 .. 337 + 85: 342 .. 342 + 89: 342 .. 342 + 110: 342 .. 342 + 114: 342 .. 342 + 131: 342 .. 342 + 135: 342 .. 342 + 140: 337 .. 398 + 143: 347 .. 347 + 160: 337 .. 398 + 164: 347 .. 398 + 173: 347 .. 347 + 179: 347 .. 347 + 189: 347 .. 347 + 191: 347 .. 347 + 195: 347 .. 347 + 198: 342 .. 342 + 202: 342 .. 342 + 211: 342 .. 342 + 218: 342 .. 342 + 223: 347 .. 362 + 227: 360 .. 362 + 236: 362 .. 362 + 243: 360 .. 362 + 247: 360 .. 360 + 250: 352 .. 352 + 254: 352 .. 352 + 260: 352 .. 352 + 269: 352 .. 352 + 276: 352 .. 352 + 279: 352 .. 352 + 282: 352 .. 352 + 286: 352 .. 352 + 301: 352 .. 352 + 313: 352 .. 352 + 319: 352 .. 352 + 331: 352 .. 352 + 339: 352 .. 352 + 359: 352 .. 352 + 363: 352 .. 352 + 366: 352 .. 352 + 371: 352 .. 352 + 374: 352 .. 352 + 378: 352 .. 352 + 384: 352 .. 352 + 390: 352 .. 352 + 397: 352 .. 352 + 400: 352 .. 352 + 403: 352 .. 352 + 406: 352 .. 352 + 414: 352 .. 352 + 420: 352 .. 352 + 439: 352 .. 352 + 442: 352 .. 352 + 445: 352 .. 352 + 449: 352 .. 352 + 452: 352 .. 352 + 458: 352 .. 352 + 462: 352 .. 352 + 465: 352 .. 352 + 468: 352 .. 352 + 471: 352 .. 352 + 476: 352 .. 352 + 481: 352 .. 352 + 493: 352 .. 352 + 513: 352 .. 352 + 516: 352 .. 352 + 519: 352 .. 352 + 522: 352 .. 352 + 527: 352 .. 352 + 532: 352 .. 352 + 544: 352 .. 352 + 563: 347 .. 347 + 567: 347 .. 347 + 576: 347 .. 347 + 583: 362 .. 362 + 586: 362 .. 362 + 589: 362 .. 362 + 595: 347 .. 362 + 598: 347 .. 347 + 619: 347 .. 362 + 623: 347 .. 347 + 632: 347 .. 347 + 637: 347 .. 347 + 645: 347 .. 347 + 650: 347 .. 347 + 654: 347 .. 347 + 657: 352 .. 352 + 661: 352 .. 352 + 670: 352 .. 352 + 676: 352 .. 352 + 682: 347 .. 347 + 686: 347 .. 347 + 695: 347 .. 347 + 702: 347 .. 347 + 705: 347 .. 347 + 708: 347 .. 347 + 713: 347 .. 347 + 714: 347 .. 347 + 716: 347 .. 347 + 719: 347 .. 347 + 722: 347 .. 347 + 726: 352 .. 352 + 729: 352 .. 352 + 737: 352 .. 352 + 740: 352 .. 352 + 743: 352 .. 352 + 747: 352 .. 352 + 760: 352 .. 352 + 769: 365 .. 380 + 773: 378 .. 380 + 782: 380 .. 380 + 788: 378 .. 380 + 792: 378 .. 378 + 795: 370 .. 370 + 799: 370 .. 370 + 804: 370 .. 370 + 808: 370 .. 370 + 814: 370 .. 370 + 817: 370 .. 370 + 820: 370 .. 370 + 823: 370 .. 370 + 837: 370 .. 370 + 848: 370 .. 370 + 854: 370 .. 370 + 866: 370 .. 370 + 873: 370 .. 370 + 891: 370 .. 370 + 895: 370 .. 370 + 898: 370 .. 370 + 903: 370 .. 370 + 906: 370 .. 370 + 910: 370 .. 370 + 915: 370 .. 370 + 919: 370 .. 370 + 925: 370 .. 370 + 928: 370 .. 370 + 931: 370 .. 370 + 934: 370 .. 370 + 942: 370 .. 370 + 948: 370 .. 370 + 966: 370 .. 370 + 969: 370 .. 370 + 972: 370 .. 370 + 976: 370 .. 370 + 979: 370 .. 370 + 982: 370 .. 370 + 986: 370 .. 370 + 989: 370 .. 370 + 992: 370 .. 370 + 995: 370 .. 370 + 1000: 370 .. 370 + 1005: 370 .. 370 + 1017: 370 .. 370 + 1036: 370 .. 370 + 1039: 370 .. 370 + 1042: 370 .. 370 + 1045: 370 .. 370 + 1050: 370 .. 370 + 1055: 370 .. 370 + 1067: 370 .. 370 + 1086: 365 .. 365 + 1090: 365 .. 365 + 1098: 365 .. 365 + 1103: 380 .. 380 + 1106: 380 .. 380 + 1109: 380 .. 380 + 1115: 365 .. 380 + 1118: 365 .. 365 + 1139: 365 .. 380 + 1143: 365 .. 365 + 1152: 365 .. 365 + 1157: 365 .. 365 + 1163: 365 .. 365 + 1168: 365 .. 365 + 1172: 365 .. 365 + 1175: 370 .. 370 + 1179: 370 .. 370 + 1188: 370 .. 370 + 1194: 370 .. 370 + 1200: 365 .. 365 + 1204: 365 .. 365 + 1212: 365 .. 365 + 1217: 365 .. 365 + 1220: 365 .. 365 + 1223: 365 .. 365 + 1227: 365 .. 365 + 1228: 365 .. 365 + 1230: 365 .. 365 + 1233: 365 .. 365 + 1236: 365 .. 365 + 1240: 370 .. 370 + 1243: 370 .. 370 + 1251: 370 .. 370 + 1254: 370 .. 370 + 1257: 370 .. 370 + 1260: 370 .. 370 + 1273: 370 .. 370 + 1280: 383 .. 398 + 1284: 396 .. 398 + 1293: 398 .. 398 + 1299: 396 .. 398 + 1303: 396 .. 396 + 1306: 388 .. 388 + 1310: 388 .. 388 + 1315: 388 .. 388 + 1319: 388 .. 388 + 1325: 388 .. 388 + 1328: 388 .. 388 + 1331: 388 .. 388 + 1334: 388 .. 388 + 1348: 388 .. 388 + 1359: 388 .. 388 + 1365: 388 .. 388 + 1377: 388 .. 388 + 1384: 388 .. 388 + 1402: 388 .. 388 + 1406: 388 .. 388 + 1409: 388 .. 388 + 1414: 388 .. 388 + 1417: 388 .. 388 + 1421: 388 .. 388 + 1426: 388 .. 388 + 1430: 388 .. 388 + 1436: 388 .. 388 + 1439: 388 .. 388 + 1442: 388 .. 388 + 1445: 388 .. 388 + 1453: 388 .. 388 + 1459: 388 .. 388 + 1477: 388 .. 388 + 1480: 388 .. 388 + 1483: 388 .. 388 + 1487: 388 .. 388 + 1490: 388 .. 388 + 1493: 388 .. 388 + 1497: 388 .. 388 + 1500: 388 .. 388 + 1503: 388 .. 388 + 1506: 388 .. 388 + 1511: 388 .. 388 + 1516: 388 .. 388 + 1528: 388 .. 388 + 1547: 388 .. 388 + 1550: 388 .. 388 + 1553: 388 .. 388 + 1556: 388 .. 388 + 1561: 388 .. 388 + 1566: 388 .. 388 + 1578: 388 .. 388 + 1597: 383 .. 383 + 1601: 383 .. 383 + 1609: 383 .. 383 + 1614: 398 .. 398 + 1617: 398 .. 398 + 1620: 398 .. 398 + 1626: 383 .. 398 + 1629: 383 .. 383 + 1650: 383 .. 398 + 1654: 383 .. 383 + 1663: 383 .. 383 + 1668: 383 .. 383 + 1674: 383 .. 383 + 1679: 383 .. 383 + 1683: 383 .. 383 + 1686: 388 .. 388 + 1690: 388 .. 388 + 1699: 388 .. 388 + 1705: 388 .. 388 + 1711: 383 .. 383 + 1715: 383 .. 383 + 1723: 383 .. 383 + 1728: 383 .. 383 + 1731: 383 .. 383 + 1734: 383 .. 383 + 1738: 383 .. 383 + 1739: 383 .. 383 + 1741: 383 .. 383 + 1744: 383 .. 383 + 1747: 383 .. 383 + 1751: 388 .. 388 + 1754: 388 .. 388 + 1762: 388 .. 388 + 1765: 388 .. 388 + 1768: 388 .. 388 + 1771: 388 .. 388 + 1784: 388 .. 388 + 1791: 398 .. 398 + 1794: 398 .. 398 + 1797: 398 .. 398 + 1800: 398 .. 398 + 1810: 398 .. 398 + 1822: 398 .. 398 + 1832: 398 .. 398 + 1839: 342 .. 342 + 1842: 342 .. 342 + 1848: 342 .. 342 + 1851: 342 .. 342 + 1854: 342 .. 342 + 1859: 342 .. 342 + 1864: 342 .. 342 + 1874: 337 .. 398 + 1880: 337 .. 337 + 1884: 337 .. 337 + + source paths: + 0: 27 [] + +Attributes (2 bytes, starting from ): + SOURCEFILEattr 27 [] From 04dba38662388aa493c525b1fb4c30388865e500 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 18 Jul 2024 11:03:27 +0200 Subject: [PATCH 348/827] Widen non-trackable singleton types before computing their dcs --- compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 87638b078040..7b639ee64cdf 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -1073,7 +1073,7 @@ object CaptureSet: case parent: SingletonCaptureRef if parent.isTrackableRef => tp.singletonCaptureSet case _ => - CaptureSet.ofTypeDeeply(parent) + CaptureSet.ofTypeDeeply(parent.widen) case tpd @ defn.RefinedFunctionOf(rinfo: MethodType) if followResult => ofType(tpd.parent, followResult = false) // pick up capture set from parent type ++ (recur(rinfo.resType) // add capture set of result From 9391b9a38e876d2dcb10ba99da1f22bec24c60b1 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 18 Jul 2024 12:24:14 +0200 Subject: [PATCH 349/827] Give some explanation if a capture set was under-approximated Give some explanation if an empty capture set was the result of an under-approximation. --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 3 ++- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 15 +++++++++--- .../dotty/tools/dotc/cc/CapturingType.scala | 2 +- .../dotty/tools/dotc/cc/CheckCaptures.scala | 23 ++++++++++++++++++- .../captures/class-contra.check | 7 ++++-- .../captures/explain-under-approx.check | 20 ++++++++++++++++ .../captures/explain-under-approx.scala | 17 ++++++++++++++ 7 files changed, 79 insertions(+), 8 deletions(-) create mode 100644 tests/neg-custom-args/captures/explain-under-approx.check create mode 100644 tests/neg-custom-args/captures/explain-under-approx.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 116b35e34aea..1f19641e3b08 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -244,7 +244,8 @@ extension (tp: Type) * the two capture sets are combined. */ def capturing(cs: CaptureSet)(using Context): Type = - if cs.isAlwaysEmpty || cs.isConst && cs.subCaptures(tp.captureSet, frozen = true).isOK + if (cs.isAlwaysEmpty || cs.isConst && cs.subCaptures(tp.captureSet, frozen = true).isOK) + && !cs.keepAlways then tp else tp match case CapturingType(parent, cs1) => parent.capturing(cs1 ++ cs) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 7b639ee64cdf..690d9b4411aa 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -88,6 +88,8 @@ sealed abstract class CaptureSet extends Showable: final def isUnboxable(using Context) = elems.exists(elem => elem.isRootCapability || Existential.isExistentialVar(elem)) + final def keepAlways: Boolean = this.isInstanceOf[EmptyWithProvenance] + /** Try to include an element in this capture set. * @param elem The element to be added * @param origin The set that originated the request, or `empty` if the request came from outside. @@ -219,7 +221,8 @@ sealed abstract class CaptureSet extends Showable: * `this` and `that` */ def ++ (that: CaptureSet)(using Context): CaptureSet = - if this.subCaptures(that, frozen = true).isOK then that + if this.subCaptures(that, frozen = true).isOK then + if that.isAlwaysEmpty && this.keepAlways then this else that else if that.subCaptures(this, frozen = true).isOK then this else if this.isConst && that.isConst then Const(this.elems ++ that.elems) else Union(this, that) @@ -294,7 +297,7 @@ sealed abstract class CaptureSet extends Showable: case _ => val mapped = mapRefs(elems, tm, tm.variance) if isConst then - if mapped.isConst && mapped.elems == elems then this + if mapped.isConst && mapped.elems == elems && !mapped.keepAlways then this else mapped else Mapped(asVar, tm, tm.variance, mapped) @@ -398,6 +401,12 @@ object CaptureSet: override def toString = elems.toString end Const + case class EmptyWithProvenance(ref: CaptureRef, mapped: Type) extends Const(SimpleIdentitySet.empty): + override def optionalInfo(using Context): String = + if ctx.settings.YccDebug.value + then i" under-approximating the result of mapping $ref to $mapped" + else "" + /** A special capture set that gets added to the types of symbols that were not * themselves capture checked, in order to admit arbitrary corresponding capture * sets in subcapturing comparisons. Similar to platform types for explicit @@ -863,7 +872,7 @@ object CaptureSet: || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) || r.derivesFrom(defn.Caps_CapSet) if variance > 0 || isExact then upper - else if variance < 0 then CaptureSet.empty + else if variance < 0 then CaptureSet.EmptyWithProvenance(r, r1) else upper.maybe /** Apply `f` to each element in `xs`, and join result sets with `++` */ diff --git a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala index f859b0d110aa..bb79e52f1060 100644 --- a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala +++ b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala @@ -33,7 +33,7 @@ object CapturingType: * boxing status is the same or if A is boxed. */ def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = - if refs.isAlwaysEmpty then parent + if refs.isAlwaysEmpty && !refs.keepAlways then parent else parent match case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => apply(parent1, refs ++ refs1, boxed) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 667d91a10330..511c978a8e32 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -990,6 +990,25 @@ class CheckCaptures extends Recheck, SymTransformer: | |Note that ${msg.toString}""" + private def addApproxAddenda(using Context) = + new TypeAccumulator[Addenda]: + def apply(add: Addenda, t: Type) = t match + case CapturingType(t, CaptureSet.EmptyWithProvenance(ref, mapped)) => + /* val (origCore, kind) = original match + case tp @ AnnotatedType(parent, ann) if ann.hasSymbol(defn.ReachCapabilityAnnot) => + (parent, " deep") + case _ => + (original, "")*/ + add ++ new Addenda: + override def toAdd(using Context): List[String] = + i""" + | + |Note that a capability $ref in a capture set appearing in contravariant position + |was mapped to $mapped which is not a capability. Therefore, it was under-approximated to the empty set.""" + :: Nil + case _ => + foldOver(add, t) + /** Massage `actual` and `expected` types before checking conformance. * Massaging is done by the methods following this one: * - align dependent function types and add outer references in the expected type @@ -1015,7 +1034,9 @@ class CheckCaptures extends Recheck, SymTransformer: else capt.println(i"conforms failed for ${tree}: $actual vs $expected") err.typeMismatch(tree.withType(actualBoxed), expected1, - addenda ++ CaptureSet.levelErrors ++ boxErrorAddenda(boxErrors)) + addApproxAddenda( + addenda ++ CaptureSet.levelErrors ++ boxErrorAddenda(boxErrors), + expected1)) actual end checkConformsExpr diff --git a/tests/neg-custom-args/captures/class-contra.check b/tests/neg-custom-args/captures/class-contra.check index 6d4c89f872ad..9fc009ac3d48 100644 --- a/tests/neg-custom-args/captures/class-contra.check +++ b/tests/neg-custom-args/captures/class-contra.check @@ -1,7 +1,10 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/class-contra.scala:12:39 --------------------------------- 12 | def fun(x: K{val f: T^{a}}) = x.setf(a) // error | ^ - | Found: (a : T^{x, y}) - | Required: T + | Found: (a : T^{x, y}) + | Required: T^{} + | + | Note that a capability (K.this.f : T^) in a capture set appearing in contravariant position + | was mapped to (x.f : T^{a}) which is not a capability. Therefore, it was under-approximated to the empty set. | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/explain-under-approx.check b/tests/neg-custom-args/captures/explain-under-approx.check new file mode 100644 index 000000000000..2d2b05b4b95a --- /dev/null +++ b/tests/neg-custom-args/captures/explain-under-approx.check @@ -0,0 +1,20 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/explain-under-approx.scala:12:10 ------------------------- +12 | col.add(Future(() => 25)) // error + | ^^^^^^^^^^^^^^^^ + | Found: Future[Int]{val a: (async : Async^)}^{async} + | Required: Future[Int]^{} + | + | Note that a capability Collector.this.futs* in a capture set appearing in contravariant position + | was mapped to col.futs* which is not a capability. Therefore, it was under-approximated to the empty set. + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/explain-under-approx.scala:15:11 ------------------------- +15 | col1.add(Future(() => 25)) // error + | ^^^^^^^^^^^^^^^^ + | Found: Future[Int]{val a: (async : Async^)}^{async} + | Required: Future[Int]^{} + | + | Note that a capability Collector.this.futs* in a capture set appearing in contravariant position + | was mapped to col1.futs* which is not a capability. Therefore, it was under-approximated to the empty set. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/explain-under-approx.scala b/tests/neg-custom-args/captures/explain-under-approx.scala new file mode 100644 index 000000000000..816465e4af34 --- /dev/null +++ b/tests/neg-custom-args/captures/explain-under-approx.scala @@ -0,0 +1,17 @@ +trait Async extends caps.Capability + +class Future[+T](x: () => T)(using val a: Async) + +class Collector[T](val futs: Seq[Future[T]^]): + def add(fut: Future[T]^{futs*}) = ??? + +def main() = + given async: Async = ??? + val futs = (1 to 20).map(x => Future(() => x)) + val col = Collector(futs) + col.add(Future(() => 25)) // error + val col1: Collector[Int] { val futs: Seq[Future[Int]^{async}] } + = Collector(futs) + col1.add(Future(() => 25)) // error + + From 442c0e942ce905d6d4696df1ce4ac3bc3afdb944 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Thu, 18 Jul 2024 14:04:37 +0200 Subject: [PATCH 350/827] Add `LAUNCHER_SHA256` description Co-authored-by: Jamie Thompson --- pkgs/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/pkgs/README.md b/pkgs/README.md index 86b0dc6b6fe6..9369fb822da1 100644 --- a/pkgs/README.md +++ b/pkgs/README.md @@ -7,6 +7,7 @@ Official support for Chocolatey started by the release of Scala 3.6.0 > The `scala.nuspec` and `chocolateyInstall.ps1` files needs to be rewritten by changing the following placeholders: > - @LAUNCHER_VERSION@ : Placeholder for the current scala version to deploy > - @LAUNCHER_URL@ : Placeholder for the URL to the windows zip released on GitHub +> - @LAUNCHER_SHA256@ : Placeholder for the SHA256 of the msi file released on GitHub ## Important information From fac643a3ffd062c9d76593c0bba260f99f897a50 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 18 Jul 2024 14:14:57 +0200 Subject: [PATCH 351/827] Split postcheck phase in two Perform bounds checking before checking self types. Checking self types interpolates them, which may give an upper approximation solution that failes subsequent bounds checks. On the other hand, well-formedness checkimng should come after self types checking since otherwise we get spurious "has empty capture set, cannot be tracked" messages. --- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 4 +++- .../dotty/tools/dotc/cc/CheckCaptures.scala | 18 ++++++++++----- .../captures/checkbounds.scala | 22 +++++++++++++++++++ 3 files changed, 38 insertions(+), 6 deletions(-) create mode 100644 tests/pos-custom-args/captures/checkbounds.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 690d9b4411aa..1d09b9dc5f20 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -594,7 +594,9 @@ object CaptureSet: override def optionalInfo(using Context): String = for vars <- ctx.property(ShownVars) do vars += this val debugInfo = - if !isConst && ctx.settings.YccDebug.value then ids else "" + if !ctx.settings.YccDebug.value then "" + else if isConst then ids ++ "(solved)" + else ids val limitInfo = if ctx.settings.YprintLevel.value && level.isDefined then i"" diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 511c978a8e32..9f6cb278f012 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1363,8 +1363,9 @@ class CheckCaptures extends Recheck, SymTransformer: withCaptureSetsExplained: super.checkUnit(unit) checkOverrides.traverse(unit.tpdTree) - checkSelfTypes(unit.tpdTree) postCheck(unit.tpdTree) + checkSelfTypes(unit.tpdTree) + postCheckWF(unit.tpdTree) if ctx.settings.YccDebug.value then show(unit.tpdTree) // this does not print tree, but makes its variables visible for dependency printing @@ -1514,7 +1515,6 @@ class CheckCaptures extends Recheck, SymTransformer: check.traverse(tp) /** Perform the following kinds of checks - * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. */ @@ -1542,10 +1542,8 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => end check end checker - checker.traverse(unit)(using ctx.withOwner(defn.RootClass)) - for chk <- todoAtPostCheck do chk() - setup.postCheck() + checker.traverse(unit)(using ctx.withOwner(defn.RootClass)) if !ctx.reporter.errorsReported then // We dont report errors here if previous errors were reported, because other // errors often result in bad applied types, but flagging these bad types gives @@ -1557,5 +1555,15 @@ class CheckCaptures extends Recheck, SymTransformer: case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) case _ => traverseChildren(t) checkApplied.traverse(unit) + end postCheck + + /** Perform the following kinds of checks: + * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. + * - Check that publicly visible inferred types correspond to the type + * they have without capture checking. + */ + def postCheckWF(unit: tpd.Tree)(using Context): Unit = + for chk <- todoAtPostCheck do chk() + setup.postCheck() end CaptureChecker end CheckCaptures diff --git a/tests/pos-custom-args/captures/checkbounds.scala b/tests/pos-custom-args/captures/checkbounds.scala new file mode 100644 index 000000000000..f9cd76ce8b1a --- /dev/null +++ b/tests/pos-custom-args/captures/checkbounds.scala @@ -0,0 +1,22 @@ +trait Dsl: + + sealed trait Nat + case object Zero extends Nat + case class Succ[N <: Nat](n: N) extends Nat + + type Stable[+l <: Nat, +b <: Nat, +A] + type Now[+l <: Nat, +b <: Nat, +A] + type Box[+A] + def stable[l <: Nat, b <: Nat, A](e: Stable[l, b, A]): Now[l, b, Box[A]] + + def program[A](prog: Now[Zero.type, Zero.type, A]): Now[Zero.type, Zero.type, A] + + //val conforms: Zero.type <:< Nat = summon + // ^ need to uncomment this line to compile with captureChecking enabled + + def test: Any = + program[Box[Int]]: + val v : Stable[Zero.type, Zero.type, Int] = ??? + stable[Zero.type, Zero.type, Int](v) +// ^ +// Type argument Dsl.this.Zero.type does not conform to upper bound Dsl.this.Nat \ No newline at end of file From 5d5af45eb524f208e12ac84417d73611cf749ed2 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Thu, 18 Jul 2024 15:15:40 +0200 Subject: [PATCH 352/827] Add SECURITY.md [skip ci] --- SECURITY.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000000..ddc7be95bf71 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,5 @@ +# Security Policy + +The details about the security policy of the Scala Programming Language organisation can be found in [https://scala-lang.org/security](https://scala-lang.org/security). + +For any additional information related to our security policy, please contact [security@scala-lang.org](mailto:security@scala-lang.org). From 87ce8d4f72c1587d9a66ac9a35b5c178a131ffb8 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Thu, 18 Jul 2024 15:28:02 +0200 Subject: [PATCH 353/827] Add the merge commit hash to the icon's url --- pkgs/chocolatey/scala.nuspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/chocolatey/scala.nuspec b/pkgs/chocolatey/scala.nuspec index 2fff36a83d5b..bb2e0e07ce70 100644 --- a/pkgs/chocolatey/scala.nuspec +++ b/pkgs/chocolatey/scala.nuspec @@ -14,7 +14,7 @@ https://scala-lang.org/ https://github.com/scala/scala3/issues © 2002-2024, LAMP/EPFL - https://cdn.jsdelivr.net/gh/scala/scala3@version/pkgs/chocolatey/icon.svg + https://cdn.jsdelivr.net/gh/scala/scala3@a046b0014ffd9536144d67a48f8759901b96d12f/pkgs/chocolatey/icon.svg https://github.com/scala/scala3/blob/main/LICENSE true https://github.com/scala/scala3/releases From ba0d8cbdd35762e11289b6ec3ae10ab379d3e4fd Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 18 Jul 2024 16:11:51 +0200 Subject: [PATCH 354/827] Optimize caching for computing atoms and widened in OrTypes --- .../src/dotty/tools/dotc/core/Types.scala | 11 +- tests/pos/i20521.scala | 800 ++++++++++++++++++ 2 files changed, 805 insertions(+), 6 deletions(-) create mode 100644 tests/pos/i20521.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 9aca8a9b4b60..c3a81bf71571 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3701,6 +3701,7 @@ object Types extends TypeUtils { myUnion private var atomsRunId: RunId = NoRunId + private var widenedRunId: RunId = NoRunId private var myAtoms: Atoms = uninitialized private var myWidened: Type = uninitialized @@ -3716,20 +3717,18 @@ object Types extends TypeUtils { val tp2w = tp2.widenSingletons() if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) - private def ensureAtomsComputed()(using Context): Unit = + override def atoms(using Context): Atoms = if atomsRunId != ctx.runId then myAtoms = computeAtoms() - myWidened = computeWidenSingletons() if !isProvisional then atomsRunId = ctx.runId - - override def atoms(using Context): Atoms = - ensureAtomsComputed() myAtoms override def widenSingletons(skipSoftUnions: Boolean)(using Context): Type = if isSoft && skipSoftUnions then this else - ensureAtomsComputed() + if widenedRunId != ctx.runId then + myWidened = computeWidenSingletons() + if !isProvisional then widenedRunId = ctx.runId myWidened def derivedOrType(tp1: Type, tp2: Type, soft: Boolean = isSoft)(using Context): Type = diff --git a/tests/pos/i20521.scala b/tests/pos/i20521.scala new file mode 100644 index 000000000000..8fe79255572f --- /dev/null +++ b/tests/pos/i20521.scala @@ -0,0 +1,800 @@ +object LFSR { + lazy val tapsMaxPeriod: Map[Int, Seq[Set[Int]]] = tapsFirst ++ tapsSecond + + /** First portion of known taps (a combined map hits the 64KB JVM method limit) */ + private def tapsFirst = Map( + 2 -> Seq(Set(2, 1)), + 3 -> Seq(Set(3, 2)), + 4 -> Seq(Set(4, 3)), + 5 -> Seq(Set(5, 3), Set(5, 4, 3, 2)), + 6 -> Seq(Set(6, 5), Set(6, 5, 3, 2)), + 7 -> Seq(Set(7, 6), Set(7, 6, 5, 4)), + 8 -> Seq(Set(8, 6, 5, 4)), + 9 -> Seq(Set(9, 5), Set(9, 8, 6, 5)), + 10 -> Seq(Set(10, 7), Set(10, 9, 7, 6)), + 11 -> Seq(Set(11, 9), Set(11, 10, 9, 7)), + 12 -> Seq(Set(12, 11, 8, 6)), + 13 -> Seq(Set(13, 12, 10, 9)), + 14 -> Seq(Set(14, 13, 11, 9)), + 15 -> Seq(Set(15, 14), Set(15, 14, 13, 11)), + 16 -> Seq(Set(16, 14, 13, 11)), + 17 -> Seq(Set(17, 14), Set(17, 16, 15, 14)), + 18 -> Seq(Set(18, 11), Set(18, 17, 16, 13)), + 19 -> Seq(Set(19, 18, 17, 14)), + 20 -> Seq(Set(20, 17), Set(20, 19, 16, 14)), + 21 -> Seq(Set(21, 19), Set(21, 20, 19, 16)), + 22 -> Seq(Set(22, 21), Set(22, 19, 18, 17)), + 23 -> Seq(Set(23, 18), Set(23, 22, 20, 18)), + 24 -> Seq(Set(24, 23, 21, 20)), + 25 -> Seq(Set(25, 22), Set(25, 24, 23, 22)), + 26 -> Seq(Set(26, 25, 24, 20)), + 27 -> Seq(Set(27, 26, 25, 22)), + 28 -> Seq(Set(28, 25), Set(28, 27, 24, 22)), + 29 -> Seq(Set(29, 27), Set(29, 28, 27, 25)), + 30 -> Seq(Set(30, 29, 26, 24)), + 31 -> Seq(Set(31, 28), Set(31, 30, 29, 28)), + 32 -> Seq(Set(32, 30, 26, 25)), + 33 -> Seq(Set(33, 20), Set(33, 32, 29, 27)), + 34 -> Seq(Set(34, 31, 30, 26)), + 35 -> Seq(Set(35, 33), Set(35, 34, 28, 27)), + 36 -> Seq(Set(36, 25), Set(36, 35, 29, 28)), + 37 -> Seq(Set(37, 36, 33, 31)), + 38 -> Seq(Set(38, 37, 33, 32)), + 39 -> Seq(Set(39, 35), Set(39, 38, 35, 32)), + 40 -> Seq(Set(40, 37, 36, 35)), + 41 -> Seq(Set(41, 38), Set(41, 40, 39, 38)), + 42 -> Seq(Set(42, 40, 37, 35)), + 43 -> Seq(Set(43, 42, 38, 37)), + 44 -> Seq(Set(44, 42, 39, 38)), + 45 -> Seq(Set(45, 44, 42, 41)), + 46 -> Seq(Set(46, 40, 39, 38)), + 47 -> Seq(Set(47, 42), Set(47, 46, 43, 42)), + 48 -> Seq(Set(48, 44, 41, 39)), + 49 -> Seq(Set(49, 40), Set(49, 45, 44, 43)), + 50 -> Seq(Set(50, 48, 47, 46)), + 51 -> Seq(Set(51, 50, 48, 45)), + 52 -> Seq(Set(52, 49), Set(52, 51, 49, 46)), + 53 -> Seq(Set(53, 52, 51, 47)), + 54 -> Seq(Set(54, 51, 48, 46)), + 55 -> Seq(Set(55, 31), Set(55, 54, 53, 49)), + 56 -> Seq(Set(56, 54, 52, 49)), + 57 -> Seq(Set(57, 50), Set(57, 55, 54, 52)), + 58 -> Seq(Set(58, 39), Set(58, 57, 53, 52)), + 59 -> Seq(Set(59, 57, 55, 52)), + 60 -> Seq(Set(60, 59), Set(60, 58, 56, 55)), + 61 -> Seq(Set(61, 60, 59, 56)), + 62 -> Seq(Set(62, 59, 57, 56)), + 63 -> Seq(Set(63, 62), Set(63, 62, 59, 58)), + 64 -> Seq(Set(64, 63, 61, 60)), + 65 -> Seq(Set(65, 47), Set(65, 64, 62, 61)), + 66 -> Seq(Set(66, 60, 58, 57)), + 67 -> Seq(Set(67, 66, 65, 62)), + 68 -> Seq(Set(68, 59), Set(68, 67, 63, 61)), + 69 -> Seq(Set(69, 67, 64, 63)), + 70 -> Seq(Set(70, 69, 67, 65)), + 71 -> Seq(Set(71, 65), Set(71, 70, 68, 66)), + 72 -> Seq(Set(72, 69, 63, 62)), + 73 -> Seq(Set(73, 48), Set(73, 71, 70, 69)), + 74 -> Seq(Set(74, 71, 70, 67)), + 75 -> Seq(Set(75, 74, 72, 69)), + 76 -> Seq(Set(76, 74, 72, 71)), + 77 -> Seq(Set(77, 75, 72, 71)), + 78 -> Seq(Set(78, 77, 76, 71)), + 79 -> Seq(Set(79, 70), Set(79, 77, 76, 75)), + 80 -> Seq(Set(80, 78, 76, 71)), + 81 -> Seq(Set(81, 77), Set(81, 79, 78, 75)), + 82 -> Seq(Set(82, 78, 76, 73)), + 83 -> Seq(Set(83, 81, 79, 76)), + 84 -> Seq(Set(84, 71), Set(84, 83, 77, 75)), + 85 -> Seq(Set(85, 84, 83, 77)), + 86 -> Seq(Set(86, 84, 81, 80)), + 87 -> Seq(Set(87, 74), Set(87, 86, 82, 80)), + 88 -> Seq(Set(88, 80, 79, 77)), + 89 -> Seq(Set(89, 51), Set(89, 86, 84, 83)), + 90 -> Seq(Set(90, 88, 87, 85)), + 91 -> Seq(Set(91, 90, 86, 83)), + 92 -> Seq(Set(92, 90, 87, 86)), + 93 -> Seq(Set(93, 91), Set(93, 91, 90, 87)), + 94 -> Seq(Set(94, 73), Set(94, 93, 89, 88)), + 95 -> Seq(Set(95, 84), Set(95, 94, 90, 88)), + 96 -> Seq(Set(96, 90, 87, 86)), + 97 -> Seq(Set(97, 91), Set(97, 95, 93, 91)), + 98 -> Seq(Set(98, 87), Set(98, 97, 91, 90)), + 99 -> Seq(Set(99, 95, 94, 92)), + 100 -> Seq(Set(100, 63), Set(100, 98, 93, 92)), + 101 -> Seq(Set(101, 100, 95, 94)), + 102 -> Seq(Set(102, 99, 97, 96)), + 103 -> Seq(Set(103, 94), Set(103, 102, 99, 94)), + 104 -> Seq(Set(104, 103, 94, 93)), + 105 -> Seq(Set(105, 89), Set(105, 104, 99, 98)), + 106 -> Seq(Set(106, 91), Set(106, 105, 101, 100)), + 107 -> Seq(Set(107, 105, 99, 98)), + 108 -> Seq(Set(108, 77), Set(108, 103, 97, 96)), + 109 -> Seq(Set(109, 107, 105, 104)), + 110 -> Seq(Set(110, 109, 106, 104)), + 111 -> Seq(Set(111, 101), Set(111, 109, 107, 104)), + 112 -> Seq(Set(112, 108, 106, 101)), + 113 -> Seq(Set(113, 104), Set(113, 111, 110, 108)), + 114 -> Seq(Set(114, 113, 112, 103)), + 115 -> Seq(Set(115, 110, 108, 107)), + 116 -> Seq(Set(116, 114, 111, 110)), + 117 -> Seq(Set(117, 116, 115, 112)), + 118 -> Seq(Set(118, 85), Set(118, 116, 113, 112)), + 119 -> Seq(Set(119, 111), Set(119, 116, 111, 110)), + 120 -> Seq(Set(120, 118, 114, 111)), + 121 -> Seq(Set(121, 103), Set(121, 120, 116, 113)), + 122 -> Seq(Set(122, 121, 120, 116)), + 123 -> Seq(Set(123, 121), Set(123, 122, 119, 115)), + 124 -> Seq(Set(124, 87), Set(124, 119, 118, 117)), + 125 -> Seq(Set(125, 120, 119, 118)), + 126 -> Seq(Set(126, 124, 122, 119)), + 127 -> Seq(Set(127, 126), Set(127, 126, 124, 120)), + 128 -> Seq(Set(128, 127, 126, 121)), + 129 -> Seq(Set(129, 124), Set(129, 128, 125, 124)), + 130 -> Seq(Set(130, 127), Set(130, 129, 128, 125)), + 131 -> Seq(Set(131, 129, 128, 123)), + 132 -> Seq(Set(132, 103), Set(132, 130, 127, 123)), + 133 -> Seq(Set(133, 131, 125, 124)), + 134 -> Seq(Set(134, 77), Set(134, 133, 129, 127)), + 135 -> Seq(Set(135, 124), Set(135, 132, 131, 129)), + 136 -> Seq(Set(136, 134, 133, 128)), + 137 -> Seq(Set(137, 116), Set(137, 136, 133, 126)), + 138 -> Seq(Set(138, 137, 131, 130)), + 139 -> Seq(Set(139, 136, 134, 131)), + 140 -> Seq(Set(140, 111), Set(140, 139, 136, 132)), + 141 -> Seq(Set(141, 140, 135, 128)), + 142 -> Seq(Set(142, 121), Set(142, 141, 139, 132)), + 143 -> Seq(Set(143, 141, 140, 138)), + 144 -> Seq(Set(144, 142, 140, 137)), + 145 -> Seq(Set(145, 93), Set(145, 144, 140, 139)), + 146 -> Seq(Set(146, 144, 143, 141)), + 147 -> Seq(Set(147, 145, 143, 136)), + 148 -> Seq(Set(148, 121), Set(148, 145, 143, 141)), + 149 -> Seq(Set(149, 142, 140, 139)), + 150 -> Seq(Set(150, 97), Set(150, 148, 147, 142)), + 151 -> Seq(Set(151, 148), Set(151, 150, 149, 148)), + 152 -> Seq(Set(152, 150, 149, 146)), + 153 -> Seq(Set(153, 152), Set(153, 149, 148, 145)), + 154 -> Seq(Set(154, 153, 149, 145)), + 155 -> Seq(Set(155, 151, 150, 148)), + 156 -> Seq(Set(156, 153, 151, 147)), + 157 -> Seq(Set(157, 155, 152, 151)), + 158 -> Seq(Set(158, 153, 152, 150)), + 159 -> Seq(Set(159, 128), Set(159, 156, 153, 148)), + 160 -> Seq(Set(160, 158, 157, 155)), + 161 -> Seq(Set(161, 143), Set(161, 159, 158, 155)), + 162 -> Seq(Set(162, 158, 155, 154)), + 163 -> Seq(Set(163, 160, 157, 156)), + 164 -> Seq(Set(164, 159, 158, 152)), + 165 -> Seq(Set(165, 162, 157, 156)), + 166 -> Seq(Set(166, 164, 163, 156)), + 167 -> Seq(Set(167, 161), Set(167, 165, 163, 161)), + 168 -> Seq(Set(168, 162, 159, 152)), + 169 -> Seq(Set(169, 135), Set(169, 164, 163, 161)), + 170 -> Seq(Set(170, 147), Set(170, 169, 166, 161)), + 171 -> Seq(Set(171, 169, 166, 165)), + 172 -> Seq(Set(172, 165), Set(172, 169, 165, 161)), + 173 -> Seq(Set(173, 171, 168, 165)), + 174 -> Seq(Set(174, 161), Set(174, 169, 166, 165)), + 175 -> Seq(Set(175, 169), Set(175, 173, 171, 169)), + 176 -> Seq(Set(176, 167, 165, 164)), + 177 -> Seq(Set(177, 169), Set(177, 175, 174, 172)), + 178 -> Seq(Set(178, 91), Set(178, 176, 171, 170)), + 179 -> Seq(Set(179, 178, 177, 175)), + 180 -> Seq(Set(180, 173, 170, 168)), + 181 -> Seq(Set(181, 180, 175, 174)), + 182 -> Seq(Set(182, 181, 176, 174)), + 183 -> Seq(Set(183, 127), Set(183, 179, 176, 175)), + 184 -> Seq(Set(184, 177, 176, 175)), + 185 -> Seq(Set(185, 161), Set(185, 184, 182, 177)), + 186 -> Seq(Set(186, 180, 178, 177)), + 187 -> Seq(Set(187, 182, 181, 180)), + 188 -> Seq(Set(188, 186, 183, 182)), + 189 -> Seq(Set(189, 187, 184, 183)), + 190 -> Seq(Set(190, 188, 184, 177)), + 191 -> Seq(Set(191, 182), Set(191, 187, 185, 184)), + 192 -> Seq(Set(192, 190, 178, 177)), + 193 -> Seq(Set(193, 178), Set(193, 189, 186, 184)), + 194 -> Seq(Set(194, 107), Set(194, 192, 191, 190)), + 195 -> Seq(Set(195, 193, 192, 187)), + 196 -> Seq(Set(196, 194, 187, 185)), + 197 -> Seq(Set(197, 195, 193, 188)), + 198 -> Seq(Set(198, 133), Set(198, 193, 190, 183)), + 199 -> Seq(Set(199, 165), Set(199, 198, 195, 190)), + 200 -> Seq(Set(200, 198, 197, 195)), + 201 -> Seq(Set(201, 187), Set(201, 199, 198, 195)), + 202 -> Seq(Set(202, 147), Set(202, 198, 196, 195)), + 203 -> Seq(Set(203, 202, 196, 195)), + 204 -> Seq(Set(204, 201, 200, 194)), + 205 -> Seq(Set(205, 203, 200, 196)), + 206 -> Seq(Set(206, 201, 197, 196)), + 207 -> Seq(Set(207, 164), Set(207, 206, 201, 198)), + 208 -> Seq(Set(208, 207, 205, 199)), + 209 -> Seq(Set(209, 203), Set(209, 207, 206, 204)), + 210 -> Seq(Set(210, 207, 206, 198)), + 211 -> Seq(Set(211, 203, 201, 200)), + 212 -> Seq(Set(212, 107), Set(212, 209, 208, 205)), + 213 -> Seq(Set(213, 211, 208, 207)), + 214 -> Seq(Set(214, 213, 211, 209)), + 215 -> Seq(Set(215, 192), Set(215, 212, 210, 209)), + 216 -> Seq(Set(216, 215, 213, 209)), + 217 -> Seq(Set(217, 172), Set(217, 213, 212, 211)), + 218 -> Seq(Set(218, 207), Set(218, 217, 211, 210)), + 219 -> Seq(Set(219, 218, 215, 211)), + 220 -> Seq(Set(220, 211, 210, 208)), + 221 -> Seq(Set(221, 219, 215, 213)), + 222 -> Seq(Set(222, 220, 217, 214)), + 223 -> Seq(Set(223, 190), Set(223, 221, 219, 218)), + 224 -> Seq(Set(224, 222, 217, 212)), + 225 -> Seq(Set(225, 193), Set(225, 224, 220, 215)), + 226 -> Seq(Set(226, 223, 219, 216)), + 227 -> Seq(Set(227, 223, 218, 217)), + 228 -> Seq(Set(228, 226, 217, 216)), + 229 -> Seq(Set(229, 228, 225, 219)), + 230 -> Seq(Set(230, 224, 223, 222)), + 231 -> Seq(Set(231, 205), Set(231, 229, 227, 224)), + 232 -> Seq(Set(232, 228, 223, 221)), + 233 -> Seq(Set(233, 159), Set(233, 232, 229, 224)), + 234 -> Seq(Set(234, 203), Set(234, 232, 225, 223)), + 235 -> Seq(Set(235, 234, 229, 226)), + 236 -> Seq(Set(236, 231), Set(236, 229, 228, 226)), + 237 -> Seq(Set(237, 236, 233, 230)), + 238 -> Seq(Set(238, 237, 236, 233)), + 239 -> Seq(Set(239, 203), Set(239, 238, 232, 227)), + 240 -> Seq(Set(240, 237, 235, 232)), + 241 -> Seq(Set(241, 171), Set(241, 237, 233, 232)), + 242 -> Seq(Set(242, 241, 236, 231)), + 243 -> Seq(Set(243, 242, 238, 235)), + 244 -> Seq(Set(244, 243, 240, 235)), + 245 -> Seq(Set(245, 244, 241, 239)), + 246 -> Seq(Set(246, 245, 244, 235)), + 247 -> Seq(Set(247, 165), Set(247, 245, 243, 238)), + 248 -> Seq(Set(248, 238, 234, 233)), + 249 -> Seq(Set(249, 163), Set(249, 248, 245, 242)), + 250 -> Seq(Set(250, 147), Set(250, 247, 245, 240)), + 251 -> Seq(Set(251, 249, 247, 244)), + 252 -> Seq(Set(252, 185), Set(252, 251, 247, 241)), + 253 -> Seq(Set(253, 252, 247, 246)), + 254 -> Seq(Set(254, 253, 252, 247)), + 255 -> Seq(Set(255, 203), Set(255, 253, 252, 250)), + 256 -> Seq(Set(256, 254, 251, 246)), + 257 -> Seq(Set(257, 245), Set(257, 255, 251, 250)), + 258 -> Seq(Set(258, 175), Set(258, 254, 252, 249)), + 259 -> Seq(Set(259, 257, 253, 249)), + 260 -> Seq(Set(260, 253, 252, 250)), + 261 -> Seq(Set(261, 257, 255, 254)), + 262 -> Seq(Set(262, 258, 254, 253)), + 263 -> Seq(Set(263, 170), Set(263, 261, 258, 252)), + 264 -> Seq(Set(264, 263, 255, 254)), + 265 -> Seq(Set(265, 223), Set(265, 263, 262, 260)), + 266 -> Seq(Set(266, 219), Set(266, 265, 260, 259)), + 267 -> Seq(Set(267, 264, 261, 259)), + 268 -> Seq(Set(268, 243), Set(268, 267, 264, 258)), + 269 -> Seq(Set(269, 268, 263, 262)), + 270 -> Seq(Set(270, 217), Set(270, 267, 263, 260)), + 271 -> Seq(Set(271, 213), Set(271, 265, 264, 260)), + 272 -> Seq(Set(272, 270, 266, 263)), + 273 -> Seq(Set(273, 250), Set(273, 272, 271, 266)), + 274 -> Seq(Set(274, 207), Set(274, 272, 267, 265)), + 275 -> Seq(Set(275, 266, 265, 264)), + 276 -> Seq(Set(276, 275, 273, 270)), + 277 -> Seq(Set(277, 274, 271, 265)), + 278 -> Seq(Set(278, 273), Set(278, 277, 274, 273)), + 279 -> Seq(Set(279, 274), Set(279, 278, 275, 274)), + 280 -> Seq(Set(280, 278, 275, 271)), + 281 -> Seq(Set(281, 188), Set(281, 280, 277, 272)), + 282 -> Seq(Set(282, 247), Set(282, 278, 277, 272)), + 283 -> Seq(Set(283, 278, 276, 271)), + 284 -> Seq(Set(284, 165), Set(284, 279, 278, 276)), + 285 -> Seq(Set(285, 280, 278, 275)), + 286 -> Seq(Set(286, 217), Set(286, 285, 276, 271)), + 287 -> Seq(Set(287, 216), Set(287, 285, 282, 281)), + 288 -> Seq(Set(288, 287, 278, 277)), + 289 -> Seq(Set(289, 268), Set(289, 286, 285, 277)), + 290 -> Seq(Set(290, 288, 287, 285)), + 291 -> Seq(Set(291, 286, 280, 279)), + 292 -> Seq(Set(292, 195), Set(292, 291, 289, 285)), + 293 -> Seq(Set(293, 292, 287, 282)), + 294 -> Seq(Set(294, 233), Set(294, 292, 291, 285)), + 295 -> Seq(Set(295, 247), Set(295, 293, 291, 290)), + 296 -> Seq(Set(296, 292, 287, 285)), + 297 -> Seq(Set(297, 292), Set(297, 296, 293, 292)), + 298 -> Seq(Set(298, 294, 290, 287)), + 299 -> Seq(Set(299, 295, 293, 288)), + 300 -> Seq(Set(300, 293), Set(300, 290, 288, 287)), + 301 -> Seq(Set(301, 299, 296, 292)), + 302 -> Seq(Set(302, 261), Set(302, 297, 293, 290)), + 303 -> Seq(Set(303, 297, 291, 290)), + 304 -> Seq(Set(304, 303, 302, 293)), + 305 -> Seq(Set(305, 203), Set(305, 303, 299, 298)), + 306 -> Seq(Set(306, 305, 303, 299)), + 307 -> Seq(Set(307, 305, 303, 299)), + 308 -> Seq(Set(308, 306, 299, 293)), + 309 -> Seq(Set(309, 307, 302, 299)), + 310 -> Seq(Set(310, 309, 305, 302)), + 311 -> Seq(Set(311, 308, 306, 304)), + 312 -> Seq(Set(312, 307, 302, 301)), + 313 -> Seq(Set(313, 234), Set(313, 312, 310, 306)), + 314 -> Seq(Set(314, 299), Set(314, 311, 305, 300)), + 315 -> Seq(Set(315, 314, 306, 305)), + 316 -> Seq(Set(316, 181), Set(316, 309, 305, 304)), + 317 -> Seq(Set(317, 315, 313, 310)), + 318 -> Seq(Set(318, 313, 312, 310)), + 319 -> Seq(Set(319, 283), Set(319, 318, 317, 308)), + 320 -> Seq(Set(320, 319, 317, 316)), + 321 -> Seq(Set(321, 290), Set(321, 319, 316, 314)), + 322 -> Seq(Set(322, 255), Set(322, 321, 320, 305)), + 323 -> Seq(Set(323, 322, 320, 313)), + 324 -> Seq(Set(324, 321, 320, 318)), + 325 -> Seq(Set(325, 323, 320, 315)), + 326 -> Seq(Set(326, 325, 323, 316)), + 327 -> Seq(Set(327, 293), Set(327, 325, 322, 319)), + 328 -> Seq(Set(328, 323, 321, 319)), + 329 -> Seq(Set(329, 279), Set(329, 326, 323, 321)), + 330 -> Seq(Set(330, 328, 323, 322)), + 331 -> Seq(Set(331, 329, 325, 321)), + 332 -> Seq(Set(332, 209), Set(332, 325, 321, 320)), + 333 -> Seq(Set(333, 331), Set(333, 331, 329, 325)), + 334 -> Seq(Set(334, 333, 330, 327)), + 335 -> Seq(Set(335, 333, 328, 325)), + 336 -> Seq(Set(336, 335, 332, 329)), + 337 -> Seq(Set(337, 282), Set(337, 336, 331, 327)), + 338 -> Seq(Set(338, 336, 335, 332)), + 339 -> Seq(Set(339, 332, 329, 323)), + 340 -> Seq(Set(340, 337, 336, 329)), + 341 -> Seq(Set(341, 336, 330, 327)), + 342 -> Seq(Set(342, 217), Set(342, 341, 340, 331)), + 343 -> Seq(Set(343, 268), Set(343, 338, 335, 333)), + 344 -> Seq(Set(344, 338, 334, 333)), + 345 -> Seq(Set(345, 323), Set(345, 343, 341, 337)), + 346 -> Seq(Set(346, 344, 339, 335)), + 347 -> Seq(Set(347, 344, 337, 336)), + 348 -> Seq(Set(348, 344, 341, 340)), + 349 -> Seq(Set(349, 347, 344, 343)), + 350 -> Seq(Set(350, 297), Set(350, 340, 337, 336)), + 351 -> Seq(Set(351, 317), Set(351, 348, 345, 343)), + 352 -> Seq(Set(352, 346, 341, 339)), + 353 -> Seq(Set(353, 284), Set(353, 349, 346, 344)), + 354 -> Seq(Set(354, 349, 341, 340)), + 355 -> Seq(Set(355, 354, 350, 349)), + 356 -> Seq(Set(356, 349, 347, 346)), + 357 -> Seq(Set(357, 355, 347, 346)), + 358 -> Seq(Set(358, 351, 350, 344)), + 359 -> Seq(Set(359, 291), Set(359, 358, 352, 350)), + 360 -> Seq(Set(360, 359, 335, 334)), + 361 -> Seq(Set(361, 360, 357, 354)), + 362 -> Seq(Set(362, 299), Set(362, 360, 351, 344)), + 363 -> Seq(Set(363, 362, 356, 355)), + 364 -> Seq(Set(364, 297), Set(364, 363, 359, 352)), + 365 -> Seq(Set(365, 360, 359, 356)), + 366 -> Seq(Set(366, 337), Set(366, 362, 359, 352)), + 367 -> Seq(Set(367, 346), Set(367, 365, 363, 358)), + 368 -> Seq(Set(368, 361, 359, 351)), + 369 -> Seq(Set(369, 278), Set(369, 367, 359, 358)), + 370 -> Seq(Set(370, 231), Set(370, 368, 367, 365)), + 371 -> Seq(Set(371, 369, 368, 363)), + 372 -> Seq(Set(372, 369, 365, 357)), + 373 -> Seq(Set(373, 371, 366, 365)), + 374 -> Seq(Set(374, 369, 368, 366)), + 375 -> Seq(Set(375, 359), Set(375, 374, 368, 367)), + 376 -> Seq(Set(376, 371, 369, 368)), + 377 -> Seq(Set(377, 336), Set(377, 376, 374, 369)), + 378 -> Seq(Set(378, 335), Set(378, 374, 365, 363)), + 379 -> Seq(Set(379, 375, 370, 369)), + 380 -> Seq(Set(380, 333), Set(380, 377, 374, 366)), + 381 -> Seq(Set(381, 380, 379, 376)), + 382 -> Seq(Set(382, 301), Set(382, 379, 375, 364)), + 383 -> Seq(Set(383, 293), Set(383, 382, 378, 374)), + 384 -> Seq(Set(384, 378, 369, 368)), + 385 -> Seq(Set(385, 379), Set(385, 383, 381, 379)), + 386 -> Seq(Set(386, 303), Set(386, 381, 380, 376)), + 387 -> Seq(Set(387, 385, 379, 378)), + 388 -> Seq(Set(388, 387, 385, 374)), + 389 -> Seq(Set(389, 384, 380, 379)), + 390 -> Seq(Set(390, 301), Set(390, 388, 380, 377)), + 391 -> Seq(Set(391, 363), Set(391, 390, 389, 385)), + 392 -> Seq(Set(392, 386, 382, 379)), + 393 -> Seq(Set(393, 386), Set(393, 392, 391, 386)), + 394 -> Seq(Set(394, 259), Set(394, 392, 387, 386)) + ) + + /** Second portion of known taps (a combined map hits the 64KB JVM method limit) */ + private def tapsSecond = Map( + 395 -> Seq(Set(395, 390, 389, 384)), + 396 -> Seq(Set(396, 371), Set(396, 392, 390, 389)), + 397 -> Seq(Set(397, 392, 387, 385)), + 398 -> Seq(Set(398, 393, 392, 384)), + 399 -> Seq(Set(399, 313), Set(399, 397, 390, 388)), + 400 -> Seq(Set(400, 398, 397, 395)), + 401 -> Seq(Set(401, 249), Set(401, 399, 392, 389)), + 402 -> Seq(Set(402, 399, 398, 393)), + 403 -> Seq(Set(403, 398, 395, 394)), + 404 -> Seq(Set(404, 215), Set(404, 400, 398, 397)), + 405 -> Seq(Set(405, 398, 397, 388)), + 406 -> Seq(Set(406, 249), Set(406, 402, 397, 393)), + 407 -> Seq(Set(407, 336), Set(407, 402, 400, 398)), + 408 -> Seq(Set(408, 407, 403, 401)), + 409 -> Seq(Set(409, 322), Set(409, 406, 404, 402)), + 410 -> Seq(Set(410, 407, 406, 400)), + 411 -> Seq(Set(411, 408, 401, 399)), + 412 -> Seq(Set(412, 265), Set(412, 409, 404, 401)), + 413 -> Seq(Set(413, 407, 406, 403)), + 414 -> Seq(Set(414, 405, 401, 398)), + 415 -> Seq(Set(415, 313), Set(415, 413, 411, 406)), + 416 -> Seq(Set(416, 414, 411, 407)), + 417 -> Seq(Set(417, 310), Set(417, 416, 414, 407)), + 418 -> Seq(Set(418, 417, 415, 403)), + 419 -> Seq(Set(419, 415, 414, 404)), + 420 -> Seq(Set(420, 412, 410, 407)), + 421 -> Seq(Set(421, 419, 417, 416)), + 422 -> Seq(Set(422, 273), Set(422, 421, 416, 412)), + 423 -> Seq(Set(423, 398), Set(423, 420, 418, 414)), + 424 -> Seq(Set(424, 422, 417, 415)), + 425 -> Seq(Set(425, 413), Set(425, 422, 421, 418)), + 426 -> Seq(Set(426, 415, 414, 412)), + 427 -> Seq(Set(427, 422, 421, 416)), + 428 -> Seq(Set(428, 323), Set(428, 426, 425, 417)), + 429 -> Seq(Set(429, 422, 421, 419)), + 430 -> Seq(Set(430, 419, 417, 415)), + 431 -> Seq(Set(431, 311), Set(431, 430, 428, 426)), + 432 -> Seq(Set(432, 429, 428, 419)), + 433 -> Seq(Set(433, 400), Set(433, 430, 428, 422)), + 434 -> Seq(Set(434, 429, 423, 422)), + 435 -> Seq(Set(435, 430, 426, 423)), + 436 -> Seq(Set(436, 271), Set(436, 432, 431, 430)), + 437 -> Seq(Set(437, 436, 435, 431)), + 438 -> Seq(Set(438, 373), Set(438, 436, 432, 421)), + 439 -> Seq(Set(439, 390), Set(439, 437, 436, 431)), + 440 -> Seq(Set(440, 439, 437, 436)), + 441 -> Seq(Set(441, 410), Set(441, 440, 433, 430)), + 442 -> Seq(Set(442, 440, 437, 435)), + 443 -> Seq(Set(443, 442, 437, 433)), + 444 -> Seq(Set(444, 435, 432, 431)), + 445 -> Seq(Set(445, 441, 439, 438)), + 446 -> Seq(Set(446, 341), Set(446, 442, 439, 431)), + 447 -> Seq(Set(447, 374), Set(447, 446, 441, 438)), + 448 -> Seq(Set(448, 444, 442, 437)), + 449 -> Seq(Set(449, 315), Set(449, 446, 440, 438)), + 450 -> Seq(Set(450, 371), Set(450, 443, 438, 434)), + 451 -> Seq(Set(451, 450, 441, 435)), + 452 -> Seq(Set(452, 448, 447, 446)), + 453 -> Seq(Set(453, 449, 447, 438)), + 454 -> Seq(Set(454, 449, 445, 444)), + 455 -> Seq(Set(455, 417), Set(455, 453, 449, 444)), + 456 -> Seq(Set(456, 454, 445, 433)), + 457 -> Seq(Set(457, 441), Set(457, 454, 449, 446)), + 458 -> Seq(Set(458, 255), Set(458, 453, 448, 445)), + 459 -> Seq(Set(459, 457, 454, 447)), + 460 -> Seq(Set(460, 399), Set(460, 459, 455, 451)), + 461 -> Seq(Set(461, 460, 455, 454)), + 462 -> Seq(Set(462, 389), Set(462, 457, 451, 450)), + 463 -> Seq(Set(463, 370), Set(463, 456, 455, 452)), + 464 -> Seq(Set(464, 460, 455, 441)), + 465 -> Seq(Set(465, 406), Set(465, 463, 462, 457)), + 466 -> Seq(Set(466, 460, 455, 452)), + 467 -> Seq(Set(467, 466, 461, 456)), + 468 -> Seq(Set(468, 464, 459, 453)), + 469 -> Seq(Set(469, 467, 464, 460)), + 470 -> Seq(Set(470, 321), Set(470, 468, 462, 461)), + 471 -> Seq(Set(471, 470), Set(471, 469, 468, 465)), + 472 -> Seq(Set(472, 470, 469, 461)), + 473 -> Seq(Set(473, 470, 467, 465)), + 474 -> Seq(Set(474, 283), Set(474, 465, 463, 456)), + 475 -> Seq(Set(475, 471, 467, 466)), + 476 -> Seq(Set(476, 461), Set(476, 475, 468, 466)), + 477 -> Seq(Set(477, 470, 462, 461)), + 478 -> Seq(Set(478, 357), Set(478, 477, 474, 472)), + 479 -> Seq(Set(479, 375), Set(479, 475, 472, 470)), + 480 -> Seq(Set(480, 473, 467, 464)), + 481 -> Seq(Set(481, 343), Set(481, 480, 472, 471)), + 482 -> Seq(Set(482, 477, 476, 473)), + 483 -> Seq(Set(483, 479, 477, 474)), + 484 -> Seq(Set(484, 379), Set(484, 483, 482, 470)), + 485 -> Seq(Set(485, 479, 469, 468)), + 486 -> Seq(Set(486, 481, 478, 472)), + 487 -> Seq(Set(487, 393), Set(487, 485, 483, 478)), + 488 -> Seq(Set(488, 487, 485, 484)), + 489 -> Seq(Set(489, 406), Set(489, 484, 483, 480)), + 490 -> Seq(Set(490, 271), Set(490, 485, 483, 481)), + 491 -> Seq(Set(491, 488, 485, 480)), + 492 -> Seq(Set(492, 491, 485, 484)), + 493 -> Seq(Set(493, 490, 488, 483)), + 494 -> Seq(Set(494, 357), Set(494, 493, 489, 481)), + 495 -> Seq(Set(495, 419), Set(495, 494, 486, 480)), + 496 -> Seq(Set(496, 494, 491, 480)), + 497 -> Seq(Set(497, 419), Set(497, 493, 488, 486)), + 498 -> Seq(Set(498, 495, 489, 487)), + 499 -> Seq(Set(499, 494, 493, 488)), + 500 -> Seq(Set(500, 499, 494, 490)), + 501 -> Seq(Set(501, 499, 497, 496)), + 502 -> Seq(Set(502, 498, 497, 494)), + 503 -> Seq(Set(503, 500), Set(503, 502, 501, 500)), + 504 -> Seq(Set(504, 502, 490, 483)), + 505 -> Seq(Set(505, 349), Set(505, 500, 497, 493)), + 506 -> Seq(Set(506, 411), Set(506, 501, 494, 491)), + 507 -> Seq(Set(507, 504, 501, 494)), + 508 -> Seq(Set(508, 399), Set(508, 505, 500, 495)), + 509 -> Seq(Set(509, 506, 502, 501)), + 510 -> Seq(Set(510, 501, 500, 498)), + 511 -> Seq(Set(511, 501), Set(511, 509, 503, 501)), + 512 -> Seq(Set(512, 510, 507, 504)), + 513 -> Seq(Set(513, 428), Set(513, 505, 503, 500)), + 514 -> Seq(Set(514, 511, 509, 507)), + 515 -> Seq(Set(515, 511, 508, 501)), + 516 -> Seq(Set(516, 514, 511, 509)), + 517 -> Seq(Set(517, 515, 507, 505)), + 518 -> Seq(Set(518, 485), Set(518, 516, 515, 507)), + 519 -> Seq(Set(519, 440), Set(519, 517, 511, 507)), + 520 -> Seq(Set(520, 509, 507, 503)), + 521 -> Seq(Set(521, 489), Set(521, 519, 514, 512)), + 522 -> Seq(Set(522, 518, 509, 507)), + 523 -> Seq(Set(523, 521, 517, 510)), + 524 -> Seq(Set(524, 357), Set(524, 523, 519, 515)), + 525 -> Seq(Set(525, 524, 521, 519)), + 526 -> Seq(Set(526, 525, 521, 517)), + 527 -> Seq(Set(527, 480), Set(527, 526, 520, 518)), + 528 -> Seq(Set(528, 526, 522, 517)), + 529 -> Seq(Set(529, 487), Set(529, 528, 525, 522)), + 530 -> Seq(Set(530, 527, 523, 520)), + 531 -> Seq(Set(531, 529, 525, 519)), + 532 -> Seq(Set(532, 531), Set(532, 529, 528, 522)), + 533 -> Seq(Set(533, 531, 530, 529)), + 534 -> Seq(Set(534, 533, 529, 527)), + 535 -> Seq(Set(535, 533, 529, 527)), + 536 -> Seq(Set(536, 533, 531, 529)), + 537 -> Seq(Set(537, 443), Set(537, 536, 535, 527)), + 538 -> Seq(Set(538, 537, 536, 533)), + 539 -> Seq(Set(539, 535, 534, 529)), + 540 -> Seq(Set(540, 361), Set(540, 537, 534, 529)), + 541 -> Seq(Set(541, 537, 531, 528)), + 542 -> Seq(Set(542, 540, 539, 533)), + 543 -> Seq(Set(543, 527), Set(543, 538, 536, 532)), + 544 -> Seq(Set(544, 538, 535, 531)), + 545 -> Seq(Set(545, 423), Set(545, 539, 537, 532)), + 546 -> Seq(Set(546, 545, 544, 538)), + 547 -> Seq(Set(547, 543, 540, 534)), + 548 -> Seq(Set(548, 545, 543, 538)), + 549 -> Seq(Set(549, 546, 545, 533)), + 550 -> Seq(Set(550, 357), Set(550, 546, 533, 529)), + 551 -> Seq(Set(551, 416), Set(551, 550, 547, 542)), + 552 -> Seq(Set(552, 550, 547, 532)), + 553 -> Seq(Set(553, 514), Set(553, 550, 549, 542)), + 554 -> Seq(Set(554, 551, 546, 543)), + 555 -> Seq(Set(555, 551, 546, 545)), + 556 -> Seq(Set(556, 403), Set(556, 549, 546, 540)), + 557 -> Seq(Set(557, 552, 551, 550)), + 558 -> Seq(Set(558, 553, 549, 544)), + 559 -> Seq(Set(559, 525), Set(559, 557, 552, 550)), + 560 -> Seq(Set(560, 554, 551, 549)), + 561 -> Seq(Set(561, 490), Set(561, 558, 552, 550)), + 562 -> Seq(Set(562, 560, 558, 551)), + 563 -> Seq(Set(563, 561, 554, 549)), + 564 -> Seq(Set(564, 401), Set(564, 563, 561, 558)), + 565 -> Seq(Set(565, 564, 559, 554)), + 566 -> Seq(Set(566, 413), Set(566, 564, 561, 560)), + 567 -> Seq(Set(567, 424), Set(567, 563, 557, 556)), + 568 -> Seq(Set(568, 558, 557, 551)), + 569 -> Seq(Set(569, 492), Set(569, 568, 559, 557)), + 570 -> Seq(Set(570, 503), Set(570, 563, 558, 552)), + 571 -> Seq(Set(571, 569, 566, 561)), + 572 -> Seq(Set(572, 571, 564, 560)), + 573 -> Seq(Set(573, 569, 567, 563)), + 574 -> Seq(Set(574, 561), Set(574, 569, 565, 560)), + 575 -> Seq(Set(575, 429), Set(575, 572, 570, 569)), + 576 -> Seq(Set(576, 573, 572, 563)), + 577 -> Seq(Set(577, 552), Set(577, 575, 574, 569)), + 578 -> Seq(Set(578, 562, 556, 555)), + 579 -> Seq(Set(579, 572, 570, 567)), + 580 -> Seq(Set(580, 579, 576, 574)), + 581 -> Seq(Set(581, 575, 574, 568)), + 582 -> Seq(Set(582, 497), Set(582, 579, 576, 571)), + 583 -> Seq(Set(583, 453), Set(583, 581, 577, 575)), + 584 -> Seq(Set(584, 581, 571, 570)), + 585 -> Seq(Set(585, 464), Set(585, 583, 582, 577)), + 586 -> Seq(Set(586, 584, 581, 579)), + 587 -> Seq(Set(587, 586, 581, 576)), + 588 -> Seq(Set(588, 437), Set(588, 577, 572, 571)), + 589 -> Seq(Set(589, 586, 585, 579)), + 590 -> Seq(Set(590, 497), Set(590, 588, 587, 578)), + 591 -> Seq(Set(591, 587, 585, 582)), + 592 -> Seq(Set(592, 591, 573, 568)), + 593 -> Seq(Set(593, 507), Set(593, 588, 585, 584)), + 594 -> Seq(Set(594, 575), Set(594, 586, 584, 583)), + 595 -> Seq(Set(595, 594, 593, 586)), + 596 -> Seq(Set(596, 592, 591, 590)), + 597 -> Seq(Set(597, 588, 585, 583)), + 598 -> Seq(Set(598, 597, 592, 591)), + 599 -> Seq(Set(599, 569), Set(599, 593, 591, 590)), + 600 -> Seq(Set(600, 599, 590, 589)), + 601 -> Seq(Set(601, 400), Set(601, 600, 597, 589)), + 602 -> Seq(Set(602, 596, 594, 591)), + 603 -> Seq(Set(603, 600, 599, 597)), + 604 -> Seq(Set(604, 600, 598, 589)), + 605 -> Seq(Set(605, 600, 598, 595)), + 606 -> Seq(Set(606, 602, 599, 591)), + 607 -> Seq(Set(607, 502), Set(607, 600, 598, 595)), + 608 -> Seq(Set(608, 606, 602, 585)), + 609 -> Seq(Set(609, 578), Set(609, 601, 600, 597)), + 610 -> Seq(Set(610, 483), Set(610, 602, 600, 599)), + 611 -> Seq(Set(611, 609, 607, 601)), + 612 -> Seq(Set(612, 607, 602, 598)), + 613 -> Seq(Set(613, 609, 603, 594)), + 614 -> Seq(Set(614, 613, 612, 607)), + 615 -> Seq(Set(615, 404), Set(615, 614, 609, 608)), + 616 -> Seq(Set(616, 614, 602, 597)), + 617 -> Seq(Set(617, 417), Set(617, 612, 608, 607)), + 618 -> Seq(Set(618, 615, 604, 598)), + 619 -> Seq(Set(619, 614, 611, 610)), + 620 -> Seq(Set(620, 619, 618, 611)), + 621 -> Seq(Set(621, 616, 615, 609)), + 622 -> Seq(Set(622, 325), Set(622, 612, 610, 605)), + 623 -> Seq(Set(623, 555), Set(623, 614, 613, 612)), + 624 -> Seq(Set(624, 617, 615, 612)), + 625 -> Seq(Set(625, 492), Set(625, 620, 617, 613)), + 626 -> Seq(Set(626, 623, 621, 613)), + 627 -> Seq(Set(627, 622, 617, 613)), + 628 -> Seq(Set(628, 405), Set(628, 626, 617, 616)), + 629 -> Seq(Set(629, 627, 624, 623)), + 630 -> Seq(Set(630, 628, 626, 623)), + 631 -> Seq(Set(631, 324), Set(631, 625, 623, 617)), + 632 -> Seq(Set(632, 629, 619, 613)), + 633 -> Seq(Set(633, 532), Set(633, 632, 631, 626)), + 634 -> Seq(Set(634, 319), Set(634, 631, 629, 627)), + 635 -> Seq(Set(635, 631, 625, 621)), + 636 -> Seq(Set(636, 632, 628, 623)), + 637 -> Seq(Set(637, 636, 628, 623)), + 638 -> Seq(Set(638, 637, 633, 632)), + 639 -> Seq(Set(639, 623), Set(639, 636, 635, 629)), + 640 -> Seq(Set(640, 638, 637, 626)), + 641 -> Seq(Set(641, 630), Set(641, 640, 636, 622)), + 642 -> Seq(Set(642, 523), Set(642, 636, 633, 632)), + 643 -> Seq(Set(643, 641, 640, 632)), + 644 -> Seq(Set(644, 634, 633, 632)), + 645 -> Seq(Set(645, 641, 637, 634)), + 646 -> Seq(Set(646, 397), Set(646, 635, 634, 633)), + 647 -> Seq(Set(647, 642), Set(647, 646, 643, 642)), + 648 -> Seq(Set(648, 647, 626, 625)), + 649 -> Seq(Set(649, 612), Set(649, 648, 644, 638)), + 650 -> Seq(Set(650, 647), Set(650, 644, 635, 632)), + 651 -> Seq(Set(651, 646, 638, 637)), + 652 -> Seq(Set(652, 559), Set(652, 647, 643, 641)), + 653 -> Seq(Set(653, 646, 645, 643)), + 654 -> Seq(Set(654, 649, 643, 640)), + 655 -> Seq(Set(655, 567), Set(655, 653, 639, 638)), + 656 -> Seq(Set(656, 646, 638, 637)), + 657 -> Seq(Set(657, 619), Set(657, 656, 650, 649)), + 658 -> Seq(Set(658, 603), Set(658, 651, 648, 646)), + 659 -> Seq(Set(659, 657, 655, 644)), + 660 -> Seq(Set(660, 657, 656, 648)), + 661 -> Seq(Set(661, 657, 650, 649)), + 662 -> Seq(Set(662, 365), Set(662, 659, 656, 650)), + 663 -> Seq(Set(663, 406), Set(663, 655, 652, 649)), + 664 -> Seq(Set(664, 662, 660, 649)), + 665 -> Seq(Set(665, 632), Set(665, 661, 659, 654)), + 666 -> Seq(Set(666, 664, 659, 656)), + 667 -> Seq(Set(667, 664, 660, 649)), + 668 -> Seq(Set(668, 658, 656, 651)), + 669 -> Seq(Set(669, 667, 665, 664)), + 670 -> Seq(Set(670, 517), Set(670, 669, 665, 664)), + 671 -> Seq(Set(671, 656), Set(671, 669, 665, 662)), + 672 -> Seq(Set(672, 667, 666, 661)), + 673 -> Seq(Set(673, 645), Set(673, 666, 664, 663)), + 674 -> Seq(Set(674, 671, 665, 660)), + 675 -> Seq(Set(675, 674, 672, 669)), + 676 -> Seq(Set(676, 435), Set(676, 675, 671, 664)), + 677 -> Seq(Set(677, 674, 673, 669)), + 678 -> Seq(Set(678, 675, 673, 663)), + 679 -> Seq(Set(679, 613), Set(679, 676, 667, 661)), + 680 -> Seq(Set(680, 679, 650, 645)), + 681 -> Seq(Set(681, 678, 672, 670)), + 682 -> Seq(Set(682, 681, 679, 675)), + 683 -> Seq(Set(683, 682, 677, 672)), + 684 -> Seq(Set(684, 681, 671, 666)), + 685 -> Seq(Set(685, 684, 682, 681)), + 686 -> Seq(Set(686, 489), Set(686, 684, 674, 673)), + 687 -> Seq(Set(687, 674), Set(687, 682, 675, 673)), + 688 -> Seq(Set(688, 682, 674, 669)), + 689 -> Seq(Set(689, 675), Set(689, 686, 683, 681)), + 690 -> Seq(Set(690, 687, 683, 680)), + 691 -> Seq(Set(691, 689, 685, 678)), + 692 -> Seq(Set(692, 393), Set(692, 687, 686, 678)), + 693 -> Seq(Set(693, 691, 685, 678)), + 694 -> Seq(Set(694, 691, 681, 677)), + 695 -> Seq(Set(695, 483), Set(695, 694, 691, 686)), + 696 -> Seq(Set(696, 694, 686, 673)), + 697 -> Seq(Set(697, 430), Set(697, 689, 685, 681)), + 698 -> Seq(Set(698, 483), Set(698, 690, 689, 688)), + 699 -> Seq(Set(699, 698, 689, 684)), + 700 -> Seq(Set(700, 698, 695, 694)), + 701 -> Seq(Set(701, 699, 697, 685)), + 702 -> Seq(Set(702, 665), Set(702, 701, 699, 695)), + 703 -> Seq(Set(703, 702, 696, 691)), + 704 -> Seq(Set(704, 701, 699, 692)), + 705 -> Seq(Set(705, 686), Set(705, 704, 698, 697)), + 706 -> Seq(Set(706, 697, 695, 692)), + 707 -> Seq(Set(707, 702, 699, 692)), + 708 -> Seq(Set(708, 421), Set(708, 706, 704, 703)), + 709 -> Seq(Set(709, 708, 706, 705)), + 710 -> Seq(Set(710, 709, 696, 695)), + 711 -> Seq(Set(711, 619), Set(711, 704, 703, 700)), + 712 -> Seq(Set(712, 709, 708, 707)), + 713 -> Seq(Set(713, 672), Set(713, 706, 703, 696)), + 714 -> Seq(Set(714, 691), Set(714, 709, 707, 701)), + 715 -> Seq(Set(715, 714, 711, 708)), + 716 -> Seq(Set(716, 533), Set(716, 706, 705, 704)), + 717 -> Seq(Set(717, 716, 710, 701)), + 718 -> Seq(Set(718, 717, 716, 713)), + 719 -> Seq(Set(719, 569), Set(719, 711, 710, 707)), + 720 -> Seq(Set(720, 718, 712, 709)), + 721 -> Seq(Set(721, 712), Set(721, 720, 713, 712)), + 722 -> Seq(Set(722, 491), Set(722, 721, 718, 707)), + 723 -> Seq(Set(723, 717, 710, 707)), + 724 -> Seq(Set(724, 719, 716, 711)), + 725 -> Seq(Set(725, 720, 719, 716), Set(758)), + 726 -> Seq(Set(726, 721), Set(726, 725, 722, 721)), + 727 -> Seq(Set(727, 547), Set(727, 721, 719, 716)), + 728 -> Seq(Set(728, 726, 725, 724), Set(761)), + 729 -> Seq(Set(729, 671), Set(729, 726, 724, 718)), + 730 -> Seq(Set(730, 583), Set(730, 726, 715, 711)), + 731 -> Seq(Set(731, 729, 725, 723), Set(764)), + 732 -> Seq(Set(732, 729, 728, 725), Set(765)), + 733 -> Seq(Set(733, 731, 726, 725), Set(766)), + 734 -> Seq(Set(734, 724, 721, 720), Set(767)), + 735 -> Seq(Set(735, 691), Set(735, 733, 728, 727)), + 736 -> Seq(Set(736, 730, 728, 723), Set(769)), + 737 -> Seq(Set(737, 732), Set(737, 736, 733, 732)), + 738 -> Seq(Set(738, 391), Set(738, 730, 729, 727)), + 739 -> Seq(Set(739, 731, 723, 721), Set(772)), + 740 -> Seq(Set(740, 587), Set(740, 737, 728, 716)), + 741 -> Seq(Set(741, 738, 733, 732), Set(774)), + 742 -> Seq(Set(742, 741, 738, 730), Set(775)), + 743 -> Seq(Set(743, 653), Set(743, 742, 731, 730)), + 744 -> Seq(Set(744, 743, 733, 731), Set(777)), + 745 -> Seq(Set(745, 487), Set(745, 740, 738, 737)), + 746 -> Seq(Set(746, 395), Set(746, 738, 733, 728)), + 747 -> Seq(Set(747, 743, 741, 737), Set(780)), + 748 -> Seq(Set(748, 744, 743, 733), Set(781)), + 749 -> Seq(Set(749, 748, 743, 742), Set(782)), + 750 -> Seq(Set(750, 746, 741, 734), Set(783)), + 751 -> Seq(Set(751, 733), Set(751, 750, 748, 740)), + 752 -> Seq(Set(752, 749, 732, 731), Set(785)), + 753 -> Seq(Set(753, 595), Set(753, 748, 745, 740)), + 754 -> Seq(Set(754, 735), Set(754, 742, 740, 735)), + 755 -> Seq(Set(755, 754, 745, 743), Set(2048)), + 756 -> Seq(Set(756, 407), Set(756, 755, 747, 740)), + 757 -> Seq(Set(757, 756, 751, 750)), + 758 -> Seq(Set(758, 757, 746, 741)), + 759 -> Seq(Set(759, 661), Set(759, 757, 756, 750)), + 760 -> Seq(Set(760, 757, 747, 734)), + 761 -> Seq(Set(761, 758), Set(761, 760, 759, 758)), + 762 -> Seq(Set(762, 679), Set(762, 761, 755, 745)), + 763 -> Seq(Set(763, 754, 749, 747)), + 764 -> Seq(Set(764, 761, 759, 758)), + 765 -> Seq(Set(765, 760, 755, 754)), + 766 -> Seq(Set(766, 757, 747, 744)), + 767 -> Seq(Set(767, 599), Set(767, 763, 760, 759)), + 768 -> Seq(Set(768, 764, 751, 749)), + 769 -> Seq(Set(769, 649), Set(769, 763, 762, 760)), + 770 -> Seq(Set(770, 768, 765, 756)), + 771 -> Seq(Set(771, 765, 756, 754)), + 772 -> Seq(Set(772, 765), Set(772, 767, 766, 764)), + 773 -> Seq(Set(773, 767, 765, 763)), + 774 -> Seq(Set(774, 589), Set(774, 767, 760, 758)), + 775 -> Seq(Set(775, 408), Set(775, 771, 769, 768)), + 776 -> Seq(Set(776, 773, 764, 759)), + 777 -> Seq(Set(777, 748), Set(777, 776, 767, 761)), + 778 -> Seq(Set(778, 403), Set(778, 775, 762, 759)), + 779 -> Seq(Set(779, 776, 771, 769)), + 780 -> Seq(Set(780, 775, 772, 764)), + 781 -> Seq(Set(781, 779, 765, 764)), + 782 -> Seq(Set(782, 453), Set(782, 780, 779, 773)), + 783 -> Seq(Set(783, 715), Set(783, 782, 776, 773)), + 784 -> Seq(Set(784, 778, 775, 771)), + 785 -> Seq(Set(785, 693), Set(785, 780, 776, 775)), + 786 -> Seq(Set(786, 782, 780, 771)), + 1024 -> Seq(Set(1024, 1015, 1002, 1001)), + 2048 -> Seq(Set(2048, 2035, 2034, 2029)), + 4096 -> Seq(Set(4096, 4095, 4081, 4069)) + ) + +} From 5ac157277ec1a9703777956bc75bd616ffd13a9e Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 19 Jul 2024 09:47:53 +0200 Subject: [PATCH 355/827] Add _spec to dependabot --- .github/dependabot.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f9cb18a0ad00..cce85f675a12 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,3 +8,12 @@ updates: - hamzaremmal reviewers: - hamzaremmal + - package-ecosystem: bundler + directory: '/docs/_spec' + schedule: + interval: weekly + assignees: + - hamzaremmal + reviewers: + - hamzaremmal + From 02153d6b67021b9b7cafd8ae73b267b77ddc8fe7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Jul 2024 11:25:01 +0000 Subject: [PATCH 356/827] Bump redcarpet from 3.5.1 to 3.6.0 in /docs/_spec Bumps [redcarpet](https://github.com/vmg/redcarpet) from 3.5.1 to 3.6.0. - [Release notes](https://github.com/vmg/redcarpet/releases) - [Changelog](https://github.com/vmg/redcarpet/blob/master/CHANGELOG.md) - [Commits](https://github.com/vmg/redcarpet/compare/v3.5.1...v3.6.0) --- updated-dependencies: - dependency-name: redcarpet dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- docs/_spec/Gemfile | 2 +- docs/_spec/Gemfile.lock | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/_spec/Gemfile b/docs/_spec/Gemfile index ec15529ceb37..dcb5701fd8e1 100644 --- a/docs/_spec/Gemfile +++ b/docs/_spec/Gemfile @@ -6,4 +6,4 @@ gem "jekyll", "3.6.3" gem "webrick" gem "rouge" # gem 's3_website' -gem "redcarpet", "3.5.1" +gem "redcarpet", "3.6.0" diff --git a/docs/_spec/Gemfile.lock b/docs/_spec/Gemfile.lock index 48efd373725e..cb5edf940721 100644 --- a/docs/_spec/Gemfile.lock +++ b/docs/_spec/Gemfile.lock @@ -33,7 +33,7 @@ GEM rb-fsevent (0.11.2) rb-inotify (0.10.1) ffi (~> 1.0) - redcarpet (3.5.1) + redcarpet (3.6.0) rouge (2.2.1) safe_yaml (1.0.5) sass (3.7.4) @@ -49,7 +49,7 @@ PLATFORMS DEPENDENCIES jekyll (= 3.6.3) - redcarpet (= 3.5.1) + redcarpet (= 3.6.0) rouge webrick From 60c210584314606788220696b84b60bc88c4139a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Jul 2024 11:25:18 +0000 Subject: [PATCH 357/827] Bump VirtusLab/scala-cli-setup from 1.4.0 to 1.4.1 Bumps [VirtusLab/scala-cli-setup](https://github.com/virtuslab/scala-cli-setup) from 1.4.0 to 1.4.1. - [Release notes](https://github.com/virtuslab/scala-cli-setup/releases) - [Commits](https://github.com/virtuslab/scala-cli-setup/compare/v1.4.0...v1.4.1) --- updated-dependencies: - dependency-name: VirtusLab/scala-cli-setup dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 6c6f1f67bc9c..4eb2474855ce 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.4.0 + - uses: VirtusLab/scala-cli-setup@v1.4.1 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From 0f0b703a0eb82c693a2f2edfe0932232ae44b10f Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Fri, 19 Jul 2024 13:51:35 +0200 Subject: [PATCH 358/827] Drop previous scala-cli management on windows --- dist/bin-native-overrides/cli-common-platform | 15 +------------ .../cli-common-platform.bat | 21 +------------------ project/Build.scala | 8 +------ 3 files changed, 3 insertions(+), 41 deletions(-) diff --git a/dist/bin-native-overrides/cli-common-platform b/dist/bin-native-overrides/cli-common-platform index 1a11c770f91a..49803d6282c5 100644 --- a/dist/bin-native-overrides/cli-common-platform +++ b/dist/bin-native-overrides/cli-common-platform @@ -1,16 +1,3 @@ #!/usr/bin/env bash -if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then - SCALA_CLI_VERSION="" - # iterate through lines in VERSION_SRC - while IFS= read -r line; do - # if line starts with "version:=" then extract the version - if [[ "$line" == cli_version:=* ]]; then - SCALA_CLI_VERSION="${line#cli_version:=}" - break - fi - done < "$PROG_HOME/EXTRA_PROPERTIES" - SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"" "--cli-version \"$SCALA_CLI_VERSION\"") -else - SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"") -fi +SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"") diff --git a/dist/bin-native-overrides/cli-common-platform.bat b/dist/bin-native-overrides/cli-common-platform.bat index d1c4f1c4716b..24ab08086007 100644 --- a/dist/bin-native-overrides/cli-common-platform.bat +++ b/dist/bin-native-overrides/cli-common-platform.bat @@ -1,22 +1,3 @@ @echo off -setlocal enabledelayedexpansion - -set "_SCALA_CLI_VERSION=" -@rem read for cli_version:=_SCALA_CLI_VERSION in EXTRA_PROPERTIES file -FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\EXTRA_PROPERTIES") DO ( - SET "line=%%G" - IF "!line:~0,13!"=="cli_version:=" ( - SET "_SCALA_CLI_VERSION=!line:~13!" - GOTO :foundCliVersion - ) -) - -@REM we didn't find it, so we should fail -echo "ERROR: cli_version not found in EXTRA_PROPERTIES file" -exit /b 1 - -:foundCliVersion -endlocal & set "SCALA_CLI_VERSION=%_SCALA_CLI_VERSION%" - -set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" "--cli-version" "%SCALA_CLI_VERSION%" +set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" diff --git a/project/Build.scala b/project/Build.scala index b329f79eaca7..be81537da8fe 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -125,8 +125,6 @@ object Build { /** Version of Scala CLI to download */ val scalaCliLauncherVersion = "1.4.0" - /** Version of Scala CLI to download (on Windows - last known validated version) */ - val scalaCliLauncherVersionWindows = "1.4.0" /** Version of Coursier to download for initializing the local maven repo of Scala command */ val coursierJarVersion = "2.1.10" @@ -2185,12 +2183,8 @@ object Build { republishBinDir := (dist / republishBinDir).value, republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, - republishExtraProps += ("cli_version" -> scalaCliLauncherVersion), republishLaunchers += - ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersionWindows/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") - ) - .settings( - Universal / mappings += (republishRepo.value / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), + ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") ) .settings( Windows / name := "scala", From 7695d9bae9c78c1b618c281bcd9e72e3f7ee75e8 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Fri, 19 Jul 2024 13:52:18 +0200 Subject: [PATCH 359/827] Bump scala-cli to 1.4.1 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index be81537da8fe..86b0b0d50c03 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -124,7 +124,7 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.4.0" + val scalaCliLauncherVersion = "1.4.1" /** Version of Coursier to download for initializing the local maven repo of Scala command */ val coursierJarVersion = "2.1.10" From 2facda005615361a701616adccd2d77fb5bea1d1 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 18 Jul 2024 17:56:04 +0200 Subject: [PATCH 360/827] Fix `Applications#compare#isAsGood#isGiven` to use parameter to apply the logic prioritizing givens over implicits as intended in #19300 Fix #21212 --- .../dotty/tools/dotc/typer/Applications.scala | 4 +-- tests/pos/i13044.scala | 2 +- tests/pos/i21212.scala | 33 +++++++++++++++++++ 3 files changed, 36 insertions(+), 3 deletions(-) create mode 100644 tests/pos/i21212.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 74c20812893b..dc2f02653d16 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1822,7 +1822,7 @@ trait Applications extends Compatibility { } case _ => // (3) def isGiven(alt: TermRef) = - alt1.symbol.is(Given) && alt.symbol != defn.NotGivenClass + alt.symbol.is(Given) && alt.symbol != defn.NotGivenClass def compareValues(tp1: Type, tp2: Type)(using Context) = isAsGoodValueType(tp1, tp2, isGiven(alt1), isGiven(alt2)) tp2 match @@ -1842,7 +1842,7 @@ trait Applications extends Compatibility { * available in 3.0-migration if mode `Mode.OldImplicitResolution` is turned on as well. * It is used to highlight differences between Scala 2 and 3 behavior. * - * - In Scala 3.0-3.5, the behavior is as follows: `T <:p U` iff there is an impliit conversion + * - In Scala 3.0-3.5, the behavior is as follows: `T <:p U` iff there is an implicit conversion * from `T` to `U`, or * * flip(T) <: flip(U) diff --git a/tests/pos/i13044.scala b/tests/pos/i13044.scala index 4c9b8b914062..36299d9e8366 100644 --- a/tests/pos/i13044.scala +++ b/tests/pos/i13044.scala @@ -1,4 +1,4 @@ -//> using options -Xmax-inlines:33 +//> using options -Xmax-inlines:35 import scala.deriving.Mirror import scala.compiletime._ diff --git a/tests/pos/i21212.scala b/tests/pos/i21212.scala new file mode 100644 index 000000000000..2116beb72012 --- /dev/null +++ b/tests/pos/i21212.scala @@ -0,0 +1,33 @@ + +trait Functor[F[_]]: + def map[A, B](fa: F[A])(f: A => B): F[B] = ??? +trait Monad[F[_]] extends Functor[F] +trait MonadError[F[_], E] extends Monad[F]: + def raiseError[A](e: E): F[A] +trait Temporal[F[_]] extends MonadError[F, Throwable] + +trait FunctorOps[F[_], A]: + def map[B](f: A => B): F[B] = ??? +implicit def toFunctorOps[F[_], A](target: F[A])(implicit tc: Functor[F]): FunctorOps[F, A] = ??? + +class ContextBounds[F[_]: Temporal](using err: MonadError[F, Throwable]): + def useCase = err.raiseError(new RuntimeException()) + val bool: F[Boolean] = ??? + def fails = toFunctorOps(bool).map(_ => ()) // warns under -source:3.5, // error under -source:3.6 + +class UsingArguments[F[_]](using Temporal[F])(using err: MonadError[F, Throwable]): + def useCase = err.raiseError(new RuntimeException()) + val bool: F[Boolean] = ??? + def works = toFunctorOps(bool).map(_ => ()) // warns under -source:3.5 + + +object Minimization: + + trait A + trait B extends A + + def test1(using a1: A)(using b1: B) = summon[A] // picks (most general) a1 + def test2(using a2: A)(implicit b2: B) = summon[A] // picks (most general) a2, was ambiguous + def test3(implicit a3: A, b3: B) = summon[A] // picks (most specific) b3 + +end Minimization From 184bdc2f600499d2b9ae44e7018ed24372b5b6f3 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Thu, 18 Jul 2024 18:35:43 +0200 Subject: [PATCH 361/827] Prefer extensions over conversions and implicits for member selection Before the changes, if `isAsGoodValueType` was called with an extension and a given conversion, it would prefer the conversion over the extension, because only the former yielded true in `isGiven`. Which contradicted the logic from searchImplicit which preferred extension over conversions for member selection. --- .../src/dotty/tools/dotc/typer/Applications.scala | 14 ++++++-------- tests/pos/i19715.scala | 3 ++- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index dc2f02653d16..dccd018d72cf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1821,10 +1821,8 @@ trait Applications extends Compatibility { isAsGood(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) } case _ => // (3) - def isGiven(alt: TermRef) = - alt.symbol.is(Given) && alt.symbol != defn.NotGivenClass def compareValues(tp1: Type, tp2: Type)(using Context) = - isAsGoodValueType(tp1, tp2, isGiven(alt1), isGiven(alt2)) + isAsGoodValueType(tp1, tp2, alt1.symbol.is(Implicit), alt2.symbol.is(Implicit)) tp2 match case tp2: MethodType => true // (3a) case tp2: PolyType if tp2.resultType.isInstanceOf[MethodType] => true // (3a) @@ -1861,7 +1859,7 @@ trait Applications extends Compatibility { * for overloading resolution (when `preferGeneral is false), and the opposite relation * `U <: T` or `U convertible to `T` for implicit disambiguation between givens * (when `preferGeneral` is true). For old-style implicit values, the 3.4 behavior is kept. - * If one of the alternatives is a given and the other is an implicit, the given wins. + * If one of the alternatives is an implicit and the other is a given (or an extension), the implicit loses. * * - In Scala 3.5 and Scala 3.6-migration, we issue a warning if the result under * Scala 3.6 differ wrt to the old behavior up to 3.5. @@ -1869,7 +1867,7 @@ trait Applications extends Compatibility { * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. */ - def isAsGoodValueType(tp1: Type, tp2: Type, alt1isGiven: Boolean, alt2isGiven: Boolean)(using Context): Boolean = + def isAsGoodValueType(tp1: Type, tp2: Type, alt1IsImplicit: Boolean, alt2IsImplicit: Boolean)(using Context): Boolean = val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) if !preferGeneral || Feature.migrateTo3 && oldResolution then // Normal specificity test for overloading resolution (where `preferGeneral` is false) @@ -1887,7 +1885,7 @@ trait Applications extends Compatibility { if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) || oldResolution - || !alt1isGiven && !alt2isGiven + || alt1IsImplicit && alt2IsImplicit then // Intermediate rules: better means specialize, but map all type arguments downwards // These are enabled for 3.0-3.5, and for all comparisons between old-style implicits, @@ -1902,8 +1900,8 @@ trait Applications extends Compatibility { case _ => mapOver(t) (flip(tp1p) relaxed_<:< flip(tp2p)) || viewExists(tp1, tp2) else - // New rules: better means generalize, givens always beat implicits - if alt1isGiven != alt2isGiven then alt1isGiven + // New rules: better means generalize, givens (and extensions) always beat implicits + if alt1IsImplicit != alt2IsImplicit then alt2IsImplicit else (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) end isAsGoodValueType diff --git a/tests/pos/i19715.scala b/tests/pos/i19715.scala index 91aeda5c1698..be5471ffa9b3 100644 --- a/tests/pos/i19715.scala +++ b/tests/pos/i19715.scala @@ -6,7 +6,8 @@ class NT(t: Tup): object NT: extension (x: NT) def app(n: Int): Boolean = true - given Conversion[NT, Tup] = _.toTup + given c1: Conversion[NT, Tup] = _.toTup + implicit def c2(t: NT): Tup = c1(t) def test = val nt = new NT(Tup()) From e5fb143acf55b2977acdbdeb5f077d329e4c1c4e Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 19 Jul 2024 15:32:28 +0200 Subject: [PATCH 362/827] Make `Namer#ClassCompleter#completerCtx` a given instead of an `implicit val`, to account for the changes in given prioritization from #19300 and #21226, which made it ambiguous with the `Completer#creationContext` given. --- compiler/src/dotty/tools/dotc/typer/Namer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 83964417a6f1..bd0d826bc17d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1106,7 +1106,7 @@ class Namer { typer: Typer => class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) { withDecls(newScope(using ictx)) - protected implicit val completerCtx: Context = localContext(cls) + protected given completerCtx: Context = localContext(cls) private var localCtx: Context = uninitialized From 1bce8a962aa6b0e2e112b26553d0c01d223fe721 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Jul 2024 15:26:12 +0000 Subject: [PATCH 363/827] Bump webrick from 1.7.0 to 1.8.1 in /docs/_spec Bumps [webrick](https://github.com/ruby/webrick) from 1.7.0 to 1.8.1. - [Release notes](https://github.com/ruby/webrick/releases) - [Commits](https://github.com/ruby/webrick/compare/v1.7.0...v1.8.1) --- updated-dependencies: - dependency-name: webrick dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- docs/_spec/Gemfile.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/_spec/Gemfile.lock b/docs/_spec/Gemfile.lock index cb5edf940721..b8e54fb6b4cb 100644 --- a/docs/_spec/Gemfile.lock +++ b/docs/_spec/Gemfile.lock @@ -41,7 +41,7 @@ GEM sass-listen (4.0.0) rb-fsevent (~> 0.9, >= 0.9.4) rb-inotify (~> 0.9, >= 0.9.7) - webrick (1.7.0) + webrick (1.8.1) PLATFORMS ruby From 8c07ebd0c07cfe53a52a95adf1bd81bfde40a414 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 20 Jul 2024 11:39:58 +0200 Subject: [PATCH 364/827] Clean up categorization of type parameters There are 4 categories, depending on whether (1) context bounds are allowed and (2) variances are allowed ``` Class: yes/yes Type: no/no Hk: no/yes Def: yes/no ``` Furthermore, Type and Hk parameters can be wildcards. The previous grammars confused Type and Hk parameters and contained other errors as well. --- .../dotty/tools/dotc/parsing/Parsers.scala | 51 ++++++++++--------- docs/_docs/internals/syntax.md | 22 ++++---- docs/_docs/reference/syntax.md | 26 +++++----- 3 files changed, 52 insertions(+), 47 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 5d36bd230b7e..091077a7a195 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -54,9 +54,9 @@ object Parsers { enum ParamOwner: case Class // class or trait or enum case CaseClass // case class or enum case - case Type // type alias or abstract type - case TypeParam // type parameter case Def // method + case Type // type alias or abstract type or polyfunction type/expr + case Hk // type parameter (i.e. current parameter is higher-kinded) case Given // given definition case ExtensionPrefix // extension clause, up to and including extension parameter case ExtensionFollow // extension clause, following extension parameter @@ -66,7 +66,11 @@ object Parsers { def takesOnlyUsingClauses = // only using clauses allowed for this owner this == Given || this == ExtensionFollow def acceptsVariance = - this == Class || this == CaseClass || this == Type + this == Class || this == CaseClass || this == Hk + def acceptsCtxBounds = + !(this == Type || this == Hk) + def acceptsWildcard = + this == Type || this == Hk end ParamOwner @@ -1569,15 +1573,15 @@ object Parsers { else core() /** Type ::= FunType - * | HkTypeParamClause ‘=>>’ Type + * | TypTypeParamClause ‘=>>’ Type * | FunParamClause ‘=>>’ Type * | MatchType * | InfixType * FunType ::= (MonoFunType | PolyFunType) * MonoFunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type - * | (‘->’ | ‘?->’ ) [CaptureSet] Type -- under pureFunctions - * PolyFunType ::= HKTypeParamClause '=>' Type - * | HKTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions + * | (‘->’ | ‘?->’ ) [CaptureSet] Type -- under pureFunctions + * PolyFunType ::= TypTypeParamClause '=>' Type + * | TypTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions * FunTypeArgs ::= InfixType * | `(' [ FunArgType {`,' FunArgType } ] `)' * | '(' [ TypedFunParam {',' TypedFunParam } ')' @@ -1743,7 +1747,7 @@ object Parsers { simpleTypeRest(tuple) else if in.token == LBRACKET then val start = in.offset - val tparams = typeParamClause(ParamOwner.TypeParam) + val tparams = typeParamClause(ParamOwner.Type) if in.token == TLARROW then atSpan(start, in.skipToken()): LambdaTypeTree(tparams, toplevelTyp()) @@ -2296,7 +2300,7 @@ object Parsers { t /** Expr ::= [`implicit'] FunParams (‘=>’ | ‘?=>’) Expr - * | HkTypeParamClause ‘=>’ Expr + * | TypTypeParamClause ‘=>’ Expr * | Expr1 * FunParams ::= Bindings * | id @@ -2304,7 +2308,7 @@ object Parsers { * ExprInParens ::= PostfixExpr `:' Type * | Expr * BlockResult ::= [‘implicit’] FunParams (‘=>’ | ‘?=>’) Block - * | HkTypeParamClause ‘=>’ Block + * | TypTypeParamClause ‘=>’ Block * | Expr1 * Expr1 ::= [‘inline’] `if' `(' Expr `)' {nl} Expr [[semi] else Expr] * | [‘inline’] `if' Expr `then' Expr [[semi] else Expr] @@ -2340,7 +2344,7 @@ object Parsers { closure(start, location, modifiers(BitSet(IMPLICIT))) case LBRACKET => val start = in.offset - val tparams = typeParamClause(ParamOwner.TypeParam) + val tparams = typeParamClause(ParamOwner.Type) val arrowOffset = accept(ARROW) val body = expr(location) atSpan(start, arrowOffset) { @@ -2673,7 +2677,7 @@ object Parsers { * ColonArgument ::= colon [LambdaStart] * indent (CaseClauses | Block) outdent * LambdaStart ::= FunParams (‘=>’ | ‘?=>’) - * | HkTypeParamClause ‘=>’ + * | TypTypeParamClause ‘=>’ * ColonArgBody ::= indent (CaseClauses | Block) outdent * Quoted ::= ‘'’ ‘{’ Block ‘}’ * | ‘'’ ‘[’ Type ‘]’ @@ -3390,17 +3394,19 @@ object Parsers { /** ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ * ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] - * id [HkTypeParamClause] TypeParamBounds + * id [HkTypeParamClause] TypeAndCtxBounds * * DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ * DefTypeParam ::= {Annotation} - * id [HkTypeParamClause] TypeParamBounds + * id [HkTypeParamClause] TypeAndCtxBounds * * TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ - * TypTypeParam ::= {Annotation} id [HkTypePamClause] TypeBounds + * TypTypeParam ::= {Annotation} + * (id | ‘_’) [HkTypeParamClause] TypeBounds * * HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ - * HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypePamClause] | ‘_’) TypeBounds + * HkTypeParam ::= {Annotation} [‘+’ | ‘-’] + * (id | ‘_’) [HkTypePamClause] TypeBounds */ def typeParamClause(paramOwner: ParamOwner): List[TypeDef] = inBracketsWithCommas { @@ -3411,7 +3417,6 @@ object Parsers { ok def typeParam(): TypeDef = { - val isAbstractOwner = paramOwner == ParamOwner.Type || paramOwner == ParamOwner.TypeParam val start = in.offset var mods = annotsAsMods() | Param if paramOwner.isClass then @@ -3422,13 +3427,13 @@ object Parsers { mods |= Contravariant atSpan(start, nameStart) { val name = - if (isAbstractOwner && in.token == USCORE) { + if paramOwner.acceptsWildcard && in.token == USCORE then in.nextToken() WildcardParamName.fresh().toTypeName - } else ident().toTypeName - val hkparams = typeParamClauseOpt(ParamOwner.Type) - val bounds = if (isAbstractOwner) typeBounds() else typeAndCtxBounds(name) + val hkparams = typeParamClauseOpt(ParamOwner.Hk) + val bounds = + if paramOwner.acceptsCtxBounds then typeAndCtxBounds(name) else typeBounds() TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) } } @@ -3938,14 +3943,14 @@ object Parsers { argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] + /** TypeDef ::= id [HkTypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() atSpan(start, nameStart) { val nameIdent = typeIdent() val tname = nameIdent.name.asTypeName - val tparams = typeParamClauseOpt(ParamOwner.Type) + val tparams = typeParamClauseOpt(ParamOwner.Hk) val vparamss = funParamClauses() def makeTypeDef(rhs: Tree): Tree = { diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 1036397eed7b..941951ab9916 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -177,12 +177,12 @@ ClassQualifier ::= ‘[’ id ‘]’ ### Types ```ebnf Type ::= FunType - | HkTypeParamClause ‘=>>’ Type LambdaTypeTree(ps, t) + | TypTypeParamClause ‘=>>’ Type LambdaTypeTree(ps, t) | FunParamClause ‘=>>’ Type TermLambdaTypeTree(ps, t) | MatchType | InfixType FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) | FunctionWithMods(ts, t, mods, erasedParams) - | HKTypeParamClause '=>' Type PolyFunction(ps, t) + | TypTypeParamClause '=>' Type PolyFunction(ps, t) FunTypeArgs ::= InfixType | ‘(’ [ FunArgTypes ] ‘)’ | FunParamClause @@ -233,10 +233,10 @@ NameAndType ::= id ':' Type ### Expressions ```ebnf Expr ::= FunParams (‘=>’ | ‘?=>’) Expr Function(args, expr), Function(ValDef([implicit], id, TypeTree(), EmptyTree), expr) - | HkTypeParamClause ‘=>’ Expr PolyFunction(ts, expr) + | TypTypeParamClause ‘=>’ Expr PolyFunction(ts, expr) | Expr1 BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block - | HkTypeParamClause ‘=>’ Block + | TypTypeParamClause ‘=>’ Block | Expr1 FunParams ::= Bindings | id @@ -286,7 +286,7 @@ SimpleExpr ::= SimpleRef ColonArgument ::= colon [LambdaStart] indent (CaseClauses | Block) outdent LambdaStart ::= FunParams (‘=>’ | ‘?=>’) - | HkTypeParamClause ‘=>’ + | TypTypeParamClause ‘=>’ Quoted ::= ‘'’ ‘{’ Block ‘}’ | ‘'’ ‘[’ TypeBlock ‘]’ ExprSplice ::= spliceId -- if inside quoted block @@ -364,11 +364,14 @@ ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) id [HkTypeParamClause] TypeAndCtxBounds Bound(below, above, context) +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds + TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ -TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds +TypTypeParam ::= {Annotation} (id | ‘_’) [HkTypeParamClause] TypeBounds HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ -HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypeParamClause] | ‘_’) +HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id | ‘_’) [HkTypeParamClause] TypeBounds ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] @@ -385,9 +388,6 @@ DefParamClause ::= DefTypeParamClause TypelessClauses ::= TypelessClause {TypelessClause} TypelessClause ::= DefTermParamClause | UsingParamClause - -DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ @@ -458,7 +458,7 @@ PatDef ::= ids [‘:’ Type] [‘=’ Expr] DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound +TypeDef ::= id [HkTypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index 36970bb95306..5d984c762a89 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -178,12 +178,11 @@ ClassQualifier ::= ‘[’ id ‘]’ ### Types ``` Type ::= FunType - | HkTypeParamClause ‘=>>’ Type - | FunParamClause ‘=>>’ Type + | TypTypeParamClause ‘=>>’ Type | MatchType | InfixType FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type - | HKTypeParamClause '=>' Type + | TypTypeParamClause '=>' Type FunTypeArgs ::= InfixType | ‘(’ [ FunArgTypes ] ‘)’ | FunParamClause @@ -215,17 +214,17 @@ ParamValueType ::= Type [‘*’] TypeArgs ::= ‘[’ Types ‘]’ Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> TypeBounds ::= [‘>:’ Type] [‘<:’ Type] -TypeParamBounds ::= TypeBounds {‘:’ Type} +TypeAndCtxBounds ::= TypeBounds {‘:’ Type} Types ::= Type {‘,’ Type} ``` ### Expressions ``` Expr ::= FunParams (‘=>’ | ‘?=>’) Expr - | HkTypeParamClause ‘=>’ Expr + | TypTypeParamClause ‘=>’ Expr | Expr1 BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block - | HkTypeParamClause ‘=>’ Block + | TypTypeParamClause ‘=>’ Block | Expr1 FunParams ::= Bindings | id @@ -273,7 +272,7 @@ SimpleExpr ::= SimpleRef ColonArgument ::= colon [LambdaStart] indent (CaseClauses | Block) outdent LambdaStart ::= FunParams (‘=>’ | ‘?=>’) - | HkTypeParamClause ‘=>’ + | TypTypeParamClause ‘=>’ Quoted ::= ‘'’ ‘{’ Block ‘}’ | ‘'’ ‘[’ TypeBlock ‘]’ ExprSplice ::= spliceId -- if inside quoted block @@ -339,13 +338,16 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ### Type and Value Parameters ``` ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ -ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeParamBounds +ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeAndCtxBounds + +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ -TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds +TypTypeParam ::= {Annotation} (id | ‘_’) [HkTypeParamClause] TypeBounds HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ -HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypeParamClause] | ‘_’) TypeBounds +HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id | ‘_’) [HkTypeParamClause] TypeBounds ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ @@ -361,8 +363,6 @@ TypelessClauses ::= TypelessClause {TypelessClause} TypelessClause ::= DefTermParamClause | UsingParamClause -DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ @@ -431,7 +431,7 @@ PatDef ::= ids [‘:’ Type] [‘=’ Expr] DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound +TypeDef ::= id [HkTypeParamClause] {FunParamClause}TypeBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef From 41e2d52f661182b55efa07ed2dcd9b4bd7051985 Mon Sep 17 00:00:00 2001 From: Yuito Murase Date: Sun, 27 Aug 2023 03:35:38 +0900 Subject: [PATCH 365/827] Generalize HOAS patterns to take type parameters (experimental feature) --- .../src/dotty/tools/dotc/ast/Desugar.scala | 2 +- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 2 +- compiler/src/dotty/tools/dotc/ast/Trees.scala | 17 +- compiler/src/dotty/tools/dotc/ast/tpd.scala | 4 +- compiler/src/dotty/tools/dotc/ast/untpd.scala | 2 +- .../src/dotty/tools/dotc/config/Feature.scala | 4 + .../dotty/tools/dotc/core/Definitions.scala | 1 + .../tools/dotc/core/tasty/TreePickler.scala | 3 +- .../tools/dotc/core/tasty/TreeUnpickler.scala | 3 +- .../dotty/tools/dotc/parsing/Parsers.scala | 2 +- .../tools/dotc/printing/RefinedPrinter.scala | 7 +- .../tools/dotc/quoted/QuotePatterns.scala | 142 ++++++++--- .../dotty/tools/dotc/typer/Applications.scala | 9 + .../tools/dotc/typer/QuotesAndSplices.scala | 68 ++++- .../src/dotty/tools/dotc/typer/ReTyper.scala | 7 +- .../quoted/runtime/impl/QuoteMatcher.scala | 233 +++++++++++++----- ...ted-patterns-with-polymorphic-functions.md | 54 ++++ .../src/scala/quoted/runtime/Patterns.scala | 9 + .../runtime/stdLibPatches/language.scala | 8 + ...-with-bounded-type-params-regression.check | 6 + ...-with-bounded-type-params-regression.scala | 12 + ...ted-pattern-with-bounded-type-params.check | 4 + ...ted-pattern-with-bounded-type-params.scala | 12 + ...-pattern-with-type-params-regression.check | 16 ++ ...-pattern-with-type-params-regression.scala | 11 + .../quoted-pattern-with-type-params.check | 12 + .../quoted-pattern-with-type-params.scala | 9 + .../quoted-patten-with-type-params.scala | 14 ++ ...ote-match-poly-function-1-regression.check | 3 + .../Macro_1.scala | 8 + .../Test_2.scala | 5 + .../quote-match-poly-function-1.check | 3 + .../quote-match-poly-function-1/Macro_1.scala | 9 + .../quote-match-poly-function-1/Test_2.scala | 5 + .../quote-match-poly-function-2.check | 7 + .../quote-match-poly-function-2/Macro_1.scala | 23 ++ .../quote-match-poly-function-2/Test_2.scala | 9 + .../stdlibExperimentalDefinitions.scala | 4 + 38 files changed, 624 insertions(+), 125 deletions(-) create mode 100644 docs/_docs/reference/experimental/quoted-patterns-with-polymorphic-functions.md create mode 100644 tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.check create mode 100644 tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.scala create mode 100644 tests/neg-macros/quoted-pattern-with-bounded-type-params.check create mode 100644 tests/neg-macros/quoted-pattern-with-bounded-type-params.scala create mode 100644 tests/neg-macros/quoted-pattern-with-type-params-regression.check create mode 100644 tests/neg-macros/quoted-pattern-with-type-params-regression.scala create mode 100644 tests/neg-macros/quoted-pattern-with-type-params.check create mode 100644 tests/neg-macros/quoted-pattern-with-type-params.scala create mode 100644 tests/pos-macros/quoted-patten-with-type-params.scala create mode 100644 tests/run-macros/quote-match-poly-function-1-regression.check create mode 100644 tests/run-macros/quote-match-poly-function-1-regression/Macro_1.scala create mode 100644 tests/run-macros/quote-match-poly-function-1-regression/Test_2.scala create mode 100644 tests/run-macros/quote-match-poly-function-1.check create mode 100644 tests/run-macros/quote-match-poly-function-1/Macro_1.scala create mode 100644 tests/run-macros/quote-match-poly-function-1/Test_2.scala create mode 100644 tests/run-macros/quote-match-poly-function-2.check create mode 100644 tests/run-macros/quote-match-poly-function-2/Macro_1.scala create mode 100644 tests/run-macros/quote-match-poly-function-2/Test_2.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 237dd4fe86e0..c360712999e2 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -2218,7 +2218,7 @@ object desugar { case Quote(body, _) => new UntypedTreeTraverser { def traverse(tree: untpd.Tree)(using Context): Unit = tree match { - case SplicePattern(body, _) => collect(body) + case SplicePattern(body, _, _) => collect(body) case _ => traverseChildren(tree) } }.traverse(body) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 97de434ba9d5..385917f9b368 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -871,7 +871,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => } private object quotePatVars extends TreeAccumulator[List[Symbol]] { def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { - case SplicePattern(pat, _) => outer.apply(syms, pat) + case SplicePattern(pat, _, _) => outer.apply(syms, pat) case _ => foldOver(syms, tree) } } diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 41899ed661f5..942fd6c9b0c7 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -763,9 +763,10 @@ object Trees { * `SplicePattern` can only be contained within a `QuotePattern`. * * @param body The tree that was spliced + * @param typeargs The type arguments of the splice (the HOAS arguments) * @param args The arguments of the splice (the HOAS arguments) */ - case class SplicePattern[+T <: Untyped] private[ast] (body: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class SplicePattern[+T <: Untyped] private[ast] (body: Tree[T], typeargs: List[Tree[T]], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { type ThisTree[+T <: Untyped] = SplicePattern[T] } @@ -1372,9 +1373,9 @@ object Trees { case tree: QuotePattern if (bindings eq tree.bindings) && (body eq tree.body) && (quotes eq tree.quotes) => tree case _ => finalize(tree, untpd.QuotePattern(bindings, body, quotes)(sourceFile(tree))) } - def SplicePattern(tree: Tree)(body: Tree, args: List[Tree])(using Context): SplicePattern = tree match { - case tree: SplicePattern if (body eq tree.body) && (args eq tree.args) => tree - case _ => finalize(tree, untpd.SplicePattern(body, args)(sourceFile(tree))) + def SplicePattern(tree: Tree)(body: Tree, typeargs: List[Tree], args: List[Tree])(using Context): SplicePattern = tree match { + case tree: SplicePattern if (body eq tree.body) && (typeargs eq tree.typeargs) & (args eq tree.args) => tree + case _ => finalize(tree, untpd.SplicePattern(body, typeargs, args)(sourceFile(tree))) } def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { case tree: SingletonTypeTree if (ref eq tree.ref) => tree @@ -1622,8 +1623,8 @@ object Trees { cpy.Splice(tree)(transform(expr)(using spliceContext)) case tree @ QuotePattern(bindings, body, quotes) => cpy.QuotePattern(tree)(transform(bindings), transform(body)(using quoteContext), transform(quotes)) - case tree @ SplicePattern(body, args) => - cpy.SplicePattern(tree)(transform(body)(using spliceContext), transform(args)) + case tree @ SplicePattern(body, targs, args) => + cpy.SplicePattern(tree)(transform(body)(using spliceContext), transform(targs), transform(args)) case tree @ Hole(isTerm, idx, args, content) => cpy.Hole(tree)(isTerm, idx, transform(args), transform(content)) case _ => @@ -1771,8 +1772,8 @@ object Trees { this(x, expr)(using spliceContext) case QuotePattern(bindings, body, quotes) => this(this(this(x, bindings), body)(using quoteContext), quotes) - case SplicePattern(body, args) => - this(this(x, body)(using spliceContext), args) + case SplicePattern(body, typeargs, args) => + this(this(this(x, body)(using spliceContext), typeargs), args) case Hole(_, _, args, content) => this(this(x, args), content) case _ => diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 514ac46170e1..3ce2d1d038dd 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -181,8 +181,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Splice(expr: Tree)(using Context): Splice = ta.assignType(untpd.Splice(expr), expr) - def SplicePattern(pat: Tree, args: List[Tree], tpe: Type)(using Context): SplicePattern = - untpd.SplicePattern(pat, args).withType(tpe) + def SplicePattern(pat: Tree, targs: List[Tree], args: List[Tree], tpe: Type)(using Context): SplicePattern = + untpd.SplicePattern(pat, targs, args).withType(tpe) def Hole(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpe: Type)(using Context): Hole = untpd.Hole(isTerm, idx, args, content).withType(tpe) diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index c42e8f71246d..8efc9f459ee8 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -415,7 +415,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def Quote(body: Tree, tags: List[Tree])(implicit src: SourceFile): Quote = new Quote(body, tags) def Splice(expr: Tree)(implicit src: SourceFile): Splice = new Splice(expr) def QuotePattern(bindings: List[Tree], body: Tree, quotes: Tree)(implicit src: SourceFile): QuotePattern = new QuotePattern(bindings, body, quotes) - def SplicePattern(body: Tree, args: List[Tree])(implicit src: SourceFile): SplicePattern = new SplicePattern(body, args) + def SplicePattern(body: Tree, typeargs: List[Tree], args: List[Tree])(implicit src: SourceFile): SplicePattern = new SplicePattern(body, typeargs, args) def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() def SingletonTypeTree(ref: Tree)(implicit src: SourceFile): SingletonTypeTree = new SingletonTypeTree(ref) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index fed67b380092..8c1021e91e38 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -37,6 +37,7 @@ object Feature: val namedTuples = experimental("namedTuples") val modularity = experimental("modularity") val betterMatchTypeExtractors = experimental("betterMatchTypeExtractors") + val quotedPatternsWithPolymorphicFunctions = experimental("quotedPatternsWithPolymorphicFunctions") def experimentalAutoEnableFeatures(using Context): List[TermName] = defn.languageExperimentalFeatures @@ -130,6 +131,9 @@ object Feature: def betterMatchTypeExtractorsEnabled(using Context) = enabled(betterMatchTypeExtractors) + def quotedPatternsWithPolymorphicFunctionsEnabled(using Context) = + enabled(quotedPatternsWithPolymorphicFunctions) + /** Is pureFunctions enabled for this compilation unit? */ def pureFunsEnabled(using Context) = enabledBySetting(pureFunctions) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 1f0a673f90b1..3059ba35cca5 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -903,6 +903,7 @@ class Definitions { @tu lazy val QuotedRuntimePatterns: Symbol = requiredModule("scala.quoted.runtime.Patterns") @tu lazy val QuotedRuntimePatterns_patternHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHole") @tu lazy val QuotedRuntimePatterns_higherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHole") + @tu lazy val QuotedRuntimePatterns_higherOrderHoleWithTypes: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHoleWithTypes") @tu lazy val QuotedRuntimePatterns_patternTypeAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("patternType") @tu lazy val QuotedRuntimePatterns_fromAboveAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("fromAbove") diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 4e32db2ae602..eeeaaaf72bf1 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -776,8 +776,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickleType(tree.tpe) bindings.foreach(pickleTree) } - case SplicePattern(pat, args) => - val targs = Nil // SplicePattern `targs` will be added with #18271 + case SplicePattern(pat, targs, args) => writeByte(SPLICEPATTERN) withLength { pickleTree(pat) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 4750276f4553..e62db9af520a 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1668,8 +1668,7 @@ class TreeUnpickler(reader: TastyReader, val pat = readTree() val patType = readType() val (targs, args) = until(end)(readTree()).span(_.isType) - assert(targs.isEmpty, "unexpected type arguments in SPLICEPATTERN") // `targs` will be needed for #18271. Until this fearure is added they should be empty. - SplicePattern(pat, args, patType) + SplicePattern(pat, targs, args, patType) case HOLE => readHole(end, isTerm = true) case _ => diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index ae8e16ac9ea4..f5ac90cbc773 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1962,7 +1962,7 @@ object Parsers { syntaxError(em"$msg\n\nHint: $hint", Span(start, in.lastOffset)) Ident(nme.ERROR.toTypeName) else if inPattern then - SplicePattern(expr, Nil) + SplicePattern(expr, Nil, Nil) else Splice(expr) } diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 0c6e36c8f18f..0a7f258584e7 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -789,11 +789,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val open = if (body.isTerm) keywordStr("{") else keywordStr("[") val close = if (body.isTerm) keywordStr("}") else keywordStr("]") keywordStr("'") ~ quotesText ~ open ~ bindingsText ~ toTextGlobal(body) ~ close - case SplicePattern(pattern, args) => + case SplicePattern(pattern, typeargs, args) => val spliceTypeText = (keywordStr("[") ~ toTextGlobal(tree.typeOpt) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) keywordStr("$") ~ spliceTypeText ~ { - if args.isEmpty then keywordStr("{") ~ inPattern(toText(pattern)) ~ keywordStr("}") - else toText(pattern) ~ "(" ~ toTextGlobal(args, ", ") ~ ")" + if typeargs.isEmpty && args.isEmpty then keywordStr("{") ~ inPattern(toText(pattern)) ~ keywordStr("}") + else if typeargs.isEmpty then toText(pattern) ~ "(" ~ toTextGlobal(args, ", ") ~ ")" + else toText(pattern) ~ "[" ~ toTextGlobal(typeargs, ", ")~ "]" ~ "(" ~ toTextGlobal(args, ", ") ~ ")" } case Hole(isTerm, idx, args, content) => val (prefix, postfix) = if isTerm then ("{{{", "}}}") else ("[[[", "]]]") diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala index 1ebf2ae5714b..82701dafd2c9 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala @@ -26,33 +26,93 @@ object QuotePatterns: import tpd.* /** Check for restricted patterns */ - def checkPattern(quotePattern: QuotePattern)(using Context): Unit = new tpd.TreeTraverser { - def traverse(tree: Tree)(using Context): Unit = tree match { - case tree: SplicePattern => - if !tree.body.typeOpt.derivesFrom(defn.QuotedExprClass) then - report.error(i"Splice pattern must match an Expr[...]", tree.body.srcPos) - case tdef: TypeDef if tdef.symbol.isClass => - val kind = if tdef.symbol.is(Module) then "objects" else "classes" - report.error(em"Implementation restriction: cannot match $kind", tree.srcPos) - case tree: NamedDefTree => - if tree.name.is(NameKinds.WildcardParamName) then - report.warning( - "Use of `_` for lambda in quoted pattern. Use explicit lambda instead or use `$_` to match any term.", - tree.srcPos) - if tree.name.isTermName && !tree.nameSpan.isSynthetic && tree.name != nme.ANON_FUN && tree.name.startsWith("$") then - report.error("Names cannot start with $ quote pattern", tree.namePos) - traverseChildren(tree) - case _: Match => - report.error("Implementation restriction: cannot match `match` expressions", tree.srcPos) - case _: Try => - report.error("Implementation restriction: cannot match `try` expressions", tree.srcPos) - case _: Return => - report.error("Implementation restriction: cannot match `return` statements", tree.srcPos) - case _ => - traverseChildren(tree) - } + def checkPattern(quotePattern: QuotePattern)(using Context): Unit = + def validatePatternAndCollectTypeVars(): Set[Symbol] = new tpd.TreeAccumulator[Set[Symbol]] { + override def apply(typevars: Set[Symbol], tree: tpd.Tree)(using Context): Set[Symbol] = + // Collect type variables + val typevars1 = tree match + case tree @ DefDef(_, paramss, _, _) => + typevars union paramss.flatMap{ params => params match + case TypeDefs(tdefs) => tdefs.map(_.symbol) + case _ => List.empty + }.toSet union typevars + case _ => typevars + + // Validate pattern + tree match + case tree: SplicePattern => + if !tree.body.typeOpt.derivesFrom(defn.QuotedExprClass) then + report.error(i"Splice pattern must match an Expr[...]", tree.body.srcPos) + typevars1 + case tdef: TypeDef if tdef.symbol.isClass => + val kind = if tdef.symbol.is(Module) then "objects" else "classes" + report.error(em"Implementation restriction: cannot match $kind", tree.srcPos) + typevars1 + case tree: NamedDefTree => + if tree.name.is(NameKinds.WildcardParamName) then + report.warning( + "Use of `_` for lambda in quoted pattern. Use explicit lambda instead or use `$_` to match any term.", + tree.srcPos) + if tree.name.isTermName && !tree.nameSpan.isSynthetic && tree.name != nme.ANON_FUN && tree.name.startsWith("$") then + report.error("Names cannot start with $ quote pattern", tree.namePos) + foldOver(typevars1, tree) + case _: Match => + report.error("Implementation restriction: cannot match `match` expressions", tree.srcPos) + typevars1 + case _: Try => + report.error("Implementation restriction: cannot match `try` expressions", tree.srcPos) + typevars1 + case _: Return => + report.error("Implementation restriction: cannot match `return` statements", tree.srcPos) + typevars1 + case _ => + foldOver(typevars1, tree) + }.apply(Set.empty, quotePattern.body) + + val boundTypeVars = validatePatternAndCollectTypeVars() - }.traverse(quotePattern.body) + /* + * This part checks well-formedness of arguments to hoas patterns. + * (1) Type arguments of a hoas patterns must be introduced in the quote pattern.ctxShow + * Examples + * well-formed: '{ [A] => (x : A) => $a[A](x) } // A is introduced in the quote pattern + * ill-formed: '{ (x : Int) => $a[Int](x) } // Int is defined outside of the quote pattern + * (2) If value arguments of a hoas pattern has a type with type variables that are introduced in + * the quote pattern, those type variables should be in type arguments to the hoas patternHole + * Examples + * well-formed: '{ [A] => (x : A) => $a[A](x) } // a : [A] => (x:A) => A + * ill-formed: '{ [A] => (x : A) => $a(x) } // a : (x:A) => A ...but A is undefined; hence ill-formed + */ + new tpd.TreeTraverser { + override def traverse(tree: tpd.Tree)(using Context): Unit = tree match { + case tree: SplicePattern => + def uncapturedTypeVars(arg: tpd.Tree, capturedTypeVars: List[tpd.Tree]): Set[Type] = + /* Sometimes arg is untyped when a splice pattern is ill-formed. + * Return early in such case. + * Refer to QuoteAndSplices::typedSplicePattern + */ + if !arg.hasType then return Set.empty + + val capturedTypeVarsSet = capturedTypeVars.map(_.symbol).toSet + new TypeAccumulator[Set[Type]] { + def apply(x: Set[Type], tp: Type): Set[Type] = + if boundTypeVars.contains(tp.typeSymbol) && !capturedTypeVarsSet.contains(tp.typeSymbol) then + foldOver(x + tp, tp) + else + foldOver(x, tp) + }.apply(Set.empty, arg.tpe) + + for (typearg <- tree.typeargs) // case (1) + do + if !boundTypeVars.contains(typearg.symbol) then + report.error("Type arguments of a hoas pattern needs to be defined inside the quoted pattern", typearg.srcPos) + for (arg <- tree.args) // case (2) + do + if !uncapturedTypeVars(arg, tree.typeargs).isEmpty then + report.error("Type variables that this argument depends on are not captured in this hoas pattern", arg.srcPos) + case _ => traverseChildren(tree) + } + }.traverse(quotePattern.body) /** Encode the quote pattern into an `unapply` that the pattern matcher can handle. * @@ -76,7 +136,7 @@ object QuotePatterns: * .ExprMatch // or TypeMatch * .unapply[ * KCons[t1 >: l1 <: b1, ...KCons[tn >: ln <: bn, KNil]...], // scala.quoted.runtime.{KCons, KNil} - * (T1, T2, (A1, ..., An) => T3, ...) + * (Expr[T1], Expr[T2], Expr[(A1, ..., An) => T3], ...) * ]( * '{ * type t1' >: l1' <: b1' @@ -199,16 +259,24 @@ object QuotePatterns: val patBuf = new mutable.ListBuffer[Tree] val shape = new tpd.TreeMap { override def transform(tree: Tree)(using Context) = tree match { - case Typed(splice @ SplicePattern(pat, Nil), tpt) if !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => + case Typed(splice @ SplicePattern(pat, Nil, Nil), tpt) if !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => transform(tpt) // Collect type bindings transform(splice) - case SplicePattern(pat, args) => + case SplicePattern(pat, typeargs, args) => val patType = pat.tpe.widen val patType1 = patType.translateFromRepeated(toArray = false) val pat1 = if (patType eq patType1) pat else pat.withType(patType1) patBuf += pat1 - if args.isEmpty then ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) - else ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef).appliedToType(tree.tpe).appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))).withSpan(tree.span) + if typeargs.isEmpty && args.isEmpty then ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) + else if typeargs.isEmpty then + ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef) + .appliedToType(tree.tpe) + .appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))) + .withSpan(tree.span) + else ref(defn.QuotedRuntimePatterns_higherOrderHoleWithTypes.termRef) + .appliedToTypeTrees(List(TypeTree(tree.tpe), tpd.hkNestedPairsTypeTree(typeargs))) + .appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))) + .withSpan(tree.span) case _ => super.transform(tree) } @@ -234,7 +302,7 @@ object QuotePatterns: fun match // .asInstanceOf[QuoteMatching].{ExprMatch,TypeMatch}.unapply[, ] case TypeApply(Select(Select(TypeApply(Select(quotes, _), _), _), _), typeBindings :: resTypes :: Nil) => - val bindings = unrollBindings(typeBindings) + val bindings = unrollHkNestedPairsTypeTree(typeBindings) val addPattenSplice = new TreeMap { private val patternIterator = patterns.iterator.filter { case pat: Bind => !pat.symbol.name.is(PatMatGivenVarName) @@ -242,9 +310,11 @@ object QuotePatterns: } override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match case TypeApply(patternHole, _) if patternHole.symbol == defn.QuotedRuntimePatterns_patternHole => - cpy.SplicePattern(tree)(patternIterator.next(), Nil) + cpy.SplicePattern(tree)(patternIterator.next(), Nil, Nil) case Apply(patternHole, SeqLiteral(args, _) :: Nil) if patternHole.symbol == defn.QuotedRuntimePatterns_higherOrderHole => - cpy.SplicePattern(tree)(patternIterator.next(), args) + cpy.SplicePattern(tree)(patternIterator.next(), Nil, args) + case Apply(TypeApply(patternHole, List(_, targsTpe)), SeqLiteral(args, _) :: Nil) if patternHole.symbol == defn.QuotedRuntimePatterns_higherOrderHoleWithTypes => + cpy.SplicePattern(tree)(patternIterator.next(), unrollHkNestedPairsTypeTree(targsTpe), args) case _ => super.transform(tree) } val body = addPattenSplice.transform(shape) match @@ -262,7 +332,7 @@ object QuotePatterns: case body => body cpy.QuotePattern(tree)(bindings, body, quotes) - private def unrollBindings(tree: Tree)(using Context): List[Tree] = tree match + private def unrollHkNestedPairsTypeTree(tree: Tree)(using Context): List[Tree] = tree match case AppliedTypeTree(tupleN, bindings) if defn.isTupleClass(tupleN.symbol) => bindings // TupleN, 1 <= N <= 22 - case AppliedTypeTree(_, head :: tail :: Nil) => head :: unrollBindings(tail) // KCons or *: + case AppliedTypeTree(_, head :: tail :: Nil) => head :: unrollHkNestedPairsTypeTree(tail) // KCons or *: case _ => Nil // KNil or EmptyTuple diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 74c20812893b..81a80ea28af3 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1232,6 +1232,8 @@ trait Applications extends Compatibility { } else { val app = tree.fun match + case untpd.TypeApply(_: untpd.SplicePattern, _) if Feature.quotedPatternsWithPolymorphicFunctionsEnabled => + typedAppliedSpliceWithTypes(tree, pt) case _: untpd.SplicePattern => typedAppliedSplice(tree, pt) case _ => realApply app match { @@ -1283,9 +1285,16 @@ trait Applications extends Compatibility { if (ctx.mode.is(Mode.Pattern)) return errorTree(tree, em"invalid pattern") + tree.fun match { + case _: untpd.SplicePattern if Feature.quotedPatternsWithPolymorphicFunctionsEnabled => + return errorTree(tree, em"Implementation restriction: A higher-order pattern must carry value arguments") + case _ => + } + val isNamed = hasNamedArg(tree.args) val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) record("typedTypeApply") + typedExpr(tree.fun, PolyProto(typedArgs, pt)) match { case fun: TypeApply if !ctx.isAfterTyper => val function = fun.fun diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index fb9176526e42..59993a69797d 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -119,14 +119,31 @@ trait QuotesAndSplices { EmptyTree } } + val typedTypeargs = tree.typeargs.map { + case typearg: untpd.Ident => + val typedTypearg = typedType(typearg) + val bounds = ctx.gadt.fullBounds(typedTypearg.symbol) + if bounds != null && bounds != TypeBounds.empty then + report.error("Implementation restriction: Type arguments to Open pattern are expected to have no bounds", typearg.srcPos) + typedTypearg + case arg => + report.error("Open pattern expected an identifier", arg.srcPos) + EmptyTree + } for arg <- typedArgs if arg.symbol.is(Mutable) do // TODO support these patterns. Possibly using scala.quoted.util.Var report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val patType = if tree.args.isEmpty then pt else defn.FunctionNOf(argTypes, pt) + val patType = (tree.typeargs.isEmpty, tree.args.isEmpty) match + case (true, true) => pt + case (true, false) => + defn.FunctionNOf(argTypes, pt) + case (false, _) => + PolyFunctionOf(typedTypeargs.tpes, argTypes, pt) + val pat = typedPattern(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patType))(using quotePatternSpliceContext) val baseType = pat.tpe.baseType(defn.QuotedExprClass) val argType = if baseType.exists then baseType.argTypesHi.head else defn.NothingType - untpd.cpy.SplicePattern(tree)(pat, typedArgs).withType(pt) + untpd.cpy.SplicePattern(tree)(pat, typedTypeargs, typedArgs).withType(pt) else errorTree(tree, em"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.body.srcPos) } @@ -153,7 +170,34 @@ trait QuotesAndSplices { else // $x(...) higher-order quasipattern if args.isEmpty then report.error("Missing arguments for open pattern", tree.srcPos) - typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, args), pt) + typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, Nil, args), pt) + } + + /** Types a splice applied to some type arguments and arguments + * `$f[targs1, ..., targsn](arg1, ..., argn)` in a quote pattern. + * + * Refer to: typedAppliedSplice + */ + def typedAppliedSpliceWithTypes(tree: untpd.Apply, pt: Type)(using Context): Tree = { + assert(ctx.mode.isQuotedPattern) + val untpd.Apply(typeApplyTree @ untpd.TypeApply(splice: untpd.SplicePattern, typeargs), args) = tree: @unchecked + def isInBraces: Boolean = splice.span.end != splice.body.span.end + if isInBraces then // ${x}[...](...) match an application + val typedTypeargs = typeargs.map(arg => typedType(arg)) + val typedArgs = args.map(arg => typedExpr(arg)) + val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) + val splice1 = typedSplicePattern(splice, ProtoTypes.PolyProto(typedArgs, defn.FunctionOf(argTypes, pt))) + val typedTypeApply = untpd.cpy.TypeApply(typeApplyTree)(splice1.select(nme.apply), typedTypeargs) + untpd.cpy.Apply(tree)(typedTypeApply, typedArgs).withType(pt) + else // $x[...](...) higher-order quasipattern + // Empty args is allowed + if typeargs.isEmpty then + report.error("Missing type arguments for open pattern", tree.srcPos) + typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, typeargs, args), pt) + } + + def typedTypeAppliedSplice(tree: untpd.TypeApply, pt: Type)(using Context): Tree = { + typedAppliedSpliceWithTypes(untpd.Apply(tree, Nil), pt) } /** Type check a type binding reference in a quoted pattern. @@ -322,4 +366,22 @@ object QuotesAndSplices { case _ => super.transform(tree) end TreeMapWithVariance + + object PolyFunctionOf { + /** + * Return a poly-type + method type [$typeargs] => ($args) => ($resultType) + * where typeargs occur in args and resulttype + */ + def apply(typeargs: List[Type], args: List[Type], resultType: Type)(using Context): Type = + val typeargs1 = PolyType.syntheticParamNames(typeargs.length) + + val bounds = typeargs map (_ => TypeBounds.empty) + val resultTypeExp = (pt: PolyType) => { + val fromSymbols = typeargs map (_.typeSymbol) + val args1 = args map (_.subst(fromSymbols, pt.paramRefs)) + val resultType1 = resultType.subst(fromSymbols, pt.paramRefs) + MethodType(args1, resultType1) + } + defn.PolyFunctionOf(PolyType(typeargs1)(_ => bounds, resultTypeExp)) + } } diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 7a5c838848ac..ed8919661860 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -130,14 +130,15 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedSplicePattern(tree: untpd.SplicePattern, pt: Type)(using Context): Tree = assertTyped(tree) + val typeargs1 = tree.typeargs.mapconserve(typedType(_)) val args1 = tree.args.mapconserve(typedExpr(_)) val patternTpe = - if args1.isEmpty then tree.typeOpt + if !typeargs1.isEmpty then QuotesAndSplices.PolyFunctionOf(typeargs1.map(_.tpe), args1.map(_.tpe), tree.typeOpt) + else if args1.isEmpty then tree.typeOpt else defn.FunctionType(args1.size).appliedTo(args1.map(_.tpe) :+ tree.typeOpt) val bodyCtx = spliceContext.addMode(Mode.Pattern).retractMode(Mode.QuotedPatternBits) val body1 = typed(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patternTpe))(using bodyCtx) - val args = tree.args.mapconserve(typedExpr(_)) - untpd.cpy.SplicePattern(tree)(body1, args1).withType(tree.typeOpt) + untpd.cpy.SplicePattern(tree)(body1, typeargs1, args1).withType(tree.typeOpt) override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = promote(tree) diff --git a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala index ac1cbbfb6bb5..3790174526b3 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala @@ -11,6 +11,7 @@ import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.util.optional +import dotty.tools.dotc.ast.TreeTypeMap /** Matches a quoted tree against a quoted pattern tree. * A quoted pattern tree may have type and term holes in addition to normal terms. @@ -112,16 +113,17 @@ class QuoteMatcher(debug: Boolean) { /** Sequence of matched expressions. * These expressions are part of the scrutinee and will be bound to the quote pattern term splices. */ - type MatchingExprs = Seq[MatchResult] + private type MatchingExprs = Seq[MatchResult] - /** A map relating equivalent symbols from the scrutinee and the pattern + /** TODO-18271: update + * A map relating equivalent symbols from the scrutinee and the pattern * For example in * ``` * '{val a = 4; a * a} match case '{ val x = 4; x * x } * ``` * when matching `a * a` with `x * x` the environment will contain `Map(a -> x)`. */ - private type Env = Map[Symbol, Symbol] + private case class Env(val termEnv: Map[Symbol, Symbol], val typeEnv: Map[Symbol, Symbol]) private def withEnv[T](env: Env)(body: Env ?=> T): T = body(using env) @@ -132,7 +134,7 @@ class QuoteMatcher(debug: Boolean) { val (pat1, typeHoles, ctx1) = instrumentTypeHoles(pattern) inContext(ctx1) { optional { - given Env = Map.empty + given Env = new Env(Map.empty, Map.empty) scrutinee =?= pat1 }.map { matchings => lazy val spliceScope = SpliceScope.getCurrent @@ -236,6 +238,26 @@ class QuoteMatcher(debug: Boolean) { case _ => None end TypeTreeTypeTest + /* Some of method symbols in arguments of higher-order term hole are eta-expanded. + * e.g. + * g: (Int) => Int + * => { + * def $anonfun(y: Int): Int = g(y) + * closure($anonfun) + * } + * + * f: (using Int) => Int + * => f(using x) + * This function restores the symbol of the original method from + * the eta-expanded function. + */ + def getCapturedIdent(arg: Tree)(using Context): Ident = + arg match + case id: Ident => id + case Apply(fun, _) => getCapturedIdent(fun) + case Block((ddef: DefDef) :: _, _: Closure) => getCapturedIdent(ddef.rhs) + case Typed(expr, _) => getCapturedIdent(expr) + def runMatch(): optional[MatchingExprs] = pattern match /* Term hole */ @@ -244,14 +266,14 @@ class QuoteMatcher(debug: Boolean) { if patternHole.symbol.eq(defn.QuotedRuntimePatterns_patternHole) && tpt2.tpe.derivesFrom(defn.RepeatedParamClass) => scrutinee match - case Typed(s, tpt1) if s.tpe <:< tpt.tpe => matched(scrutinee) + case Typed(s, tpt1) if isSubTypeUnderEnv(s, tpt) => matched(scrutinee) case _ => notMatched /* Term hole */ // Match a scala.internal.Quoted.patternHole and return the scrutinee tree case TypeApply(patternHole, tpt :: Nil) if patternHole.symbol.eq(defn.QuotedRuntimePatterns_patternHole) && - scrutinee.tpe <:< tpt.tpe => + isSubTypeUnderEnv(scrutinee, tpt) => scrutinee match case ClosedPatternTerm(scrutinee) => matched(scrutinee) case _ => notMatched @@ -262,33 +284,32 @@ class QuoteMatcher(debug: Boolean) { case Apply(TypeApply(Ident(_), List(TypeTree())), SeqLiteral(args, _) :: Nil) if pattern.symbol.eq(defn.QuotedRuntimePatterns_higherOrderHole) => - /* Some of method symbols in arguments of higher-order term hole are eta-expanded. - * e.g. - * g: (Int) => Int - * => { - * def $anonfun(y: Int): Int = g(y) - * closure($anonfun) - * } - * - * f: (using Int) => Int - * => f(using x) - * This function restores the symbol of the original method from - * the eta-expanded function. - */ - def getCapturedIdent(arg: Tree)(using Context): Ident = - arg match - case id: Ident => id - case Apply(fun, _) => getCapturedIdent(fun) - case Block((ddef: DefDef) :: _, _: Closure) => getCapturedIdent(ddef.rhs) - case Typed(expr, _) => getCapturedIdent(expr) - val env = summon[Env] val capturedIds = args.map(getCapturedIdent) val capturedSymbols = capturedIds.map(_.symbol) - val captureEnv = env.filter((k, v) => !capturedSymbols.contains(v)) + val captureEnv = Env( + termEnv = env.termEnv.filter((k, v) => !capturedIds.map(_.symbol).contains(v)), + typeEnv = env.typeEnv) withEnv(captureEnv) { scrutinee match - case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, capturedIds, args.map(_.tpe), env) + case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, capturedIds, args.map(_.tpe), Nil, env) + case _ => notMatched + } + + /* Higher order term hole */ + // Matches an open term and wraps it into a lambda that provides the free variables + case Apply(TypeApply(Ident(_), List(TypeTree(), targs)), SeqLiteral(args, _) :: Nil) + if pattern.symbol.eq(defn.QuotedRuntimePatterns_higherOrderHoleWithTypes) => + + val env = summon[Env] + val capturedIds = args.map(getCapturedIdent) + val capturedTargs = unrollHkNestedPairsTypeTree(targs) + val captureEnv = Env( + termEnv = env.termEnv.filter((k, v) => !capturedIds.map(_.symbol).contains(v)), + typeEnv = env.typeEnv.filter((k, v) => !capturedTargs.map(_.symbol).contains(v))) + withEnv(captureEnv) { + scrutinee match + case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, capturedIds, args.map(_.tpe), capturedTargs.map(_.tpe), env) case _ => notMatched } @@ -324,7 +345,7 @@ class QuoteMatcher(debug: Boolean) { /* Match reference */ case _: Ident if symbolMatch(scrutinee, pattern) => matched /* Match type */ - case TypeTreeTypeTest(pattern) if scrutinee.tpe <:< pattern.tpe => matched + case TypeTreeTypeTest(pattern) if isSubTypeUnderEnv(scrutinee, pattern) => matched case _ => notMatched /* Match application */ @@ -346,8 +367,12 @@ class QuoteMatcher(debug: Boolean) { pattern match case Block(stat2 :: stats2, expr2) => val newEnv = (stat1, stat2) match { - case (stat1: MemberDef, stat2: MemberDef) => - summon[Env] + (stat1.symbol -> stat2.symbol) + case (stat1: ValOrDefDef, stat2: ValOrDefDef) => + val Env(termEnv, typeEnv) = summon[Env] + new Env(termEnv + (stat1.symbol -> stat2.symbol), typeEnv) + case (stat1: TypeDef, stat2: TypeDef) => + val Env(termEnv, typeEnv) = summon[Env] + new Env(termEnv, typeEnv + (stat1.symbol -> stat2.symbol)) case _ => summon[Env] } @@ -403,14 +428,16 @@ class QuoteMatcher(debug: Boolean) { // TODO remove this? case TypeTreeTypeTest(scrutinee) => pattern match - case TypeTreeTypeTest(pattern) if scrutinee.tpe <:< pattern.tpe => matched + case TypeTreeTypeTest(pattern) if isSubTypeUnderEnv(scrutinee, pattern) => matched case _ => notMatched /* Match val */ case scrutinee @ ValDef(_, tpt1, _) => pattern match case pattern @ ValDef(_, tpt2, _) if checkValFlags() => - def rhsEnv = summon[Env] + (scrutinee.symbol -> pattern.symbol) + def rhsEnv = + val Env(termEnv, typeEnv) = summon[Env] + new Env(termEnv + (scrutinee.symbol -> pattern.symbol), typeEnv) tpt1 =?= tpt2 &&& withEnv(rhsEnv)(scrutinee.rhs =?= pattern.rhs) case _ => notMatched @@ -427,11 +454,38 @@ class QuoteMatcher(debug: Boolean) { notMatched case _ => matched + /** + * Implementation restriction: The current implementation matches type parameters + * only when they have empty bounds (>: Nothing <: Any) + */ + def matchTypeDef(sctypedef: TypeDef, pttypedef: TypeDef): MatchingExprs = sctypedef match + case TypeDef(_, TypeBoundsTree(sclo, schi, EmptyTree)) + if sclo.tpe == defn.NothingType && schi.tpe == defn.AnyType => + pttypedef match + case TypeDef(_, TypeBoundsTree(ptlo, pthi, EmptyTree)) + if sclo.tpe == defn.NothingType && schi.tpe == defn.AnyType => + matched + case _ => notMatched + case _ => notMatched + def matchParamss(scparamss: List[ParamClause], ptparamss: List[ParamClause])(using Env): optional[(Env, MatchingExprs)] = (scparamss, ptparamss) match { - case (scparams :: screst, ptparams :: ptrest) => + case (ValDefs(scparams) :: screst, ValDefs(ptparams) :: ptrest) => val mr1 = matchLists(scparams, ptparams)(_ =?= _) - val newEnv = summon[Env] ++ scparams.map(_.symbol).zip(ptparams.map(_.symbol)) + val Env(termEnv, typeEnv) = summon[Env] + val newEnv = new Env( + termEnv = termEnv ++ scparams.map(_.symbol).zip(ptparams.map(_.symbol)), + typeEnv = typeEnv + ) + val (resEnv, mrrest) = withEnv(newEnv)(matchParamss(screst, ptrest)) + (resEnv, mr1 &&& mrrest) + case (TypeDefs(scparams) :: screst, TypeDefs(ptparams) :: ptrest) => + val mr1 = matchLists(scparams, ptparams)(matchTypeDef) + val Env(termEnv, typeEnv) = summon[Env] + val newEnv = new Env( + termEnv = termEnv, + typeEnv = typeEnv ++ scparams.map(_.symbol).zip(ptparams.map(_.symbol)), + ) val (resEnv, mrrest) = withEnv(newEnv)(matchParamss(screst, ptrest)) (resEnv, mr1 &&& mrrest) case (Nil, Nil) => (summon[Env], matched) @@ -439,8 +493,8 @@ class QuoteMatcher(debug: Boolean) { } val ematch = matchErasedParams(scrutinee.tpe.widenTermRefExpr, pattern.tpe.widenTermRefExpr) - val (pEnv, pmatch) = matchParamss(paramss1, paramss2) - val defEnv = pEnv + (scrutinee.symbol -> pattern.symbol) + val (Env(termEnv, typeEnv), pmatch) = matchParamss(paramss1, paramss2) + val defEnv = Env(termEnv + (scrutinee.symbol -> pattern.symbol), typeEnv) ematch &&& pmatch @@ -514,11 +568,19 @@ class QuoteMatcher(debug: Boolean) { else scrutinee case _ => scrutinee val pattern = patternTree.symbol + val Env(termEnv, typeEnv) = summon[Env] devirtualizedScrutinee == pattern - || summon[Env].get(devirtualizedScrutinee).contains(pattern) + || termEnv.get(devirtualizedScrutinee).contains(pattern) + || typeEnv.get(devirtualizedScrutinee).contains(pattern) || devirtualizedScrutinee.allOverriddenSymbols.contains(pattern) + private def isSubTypeUnderEnv(scrutinee: Tree, pattern: Tree)(using Env, Context): Boolean = + val env = summon[Env].typeEnv + val scType = if env.isEmpty then scrutinee.tpe + else scrutinee.subst(env.keys.toList, env.values.toList).tpe + scType <:< pattern.tpe + private object ClosedPatternTerm { /** Matches a term that does not contain free variables defined in the pattern (i.e. not defined in `Env`) */ def unapply(term: Tree)(using Env, Context): Option[term.type] = @@ -526,16 +588,24 @@ class QuoteMatcher(debug: Boolean) { /** Return all free variables of the term defined in the pattern (i.e. defined in `Env`) */ def freePatternVars(term: Tree)(using Env, Context): Set[Symbol] = - val accumulator = new TreeAccumulator[Set[Symbol]] { + val Env(termEnv, typeEnv) = summon[Env] + val typeAccumulator = new TypeAccumulator[Set[Symbol]] { + def apply(x: Set[Symbol], tp: Type): Set[Symbol] = tp match + case tp: TypeRef if typeEnv.contains(tp.typeSymbol) => foldOver(x + tp.typeSymbol, tp) + case tp: TermRef if termEnv.contains(tp.termSymbol) => foldOver(x + tp.termSymbol, tp) + case _ => foldOver(x, tp) + } + val treeAccumulator = new TreeAccumulator[Set[Symbol]] { def apply(x: Set[Symbol], tree: Tree)(using Context): Set[Symbol] = tree match - case tree: Ident if summon[Env].contains(tree.symbol) => foldOver(x + tree.symbol, tree) + case tree: Ident if termEnv.contains(tree.symbol) => foldOver(typeAccumulator(x, tree.tpe) + tree.symbol, tree) + case tree: TypeTree => typeAccumulator(x, tree.tpe) case _ => foldOver(x, tree) } - accumulator.apply(Set.empty, term) + treeAccumulator(Set.empty, term) } - enum MatchResult: + private enum MatchResult: /** Closed pattern extracted value * @param tree Scrutinee sub-tree that matched */ @@ -546,9 +616,10 @@ class QuoteMatcher(debug: Boolean) { * @param patternTpe Type of the pattern hole (from the pattern) * @param argIds Identifiers of HOAS arguments (from the pattern) * @param argTypes Eta-expanded types of HOAS arguments (from the pattern) + * @param typeArgs type arguments from the pattern * @param env Mapping between scrutinee and pattern variables */ - case OpenTree(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], env: Env) + case OpenTree(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], typeArgs: List[Type], env: Env) /** Return the expression that was extracted from a hole. * @@ -561,28 +632,61 @@ class QuoteMatcher(debug: Boolean) { def toExpr(mapTypeHoles: Type => Type, spliceScope: Scope)(using Context): Expr[Any] = this match case MatchResult.ClosedTree(tree) => new ExprImpl(tree, spliceScope) - case MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, env) => + case MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, typeArgs, Env(termEnv, typeEnv)) => val names: List[TermName] = argIds.map(_.symbol.name.asTermName) val paramTypes = argTypes.map(tpe => mapTypeHoles(tpe.widenTermRefExpr)) - val methTpe = MethodType(names)(_ => paramTypes, _ => mapTypeHoles(patternTpe)) + val ptTypeVarSymbols = typeArgs.map(_.typeSymbol) + val isNotPoly = typeArgs.isEmpty + + val methTpe = if isNotPoly then + MethodType(names)(_ => paramTypes, _ => mapTypeHoles(patternTpe)) + else + val typeArgs1 = PolyType.syntheticParamNames(typeArgs.length) + val bounds = typeArgs map (_ => TypeBounds.empty) + val resultTypeExp = (pt: PolyType) => { + val argTypes1 = paramTypes.map(_.subst(ptTypeVarSymbols, pt.paramRefs)) + val resultType1 = mapTypeHoles(patternTpe).subst(ptTypeVarSymbols, pt.paramRefs) + MethodType(argTypes1, resultType1) + } + PolyType(typeArgs1)(_ => bounds, resultTypeExp) + val meth = newAnonFun(ctx.owner, methTpe) + def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { - val argsMap = argIds.view.map(_.symbol).zip(lambdaArgss.head).toMap - val body = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = - tree match - /* - * When matching a method call `f(0)` against a HOAS pattern `p(g)` where - * f has a method type `(x: Int): Int` and `f` maps to `g`, `p` should hold - * `g.apply(0)` because the type of `g` is `Int => Int` due to eta expansion. - */ - case Apply(fun, args) if env.contains(tree.symbol) => transform(fun).select(nme.apply).appliedToArgs(args.map(transform)) - case tree: Ident => env.get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) - case tree => super.transform(tree) - }.transform(tree) + val (typeParams, params) = if isNotPoly then + (List.empty, lambdaArgss.head) + else + (lambdaArgss.head.map(_.tpe), lambdaArgss.tail.head) + + val typeArgsMap = ptTypeVarSymbols.zip(typeParams).toMap + val argsMap = argIds.view.map(_.symbol).zip(params).toMap + + val body = new TreeTypeMap( + typeMap = if isNotPoly then IdentityTypeMap + else new TypeMap() { + override def apply(tp: Type): Type = tp match { + case tr: TypeRef if tr.prefix.eq(NoPrefix) => + typeEnv.get(tr.symbol).flatMap(typeArgsMap.get).getOrElse(tr) + case tp => mapOver(tp) + } + }, + treeMap = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = + tree match + /* + * When matching a method call `f(0)` against a HOAS pattern `p(g)` where + * f has a method type `(x: Int): Int` and `f` maps to `g`, `p` should hold + * `g.apply(0)` because the type of `g` is `Int => Int` due to eta expansion. + */ + case Apply(fun, args) if termEnv.contains(tree.symbol) => transform(fun).select(nme.apply).appliedToArgs(args.map(transform)) + case tree: Ident => termEnv.get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) + case tree => super.transform(tree) + }.transform + ).transform(tree) + TreeOps(body).changeNonLocalOwners(meth) } - val hoasClosure = Closure(meth, bodyFn) + val hoasClosure = Closure(meth, bodyFn).withSpan(tree.span) new ExprImpl(hoasClosure, spliceScope) private inline def notMatched[T]: optional[T] = @@ -594,12 +698,17 @@ class QuoteMatcher(debug: Boolean) { private inline def matched(tree: Tree)(using Context): MatchingExprs = Seq(MatchResult.ClosedTree(tree)) - private def matchedOpen(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], env: Env)(using Context): MatchingExprs = - Seq(MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, env)) + private def matchedOpen(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], typeArgs: List[Type], env: Env)(using Context): MatchingExprs = + Seq(MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, typeArgs, env)) extension (self: MatchingExprs) /** Concatenates the contents of two successful matchings */ - def &&& (that: MatchingExprs): MatchingExprs = self ++ that + private def &&& (that: MatchingExprs): MatchingExprs = self ++ that end extension + // TODO-18271: Duplicate with QuotePatterns.unrollHkNestedPairsTypeTree + private def unrollHkNestedPairsTypeTree(tree: Tree)(using Context): List[Tree] = tree match + case AppliedTypeTree(tupleN, bindings) if defn.isTupleClass(tupleN.symbol) => bindings // TupleN, 1 <= N <= 22 + case AppliedTypeTree(_, head :: tail :: Nil) => head :: unrollHkNestedPairsTypeTree(tail) // KCons or *: + case _ => Nil // KNil or EmptyTuple } diff --git a/docs/_docs/reference/experimental/quoted-patterns-with-polymorphic-functions.md b/docs/_docs/reference/experimental/quoted-patterns-with-polymorphic-functions.md new file mode 100644 index 000000000000..0c30a867b189 --- /dev/null +++ b/docs/_docs/reference/experimental/quoted-patterns-with-polymorphic-functions.md @@ -0,0 +1,54 @@ +--- +layout: doc-page +title: "Quoted Patterns with Polymorphic Functions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/quoted-patterns-with-polymorphic-functions.html +--- + +This feature extends the capability of quoted patterns with regard to polymorphic functions. It is not yet part of the Scala language standard. To use this feature, turn on the language feature [`experimental.quotedPatternsWithPolymorphicFunctions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$quotedPatternsWithPolymorphicFunctions$.html). This can be done with a language import +```scala +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions +``` +or by setting the command line option `-language:experimental.quotedPatternsWithPolymorphicFunctions`. + +## Background +Quoted patterns allows us to use quoted code as a pattern. Using quoted patterns, we can check if an expression is equivalent to another, or decompose it. Especially, higher-order patterns are useful when extracting code fraguments inside function bodies. + +```scala +def decomposeFunc(x: Expr[Any])(using Quotes): Expr[Int] = + x match + case '{ (a: Int, b: Int) => $y(a, b) : Int } => + '{ $y(0, 0) } + case _ => Expr(0) +``` + +In the example above, the first case matches the case where `x` is a function and `y` is bound to the body of the function. The higher-order pattern `$y(a, b)` states that it matches any code with free occurence of variables `a` and `b`. If it is `$y(a)` instead, an expression like `(a: Int, b: Int) => a + b` will not match because `a + b` has an occurence of `b`, which is not included in the higher-order pattern. + +## Motivation +This experimental feature extends this higher-order pattern syntax to allow type variables. + +```scala +def decomposePoly(x: Expr[Any])(using Quotes): Expr[Int] = + x match + case '{ [A] => (x: List[A]) => $y[A](x) : Int } => + '{ $y[Int](List(1, 2, 3)) } + case _ => Expr(0) +``` + +Now we can use a higher-order pattern `$y[A](x)` with type variables. `y` is bound to the body of code with occurences of `A` and `x`, and has the type `[A] => (x: List[A]) => Int`. + +## Type Dependency +If a higher-order pattern carries a value parameter with a type that has type parameters defined in the quoted pattern, those type parameters should also be captured in the higher-order pattern. For example, the following pattern will not be typed. + +``` +case '{ [A] => (x: List[A]) => $y(x) : Int } => +``` + +In this case, `x` has the type `List[A]`, which includes a type variable `A` that is defined in the pattern. However, the higher-order pattern `$y(x)` does not have any type parameters. This should be ill-typed. One can always avoid this kind of type errors by adding type parameters, like `$y[A](x)` + +## Implementation Restriction +Current implementation only allows type parameters that do not have bounds, because sound typing rules for such pattern is not clear yet. + +```scala +case '{ [A] => (x: List[A]) => $y(x) : Int } => // Allowed +case '{ [A <: Int] => (x: List[A]) => $y(x) : Int } => // Disallowed +``` diff --git a/library/src/scala/quoted/runtime/Patterns.scala b/library/src/scala/quoted/runtime/Patterns.scala index 91ad23c62a98..f8e172d30f62 100644 --- a/library/src/scala/quoted/runtime/Patterns.scala +++ b/library/src/scala/quoted/runtime/Patterns.scala @@ -1,6 +1,7 @@ package scala.quoted.runtime import scala.annotation.{Annotation, compileTimeOnly} +import scala.annotation.experimental @compileTimeOnly("Illegal reference to `scala.quoted.runtime.Patterns`") object Patterns { @@ -26,6 +27,14 @@ object Patterns { @compileTimeOnly("Illegal reference to `scala.quoted.runtime.Patterns.higherOrderHole`") def higherOrderHole[U](args: Any*): U = ??? + /** A higher order splice in a quoted pattern is desugared by the compiler into a call to this method. + * + * Calling this method in source has undefined behavior at compile-time + */ + @experimental + @compileTimeOnly("Illegal reference to `scala.quoted.runtime.Patterns.higherOrderHoleWithTypes`") + def higherOrderHoleWithTypes[U, T](args: Any*): U = ??? + /** A splice of a name in a quoted pattern is that marks the definition of a type splice. * * Adding this annotation in source has undefined behavior at compile-time diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index c9343286328d..7db326350fa1 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -125,6 +125,14 @@ object language: */ @compileTimeOnly("`betterMatchTypeExtractors` can only be used at compile time in import statements") object betterMatchTypeExtractors + + /** Experimental support for quote pattern matching with polymorphic functions + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/quoted-patterns-with-polymorphic-functions]] + */ + @compileTimeOnly("`quotedPatternsWithPolymorphicFunctions` can only be used at compile time in import statements") + object quotedPatternsWithPolymorphicFunctions + end experimental /** The deprecated object contains features that are no longer officially suypported in Scala. diff --git a/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.check b/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.check new file mode 100644 index 000000000000..860482f2e552 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.check @@ -0,0 +1,6 @@ +-- Error: tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.scala:11:48 ------------------------------ +11 | case '{ [A <: Int, B] => (x : A, y : A) => $b[A](x, y) : A } => ??? // error + | ^ + | Type must be fully defined. + | Consider annotating the splice using a type ascription: + | (${b}: XYZ). diff --git a/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.scala b/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.scala new file mode 100644 index 000000000000..6797ae926367 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.scala @@ -0,0 +1,12 @@ +/** + * Supporting hoas quote pattern with bounded type variable + * is future todo. + * Refer to: quoted-pattern-with-bounded-type-params.scala + */ + +import scala.quoted.* + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A <: Int, B] => (x : A, y : A) => $b[A](x, y) : A } => ??? // error + case _ => Expr("not matched") diff --git a/tests/neg-macros/quoted-pattern-with-bounded-type-params.check b/tests/neg-macros/quoted-pattern-with-bounded-type-params.check new file mode 100644 index 000000000000..0e787377bfc5 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-bounded-type-params.check @@ -0,0 +1,4 @@ +-- Error: tests/neg-macros/quoted-pattern-with-bounded-type-params.scala:11:50 ----------------------------------------- +11 | case '{ [A <: Int, B] => (x : A, y : A) => $b[A](x, y) : A } => ??? // error + | ^ + | Implementation restriction: Type arguments to Open pattern are expected to have no bounds diff --git a/tests/neg-macros/quoted-pattern-with-bounded-type-params.scala b/tests/neg-macros/quoted-pattern-with-bounded-type-params.scala new file mode 100644 index 000000000000..567efa9ee35d --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-bounded-type-params.scala @@ -0,0 +1,12 @@ +/* + * Supporting hoas quote pattern with bounded type variable + * is future todo. + */ + +import scala.quoted.* +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A <: Int, B] => (x : A, y : A) => $b[A](x, y) : A } => ??? // error + case _ => Expr("not matched") diff --git a/tests/neg-macros/quoted-pattern-with-type-params-regression.check b/tests/neg-macros/quoted-pattern-with-type-params-regression.check new file mode 100644 index 000000000000..543c119b3d33 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-type-params-regression.check @@ -0,0 +1,16 @@ +-- Error: tests/neg-macros/quoted-pattern-with-type-params-regression.scala:8:31 --------------------------------------- +8 | case '{ [A] => (x : A) => $b[A] : (A => A) } => ??? // error + | ^ + | Type must be fully defined. + | Consider annotating the splice using a type ascription: + | (${b}: XYZ). +-- Error: tests/neg-macros/quoted-pattern-with-type-params-regression.scala:9:33 --------------------------------------- +9 | case '{ [A] => (x : A) => $b(x) : (A => A) } => ??? // error + | ^ + | Type variables that this argument depends on are not captured in this hoas pattern +-- Error: tests/neg-macros/quoted-pattern-with-type-params-regression.scala:10:24 -------------------------------------- +10 | case '{ (a:Int) => $b[Int](a) : String } => ??? // error + | ^ + | Type must be fully defined. + | Consider annotating the splice using a type ascription: + | (${b}: XYZ). diff --git a/tests/neg-macros/quoted-pattern-with-type-params-regression.scala b/tests/neg-macros/quoted-pattern-with-type-params-regression.scala new file mode 100644 index 000000000000..aa2489bc440b --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-type-params-regression.scala @@ -0,0 +1,11 @@ +/** + * Refer to: quoted-pattern-with-type-params.scala + */ +import scala.quoted.* + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A] => (x : A) => $b[A] : (A => A) } => ??? // error + case '{ [A] => (x : A) => $b(x) : (A => A) } => ??? // error + case '{ (a:Int) => $b[Int](a) : String } => ??? // error + case _ => Expr("not matched") diff --git a/tests/neg-macros/quoted-pattern-with-type-params.check b/tests/neg-macros/quoted-pattern-with-type-params.check new file mode 100644 index 000000000000..37e8f611d5a9 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-type-params.check @@ -0,0 +1,12 @@ +-- Error: tests/neg-macros/quoted-pattern-with-type-params.scala:6:32 -------------------------------------------------- +6 | case '{ [A] => (x : A) => $b[A] : (A => A) } => ??? // error + | ^^^^^ + | Implementation restriction: A higher-order pattern must carry value arguments +-- Error: tests/neg-macros/quoted-pattern-with-type-params.scala:7:33 -------------------------------------------------- +7 | case '{ [A] => (x : A) => $b(x) : (A => A) } => ??? // error + | ^ + | Type variables that this argument depends on are not captured in this hoas pattern +-- Error: tests/neg-macros/quoted-pattern-with-type-params.scala:8:26 -------------------------------------------------- +8 | case '{ (a:Int) => $b[Int](a) : String } => ??? // error + | ^^^ + | Type arguments of a hoas pattern needs to be defined inside the quoted pattern diff --git a/tests/neg-macros/quoted-pattern-with-type-params.scala b/tests/neg-macros/quoted-pattern-with-type-params.scala new file mode 100644 index 000000000000..2e4a059ee23a --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-type-params.scala @@ -0,0 +1,9 @@ +import scala.quoted.* +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A] => (x : A) => $b[A] : (A => A) } => ??? // error + case '{ [A] => (x : A) => $b(x) : (A => A) } => ??? // error + case '{ (a:Int) => $b[Int](a) : String } => ??? // error + case _ => Expr("not matched") diff --git a/tests/pos-macros/quoted-patten-with-type-params.scala b/tests/pos-macros/quoted-patten-with-type-params.scala new file mode 100644 index 000000000000..030e3415476e --- /dev/null +++ b/tests/pos-macros/quoted-patten-with-type-params.scala @@ -0,0 +1,14 @@ +import scala.quoted.* +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A] => (x : A, y : A) => (x, y) } => ??? + // Bounded type parameters are allowed when they are not used in + // higher-order patterns + case '{ [A <: Iterable[Int]] => (x : A) => x } => ??? + case '{ [A] => (x : A, y : A) => $b[A](x, y) : A } => + '{ $b[String]("truthy", "falsy") } + case '{ [A, B] => (x : A, f : A => B) => $b[A, B](x, f) : B} => + '{ $b[Int, String](10, (x:Int)=>x.toHexString) } + case _ => Expr("not matched") diff --git a/tests/run-macros/quote-match-poly-function-1-regression.check b/tests/run-macros/quote-match-poly-function-1-regression.check new file mode 100644 index 000000000000..d871d3004550 --- /dev/null +++ b/tests/run-macros/quote-match-poly-function-1-regression.check @@ -0,0 +1,3 @@ +Case 1 matched +not matched +not matched diff --git a/tests/run-macros/quote-match-poly-function-1-regression/Macro_1.scala b/tests/run-macros/quote-match-poly-function-1-regression/Macro_1.scala new file mode 100644 index 000000000000..a148fdee3d27 --- /dev/null +++ b/tests/run-macros/quote-match-poly-function-1-regression/Macro_1.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +inline def testExpr(inline body: Any) = ${ testExprImpl1('body) } +def testExprImpl1(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A] => (x : A, y : A) => (x, y) } => Expr("Case 1 matched") + case '{ [A <: Iterable[Int]] => (x : A) => x } => Expr("Case 2 matched") + case _ => Expr("not matched") diff --git a/tests/run-macros/quote-match-poly-function-1-regression/Test_2.scala b/tests/run-macros/quote-match-poly-function-1-regression/Test_2.scala new file mode 100644 index 000000000000..10c014514a30 --- /dev/null +++ b/tests/run-macros/quote-match-poly-function-1-regression/Test_2.scala @@ -0,0 +1,5 @@ +//> using options -experimental +@main def Test: Unit = + println(testExpr([B] => (x : B, y : B) => (x, y))) + println(testExpr([B <: Iterable[Int]] => (x : B) => x)) + println(testExpr([B <: List[Int]] => (x : B) => x)) diff --git a/tests/run-macros/quote-match-poly-function-1.check b/tests/run-macros/quote-match-poly-function-1.check new file mode 100644 index 000000000000..d871d3004550 --- /dev/null +++ b/tests/run-macros/quote-match-poly-function-1.check @@ -0,0 +1,3 @@ +Case 1 matched +not matched +not matched diff --git a/tests/run-macros/quote-match-poly-function-1/Macro_1.scala b/tests/run-macros/quote-match-poly-function-1/Macro_1.scala new file mode 100644 index 000000000000..07fd18ccabb7 --- /dev/null +++ b/tests/run-macros/quote-match-poly-function-1/Macro_1.scala @@ -0,0 +1,9 @@ +import scala.quoted.* +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions + +inline def testExpr(inline body: Any) = ${ testExprImpl1('body) } +def testExprImpl1(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A] => (x : A, y : A) => (x, y) } => Expr("Case 1 matched") + case '{ [A <: Iterable[Int]] => (x : A) => x } => Expr("Case 2 matched") + case _ => Expr("not matched") diff --git a/tests/run-macros/quote-match-poly-function-1/Test_2.scala b/tests/run-macros/quote-match-poly-function-1/Test_2.scala new file mode 100644 index 000000000000..10c014514a30 --- /dev/null +++ b/tests/run-macros/quote-match-poly-function-1/Test_2.scala @@ -0,0 +1,5 @@ +//> using options -experimental +@main def Test: Unit = + println(testExpr([B] => (x : B, y : B) => (x, y))) + println(testExpr([B <: Iterable[Int]] => (x : B) => x)) + println(testExpr([B <: List[Int]] => (x : B) => x)) diff --git a/tests/run-macros/quote-match-poly-function-2.check b/tests/run-macros/quote-match-poly-function-2.check new file mode 100644 index 000000000000..a9ad3170d8fb --- /dev/null +++ b/tests/run-macros/quote-match-poly-function-2.check @@ -0,0 +1,7 @@ +case 2 matched => 5 +case 3 matched => truthy +case 4 matched => truthy +case 5 matchd => 1 +case 7 matchd => 1 +case 8 matched => (1,str) +case 9 matched => zero diff --git a/tests/run-macros/quote-match-poly-function-2/Macro_1.scala b/tests/run-macros/quote-match-poly-function-2/Macro_1.scala new file mode 100644 index 000000000000..8b5d5a85942a --- /dev/null +++ b/tests/run-macros/quote-match-poly-function-2/Macro_1.scala @@ -0,0 +1,23 @@ +import scala.quoted.* +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions + +inline def testExpr(inline body: Any) = ${ testExprImpl1('body) } +def testExprImpl1(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A] => (x : Int, y : Int) => $b(x, y) : Int } => + '{ "case 2 matched => " + $b(2, 3) } + case '{ [A] => (x : A, y : A) => $b[A](x, y) : A } => + '{ "case 3 matched => " + $b[String]("truthy", "falsy") } + case '{ [A] => (x : A, y : A) => $b[A](x, y) : (A, A) } => + '{ "case 4 matched => " + $b[String]("truthy", "falsy")._2 } + case '{ [A, B] => (x : A, y : A => B) => $a[A, B](x, y) : B } => + '{ "case 5 matchd => " + $a[Int, Int](0, x => x + 1) } + case '{ [A] => (x : List[A], y : A) => $a[A](x) : Int } => + '{ "case 6 matchd => " + $a[Int](List(1, 2, 3)) } + case '{ [A] => (x : List[A], y : A) => $a[A](x, y) : Int } => + '{ "case 7 matchd => " + $a[Int](List(1, 2, 3), 2) } + case '{ [A] => (x : A) => [B] => (y : B) => $a[A, B](x, y) : (A, B) } => + '{ "case 8 matched => " + $a[Int, String](1, "str")} + case '{ [A, B] => (x : Map[A, B], y: A) => $a[A, B](x, y) : Option[B] } => + '{ "case 9 matched => " + $a[Int, String](Map(0 -> "zero", 1 -> "one"), 0).getOrElse("failed") } + case _ => Expr("not matched") diff --git a/tests/run-macros/quote-match-poly-function-2/Test_2.scala b/tests/run-macros/quote-match-poly-function-2/Test_2.scala new file mode 100644 index 000000000000..c33c3caa7f8d --- /dev/null +++ b/tests/run-macros/quote-match-poly-function-2/Test_2.scala @@ -0,0 +1,9 @@ +//> using options -experimental +@main def Test: Unit = + println(testExpr([B] => (x : Int, y : Int) => x + y)) // Should match case 2 + println(testExpr([B] => (x : B, y : B) => x)) // Should match case 3 + println(testExpr([B] => (x : B, y : B) => (y, x))) // Should match case 4 + println(testExpr([C, D] => (x : C, f : C => D) => f(x))) // Should match case 4 + println(testExpr([B] => (x : List[B], y : B) => x.indexOf(y))) // Should match case 7 + println(testExpr([B] => (x : B) => [C] => (y : C) => (x, y))) // Should match case 8 + println(testExpr([C, D] => (x : Map[C, D], y: C) => x.get(y))) diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 7079c7320ba0..22da2ace1e52 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -85,6 +85,10 @@ val experimentalDefinitionInLibrary = Set( "scala.annotation.internal.WitnessNames", "scala.compiletime.package$package$.deferred", "scala.runtime.stdLibPatches.Predef$.is", + + // New feature: functions with erased parameters. + // Need quotedPatternsWithPolymorphicFunctions enabled. + "scala.quoted.runtime.Patterns$.higherOrderHoleWithTypes" ) From b7846c497b4dae18b7a2484fe5ea1acf55a16487 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 22 Jul 2024 11:22:49 +0200 Subject: [PATCH 366/827] =?UTF-8?q?Fix=20#20897:=20Make=20`Nothing=20?= =?UTF-8?q?=E2=8B=94=20Nothing`,=20as=20per=20spec.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit `derivesFrom`, used in `provablyDisjointClasses`, normally returns `false` when the receiver is `Nothing`. However, it returns `true` if the right-hand-side happens to be exactly `Nothing` as well. For the purpose of computing `provablyDisjoint`, that is not what we want. The root issue was that we let the previous algorithm handle `Nothing` like a class type, which it *is* in dotc but not in the spec. That led to this mistake. `AnyKind` suffers a similar issue, but already had special-cases in various places to mitigate it. Instead of adding a new special-case for `Nothing` inside `provablyDisjointClasses`, we address the root issue. Now we deal with `Nothing` and `AnyKind` early, before trying any of the code paths that handle (real) class types. --- .../dotty/tools/dotc/core/TypeComparer.scala | 24 +++++++++++++------ .../test/dotc/pos-test-pickling.blacklist | 2 +- tests/pos/i20897.scala | 10 ++++++++ 3 files changed, 28 insertions(+), 8 deletions(-) create mode 100644 tests/pos/i20897.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index c8e00686e62b..0f74ca40843b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3064,6 +3064,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case pair if pending != null && pending.contains(pair) => false + /* Nothing is not a class type in the spec but dotc represents it as if it were one. + * Get it out of the way early to avoid mistakes (see for example #20897). + * Nothing ⋔ T and T ⋔ Nothing for all T. + */ + case (tp1, tp2) if tp1.isExactlyNothing || tp2.isExactlyNothing => + true + // Cases where there is an intersection or union on the right case (tp1, tp2: OrType) => provablyDisjoint(tp1, tp2.tp1, pending) && provablyDisjoint(tp1, tp2.tp2, pending) @@ -3076,14 +3083,21 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case (tp1: AndType, tp2) => provablyDisjoint(tp1.tp1, tp2, pending) || provablyDisjoint(tp1.tp2, tp2, pending) + /* Handle AnyKind now for the same reason as Nothing above: it is not a real class type. + * Other than the rules with Nothing, unions and intersections, there is structurally + * no rule such that AnyKind ⋔ T or T ⋔ AnyKind for any T. + */ + case (tp1, tp2) if tp1.isDirectRef(AnyKindClass) || tp2.isDirectRef(AnyKindClass) => + false + // Cases involving type lambdas case (tp1: HKTypeLambda, tp2: HKTypeLambda) => tp1.paramNames.sizeCompare(tp2.paramNames) != 0 || provablyDisjoint(tp1.resultType, tp2.resultType, pending) case (tp1: HKTypeLambda, tp2) => - !tp2.isDirectRef(defn.AnyKindClass) + true case (tp1, tp2: HKTypeLambda) => - !tp1.isDirectRef(defn.AnyKindClass) + true /* Cases where both are unique values (enum cases or constant types) * @@ -3187,17 +3201,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else child }.filter(child => child.exists && child != cls) - // TODO? Special-case for Nothing and Null? We probably need Nothing/Null disjoint from Nothing/Null def eitherDerivesFromOther(cls1: Symbol, cls2: Symbol): Boolean = cls1.derivesFrom(cls2) || cls2.derivesFrom(cls1) def smallestNonTraitBase(cls: Symbol): Symbol = cls.asClass.baseClasses.find(!_.is(Trait)).get - if cls1 == defn.AnyKindClass || cls2 == defn.AnyKindClass then - // For some reason, A.derivesFrom(AnyKind) returns false, so we have to handle it specially - false - else if (eitherDerivesFromOther(cls1, cls2)) + if (eitherDerivesFromOther(cls1, cls2)) false else if (cls1.is(Final) || cls2.is(Final)) diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index d6f962176ecc..b68ac7fc3b6e 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -67,6 +67,7 @@ mt-redux-norm.perspective.scala i18211.scala 10867.scala named-tuples1.scala +i20897.scala # Opaque type i5720.scala @@ -134,4 +135,3 @@ parsercombinators-new-syntax.scala hylolib-deferred-given hylolib-cb hylolib - diff --git a/tests/pos/i20897.scala b/tests/pos/i20897.scala new file mode 100644 index 000000000000..ecfac5b1615e --- /dev/null +++ b/tests/pos/i20897.scala @@ -0,0 +1,10 @@ +object Test: + type Disj[A, B] = + A match + case B => true + case _ => false + + def f(a: Disj[1 | Nothing, 2 | Nothing]): Unit = () + + val t = f(false) +end Test From bd0aa525d9773ed2f54d60c772bee8ad2528fd6e Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 15 Jan 2023 13:18:20 +0100 Subject: [PATCH 367/827] Streamline translation of for expressions - [] Avoid redundant map call if the yielded value is the same as the last result. This makes for expressions more efficient and provides more opportunities for tail recursion. --- .../src/dotty/tools/dotc/ast/Desugar.scala | 59 ++++++++++++------- compiler/src/dotty/tools/dotc/ast/untpd.scala | 3 +- tests/run/fors.check | 3 + tests/run/fors.scala | 14 +++++ 4 files changed, 56 insertions(+), 23 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index c360712999e2..df5b7c1501d8 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1807,38 +1807,44 @@ object desugar { * * 1. * - * for (P <- G) E ==> G.foreach (P => E) + * for (P <- G) E ==> G.foreach (P => E) * - * Here and in the following (P => E) is interpreted as the function (P => E) - * if P is a variable pattern and as the partial function { case P => E } otherwise. + * Here and in the following (P => E) is interpreted as the function (P => E) + * if P is a variable pattern and as the partial function { case P => E } otherwise. * * 2. * - * for (P <- G) yield E ==> G.map (P => E) + * for (P <- G) yield P ==> G + * + * if P is a variable or a tuple of variables and G is not a withFilter. + * + * for (P <- G) yield E ==> G.map (P => E) + * + * otherwise * * 3. * - * for (P_1 <- G_1; P_2 <- G_2; ...) ... - * ==> - * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) + * for (P_1 <- G_1; P_2 <- G_2; ...) ... + * ==> + * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) * * 4. * - * for (P <- G; E; ...) ... - * => - * for (P <- G.filter (P => E); ...) ... + * for (P <- G; E; ...) ... + * => + * for (P <- G.filter (P => E); ...) ... * * 5. For any N: * - * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) - * ==> - * for (TupleN(P_1, P_2, ... P_N) <- - * for (x_1 @ P_1 <- G) yield { - * val x_2 @ P_2 = E_2 - * ... - * val x_N & P_N = E_N - * TupleN(x_1, ..., x_N) - * } ...) + * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) + * ==> + * for (TupleN(P_1, P_2, ... P_N) <- + * for (x_1 @ P_1 <- G) yield { + * val x_2 @ P_2 = E_2 + * ... + * val x_N & P_N = E_N + * TupleN(x_1, ..., x_N) + * } ...) * * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated * and the variable constituting P_i is used instead of x_i @@ -1951,7 +1957,7 @@ object desugar { case GenCheckMode.FilterAlways => false // pattern was prefixed by `case` case GenCheckMode.FilterNow | GenCheckMode.CheckAndFilter => isVarBinding(gen.pat) || isIrrefutable(gen.pat, gen.expr) case GenCheckMode.Check => true - case GenCheckMode.Ignore => true + case GenCheckMode.Ignore | GenCheckMode.Filtered => true /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when * matched against `rhs`. @@ -1961,9 +1967,18 @@ object desugar { Select(rhs, name) } + def deepEquals(t1: Tree, t2: Tree): Boolean = + (unsplice(t1), unsplice(t2)) match + case (Ident(n1), Ident(n2)) => n1 == n2 + case (Tuple(ts1), Tuple(ts2)) => ts1.corresponds(ts2)(deepEquals) + case _ => false + enums match { case (gen: GenFrom) :: Nil => - Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) + if gen.checkMode != GenCheckMode.Filtered // results of withFilter have the wrong type + && deepEquals(gen.pat, body) + then gen.expr // avoid a redundant map with identity + else Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => val cont = makeFor(mapName, flatMapName, rest, body) Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) @@ -1985,7 +2000,7 @@ object desugar { makeFor(mapName, flatMapName, vfrom1 :: rest1, body) case (gen: GenFrom) :: test :: rest => val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) - val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Ignore) + val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Filtered) makeFor(mapName, flatMapName, genFrom :: rest, body) case _ => EmptyTree //may happen for erroneous input diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 81228b1588d0..a3aee4dc17d2 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -183,7 +183,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** An enum to control checking or filtering of patterns in GenFrom trees */ enum GenCheckMode { - case Ignore // neither filter nor check since filtering was done before + case Ignore // neither filter since pattern is trivially irrefutable + case Filtered // neither filter nor check since filtering was done before case Check // check that pattern is irrefutable case CheckAndFilter // both check and filter (transitional period starting with 3.2) case FilterNow // filter out non-matching elements if we are not in 3.2 or later diff --git a/tests/run/fors.check b/tests/run/fors.check index 50f6385e5845..7b7e8d076108 100644 --- a/tests/run/fors.check +++ b/tests/run/fors.check @@ -45,6 +45,9 @@ hello world hello/1~2 hello/3~4 /1~2 /3~4 world/1~2 world/3~4 (2,1) (4,3) +testTailrec +List((4,Symbol(a)), (5,Symbol(b)), (6,Symbol(c))) + testGivens 123 456 diff --git a/tests/run/fors.scala b/tests/run/fors.scala index 682978b5b3d8..bd7de7d32263 100644 --- a/tests/run/fors.scala +++ b/tests/run/fors.scala @@ -4,6 +4,8 @@ //############################################################################ +import annotation.tailrec + object Test extends App { val xs = List(1, 2, 3) val ys = List(Symbol("a"), Symbol("b"), Symbol("c")) @@ -108,6 +110,17 @@ object Test extends App { for case (x, y) <- xs do print(s"${(y, x)} "); println() } + /////////////////// elimination of map /////////////////// + + @tailrec + def pair[B](xs: List[Int], ys: List[B], n: Int): List[(Int, B)] = + if n == 0 then xs.zip(ys) + else for (x, y) <- pair(xs.map(_ + 1), ys, n - 1) yield (x, y) + + def testTailrec() = + println("\ntestTailrec") + println(pair(xs, ys, 3)) + def testGivens(): Unit = { println("\ntestGivens") @@ -141,5 +154,6 @@ object Test extends App { testOld() testNew() testFiltering() + testTailrec() testGivens() } From 9c3e454f045a5bb886e344e3d4ba2910af5334f3 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Mon, 22 Jul 2024 20:08:49 +0200 Subject: [PATCH 368/827] Add improvements to for comprehensions - Allow `for`-comprehensions to start with aliases desugaring them into valdefs in a new block - Desugar aliases into simple valdefs, instead of patterns when they are not followed by a guard - Add an experimental language flag that enables the new desugaring method --- .../src/dotty/tools/dotc/ast/Desugar.scala | 161 +++++++++++++----- .../src/dotty/tools/dotc/config/Feature.scala | 3 + .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../dotty/tools/dotc/parsing/Parsers.scala | 18 +- .../runtime/stdLibPatches/language.scala | 6 + tests/run/better-fors.check | 12 ++ tests/run/better-fors.scala | 105 ++++++++++++ tests/run/fors.scala | 2 + 8 files changed, 263 insertions(+), 45 deletions(-) create mode 100644 tests/run/better-fors.check create mode 100644 tests/run/better-fors.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index df5b7c1501d8..4231505dce62 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -11,6 +11,7 @@ import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, D import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, SrcPos, Chars} import config.{Feature, Config} +import config.Feature.{sourceVersion, migrateTo3, enabled, betterForsEnabled} import config.SourceVersion.* import collection.mutable import reporting.* @@ -1807,7 +1808,7 @@ object desugar { * * 1. * - * for (P <- G) E ==> G.foreach (P => E) + * for (P <- G) do E ==> G.foreach (P => E) * * Here and in the following (P => E) is interpreted as the function (P => E) * if P is a variable pattern and as the partial function { case P => E } otherwise. @@ -1816,11 +1817,11 @@ object desugar { * * for (P <- G) yield P ==> G * - * if P is a variable or a tuple of variables and G is not a withFilter. + * If P is a variable or a tuple of variables and G is not a withFilter. * * for (P <- G) yield E ==> G.map (P => E) * - * otherwise + * Otherwise * * 3. * @@ -1830,25 +1831,48 @@ object desugar { * * 4. * - * for (P <- G; E; ...) ... - * => - * for (P <- G.filter (P => E); ...) ... + * for (P <- G; if E; ...) ... + * ==> + * for (P <- G.withFilter (P => E); ...) ... * * 5. For any N: * - * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) + * for (P <- G; P_1 = E_1; ... P_N = E_N; rest) * ==> - * for (TupleN(P_1, P_2, ... P_N) <- - * for (x_1 @ P_1 <- G) yield { - * val x_2 @ P_2 = E_2 + * G.flatMap (P => for (P_1 = E_1; ... P_N = E_N; ...)) if rest contains (<-) + * G.map (P => for (P_1 = E_1; ... P_N = E_N; ...)) otherwise + * + * 6. For any N: + * + * for (P <- G; P_1 = E_1; ... P_N = E_N; if E; ...) + * ==> + * for (TupleN(P, P_1, ... P_N) <- + * for (x @ P <- G) yield { + * val x_1 @ P_1 = E_2 * ... - * val x_N & P_N = E_N - * TupleN(x_1, ..., x_N) - * } ...) + * val x_N @ P_N = E_N + * TupleN(x, x_1, ..., x_N) + * }; if E; ...) * * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated * and the variable constituting P_i is used instead of x_i * + * 7. For any N: + * + * for (P_1 = E_1; ... P_N = E_N; ...) + * ==> + * { + * val x_N @ P_N = E_N + * for (...) + * } + * + * 8. + * for () yield E ==> E + * + * (Where empty for-comprehensions are excluded by the parser) + * + * If the aliases are not followed by a guard, otherwise an error. + * * @param mapName The name to be used for maps (either map or foreach) * @param flatMapName The name to be used for flatMaps (either flatMap or foreach) * @param enums The enumerators in the for expression @@ -1973,37 +1997,86 @@ object desugar { case (Tuple(ts1), Tuple(ts2)) => ts1.corresponds(ts2)(deepEquals) case _ => false - enums match { - case (gen: GenFrom) :: Nil => - if gen.checkMode != GenCheckMode.Filtered // results of withFilter have the wrong type - && deepEquals(gen.pat, body) - then gen.expr // avoid a redundant map with identity - else Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) - case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => - val cont = makeFor(mapName, flatMapName, rest, body) - Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) - case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => - val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) - val pats = valeqs map { case GenAlias(pat, _) => pat } - val rhss = valeqs map { case GenAlias(_, rhs) => rhs } - val (defpat0, id0) = makeIdPat(gen.pat) - val (defpats, ids) = (pats map makeIdPat).unzip - val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => - val mods = defpat match - case defTree: DefTree => defTree.mods - case _ => Modifiers() - makePatDef(valeq, mods, defpat, rhs) - } - val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) - val allpats = gen.pat :: pats - val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) - makeFor(mapName, flatMapName, vfrom1 :: rest1, body) - case (gen: GenFrom) :: test :: rest => - val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) - val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Filtered) - makeFor(mapName, flatMapName, genFrom :: rest, body) - case _ => - EmptyTree //may happen for erroneous input + if betterForsEnabled then + enums match { + case Nil => body + case (gen: GenFrom) :: Nil => + if gen.checkMode != GenCheckMode.Filtered // results of withFilter have the wrong type + && deepEquals(gen.pat, body) + then gen.expr // avoid a redundant map with identity + else Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) + case (gen: GenFrom) :: rest + if rest.dropWhile(_.isInstanceOf[GenAlias]).headOption.forall(e => e.isInstanceOf[GenFrom]) => + val cont = makeFor(mapName, flatMapName, rest, body) + val selectName = + if rest.exists(_.isInstanceOf[GenFrom]) then flatMapName + else mapName + Apply(rhsSelect(gen, selectName), makeLambda(gen, cont)) + case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => + val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) + val pats = valeqs map { case GenAlias(pat, _) => pat } + val rhss = valeqs map { case GenAlias(_, rhs) => rhs } + val (defpat0, id0) = makeIdPat(gen.pat) + val (defpats, ids) = (pats map makeIdPat).unzip + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => + val mods = defpat match + case defTree: DefTree => defTree.mods + case _ => Modifiers() + makePatDef(valeq, mods, defpat, rhs) + } + val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) + val allpats = gen.pat :: pats + val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, vfrom1 :: rest1, body) + case (gen: GenFrom) :: test :: rest => + val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) + val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Filtered) + makeFor(mapName, flatMapName, genFrom :: rest, body) + case GenAlias(_, _) :: _ => + val (valeqs, rest) = enums.span(_.isInstanceOf[GenAlias]) + val pats = valeqs.map { case GenAlias(pat, _) => pat } + val rhss = valeqs.map { case GenAlias(_, rhs) => rhs } + val (defpats, ids) = pats.map(makeIdPat).unzip + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => + val mods = defpat match + case defTree: DefTree => defTree.mods + case _ => Modifiers() + makePatDef(valeq, mods, defpat, rhs) + } + Block(pdefs, makeFor(mapName, flatMapName, rest, body)) + case _ => + EmptyTree //may happen for erroneous input + } + else { + enums match { + case (gen: GenFrom) :: Nil => + Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) + case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => + val cont = makeFor(mapName, flatMapName, rest, body) + Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) + case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => + val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) + val pats = valeqs map { case GenAlias(pat, _) => pat } + val rhss = valeqs map { case GenAlias(_, rhs) => rhs } + val (defpat0, id0) = makeIdPat(gen.pat) + val (defpats, ids) = (pats map makeIdPat).unzip + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => + val mods = defpat match + case defTree: DefTree => defTree.mods + case _ => Modifiers() + makePatDef(valeq, mods, defpat, rhs) + } + val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) + val allpats = gen.pat :: pats + val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, vfrom1 :: rest1, body) + case (gen: GenFrom) :: test :: rest => + val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) + val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, genFrom :: rest, body) + case _ => + EmptyTree //may happen for erroneous input + } } } diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 8c1021e91e38..cad9b4e76ca9 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -38,6 +38,7 @@ object Feature: val modularity = experimental("modularity") val betterMatchTypeExtractors = experimental("betterMatchTypeExtractors") val quotedPatternsWithPolymorphicFunctions = experimental("quotedPatternsWithPolymorphicFunctions") + val betterFors = experimental("betterFors") def experimentalAutoEnableFeatures(using Context): List[TermName] = defn.languageExperimentalFeatures @@ -125,6 +126,8 @@ object Feature: def clauseInterleavingEnabled(using Context) = sourceVersion.isAtLeast(`3.6`) || enabled(clauseInterleaving) + def betterForsEnabled(using Context) = enabled(betterFors) + def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index d3e198a7e7a7..bbe405b46bf1 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -435,6 +435,7 @@ object StdNames { val asInstanceOfPM: N = "$asInstanceOf$" val assert_ : N = "assert" val assume_ : N = "assume" + val betterFors: N = "betterFors" val box: N = "box" val break: N = "break" val build : N = "build" diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 37587868da58..f4a6b5b76aa0 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -2891,7 +2891,11 @@ object Parsers { /** Enumerators ::= Generator {semi Enumerator | Guard} */ - def enumerators(): List[Tree] = generator() :: enumeratorsRest() + def enumerators(): List[Tree] = + if in.featureEnabled(Feature.betterFors) then + aliasesUntilGenerator() ++ enumeratorsRest() + else + generator() :: enumeratorsRest() def enumeratorsRest(): List[Tree] = if (isStatSep) { @@ -2933,6 +2937,18 @@ object Parsers { GenFrom(pat, subExpr(), checkMode) } + def aliasesUntilGenerator(): List[Tree] = + if in.token == CASE then generator() :: Nil + else { + val pat = pattern1() + if in.token == EQUALS then + atSpan(startOffset(pat), in.skipToken()) { GenAlias(pat, subExpr()) } :: { + if (isStatSep) in.nextToken() + aliasesUntilGenerator() + } + else generatorRest(pat, casePat = false) :: Nil + } + /** ForExpr ::= ‘for’ ‘(’ Enumerators ‘)’ {nl} [‘do‘ | ‘yield’] Expr * | ‘for’ ‘{’ Enumerators ‘}’ {nl} [‘do‘ | ‘yield’] Expr * | ‘for’ Enumerators (‘do‘ | ‘yield’) Expr diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 7db326350fa1..3e8c2ab15cd2 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -133,6 +133,12 @@ object language: @compileTimeOnly("`quotedPatternsWithPolymorphicFunctions` can only be used at compile time in import statements") object quotedPatternsWithPolymorphicFunctions + /** Experimental support for improvements in `for` comprehensions + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/better-fors]] + */ + @compileTimeOnly("`betterFors` can only be used at compile time in import statements") + object betterFors end experimental /** The deprecated object contains features that are no longer officially suypported in Scala. diff --git a/tests/run/better-fors.check b/tests/run/better-fors.check new file mode 100644 index 000000000000..8b75db2f56ad --- /dev/null +++ b/tests/run/better-fors.check @@ -0,0 +1,12 @@ +List((1,3), (1,4), (2,3), (2,4)) +List((1,2,3), (1,2,4)) +List((1,3), (1,4), (2,3), (2,4)) +List((2,3), (2,4)) +List((2,3), (2,4)) +List((1,2), (2,4)) +List(1, 2, 3) +List((2,3,6)) +List(6) +List(3, 6) +List(6) +List(2) diff --git a/tests/run/better-fors.scala b/tests/run/better-fors.scala new file mode 100644 index 000000000000..8c0bff230632 --- /dev/null +++ b/tests/run/better-fors.scala @@ -0,0 +1,105 @@ +import scala.language.experimental.betterFors + +def for1 = + for { + a = 1 + b <- List(a, 2) + c <- List(3, 4) + } yield (b, c) + +def for2 = + for + a = 1 + b = 2 + c <- List(3, 4) + yield (a, b, c) + +def for3 = + for { + a = 1 + b <- List(a, 2) + c = 3 + d <- List(c, 4) + } yield (b, d) + +def for4 = + for { + a = 1 + b <- List(a, 2) + if b > 1 + c <- List(3, 4) + } yield (b, c) + +def for5 = + for { + a = 1 + b <- List(a, 2) + c = 3 + if b > 1 + d <- List(c, 4) + } yield (b, d) + +def for6 = + for { + a = 1 + b = 2 + c <- for { + x <- List(a, b) + y = x * 2 + } yield (x, y) + } yield c + +def for7 = + for { + a <- List(1, 2, 3) + } yield a + +def for8 = + for { + a <- List(1, 2) + b = a + 1 + if b > 2 + c = b * 2 + if c < 8 + } yield (a, b, c) + +def for9 = + for { + a <- List(1, 2) + b = a * 2 + if b > 2 + } yield a + b + +def for10 = + for { + a <- List(1, 2) + b = a * 2 + } yield a + b + +def for11 = + for { + a <- List(1, 2) + b = a * 2 + if b > 2 && b % 2 == 0 + } yield a + b + +def for12 = + for { + a <- List(1, 2) + if a > 1 + } yield a + +object Test extends App { + println(for1) + println(for2) + println(for3) + println(for4) + println(for5) + println(for6) + println(for7) + println(for8) + println(for9) + println(for10) + println(for11) + println(for12) +} diff --git a/tests/run/fors.scala b/tests/run/fors.scala index bd7de7d32263..af04beb311b1 100644 --- a/tests/run/fors.scala +++ b/tests/run/fors.scala @@ -112,6 +112,8 @@ object Test extends App { /////////////////// elimination of map /////////////////// + import scala.language.experimental.betterFors + @tailrec def pair[B](xs: List[Int], ys: List[B], n: Int): List[(Int, B)] = if n == 0 then xs.zip(ys) From 6ef7d8e856cc0acceacab27138613a362b2dc5d6 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 4 Jun 2024 15:49:09 +0200 Subject: [PATCH 369/827] Cleanup for experimental SIP-62 implementation --- .../src/dotty/tools/dotc/ast/Desugar.scala | 42 ++++++++++++++++++- .../src/dotty/tools/dotc/config/Feature.scala | 3 +- .../runtime/stdLibPatches/language.scala | 2 +- project/MiMaFilters.scala | 3 +- tests/run/fors.scala | 1 + 5 files changed, 47 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 4231505dce62..30868fac4475 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1804,7 +1804,7 @@ object desugar { /** Create tree for for-comprehension `` or * `` where mapName and flatMapName are chosen * corresponding to whether this is a for-do or a for-yield. - * The creation performs the following rewrite rules: + * If betterFors are enabled, the creation performs the following rewrite rules: * * 1. * @@ -1872,6 +1872,46 @@ object desugar { * (Where empty for-comprehensions are excluded by the parser) * * If the aliases are not followed by a guard, otherwise an error. + * + * With betterFors disabled, the translation is as follows: + * + * 1. + * + * for (P <- G) E ==> G.foreach (P => E) + * + * Here and in the following (P => E) is interpreted as the function (P => E) + * if P is a variable pattern and as the partial function { case P => E } otherwise. + * + * 2. + * + * for (P <- G) yield E ==> G.map (P => E) + * + * 3. + * + * for (P_1 <- G_1; P_2 <- G_2; ...) ... + * ==> + * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) + * + * 4. + * + * for (P <- G; E; ...) ... + * => + * for (P <- G.filter (P => E); ...) ... + * + * 5. For any N: + * + * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) + * ==> + * for (TupleN(P_1, P_2, ... P_N) <- + * for (x_1 @ P_1 <- G) yield { + * val x_2 @ P_2 = E_2 + * ... + * val x_N & P_N = E_N + * TupleN(x_1, ..., x_N) + * } ...) + * + * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated + * and the variable constituting P_i is used instead of x_i * * @param mapName The name to be used for maps (either map or foreach) * @param flatMapName The name to be used for flatMaps (either flatMap or foreach) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index cad9b4e76ca9..fa82f14a81fe 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -68,7 +68,8 @@ object Feature: (into, "Allow into modifier on parameter types"), (namedTuples, "Allow named tuples"), (modularity, "Enable experimental modularity features"), - (betterMatchTypeExtractors, "Enable better match type extractors") + (betterMatchTypeExtractors, "Enable better match type extractors"), + (betterFors, "Enable improvements in `for` comprehensions") ) // legacy language features from Scala 2 that are no longer supported. diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 3e8c2ab15cd2..3d71c0da1481 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -135,7 +135,7 @@ object language: /** Experimental support for improvements in `for` comprehensions * - * @see [[https://dotty.epfl.ch/docs/reference/experimental/better-fors]] + * @see [[https://github.com/scala/improvement-proposals/pull/79]] */ @compileTimeOnly("`betterFors` can only be used at compile time in import statements") object betterFors diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index bf652cb0ee33..88e3f2b27a84 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -8,7 +8,8 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of the library Build.mimaPreviousDottyVersion -> Seq( - + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.betterFors"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$betterFors$"), ), // Additions since last LTS diff --git a/tests/run/fors.scala b/tests/run/fors.scala index af04beb311b1..a12d0e977157 100644 --- a/tests/run/fors.scala +++ b/tests/run/fors.scala @@ -6,6 +6,7 @@ import annotation.tailrec +@scala.annotation.experimental object Test extends App { val xs = List(1, 2, 3) val ys = List(Symbol("a"), Symbol("b"), Symbol("c")) From 4bc0a4a51426d493624d170830bbec1dc9503387 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Mon, 22 Jul 2024 16:48:08 +0200 Subject: [PATCH 370/827] Merge betterFors desugaring with the default implementation --- .../src/dotty/tools/dotc/ast/Desugar.scala | 255 +++++++----------- compiler/src/dotty/tools/dotc/ast/untpd.scala | 2 +- .../src/dotty/tools/dotc/core/StdNames.scala | 1 - 3 files changed, 98 insertions(+), 160 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 30868fac4475..b892e963ea51 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1806,113 +1806,79 @@ object desugar { * corresponding to whether this is a for-do or a for-yield. * If betterFors are enabled, the creation performs the following rewrite rules: * - * 1. + * 1. if betterFors is enabled: * - * for (P <- G) do E ==> G.foreach (P => E) + * for () do E ==> E + * or + * for () yield E ==> E * - * Here and in the following (P => E) is interpreted as the function (P => E) - * if P is a variable pattern and as the partial function { case P => E } otherwise. + * (Where empty for-comprehensions are excluded by the parser) * * 2. * - * for (P <- G) yield P ==> G - * - * If P is a variable or a tuple of variables and G is not a withFilter. + * for (P <- G) do E ==> G.foreach (P => E) * - * for (P <- G) yield E ==> G.map (P => E) - * - * Otherwise + * Here and in the following (P => E) is interpreted as the function (P => E) + * if P is a variable pattern and as the partial function { case P => E } otherwise. * * 3. * - * for (P_1 <- G_1; P_2 <- G_2; ...) ... - * ==> - * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) - * - * 4. - * - * for (P <- G; if E; ...) ... - * ==> - * for (P <- G.withFilter (P => E); ...) ... - * - * 5. For any N: - * - * for (P <- G; P_1 = E_1; ... P_N = E_N; rest) - * ==> - * G.flatMap (P => for (P_1 = E_1; ... P_N = E_N; ...)) if rest contains (<-) - * G.map (P => for (P_1 = E_1; ... P_N = E_N; ...)) otherwise + * for (P <- G) yield P ==> G * - * 6. For any N: + * If betterFors is enabled, P is a variable or a tuple of variables and G is not a withFilter. * - * for (P <- G; P_1 = E_1; ... P_N = E_N; if E; ...) - * ==> - * for (TupleN(P, P_1, ... P_N) <- - * for (x @ P <- G) yield { - * val x_1 @ P_1 = E_2 - * ... - * val x_N @ P_N = E_N - * TupleN(x, x_1, ..., x_N) - * }; if E; ...) - * - * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated - * and the variable constituting P_i is used instead of x_i - * - * 7. For any N: - * - * for (P_1 = E_1; ... P_N = E_N; ...) - * ==> - * { - * val x_N @ P_N = E_N - * for (...) - * } - * - * 8. - * for () yield E ==> E - * - * (Where empty for-comprehensions are excluded by the parser) + * for (P <- G) yield E ==> G.map (P => E) * - * If the aliases are not followed by a guard, otherwise an error. - * - * With betterFors disabled, the translation is as follows: - * - * 1. + * Otherwise * - * for (P <- G) E ==> G.foreach (P => E) + * 4. * - * Here and in the following (P => E) is interpreted as the function (P => E) - * if P is a variable pattern and as the partial function { case P => E } otherwise. + * for (P_1 <- G_1; P_2 <- G_2; ...) ... + * ==> + * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) * - * 2. + * 5. * - * for (P <- G) yield E ==> G.map (P => E) + * for (P <- G; if E; ...) ... + * ==> + * for (P <- G.withFilter (P => E); ...) ... * - * 3. + * 6. For any N, if betterFors is enabled: * - * for (P_1 <- G_1; P_2 <- G_2; ...) ... + * for (P <- G; P_1 = E_1; ... P_N = E_N; P1 <- G1; ...) ... * ==> - * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) + * G.flatMap (P => for (P_1 = E_1; ... P_N = E_N; ...)) * - * 4. + * 7. For any N, if betterFors is enabled: * - * for (P <- G; E; ...) ... - * => - * for (P <- G.filter (P => E); ...) ... + * for (P <- G; P_1 = E_1; ... P_N = E_N) ... + * ==> + * G.map (P => for (P_1 = E_1; ... P_N = E_N) ...) * - * 5. For any N: + * 8. For any N: * - * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) + * for (P <- G; P_1 = E_1; ... P_N = E_N; ...) * ==> - * for (TupleN(P_1, P_2, ... P_N) <- - * for (x_1 @ P_1 <- G) yield { - * val x_2 @ P_2 = E_2 + * for (TupleN(P, P_1, ... P_N) <- + * for (x @ P <- G) yield { + * val x_1 @ P_1 = E_2 * ... - * val x_N & P_N = E_N - * TupleN(x_1, ..., x_N) - * } ...) + * val x_N @ P_N = E_N + * TupleN(x, x_1, ..., x_N) + * }; if E; ...) * * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated * and the variable constituting P_i is used instead of x_i * + * 9. For any N, if betterFors is enabled: + * + * for (P_1 = E_1; ... P_N = E_N; ...) + * ==> + * { + * val x_N @ P_N = E_N + * for (...) + * } + * * @param mapName The name to be used for maps (either map or foreach) * @param flatMapName The name to be used for flatMaps (either flatMap or foreach) * @param enums The enumerators in the for expression @@ -2037,86 +2003,59 @@ object desugar { case (Tuple(ts1), Tuple(ts2)) => ts1.corresponds(ts2)(deepEquals) case _ => false - if betterForsEnabled then - enums match { - case Nil => body - case (gen: GenFrom) :: Nil => - if gen.checkMode != GenCheckMode.Filtered // results of withFilter have the wrong type - && deepEquals(gen.pat, body) - then gen.expr // avoid a redundant map with identity - else Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) - case (gen: GenFrom) :: rest - if rest.dropWhile(_.isInstanceOf[GenAlias]).headOption.forall(e => e.isInstanceOf[GenFrom]) => - val cont = makeFor(mapName, flatMapName, rest, body) - val selectName = - if rest.exists(_.isInstanceOf[GenFrom]) then flatMapName - else mapName - Apply(rhsSelect(gen, selectName), makeLambda(gen, cont)) - case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => - val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) - val pats = valeqs map { case GenAlias(pat, _) => pat } - val rhss = valeqs map { case GenAlias(_, rhs) => rhs } - val (defpat0, id0) = makeIdPat(gen.pat) - val (defpats, ids) = (pats map makeIdPat).unzip - val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => - val mods = defpat match - case defTree: DefTree => defTree.mods - case _ => Modifiers() - makePatDef(valeq, mods, defpat, rhs) - } - val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) - val allpats = gen.pat :: pats - val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) - makeFor(mapName, flatMapName, vfrom1 :: rest1, body) - case (gen: GenFrom) :: test :: rest => - val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) - val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Filtered) - makeFor(mapName, flatMapName, genFrom :: rest, body) - case GenAlias(_, _) :: _ => - val (valeqs, rest) = enums.span(_.isInstanceOf[GenAlias]) - val pats = valeqs.map { case GenAlias(pat, _) => pat } - val rhss = valeqs.map { case GenAlias(_, rhs) => rhs } - val (defpats, ids) = pats.map(makeIdPat).unzip - val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => - val mods = defpat match - case defTree: DefTree => defTree.mods - case _ => Modifiers() - makePatDef(valeq, mods, defpat, rhs) - } - Block(pdefs, makeFor(mapName, flatMapName, rest, body)) - case _ => - EmptyTree //may happen for erroneous input - } - else { - enums match { - case (gen: GenFrom) :: Nil => - Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) - case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => - val cont = makeFor(mapName, flatMapName, rest, body) - Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) - case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => - val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) - val pats = valeqs map { case GenAlias(pat, _) => pat } - val rhss = valeqs map { case GenAlias(_, rhs) => rhs } - val (defpat0, id0) = makeIdPat(gen.pat) - val (defpats, ids) = (pats map makeIdPat).unzip - val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => - val mods = defpat match - case defTree: DefTree => defTree.mods - case _ => Modifiers() - makePatDef(valeq, mods, defpat, rhs) - } - val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) - val allpats = gen.pat :: pats - val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) - makeFor(mapName, flatMapName, vfrom1 :: rest1, body) - case (gen: GenFrom) :: test :: rest => - val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) - val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Ignore) - makeFor(mapName, flatMapName, genFrom :: rest, body) - case _ => - EmptyTree //may happen for erroneous input - } + enums match { + case Nil if betterForsEnabled => body + case (gen: GenFrom) :: Nil => + if betterForsEnabled + && gen.checkMode != GenCheckMode.Filtered // results of withFilter have the wrong type + && deepEquals(gen.pat, body) + then gen.expr // avoid a redundant map with identity + else Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) + case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => + val cont = makeFor(mapName, flatMapName, rest, body) + Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) + case (gen: GenFrom) :: rest + if betterForsEnabled + && rest.dropWhile(_.isInstanceOf[GenAlias]).headOption.forall(e => e.isInstanceOf[GenFrom]) => // possible aliases followed by a generator or end of for + val cont = makeFor(mapName, flatMapName, rest, body) + val selectName = + if rest.exists(_.isInstanceOf[GenFrom]) then flatMapName + else mapName + Apply(rhsSelect(gen, selectName), makeLambda(gen, cont)) + case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => + val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) + val pats = valeqs map { case GenAlias(pat, _) => pat } + val rhss = valeqs map { case GenAlias(_, rhs) => rhs } + val (defpat0, id0) = makeIdPat(gen.pat) + val (defpats, ids) = (pats map makeIdPat).unzip + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => + val mods = defpat match + case defTree: DefTree => defTree.mods + case _ => Modifiers() + makePatDef(valeq, mods, defpat, rhs) + } + val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) + val allpats = gen.pat :: pats + val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, vfrom1 :: rest1, body) + case (gen: GenFrom) :: test :: rest => + val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) + val genFrom = GenFrom(gen.pat, filtered, if betterForsEnabled then GenCheckMode.Filtered else GenCheckMode.Ignore) + makeFor(mapName, flatMapName, genFrom :: rest, body) + case GenAlias(_, _) :: _ if betterForsEnabled => + val (valeqs, rest) = enums.span(_.isInstanceOf[GenAlias]) + val pats = valeqs.map { case GenAlias(pat, _) => pat } + val rhss = valeqs.map { case GenAlias(_, rhs) => rhs } + val (defpats, ids) = pats.map(makeIdPat).unzip + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => + val mods = defpat match + case defTree: DefTree => defTree.mods + case _ => Modifiers() + makePatDef(valeq, mods, defpat, rhs) + } + Block(pdefs, makeFor(mapName, flatMapName, rest, body)) + case _ => + EmptyTree //may happen for erroneous input } } diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index a3aee4dc17d2..60309d4d83bd 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -183,7 +183,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** An enum to control checking or filtering of patterns in GenFrom trees */ enum GenCheckMode { - case Ignore // neither filter since pattern is trivially irrefutable + case Ignore // neither filter nor check since pattern is trivially irrefutable case Filtered // neither filter nor check since filtering was done before case Check // check that pattern is irrefutable case CheckAndFilter // both check and filter (transitional period starting with 3.2) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index bbe405b46bf1..d3e198a7e7a7 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -435,7 +435,6 @@ object StdNames { val asInstanceOfPM: N = "$asInstanceOf$" val assert_ : N = "assert" val assume_ : N = "assume" - val betterFors: N = "betterFors" val box: N = "box" val break: N = "break" val build : N = "build" From c973d9bfe248b67e5ed6966c3d46848635430907 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Mon, 22 Jul 2024 20:02:12 +0200 Subject: [PATCH 371/827] Fail when a poly function value has a different number of type params than the expected poly function --- .../src/dotty/tools/dotc/typer/Typer.scala | 52 ++++++++++--------- tests/neg/i20533.check | 5 ++ tests/neg/i20533.scala | 6 +++ 3 files changed, 38 insertions(+), 25 deletions(-) create mode 100644 tests/neg/i20533.check create mode 100644 tests/neg/i20533.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 5113a6380a78..db3bab6f766a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1912,32 +1912,34 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val untpd.Function(vparams: List[untpd.ValDef] @unchecked, body) = fun: @unchecked val dpt = pt.dealias - // If the expected type is a polymorphic function with the same number of - // type and value parameters, then infer the types of value parameters from the expected type. - val inferredVParams = dpt match - case defn.PolyFunctionOf(poly @ PolyType(_, mt: MethodType)) - if tparams.lengthCompare(poly.paramNames) == 0 && vparams.lengthCompare(mt.paramNames) == 0 => - vparams.zipWithConserve(mt.paramInfos): (vparam, formal) => - // Unlike in typedFunctionValue, `formal` cannot be a TypeBounds since - // it must be a valid method parameter type. - if vparam.tpt.isEmpty && isFullyDefined(formal, ForceDegree.failBottom) then - cpy.ValDef(vparam)(tpt = new untpd.InLambdaTypeTree(isResult = false, (tsyms, vsyms) => - // We don't need to substitute `mt` by `vsyms` because we currently disallow - // dependencies between value parameters of a closure. - formal.substParams(poly, tsyms.map(_.typeRef))) - ) - else vparam - case _ => - vparams - - val resultTpt = dpt match + dpt match case defn.PolyFunctionOf(poly @ PolyType(_, mt: MethodType)) => - untpd.InLambdaTypeTree(isResult = true, (tsyms, vsyms) => - mt.resultType.substParams(mt, vsyms.map(_.termRef)).substParams(poly, tsyms.map(_.typeRef))) - case _ => untpd.TypeTree() - - val desugared = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) - typed(desugared, pt) + if tparams.lengthCompare(poly.paramNames) == 0 && vparams.lengthCompare(mt.paramNames) == 0 then + // If the expected type is a polymorphic function with the same number of + // type and value parameters, then infer the types of value parameters from the expected type. + val inferredVParams = vparams.zipWithConserve(mt.paramInfos): (vparam, formal) => + // Unlike in typedFunctionValue, `formal` cannot be a TypeBounds since + // it must be a valid method parameter type. + if vparam.tpt.isEmpty && isFullyDefined(formal, ForceDegree.failBottom) then + cpy.ValDef(vparam)(tpt = new untpd.InLambdaTypeTree(isResult = false, (tsyms, vsyms) => + // We don't need to substitute `mt` by `vsyms` because we currently disallow + // dependencies between value parameters of a closure. + formal.substParams(poly, tsyms.map(_.typeRef))) + ) + else vparam + val resultTpt = + untpd.InLambdaTypeTree(isResult = true, (tsyms, vsyms) => + mt.resultType.substParams(mt, vsyms.map(_.termRef)).substParams(poly, tsyms.map(_.typeRef))) + val desugared = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) + typed(desugared, pt) + else + val msg = + em"""|Provided polymorphic function value doesn't match the expected type $dpt. + |Expected type should be a polymorphic function with the same number of type and value parameters.""" + errorTree(EmptyTree, msg, tree.srcPos) + case _ => + val desugared = desugar.makeClosure(tparams, vparams, body, untpd.TypeTree(), tree.span) + typed(desugared, pt) end typedPolyFunctionValue def typedClosure(tree: untpd.Closure, pt: Type)(using Context): Tree = { diff --git a/tests/neg/i20533.check b/tests/neg/i20533.check new file mode 100644 index 000000000000..45dfbd7f4b92 --- /dev/null +++ b/tests/neg/i20533.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i20533.scala:5:8 ----------------------------------------------------------------------------------- +5 | [X] => (x, y) => Map(x -> y) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Provided polymorphic function value doesn't match the expected type [X, Y] => (x$1: X, x$2: Y) => Map[X, Y]. + | Expected type should be a polymorphic function with the same number of type and value parameters. diff --git a/tests/neg/i20533.scala b/tests/neg/i20533.scala new file mode 100644 index 000000000000..20059bd795c6 --- /dev/null +++ b/tests/neg/i20533.scala @@ -0,0 +1,6 @@ +def mapF(h: [X, Y] => (X, Y) => Map[X, Y]): Unit = ??? + +def test = + mapF( + [X] => (x, y) => Map(x -> y) // error + ) From 8f490e1d68c495fd6f765c4a5a6f32e58c715b04 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 23 Jul 2024 21:11:07 +0200 Subject: [PATCH 372/827] Update PPrint community-project --- community-build/community-projects/PPrint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community-build/community-projects/PPrint b/community-build/community-projects/PPrint index 2203dc6081f5..34a777f687bc 160000 --- a/community-build/community-projects/PPrint +++ b/community-build/community-projects/PPrint @@ -1 +1 @@ -Subproject commit 2203dc6081f5e8fa89f552b155724b0a8fdcec03 +Subproject commit 34a777f687bc851953e682f99edcae9d2875babc From a3d11ffcc4bbfe65015464c7f763dcca3a172295 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 2 Jul 2024 15:19:06 +0200 Subject: [PATCH 373/827] add method, annotation and test cases --- .../dotty/tools/dotc/core/Definitions.scala | 1 + .../dotc/semanticdb/ExtractSemanticDB.scala | 7 +- .../tools/dotc/transform/patmat/Space.scala | 1 + .../src/dotty/tools/dotc/typer/Checking.scala | 1 + .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- .../dotty/tools/repl/TabcompleteTests.scala | 5 +- .../reference/experimental/runtimeChecked.md | 133 ++++++++++++++++++ docs/sidebar.yml | 1 + .../tools/languageserver/CompletionTest.scala | 1 + .../annotation/internal/RuntimeChecked.scala | 11 ++ .../scala/runtime/stdLibPatches/Predef.scala | 16 +++ .../pc/tests/completion/CompletionSuite.scala | 1 + tests/neg/runtimeChecked-2.check | 5 + tests/neg/runtimeChecked-2.scala | 13 ++ tests/neg/runtimeChecked.check | 7 + tests/neg/runtimeChecked.scala | 14 ++ .../stdlibExperimentalDefinitions.scala | 5 +- tests/run/runtimeChecked-2.scala | 8 ++ tests/run/runtimeChecked.scala | 12 ++ 19 files changed, 237 insertions(+), 7 deletions(-) create mode 100644 docs/_docs/reference/experimental/runtimeChecked.md create mode 100644 library/src/scala/annotation/internal/RuntimeChecked.scala create mode 100644 tests/neg/runtimeChecked-2.check create mode 100644 tests/neg/runtimeChecked-2.scala create mode 100644 tests/neg/runtimeChecked.check create mode 100644 tests/neg/runtimeChecked.scala create mode 100644 tests/run/runtimeChecked-2.scala create mode 100644 tests/run/runtimeChecked.scala diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index e9d0f68b79c1..fda12a5488ce 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1037,6 +1037,7 @@ class Definitions { @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") + @tu lazy val RuntimeCheckedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.RuntimeChecked") @tu lazy val SourceFileAnnot: ClassSymbol = requiredClass("scala.annotation.internal.SourceFile") @tu lazy val ScalaSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaSignature") @tu lazy val ScalaLongSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaLongSignature") diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 357202229e50..8c1f22005af3 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -458,14 +458,15 @@ object ExtractSemanticDB: def unapply(tree: ValDef)(using Context): Option[(Tree, Tree)] = tree.rhs match case Match(Typed(selected: Tree, tpt: TypeTree), CaseDef(pat: Tree, _, _) :: Nil) - if tpt.span.exists && !tpt.span.hasLength && tpt.tpe.isAnnotatedByUnchecked => + if tpt.span.exists && !tpt.span.hasLength && tpt.tpe.isAnnotatedByUncheckedOrRuntimeChecked => Some((pat, selected)) case _ => None extension (tpe: Types.Type) - private inline def isAnnotatedByUnchecked(using Context) = tpe match - case Types.AnnotatedType(_, annot) => annot.symbol == defn.UncheckedAnnot + private inline def isAnnotatedByUncheckedOrRuntimeChecked(using Context) = tpe match + case Types.AnnotatedType(_, annot) => + annot.symbol == defn.UncheckedAnnot || annot.symbol == defn.RuntimeCheckedAnnot case _ => false def collectPats(pat: Tree): List[Tree] = diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 97816bd71b84..eb74058dfb10 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -794,6 +794,7 @@ object SpaceEngine { } !sel.tpe.hasAnnotation(defn.UncheckedAnnot) + && !sel.tpe.hasAnnotation(defn.RuntimeCheckedAnnot) && { ctx.settings.YcheckAllPatmat.value || isCheckable(sel.tpe) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 1f82b9ddc084..421f00e61584 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -981,6 +981,7 @@ trait Checking { def recur(pat: Tree, pt: Type): Boolean = !sourceVersion.isAtLeast(`3.2`) || pt.hasAnnotation(defn.UncheckedAnnot) + || pt.hasAnnotation(defn.RuntimeCheckedAnnot) || { patmatch.println(i"check irrefutable $pat: ${pat.tpe} against $pt") pat match diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index db3bab6f766a..f2a7124f9fa4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2073,7 +2073,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer result match { case result @ Match(sel, CaseDef(pat, _, _) :: _) => tree.selector.removeAttachment(desugar.CheckIrrefutable) match { - case Some(checkMode) if !sel.tpe.hasAnnotation(defn.UncheckedAnnot) => + case Some(checkMode) if !(sel.tpe.hasAnnotation(defn.UncheckedAnnot) || sel.tpe.hasAnnotation(defn.RuntimeCheckedAnnot)) => val isPatDef = checkMode == desugar.MatchCheck.IrrefutablePatDef if !checkIrrefutable(sel, pat, isPatDef) && sourceVersion.isAtLeast(`3.2`) diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index f719752be353..95419824d9d1 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -9,7 +9,7 @@ import org.junit.Test class TabcompleteTests extends ReplTest { @Test def tabCompleteList = initially { - val comp = tabComplete("List.r") + val comp = tabComplete("List.ra") assertEquals(List("range"), comp.distinct) } @@ -112,7 +112,7 @@ class TabcompleteTests extends ReplTest { val comp = tabComplete("(null: AnyRef).") assertEquals( List("!=", "##", "->", "==", "asInstanceOf", "ensuring", "eq", "equals", "formatted", - "getClass", "hashCode", "isInstanceOf", "ne", "nn", "notify", "notifyAll", "synchronized", "toString", "wait", "→"), + "getClass", "hashCode", "isInstanceOf", "ne", "nn", "notify", "notifyAll", "runtimeChecked", "synchronized", "toString", "wait", "→"), comp.distinct.sorted) } @@ -163,6 +163,7 @@ class TabcompleteTests extends ReplTest { "nn", "notify", "notifyAll", + "runtimeChecked", "synchronized", "toString", "valueOf", diff --git a/docs/_docs/reference/experimental/runtimeChecked.md b/docs/_docs/reference/experimental/runtimeChecked.md new file mode 100644 index 000000000000..f13524d573d4 --- /dev/null +++ b/docs/_docs/reference/experimental/runtimeChecked.md @@ -0,0 +1,133 @@ +--- +layout: doc-page +title: "The runtimeChecked method" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/runtimeChecked.html +--- + +The `runtimeChecked` method is an extension method, defined in `scala.Predef`. It can be called on any expression. An expression marked as `runtimeChecked` is exempt from certain static checks in the compiler, for example pattern match exhaustivity. It is intended to replace `: @unchecked` type ascription in these cases. + +## Example + +A common use case for `runtimeChecked` is to assert that a pattern will always match, either for convenience, or because there is a known invariant that the types can not express. + +e.g. looking up an expected entry in a dynamically loaded dictionary-like structure +```scala +// example 1 +trait AppConfig: + def get(key: String): Option[String] + +val config: AppConfig = ??? + +val Some(appVersion) = config.get("appVersion").runtimeChecked +``` + +or to assert that a value can only match some specific patterns: +```scala +// example 2 +enum Day: + case Mon, Tue, Wed, Thu, Fri, Sat, Sun + +val weekDay: Option[Day] = ??? + +weekDay.runtimeChecked match + case Some(Mon | Tue | Wed | Thu | Fri) => println("got weekday") +// case Some(Sat | Sun) => // weekend should not appear + case None => +``` + +In both of these cases, without `runtimeChecked` then there would either be an error (example 1), or a warning (example 2), because statically, the compiler knows that there could be other cases at runtime - so is right to caution the programmer. + +```scala +// warning in example 2 when we don't add `.runtimeChecked`. +-- [E029] Pattern Match Exhaustivity Warning: ---------------------------------- +6 |weekDay match + |^^^^^^^ + |match may not be exhaustive. + | + |It would fail on pattern case: Some(Sat), Some(Sun) +``` + +## Safety + +The `runtimeChecked` method only turns off static checks that can be soundly performed at runtime. This means that patterns with unchecked type-tests will still generate warnings. For example: +```scala +scala> val xs = List(1: Any) + | xs.runtimeChecked match { + | case is: ::[Int] => is.head + | } +1 warning found +-- Unchecked Warning: --------------------------------------- +3 | case is: ::[Int] => is.head + | ^ + |the type test for ::[Int] cannot be checked at runtime + |because its type arguments can't be determined from List[Any] +val res0: Int = 1 +``` +As the warning hints, the type `::[Int]` can not be tested at runtime on a value of type `List[Any]`, so using `runtimeChecked` still protects the user against assertions that can not be validated. + +To fully avoid warnings, as with previous Scala versions, `@unchecked` should be put on the type argument: +```scala +scala> xs.runtimeChecked match { + | case is: ::[Int @unchecked] => is.head + | } +val res1: Int = 1 +``` + + +## Specification + +We add a new annotation `scala.internal.RuntimeChecked`, this is part of the standard Scala 3 library. A programmer is not expected to use this annotation directly. + +```scala +package scala.annotation.internal + +final class RuntimeChecked extends Annotation +``` + +Any term that is the scrutinee of a pattern match, that has a type annotated with `RuntimeChecked`, is exempt from pattern match exhaustivity checking. + + +The user facing API is provided by a new extension method `scala.Predef.runtimeChecked`, qualified for any value: +```scala +package scala + +import scala.annotation.internal.RuntimeChecked + +object Predef: + ... + extension [T](x: T) + inline def runtimeChecked: x.type @RuntimeChecked = + x: @RuntimeChecked +``` + +The `runtimeChecked` method returns its argument, refining its type with the `RuntimeChecked` annotation. + +## Motivation + +As described in [Pattern Bindings](../changed-features/pattern-bindings.md), under `-source:future` it is an error for a pattern definition to be refutable. For instance, consider: +```scala +def xs: List[Any] = ??? +val y :: ys = xs +``` + +This compiled without warning in 3.0, became a warning in 3.2, and we would like to make it an error by default in a future 3.x version. +As an escape hatch in 3.2 we recommended to use a type ascription of `: @unchecked`: +``` +-- Warning: ../../new/test.scala:6:16 ----------------------- +6 | val y :: ys = xs + | ^^ + |pattern's type ::[Any] is more specialized than the right + |hand side expression's type List[Any] + | + |If the narrowing is intentional, this can be communicated + |by adding `: @unchecked` after the expression, + |which may result in a MatchError at runtime. +``` + +We suggest that `: @unchecked` is syntactically awkward, and also a misnomer - in fact in this case the the pattern is fully checked, but the necessary checks occur at runtime. The `runtimeChecked` method is then a successor to `@unchecked` for this purpose. + +We propose that `@unchecked` will still be necessary for silencing warnings on unsound type tests. + +### Restoring Scala 2.13 semantics with runtimeChecked + +In Scala 3, the `: @unchecked` type ascription has the effect of turning off all pattern-match warnings on the match scrutinee - this differs from 2.13 in which it strictly turns off only pattern exhaustivity checking. `runtimeChecked` restores the semantics of Scala 2.13. diff --git a/docs/sidebar.yml b/docs/sidebar.yml index efdab80595a6..5048669ef664 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -157,6 +157,7 @@ subsection: - page: reference/experimental/named-tuples.md - page: reference/experimental/modularity.md - page: reference/experimental/typeclasses.md + - page: reference/experimental/runtimeChecked.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index 887c7a983729..8034b4c8d40b 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -1028,6 +1028,7 @@ class CompletionTest { ("ensuring", Method, "(cond: Boolean): Foo.Bar.type"), ("##", Method, "=> Int"), ("nn", Method, "=> Foo.Bar.type"), + ("runtimeChecked", Method, "=> Foo.Bar.type"), ("==", Method, "(x$0: Any): Boolean"), ("ensuring", Method, "(cond: Boolean, msg: => Any): Foo.Bar.type"), ("ne", Method, "(x$0: Object): Boolean"), diff --git a/library/src/scala/annotation/internal/RuntimeChecked.scala b/library/src/scala/annotation/internal/RuntimeChecked.scala new file mode 100644 index 000000000000..d2106d720156 --- /dev/null +++ b/library/src/scala/annotation/internal/RuntimeChecked.scala @@ -0,0 +1,11 @@ +package scala.annotation.internal + +import scala.annotation.Annotation +import scala.annotation.experimental + +/**An annotation marking an intention that all checks on a value can be reliably performed at runtime. + * + * The compiler will remove certain static checks except those that can't be performed at runtime. + */ +@experimental +final class RuntimeChecked() extends Annotation diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 77b014b80466..996f68d4e122 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -1,6 +1,7 @@ package scala.runtime.stdLibPatches import scala.annotation.experimental +import scala.annotation.internal.RuntimeChecked object Predef: import compiletime.summonFrom @@ -80,4 +81,19 @@ object Predef: @experimental infix type is[A <: AnyKind, B <: Any{type Self <: AnyKind}] = B { type Self = A } + extension [T](x: T) + /**Asserts that a term should be exempt from static checks that can be reliably checked at runtime. + * @example {{{ + * val xs: Option[Int] = Option(1) + * xs.runtimeChecked match + * case Some(x) => x // `Some(_)` can be checked at runtime, so no warning + * }}} + * @example {{{ + * val xs: List[Int] = List(1,2,3) + * val y :: ys = xs.runtimeChecked // `_ :: _` can be checked at runtime, so no warning + * }}} + */ + @experimental + inline def runtimeChecked: x.type @RuntimeChecked = x: @RuntimeChecked + end Predef diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index f281f42d9db3..6cccc923a5f5 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -117,6 +117,7 @@ class CompletionSuite extends BaseCompletionSuite: |fromSpecific(from: Any)(it: IterableOnce[Nothing]): List[Nothing] |fromSpecific(it: IterableOnce[Nothing]): List[Nothing] |nn: List.type & List.type + |runtimeChecked scala.collection.immutable |toFactory(from: Any): Factory[Nothing, List[Nothing]] |formatted(fmtstr: String): String |→[B](y: B): (List.type, B) diff --git a/tests/neg/runtimeChecked-2.check b/tests/neg/runtimeChecked-2.check new file mode 100644 index 000000000000..1b30d637a6b9 --- /dev/null +++ b/tests/neg/runtimeChecked-2.check @@ -0,0 +1,5 @@ +-- [E030] Match case Unreachable Warning: tests/neg/runtimeChecked-2.scala:10:11 --------------------------------------- +10 | case is: Some[t] => ??? // unreachable + | ^^^^^^^^^^^ + | Unreachable case +No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/runtimeChecked-2.scala b/tests/neg/runtimeChecked-2.scala new file mode 100644 index 000000000000..bfb5aff2b1ba --- /dev/null +++ b/tests/neg/runtimeChecked-2.scala @@ -0,0 +1,13 @@ +//> using options -Werror -source:future -experimental + +object Foo { + + val xs: Option[Int] = Some(1) + + def test: Int = + xs.runtimeChecked match { // this test asserts that reachability is not avoided by runtimeChecked + case is: Some[t] => is.get + case is: Some[t] => ??? // unreachable + } +} +// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/runtimeChecked.check b/tests/neg/runtimeChecked.check new file mode 100644 index 000000000000..3d984e08517d --- /dev/null +++ b/tests/neg/runtimeChecked.check @@ -0,0 +1,7 @@ +-- [E092] Pattern Match Unchecked Warning: tests/neg/runtimeChecked.scala:11:11 ---------------------------------------- +11 | case is: ::[Int/* can not be checked so still err */] => is.head + | ^ + |the type test for ::[Int] cannot be checked at runtime because its type arguments can't be determined from List[Any] + | + | longer explanation available when compiling with `-explain` +No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/runtimeChecked.scala b/tests/neg/runtimeChecked.scala new file mode 100644 index 000000000000..d3c1a91844cc --- /dev/null +++ b/tests/neg/runtimeChecked.scala @@ -0,0 +1,14 @@ +//> using options -Werror -source:future -experimental + +object Foo { + + val xs: List[Any] = List(1: Any) + + def test: Int = + xs.runtimeChecked match { // this test asserts that unsound type tests still require @unchecked + // tests/run/runtimeChecked.scala adds @unchecked to the + // unsound type test to avoid the warning. + case is: ::[Int/* can not be checked so still err */] => is.head + } +} +// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 22da2ace1e52..15ccd38f860c 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -88,7 +88,10 @@ val experimentalDefinitionInLibrary = Set( // New feature: functions with erased parameters. // Need quotedPatternsWithPolymorphicFunctions enabled. - "scala.quoted.runtime.Patterns$.higherOrderHoleWithTypes" + "scala.quoted.runtime.Patterns$.higherOrderHoleWithTypes", + + // New feature: SIP 57 - runtimeChecked replacement of @unchecked + "scala.Predef$.runtimeChecked", "scala.annotation.internal.RuntimeChecked" ) diff --git a/tests/run/runtimeChecked-2.scala b/tests/run/runtimeChecked-2.scala new file mode 100644 index 000000000000..d34ead3d6695 --- /dev/null +++ b/tests/run/runtimeChecked-2.scala @@ -0,0 +1,8 @@ +//> using options -Werror -source:future -experimental + +val xs: List[Any] = List(1: Any) + +@main +def Test: Unit = + val head :: _ = xs.runtimeChecked + assert(head == 1) diff --git a/tests/run/runtimeChecked.scala b/tests/run/runtimeChecked.scala new file mode 100644 index 000000000000..e0a5ee042381 --- /dev/null +++ b/tests/run/runtimeChecked.scala @@ -0,0 +1,12 @@ +//> using options -Werror -source:future -experimental + +val xs: List[Any] = List(1: Any) + +@main +def Test: Unit = + val head = xs.runtimeChecked match { + // tests/neg/runtimeChecked.scala asserts that @unchecked is + // still needed for unsound type tests. + case is: ::[Int @unchecked] => is.head + } + assert(head == 1) From 5595bddd855cb4579ea6e7a6dea472b87ccb28a8 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 19 Jul 2024 21:59:37 +0200 Subject: [PATCH 374/827] Apply suggestions from code review improve documentation based on suggestions Co-authored-by: odersky --- .../reference/experimental/runtimeChecked.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/_docs/reference/experimental/runtimeChecked.md b/docs/_docs/reference/experimental/runtimeChecked.md index f13524d573d4..bb0272ea8a3e 100644 --- a/docs/_docs/reference/experimental/runtimeChecked.md +++ b/docs/_docs/reference/experimental/runtimeChecked.md @@ -4,13 +4,13 @@ title: "The runtimeChecked method" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/runtimeChecked.html --- -The `runtimeChecked` method is an extension method, defined in `scala.Predef`. It can be called on any expression. An expression marked as `runtimeChecked` is exempt from certain static checks in the compiler, for example pattern match exhaustivity. It is intended to replace `: @unchecked` type ascription in these cases. +The `runtimeChecked` method is an extension method, defined in `scala.Predef`. It can be called on any expression. An expression ending in `.runtimeChecked` is exempt from certain static checks in the compiler, for example pattern match exhaustivity. The idiom is intended to replace a `: @unchecked` type ascription in these cases. ## Example A common use case for `runtimeChecked` is to assert that a pattern will always match, either for convenience, or because there is a known invariant that the types can not express. -e.g. looking up an expected entry in a dynamically loaded dictionary-like structure +E.g. looking up an expected entry in a dynamically loaded dictionary-like structure: ```scala // example 1 trait AppConfig: @@ -35,7 +35,7 @@ weekDay.runtimeChecked match case None => ``` -In both of these cases, without `runtimeChecked` then there would either be an error (example 1), or a warning (example 2), because statically, the compiler knows that there could be other cases at runtime - so is right to caution the programmer. +In both of these cases, without `runtimeChecked` there would either be an error (example 1), or a warning (example 2), because statically, the compiler knows that there could be other cases at runtime - so is right to caution the programmer. ```scala // warning in example 2 when we don't add `.runtimeChecked`. @@ -76,7 +76,7 @@ val res1: Int = 1 ## Specification -We add a new annotation `scala.internal.RuntimeChecked`, this is part of the standard Scala 3 library. A programmer is not expected to use this annotation directly. +We add a new annotation `scala.internal.RuntimeChecked` as a part of the standard Scala 3 library. A programmer is not expected to use this annotation directly. ```scala package scala.annotation.internal @@ -84,10 +84,10 @@ package scala.annotation.internal final class RuntimeChecked extends Annotation ``` -Any term that is the scrutinee of a pattern match, that has a type annotated with `RuntimeChecked`, is exempt from pattern match exhaustivity checking. +Any term that is the scrutinee of a pattern match, and that has a type annotated with `RuntimeChecked`, is exempt from pattern match exhaustivity checking. -The user facing API is provided by a new extension method `scala.Predef.runtimeChecked`, qualified for any value: +The user facing API is augmented with a new extension method `scala.Predef.runtimeChecked`, qualified for any value: ```scala package scala @@ -124,9 +124,9 @@ As an escape hatch in 3.2 we recommended to use a type ascription of `: @uncheck |which may result in a MatchError at runtime. ``` -We suggest that `: @unchecked` is syntactically awkward, and also a misnomer - in fact in this case the the pattern is fully checked, but the necessary checks occur at runtime. The `runtimeChecked` method is then a successor to `@unchecked` for this purpose. +However, `: @unchecked` is syntactically awkward, and is also a misnomer - in fact in this case the the pattern _is_ fully checked, but the necessary checks occur at runtime. The `runtimeChecked` method is intended to replace `@unchecked` for this purpose. -We propose that `@unchecked` will still be necessary for silencing warnings on unsound type tests. +The `@unchecked` annotation is still retained for silencing warnings on unsound type tests. ### Restoring Scala 2.13 semantics with runtimeChecked From b4b1541b27e7d45483e8819264ba7ab6c1fdd148 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Wed, 24 Jul 2024 12:46:24 +0200 Subject: [PATCH 375/827] fix: don't add suffix if brackets already present --- .../tools/pc/completions/Completions.scala | 4 +++ .../completion/CompletionSnippetSuite.scala | 31 +++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index db578e32663f..a657df224ab6 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -65,6 +65,10 @@ class Completions( */ case (fun) :: (appl: GenericApply) :: _ if appl.fun == fun => false + /* In case of `T@@[]` we should not add snippets. + */ + case tpe :: (appl: AppliedTypeTree) :: _ if appl.tpt == tpe => + false case _ :: (withcursor @ Select(fun, name)) :: (appl: GenericApply) :: _ if appl.fun == withcursor && name.decoded == Cursor.value => false diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala index 5769304919ca..2c91f71d8d19 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala @@ -451,3 +451,34 @@ class CompletionSnippetSuite extends BaseCompletionSuite: """.stripMargin, filter = _.contains("bar: Int") ) + + @Test def `brackets-already-present` = + check( + """|package a + |case class AAA[T]() + |object O { + | val l: AA@@[Int] = ??? + |} + |""".stripMargin, + """|AAA a + |ArrowAssoc scala.Predef + |""".stripMargin, + ) + + @Test def `brackets-already-present-edit` = + checkEdit( + """|package a + |case class AAA[T]() + |object O { + | val l: AA@@[Int] = ??? + |} + |""".stripMargin, + """|package a + |case class AAA[T]() + |object O { + | val l: AAA[Int] = ??? + |} + |""".stripMargin, + assertSingleItem = false, + ) + From 4fc85641fd270aca6639b898ef453c664dfbe65c Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 24 Jul 2024 12:01:02 +0100 Subject: [PATCH 376/827] Capture the kse3 issue in test cases The underlying type of an opaque type is only visible to anything within the scope that contains that opaque type. So, for instance, a Worm opaque type in a Worms object, anything within the Worms object sees the underlying type. So Worms.Worm is an opaque abstract type with bounds, while Worms.this.Worm is an opaque type with an underlying type. But you can also reference Worms.Worm while being inside of the Worms object. The change I made to opaque types allowed for member selection to see the underlying type when in a scope that can see that, which makes it consistent with how TypeComparer allows those two types to be seen as equivalent, when in the right scope. In kse3, it seems, the fact that this wasn't done was utilised by using an "external" reference to the opaque type, which avoided the underlying type's method being selected, and the extension method being selected instead. While my change broke kse3, I believe the change is good, as it brings consistency. And it is possible to fix the kse3 code, by using the "universal function call syntax" (to borrow from Rust nomenclature) for calling the extension method. Alternatively, the extension methods can be defined where they really don't see the underlying type, and then the companion object can be made to include the extension methods (to keep them in implicit scope). --- tests/neg/i21239.check | 7 +++++++ tests/neg/i21239.orig.check | 7 +++++++ tests/neg/i21239.orig.scala | 33 +++++++++++++++++++++++++++++++++ tests/neg/i21239.scala | 14 ++++++++++++++ tests/pos/i21239.alt.scala | 28 ++++++++++++++++++++++++++++ tests/pos/i21239.orig.scala | 34 ++++++++++++++++++++++++++++++++++ tests/pos/i21239.scala | 18 ++++++++++++++++++ 7 files changed, 141 insertions(+) create mode 100644 tests/neg/i21239.check create mode 100644 tests/neg/i21239.orig.check create mode 100644 tests/neg/i21239.orig.scala create mode 100644 tests/neg/i21239.scala create mode 100644 tests/pos/i21239.alt.scala create mode 100644 tests/pos/i21239.orig.scala create mode 100644 tests/pos/i21239.scala diff --git a/tests/neg/i21239.check b/tests/neg/i21239.check new file mode 100644 index 000000000000..5b6f2f8bcef5 --- /dev/null +++ b/tests/neg/i21239.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i21239.scala:14:18 ------------------------------------------------------------ +14 | def get2: V = get // error + | ^^^ + | Found: AnyRef + | Required: V + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21239.orig.check b/tests/neg/i21239.orig.check new file mode 100644 index 000000000000..26895bd50ed3 --- /dev/null +++ b/tests/neg/i21239.orig.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i21239.orig.scala:32:8 -------------------------------------------------------- +32 | get // error + | ^^^ + | Found: AnyRef + | Required: V + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21239.orig.scala b/tests/neg/i21239.orig.scala new file mode 100644 index 000000000000..3fb39d93446b --- /dev/null +++ b/tests/neg/i21239.orig.scala @@ -0,0 +1,33 @@ +// 1 +// A re-minimisated reproduction of the original issue in kse3 +// The one in the issue removes the usage of the package +// in the second extension bundle, which is crucial to +// why my change broke this code +package kse.flow + +import java.util.concurrent.atomic.AtomicReference + +opaque type Worm[V] = AtomicReference[AnyRef] +object Worm: + val notSetSentinel: AnyRef = new AnyRef {} + + extension [V](worm: Worm[V]) + inline def wormAsAtomic: AtomicReference[AnyRef] = worm + + extension [V](worm: kse.flow.Worm[V]) + + inline def setIfEmpty(v: => V): Boolean = + var old = worm.wormAsAtomic.get() + if old eq Worm.notSetSentinel then + worm.wormAsAtomic.compareAndSet(old, v.asInstanceOf[AnyRef]) + else false + + inline def get: V = worm.wormAsAtomic.get() match + case x if x eq Worm.notSetSentinel => throw new java.lang.IllegalStateException("Retrieved value before being set") + case x => x.asInstanceOf[V] + + inline def getOrSet(v: => V): V = worm.wormAsAtomic.get() match + case x if x eq Worm.notSetSentinel => + setIfEmpty(v) + get // error + case x => x.asInstanceOf[V] diff --git a/tests/neg/i21239.scala b/tests/neg/i21239.scala new file mode 100644 index 000000000000..4eb4d5808857 --- /dev/null +++ b/tests/neg/i21239.scala @@ -0,0 +1,14 @@ +// 2 +// A more minimised reproduction +package lib + +import java.util.concurrent.atomic.AtomicReference + +opaque type Worm[V] = AtomicReference[AnyRef] +object Worm: + extension [V](worm: Worm[V]) + inline def wormAsAtomic: AtomicReference[AnyRef] = worm + + extension [V](worm: lib.Worm[V]) + def get: V = worm.wormAsAtomic.get().asInstanceOf[V] + def get2: V = get // error diff --git a/tests/pos/i21239.alt.scala b/tests/pos/i21239.alt.scala new file mode 100644 index 000000000000..13a1647115f7 --- /dev/null +++ b/tests/pos/i21239.alt.scala @@ -0,0 +1,28 @@ +// 4 +// An alternative way to fix it, +// defining the extension method externally, +// in a scope that doesn't see through +// the opaque type definition. +// The setup here also makes sure those extension +// are on the opaque type's companion object +// (via class extension), meaning that they continue +// to be in implicit scope (as enforced by the usage test) +import java.util.concurrent.atomic.AtomicReference + +package lib: + object Worms: + opaque type Worm[V] = AtomicReference[AnyRef] + object Worm extends WormOps: + extension [V](worm: Worm[V]) + inline def wormAsAtomic: AtomicReference[AnyRef] = worm + + import Worms.Worm + trait WormOps: + extension [V](worm: Worm[V]) + def get: V = worm.wormAsAtomic.get().asInstanceOf[V] + def get2: V = get + +package test: + import lib.Worms.Worm + object Test: + def usage(worm: Worm[String]): String = worm.get2 diff --git a/tests/pos/i21239.orig.scala b/tests/pos/i21239.orig.scala new file mode 100644 index 000000000000..56666bab4b4d --- /dev/null +++ b/tests/pos/i21239.orig.scala @@ -0,0 +1,34 @@ +// 5 +// Finally, an alternative way to fix the original issue, +// by reimplementing `getOrSet` to not even need +// our `get` extension. +import java.util.concurrent.atomic.AtomicReference + +opaque type Worm[V] = AtomicReference[AnyRef] +object Worm: + val notSetSentinel: AnyRef = new AnyRef {} + + extension [V](worm: Worm[V]) + inline def wormAsAtomic: AtomicReference[AnyRef] = worm // deprecate? + + inline def setIfEmpty(v: => V): Boolean = + val x = worm.get() + if x eq notSetSentinel then + val value = v + worm.set(value.asInstanceOf[AnyRef]) + true + else false + + inline def get: V = + val x = worm.get() + if x eq notSetSentinel then + throw IllegalStateException("Retrieved value before being set") + else x.asInstanceOf[V] + + inline def getOrSet(v: => V): V = + val x = worm.get() + if x eq notSetSentinel then + val value = v + worm.set(value.asInstanceOf[AnyRef]) + value + else x.asInstanceOf[V] diff --git a/tests/pos/i21239.scala b/tests/pos/i21239.scala new file mode 100644 index 000000000000..950f90c233d8 --- /dev/null +++ b/tests/pos/i21239.scala @@ -0,0 +1,18 @@ +// 3 +// One way to fix the issue, using the +// "universal function call syntax" +// (to borrow from what Rust calls the syntax to +// disambiguate which trait's method is intended.) +import java.util.concurrent.atomic.AtomicReference + +package lib: + opaque type Worm[V] = AtomicReference[AnyRef] + object Worm: + extension [V](worm: Worm[V]) + def get: V = worm.get().asInstanceOf[V] + def get2: V = Worm.get(worm) + +package test: + import lib.Worm + object Test: + def usage(worm: Worm[String]): String = worm.get2 From eda25aeec27323a7616e6381cc9b8968f188750e Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 24 Jul 2024 10:09:09 +0200 Subject: [PATCH 377/827] Add regression test for #21215 fixed in #21226 --- tests/pos/i21215.scala | 45 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 tests/pos/i21215.scala diff --git a/tests/pos/i21215.scala b/tests/pos/i21215.scala new file mode 100644 index 000000000000..e810051da7b7 --- /dev/null +++ b/tests/pos/i21215.scala @@ -0,0 +1,45 @@ + +trait FlatMap[F[_]]: + def flatMap[A, B](fa: F[A])(f: A => F[B]): F[B] = ??? + def ifM[B](ifTrue: => F[B], ifFalse: => F[B])(implicit F: FlatMap[F]): F[B] = ??? +trait Monad[F[_]] extends FlatMap[F] +trait MonadError[F[_], E] extends Monad[F]: + def raiseError[A](e: E): F[A] +trait Temporal[F[_]] extends MonadError[F, Throwable] + +trait FlatMapOps[F[_], A]: + def flatMap[B](f: A => F[B]): F[B] = ??? + def map[B](f: A => B): F[B] = ??? + def ifM[B](ifTrue: => F[B], ifFalse: => F[B])(implicit F: FlatMap[F]): F[B] = ??? + +implicit def toFlatMapOps[F[_], A](target: F[A])(implicit tc: FlatMap[F]): FlatMapOps[F, A] = ??? + +abstract class Ref[F[_], A] +object Ref: + final class ApplyBuilders[F[_]]: + def of[A](a: A): F[Ref[F, A]] = ??? + def apply[F[_]]: ApplyBuilders[F] = ??? + +trait DatabaseMetaData[F[_]] +class DatabaseMetaDataImpl[F[_]]( + statementClosed: Ref[F, Boolean], + resultSetClosed: Ref[F, Boolean] + ) extends DatabaseMetaData[F] + +trait LdbcConnection[F[_]]: + def getMetaData(): F[DatabaseMetaData[F]] + +class ConnectionImpl[F[_]: Temporal](using ev: MonadError[F, Throwable]) + extends LdbcConnection[F]: + def isClosed(): F[Boolean] = ??? + override def getMetaData(): F[DatabaseMetaData[F]] = + isClosed().ifM( + ev.raiseError(???), + (for + statementClosed <- Ref[F].of[Boolean](false) + resultSetClosed <- Ref[F].of[Boolean](false) + yield DatabaseMetaDataImpl[F]( + statementClosed, + resultSetClosed + )) + ) From cecfa8c5984c2b559c0568932cbdf01709ec4365 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 24 Jul 2024 12:40:02 +0100 Subject: [PATCH 378/827] Fail early & more clearly on shaded-broken classfiles The shading utility uses for the coursier or coursierapi classes has removed the RuntimeVisibleAnnotation attribute after the ScalaSignature attribute. Parsing such a classfile caused the Scala 3 compiler to parse the file as a Java classfile. It seems there something about how we deal with packages and package objects (which don't exist in Java) when we are incorrectly sent down this code path. The Scala 2 compiler already correctly caught and failed on this early, with this detail, so look to do the same here. Automating the test for this isn't easy. Because relying on an external dependency would be a hazard for CI, long term. Perhaps we could try to recreate a classfile with a similar bytecode problem, but that would be quite involved as well. I guess we could commit some or all of the classfiles from the original, but it's generally advised not to commit binaries to the source repo. So instead I'm leaving instructions as to how I manually tested this, in preparing this change. I used coursier's `cs` binary to download the jars and produce a classpath string: cs fetch -p org.scalameta:scalafmt-dynamic_2.13:3.8.1 Then I invoked `scalac` in the sbt shell, passing that classpath string: scalac -classpath $classpathAbove i20405.scala scalac -classpath $classpathAbove i20555.scala Using the two minisations in the 20405 and 20555 issues. --- .../dotty/tools/dotc/core/classfile/ClassfileParser.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 22a43dd524e1..9e7b59a0cfac 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -1163,7 +1163,10 @@ class ClassfileParser( // attribute isn't, this classfile is a compilation artifact. return Some(NoEmbedded) - if (scan(tpnme.ScalaSignatureATTR) && scan(tpnme.RuntimeVisibleAnnotationATTR)) { + if (scan(tpnme.ScalaSignatureATTR)) { + if !scan(tpnme.RuntimeVisibleAnnotationATTR) then + report.error(em"No RuntimeVisibleAnnotations in classfile with ScalaSignature attribute: ${classRoot.fullName}") + return None val attrLen = in.nextInt val nAnnots = in.nextChar var i = 0 From 532c2871f9c71eb2a39c431ec3616c7df889e92a Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 16 Jul 2024 23:38:45 +0200 Subject: [PATCH 379/827] fix: Only implement a deferred given in a class if its parent won't implement it --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 7 ++++++- tests/pos/i21189-alt.scala | 12 ++++++++++++ tests/pos/i21189.scala | 10 ++++++++++ 3 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21189-alt.scala create mode 100644 tests/pos/i21189.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index c518de7dbbfe..821fd2e36b79 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3038,7 +3038,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer body /** Implement givens that were declared with a `deferred` rhs. - * The a given value matching the declared type is searched in a + * The given value matching the declared type is searched in a * context directly enclosing the current class, in which all given * parameters of the current class are also defined. */ @@ -3055,6 +3055,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer false else true + def willBeimplementedInParentClass(m: TermRef) = + val superCls = cls.superClass + superCls.exists && superCls.asClass.baseClasses.contains(m.symbol.owner) + def givenImpl(mbr: TermRef): ValDef = val dcl = mbr.symbol val target = dcl.info.asSeenFrom(cls.thisType, dcl.owner) @@ -3084,6 +3088,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cls.thisType.implicitMembers //.showing(i"impl def givens for $cls/$result") .filter(_.symbol.isAllOf(DeferredGivenFlags, butNot = Param)) + .filter(!willBeimplementedInParentClass(_)) // only implement the given in the topmost class //.showing(i"impl def filtered givens for $cls/$result") .filter(isGivenValue) .map(givenImpl) diff --git a/tests/pos/i21189-alt.scala b/tests/pos/i21189-alt.scala new file mode 100644 index 000000000000..08213cd627d4 --- /dev/null +++ b/tests/pos/i21189-alt.scala @@ -0,0 +1,12 @@ +//> using options -source:future -language:experimental.modularity + +class MySortedSet[T : Ord] extends SortedSet[T] + +trait Ord[T] + +trait Sorted[T] extends ParentOfSorted[T] + +trait ParentOfSorted[T]: + given Ord[T] as ord = compiletime.deferred + +class SortedSet[T : Ord] extends Sorted[T] diff --git a/tests/pos/i21189.scala b/tests/pos/i21189.scala new file mode 100644 index 000000000000..88a0bf601476 --- /dev/null +++ b/tests/pos/i21189.scala @@ -0,0 +1,10 @@ +//> using options -source:future -language:experimental.modularity + +class MySortedSet[T : Ord] extends SortedSet[T] + +trait Ord[T] + +trait Sorted[T]: + given Ord[T] as ord = compiletime.deferred + +class SortedSet[T : Ord] extends Sorted[T] From b8f9c2c2cc17abc1caf14199dfd67ca28130aef2 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 24 Jul 2024 11:46:30 +0200 Subject: [PATCH 380/827] Switch mapping of context bounds to using clauses in 3.6 Was future before. --- .../src/dotty/tools/dotc/ast/Desugar.scala | 2 +- .../scaladoc/tasty/ClassLikeSupport.scala | 3 +- staging/test-resources/repl-staging/i6263 | 2 +- tests/neg/ctx-bounds-priority-migration.scala | 13 +++++++ tests/neg/ctx-bounds-priority.scala | 6 +++ tests/neg/i10901.check | 38 +++++++++---------- tests/pos/i20901/Foo.tastycheck | 2 +- tests/semanticdb/metac.expect | 10 ++--- 8 files changed, 48 insertions(+), 28 deletions(-) create mode 100644 tests/neg/ctx-bounds-priority-migration.scala create mode 100644 tests/neg/ctx-bounds-priority.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index c360712999e2..785dac9b4658 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -300,7 +300,7 @@ object desugar { // implicit resolution in Scala 3. val paramssNoContextBounds = - val iflag = if Feature.sourceVersion.isAtLeast(`future`) then Given else Implicit + val iflag = if Feature.sourceVersion.isAtLeast(`3.6`) then Given else Implicit val flags = if isPrimaryConstructor then iflag | LocalParamAccessor else iflag | Param mapParamss(paramss) { tparam => desugarContextBounds(tparam, evidenceParamBuf, flags, freshName, paramss) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 8823f6cb4e5e..497b58b6ed2c 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -588,7 +588,8 @@ trait ClassLikeSupport: // `def foo[A: ClassTag] = 1`. // Scala spec states that `$` should not be used in names and behaviour may be undefiend in such case. // Documenting method slightly different then its definition is withing the 'undefiend behaviour'. - symbol.paramSymss.flatten.find(_.name == name).exists(_.flags.is(Flags.Implicit)) + symbol.paramSymss.flatten.find(_.name == name).exists(p => + p.flags.is(Flags.Given) || p.flags.is(Flags.Implicit)) def handlePolyType(memberInfo: MemberInfo, polyType: PolyType): MemberInfo = val typeParamList = MemberInfo.TypeParameterList(polyType.paramNames.zip(polyType.paramBounds).toMap) diff --git a/staging/test-resources/repl-staging/i6263 b/staging/test-resources/repl-staging/i6263 index 8d967c1c58ac..0df9a9893ae1 100644 --- a/staging/test-resources/repl-staging/i6263 +++ b/staging/test-resources/repl-staging/i6263 @@ -3,7 +3,7 @@ scala> import quoted.staging.{Compiler => StagingCompiler, _} scala> implicit def compiler: StagingCompiler = StagingCompiler.make(getClass.getClassLoader) def compiler: scala.quoted.staging.Compiler scala> def fn[T : Type](v : T) = println("ok") -def fn[T](v: T)(implicit evidence$1: scala.quoted.Type[T]): Unit +def fn[T](v: T)(using evidence$1: scala.quoted.Type[T]): Unit scala> withQuotes { fn("foo") } ok scala> withQuotes { fn((1,2)) } diff --git a/tests/neg/ctx-bounds-priority-migration.scala b/tests/neg/ctx-bounds-priority-migration.scala new file mode 100644 index 000000000000..8fc819c1e089 --- /dev/null +++ b/tests/neg/ctx-bounds-priority-migration.scala @@ -0,0 +1,13 @@ +//> using options -source 3.5 +trait Eq[A] +trait Order[A] extends Eq[A]: + def toOrdering: Ordering[A] + +def f[Element: Eq: Order] = summon[Eq[Element]].toOrdering // ok + +def Test() = + val eq: Eq[Int] = ??? + val ord: Order[Int] = ??? + f(eq, ord) // error + f(using eq, ord) // ok + diff --git a/tests/neg/ctx-bounds-priority.scala b/tests/neg/ctx-bounds-priority.scala new file mode 100644 index 000000000000..6594642d67c3 --- /dev/null +++ b/tests/neg/ctx-bounds-priority.scala @@ -0,0 +1,6 @@ +//> using options -source 3.6 +trait Eq[A] +trait Order[A] extends Eq[A]: + def toOrdering: Ordering[A] + +def Test[Element: Eq: Order] = summon[Eq[Element]].toOrdering // error diff --git a/tests/neg/i10901.check b/tests/neg/i10901.check index 4a8fa5db28bf..325cdccc6aab 100644 --- a/tests/neg/i10901.check +++ b/tests/neg/i10901.check @@ -1,23 +1,23 @@ -- [E008] Not Found Error: tests/neg/i10901.scala:45:38 ---------------------------------------------------------------- 45 | val pos1: Point2D[Int,Double] = x º y // error | ^^^ - | value º is not a member of object BugExp4Point2D.IntT. - | An extension method was tried, but could not be fully constructed: - | - | º(x) - | - | failed with: - | - | Ambiguous overload. The overloaded alternatives of method º in object dsl with types - | [T1, T2] - | (x: BugExp4Point2D.ColumnType[T1]) - | (y: BugExp4Point2D.ColumnType[T2]) - | (implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] - | [T1, T2] - | (x: T1) - | (y: BugExp4Point2D.ColumnType[T2]) - | (implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] - | both match arguments ((x : BugExp4Point2D.IntT.type))((y : BugExp4Point2D.DoubleT.type)) + | value º is not a member of object BugExp4Point2D.IntT. + | An extension method was tried, but could not be fully constructed: + | + | º(x) + | + | failed with: + | + | Ambiguous overload. The overloaded alternatives of method º in object dsl with types + | [T1, T2] + | (x: BugExp4Point2D.ColumnType[T1]) + | (y: BugExp4Point2D.ColumnType[T2]) + | (using evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | [T1, T2] + | (x: T1) + | (y: BugExp4Point2D.ColumnType[T2]) + | (using evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | both match arguments ((x : BugExp4Point2D.IntT.type))((y : BugExp4Point2D.DoubleT.type)) -- [E008] Not Found Error: tests/neg/i10901.scala:48:38 ---------------------------------------------------------------- 48 | val pos4: Point2D[Int,Double] = x º 201.1 // error | ^^^ @@ -31,8 +31,8 @@ | Ambiguous overload. The overloaded alternatives of method º in object dsl with types | [T1, T2] | (x: BugExp4Point2D.ColumnType[T1]) - | (y: T2)(implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] - | [T1, T2](x: T1)(y: T2)(implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | (y: T2)(using evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | [T1, T2](x: T1)(y: T2)(using evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] | both match arguments ((x : BugExp4Point2D.IntT.type))((201.1d : Double)) -- [E008] Not Found Error: tests/neg/i10901.scala:62:16 ---------------------------------------------------------------- 62 | val y = "abc".foo // error diff --git a/tests/pos/i20901/Foo.tastycheck b/tests/pos/i20901/Foo.tastycheck index 565c5c793bad..583595a7eb0a 100644 --- a/tests/pos/i20901/Foo.tastycheck +++ b/tests/pos/i20901/Foo.tastycheck @@ -74,7 +74,7 @@ Trees (98 bytes, starting from ): 61: SHAREDtype 6 63: IDENTtpt 16 [T] 65: TYPEREFdirect 39 - 67: IMPLICIT + 67: GIVEN 68: IDENTtpt 17 [Nothing] 70: TYPEREF 17 [Nothing] 72: TERMREFpkg 2 [scala] diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 16f1b7c13d1f..dffed5c0d477 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2621,9 +2621,9 @@ example/Methods#m6(+1). => method m6 (param x: List[T]): Nothing example/Methods#m6(+1).(x) => param x: List[T] example/Methods#m6(+2). => method m6 (param x: List[T]): Nothing example/Methods#m6(+2).(x) => param x: List[T] -example/Methods#m7(). => method m7 [typeparam U ](param c: Methods[T], param l: List[U])(implicit param evidence$1: Ordering[U]): Nothing +example/Methods#m7(). => method m7 [typeparam U ](param c: Methods[T], param l: List[U])(implicit given param evidence$1: Ordering[U]): Nothing example/Methods#m7().(c) => param c: Methods[T] -example/Methods#m7().(evidence$1) => implicit param evidence$1: Ordering[U] +example/Methods#m7().(evidence$1) => implicit given param evidence$1: Ordering[U] example/Methods#m7().(l) => param l: List[U] example/Methods#m7().[U] => typeparam U example/Methods#m9(). => method m9 (param x: m9().): Nothing @@ -3553,10 +3553,10 @@ example/Synthetic#F# => class F extends Object { self: F => +1 decls } example/Synthetic#F#``(). => primary ctor (): F example/Synthetic#J# => class J [typeparam T ] extends Object { self: J[T] => +4 decls } example/Synthetic#J#[T] => typeparam T -example/Synthetic#J#``(). => primary ctor [typeparam T ]()(implicit param evidence$1: Manifest[T]): J[T] -example/Synthetic#J#``().(evidence$1) => implicit param evidence$1: Manifest[T] +example/Synthetic#J#``(). => primary ctor [typeparam T ](implicit given param evidence$1: Manifest[T])(): J[T] +example/Synthetic#J#``().(evidence$1) => implicit given param evidence$1: Manifest[T] example/Synthetic#J#arr. => val method arr Array[T] -example/Synthetic#J#evidence$1. => private[this] implicit val method evidence$1 Manifest[T] +example/Synthetic#J#evidence$1. => private[this] implicit val given method evidence$1 Manifest[T] example/Synthetic#Name. => val method Name Regex example/Synthetic#``(). => primary ctor (): Synthetic example/Synthetic#a1. => val method a1 Int From f7996dac1c2112b5ccf220f6ca30babbc01240e5 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 24 Jul 2024 20:39:58 +0200 Subject: [PATCH 381/827] Adapt context bound evidence to implicits in last parameter list If we have a function like ```scala def foo[X: CB](...)(implicit x: T) ``` always map context bounds to implicit parameters, irrespective of version. Likewise, if we have a function ``scala def foo[X: CB](...)(using x: T) ``` always map context bounds to "using" parameters, irrespective of version. This avoids mixing implicit and using in one parameter list. --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 785dac9b4658..dba0834c95e8 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -300,7 +300,12 @@ object desugar { // implicit resolution in Scala 3. val paramssNoContextBounds = - val iflag = if Feature.sourceVersion.isAtLeast(`3.6`) then Given else Implicit + val iflag = paramss.lastOption.flatMap(_.headOption) match + case Some(param) if param.mods.isOneOf(GivenOrImplicit) => + param.mods.flags & GivenOrImplicit + case _ => + if Feature.sourceVersion.isAtLeast(`3.6`) then Given + else Implicit val flags = if isPrimaryConstructor then iflag | LocalParamAccessor else iflag | Param mapParamss(paramss) { tparam => desugarContextBounds(tparam, evidenceParamBuf, flags, freshName, paramss) @@ -472,7 +477,14 @@ object desugar { case ValDefs(mparams) :: _ if mparams.exists(referencesBoundName) => params :: mparamss case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => - (params ++ mparams) :: Nil + val normParams = + if params.head.mods.flags.is(Given) != mparam.mods.flags.is(Given) then + params.map: param => + val normFlags = param.mods.flags &~ GivenOrImplicit | (mparam.mods.flags & (GivenOrImplicit)) + param.withMods(param.mods.withFlags(normFlags)) + .showing(i"ADAPTED PARAM $result ${result.mods.flags} for ${meth.name}") + else params + (normParams ++ mparams) :: Nil case mparams :: mparamss1 => mparams :: recur(mparamss1) case Nil => From ba3c94cbb10de794b220dd5c64e8e9df14f5a760 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 24 Jul 2024 20:42:48 +0200 Subject: [PATCH 382/827] Adapt spire to new scheme --- community-build/community-projects/spire | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community-build/community-projects/spire b/community-build/community-projects/spire index d60fe2c38848..993e8c8c7a8e 160000 --- a/community-build/community-projects/spire +++ b/community-build/community-projects/spire @@ -1 +1 @@ -Subproject commit d60fe2c38848ef193031c18eab3a14d3306b3761 +Subproject commit 993e8c8c7a8e55be943d63c07c8263c1021add2f From 81b425061b24f2fda9cc4853bfe2b44a667ce0d5 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 25 Jul 2024 11:01:49 +0200 Subject: [PATCH 383/827] Adapt presentation compiler --- compiler/src/dotty/tools/dotc/util/Signatures.scala | 2 +- .../src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 3f7d7dd39531..ae6bc583bae8 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -499,7 +499,7 @@ object Signatures { def isSyntheticEvidence(name: String) = name.startsWith(NameKinds.ContextBoundParamName.separator) - && symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.is(Flags.Implicit)) + && symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.isOneOf(Flags.GivenOrImplicit)) def toTypeParam(tpe: PolyType): List[Param] = val evidenceParams = (tpe.paramNamess.flatten zip tpe.paramInfoss.flatten).flatMap: diff --git a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala index 19d603fcbb3b..a0dcb5276253 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala @@ -296,7 +296,7 @@ class ShortenedTypePrinter( val (methodParams, extParams) = splitExtensionParamss(gsym) val paramss = methodParams ++ extParams lazy val implicitParams: List[Symbol] = - paramss.flatMap(params => params.filter(p => p.is(Flags.Implicit))) + paramss.flatMap(params => params.filter(p => p.isOneOf(Flags.GivenOrImplicit))) lazy val implicitEvidenceParams: Set[Symbol] = implicitParams From 857c6c8bfc008e7b7073e099ec97fa3865402a86 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 15 Jul 2024 17:07:41 +0200 Subject: [PATCH 384/827] fix: show zero extent references when using pc --- .../src/main/dotty/tools/pc/PcCollector.scala | 12 ++- .../dotty/tools/pc/PcReferencesProvider.scala | 4 +- .../tools/pc/tests/PcReferencesSuite.scala | 86 +++++++++++++++++++ .../dotty/tools/pc/utils/RangeReplace.scala | 19 ++-- 4 files changed, 111 insertions(+), 10 deletions(-) create mode 100644 presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala index 5de80cda4ddf..1ebfd405768e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala @@ -35,6 +35,8 @@ trait PcCollector[T]: parent: Option[Tree] )(tree: Tree| EndMarker, pos: SourcePosition, symbol: Option[Symbol]): T + def allowZeroExtentImplicits: Boolean = false + def resultAllOccurences(): Set[T] = def noTreeFilter = (_: Tree) => true def noSoughtFilter = (_: Symbol => Boolean) => true @@ -87,6 +89,10 @@ trait PcCollector[T]: def isCorrect = !span.isZeroExtent && span.exists && span.start < sourceText.size && span.end <= sourceText.size + extension (tree: Tree) + def isCorrectSpan = + tree.span.isCorrect || (allowZeroExtentImplicits && tree.symbol.is(Flags.Implicit)) + def traverseSought( filter: Tree => Boolean, soughtFilter: (Symbol => Boolean) => Boolean @@ -107,7 +113,7 @@ trait PcCollector[T]: * All indentifiers such as: * val a = <> */ - case ident: Ident if ident.span.isCorrect && filter(ident) => + case ident: Ident if ident.isCorrectSpan && filter(ident) => // symbols will differ for params in different ext methods, but source pos will be the same if soughtFilter(_.sourcePos == ident.symbol.sourcePos) then @@ -122,7 +128,7 @@ trait PcCollector[T]: * val x = new <
>(1) */ case sel @ Select(New(t), _) - if sel.span.isCorrect && + if sel.isCorrectSpan && sel.symbol.isConstructor && t.symbol == NoSymbol => if soughtFilter(_ == sel.symbol.owner) then @@ -137,7 +143,7 @@ trait PcCollector[T]: * val a = hello.<> */ case sel: Select - if sel.span.isCorrect && filter(sel) && + if sel.isCorrectSpan && filter(sel) && !sel.isForComprehensionMethod => occurrences + collect( sel, diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala index 8d22ce320eee..49ed313faec4 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala @@ -23,6 +23,8 @@ class PcReferencesProvider( request: ReferencesRequest, ) extends WithCompilationUnit(driver, request.file()) with PcCollector[Option[(String, Option[lsp4j.Range])]]: + override def allowZeroExtentImplicits: Boolean = true + private def soughtSymbols = if(request.offsetOrSymbol().isLeft()) { val offsetParams = CompilerOffsetParams( @@ -64,4 +66,4 @@ class PcReferencesProvider( } .toList case _ => Nil -end PcReferencesProvider \ No newline at end of file +end PcReferencesProvider diff --git a/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala new file mode 100644 index 000000000000..aee3fd37617f --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala @@ -0,0 +1,86 @@ +package dotty.tools.pc.tests +import dotty.tools.pc.base.BasePCSuite +import dotty.tools.pc.utils.RangeReplace +import scala.meta.internal.pc.PcReferencesRequest +import scala.meta.internal.metals.CompilerVirtualFileParams +import java.net.URI +import scala.meta.internal.metals.EmptyCancelToken +import org.eclipse.lsp4j.jsonrpc.messages.{Either => JEither} +import scala.meta.internal.jdk.CollectionConverters.* + +import org.junit.Test + +class PcReferencesSuite extends BasePCSuite with RangeReplace { + def check( + original: String, + ): Unit = + val edit = original.replaceAll("(<<|>>)", "") + val expected = original.replaceAll("@@", "") + val base = original.replaceAll("(<<|>>|@@)", "") + + val (code, offset) = params(edit, "Highlight.scala") + val ranges = presentationCompiler + .references( + PcReferencesRequest( + CompilerVirtualFileParams( + URI.create("file:/Highlight.scala"), + code, + EmptyCancelToken + ), + includeDefinition = false, + offsetOrSymbol = JEither.forLeft(offset) + ) + ) + .get() + .asScala + .flatMap(_.locations().asScala.map(_.getRange())) + .toList + + assertEquals( + renderRangesAsString(base, ranges), + expected, + "references should match" + ) + + @Test def `implicit-args` = + check( + """|package example + | + |class Bar(i: Int) + | + |object Hello { + | def m(i: Int)(implicit b: Bar) = ??? + | val foo = { + | implicit val ba@@rr: Bar = new Bar(1) + | m(3)<<>> + | } + |} + |""".stripMargin + ) + + @Test def `implicit-args-2` = + check( + """|package example + | + |class Bar(i: Int) + |class Foo(implicit b: Bar) + | + |object Hello { + | implicit val ba@@rr: Bar = new Bar(1) + | val foo = new Foo<<>> + |} + |""".stripMargin + ) + + @Test def `case-class` = + check( + """|case class Ma@@in(i: Int) + |""".stripMargin + ) + + @Test def `case-class-with-implicit` = + check( + """"|case class A()(implicit val fo@@o: Int) + |""".stripMargin + ) +} diff --git a/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala b/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala index 0b41b106eb02..deafad4987ce 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala @@ -12,14 +12,21 @@ trait RangeReplace: def renderHighlightsAsString( code: String, highlights: List[DocumentHighlight] + ): String = renderRangesAsString(code, highlights.map(_.getRange())) + + def renderRangesAsString( + code: String, + highlights: List[Range], + alreadyAddedMarkings: List[(Int, Int)] = Nil, + currentBase: Option[String] = None ): String = highlights - .foldLeft((code, immutable.List.empty[(Int, Int)])) { - case ((base, alreadyAddedMarkings), location) => - replaceInRangeWithAdjustmens( + .foldLeft((currentBase.getOrElse(code), alreadyAddedMarkings)) { + case ((base, alreadyAddedMarkings), range) => + replaceInRangeWithAdjustments( code, base, - location.getRange, + range, alreadyAddedMarkings ) } @@ -31,9 +38,9 @@ trait RangeReplace: prefix: String = "<<", suffix: String = ">>" ): String = - replaceInRangeWithAdjustmens(base, base, range, List(), prefix, suffix)._1 + replaceInRangeWithAdjustments(base, base, range, List(), prefix, suffix)._1 - protected def replaceInRangeWithAdjustmens( + protected def replaceInRangeWithAdjustments( code: String, currentBase: String, range: Range, From d2c6d6049efda132a27874dab1414d6d26fecfa6 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 15 Jul 2024 17:10:10 +0200 Subject: [PATCH 385/827] fix: use already imported package aliases for auto import --- .../src/main/dotty/tools/pc/AutoImports.scala | 9 ++++++++- .../tools/pc/tests/edit/AutoImportsSuite.scala | 13 +++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala index bf814ef682e0..896954c4e1a4 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala @@ -269,7 +269,14 @@ object AutoImports: private def importName(sym: Symbol): String = if indexedContext.importContext.toplevelClashes(sym) then s"_root_.${sym.fullNameBackticked(false)}" - else sym.fullNameBackticked(false) + else + sym.ownersIterator.zipWithIndex.foldLeft((List.empty[String], false)) { case ((acc, isDone), (sym, idx)) => + if(isDone || sym.isEmptyPackage || sym.isRoot) (acc, true) + else indexedContext.rename(sym) match + case Some(renamed) => (renamed :: acc, true) + case None if !sym.isPackageObject => (sym.nameBackticked(false) :: acc, false) + case None => (acc, false) + }._1.mkString(".") end AutoImportsGenerator private def autoImportPosition( diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala index ce5ae4a1cca4..e4ef8c0f747d 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala @@ -436,6 +436,19 @@ class AutoImportsSuite extends BaseAutoImportsSuite: |""".stripMargin ) + @Test def `use-packages-in-scope` = + checkEdit( + """|import scala.collection.mutable as mut + | + |val l = <>(2) + |""".stripMargin, + """|import scala.collection.mutable as mut + |import mut.ListBuffer + | + |val l = ListBuffer(2) + |""".stripMargin + ) + private def ammoniteWrapper(code: String): String = // Vaguely looks like a scala file that Ammonite generates // from a sc file. From 2cfc5619b5b5089527d56b11e5189918f881bcfe Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 15 Jul 2024 17:26:25 +0200 Subject: [PATCH 386/827] improvement: add info needed for running tests to symbol info --- .../tools/pc/SymbolInformationProvider.scala | 18 ++++++++++++++++++ project/Build.scala | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala index 18d6a4ec8621..da075e21f486 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala @@ -1,5 +1,6 @@ package dotty.tools.pc +import scala.collection.mutable import scala.util.control.NonFatal import scala.meta.pc.PcSymbolKind @@ -37,11 +38,25 @@ class SymbolInformationProvider(using Context): if classSym.isClass then classSym.asClass.parentSyms.map(SemanticdbSymbols.symbolName) else Nil + val allParents = + val visited = mutable.Set[Symbol]() + def collect(sym: Symbol): Unit = { + visited += sym + if sym.isClass + then sym.asClass.parentSyms.foreach { + case parent if !visited(parent) => + collect(parent) + case _ => + } + } + collect(classSym) + visited.toList.map(SemanticdbSymbols.symbolName) val dealisedSymbol = if sym.isAliasType then sym.info.deepDealias.typeSymbol else sym val classOwner = sym.ownersIterator.drop(1).find(s => s.isClass || s.is(Flags.Module)) val overridden = sym.denot.allOverriddenSymbols.toList + val memberDefAnnots = sym.info.membersBasedOnFlags(Flags.Method, Flags.EmptyFlags).flatMap(_.allSymbols).flatMap(_.denot.annotations) val pcSymbolInformation = PcSymbolInformation( @@ -56,6 +71,9 @@ class SymbolInformationProvider(using Context): properties = if sym.is(Flags.Abstract) then List(PcSymbolProperty.ABSTRACT) else Nil, + recursiveParents = allParents, + annotations = sym.denot.annotations.map(_.symbol.showFullName), + memberDefsAnnotations = memberDefAnnots.map(_.symbol.showFullName).toList ) Some(pcSymbolInformation) diff --git a/project/Build.scala b/project/Build.scala index 86b0b0d50c03..8c9a0e69f0ef 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1359,7 +1359,7 @@ object Build { BuildInfoPlugin.buildInfoDefaultSettings lazy val presentationCompilerSettings = { - val mtagsVersion = "1.3.2" + val mtagsVersion = "1.3.3" Seq( libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", From 3b3b8a77ef911825bfb0d5dc5406b641332aaa07 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Mon, 15 Jul 2024 18:17:25 +0200 Subject: [PATCH 387/827] feat: add value completions for union types --- .../pc/completions/CompletionValue.scala | 9 + .../tools/pc/completions/Completions.scala | 53 +--- .../pc/completions/SingletonCompletions.scala | 133 ++++++++ .../SingletonCompletionsSuite.scala | 300 ++++++++++++++++++ 4 files changed, 459 insertions(+), 36 deletions(-) create mode 100644 presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala create mode 100644 presentation-compiler/test/dotty/tools/pc/tests/completion/SingletonCompletionsSuite.scala diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala index 9071b2cd2a23..98cceae149d3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala @@ -354,6 +354,15 @@ object CompletionValue: description override def insertMode: Option[InsertTextMode] = Some(InsertTextMode.AsIs) + case class SingletonValue(label: String, info: Type, override val range: Option[Range]) + extends CompletionValue: + override def insertText: Option[String] = Some(label) + override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = + s"$label: ${printer.tpe(info)}" + + override def completionItemKind(using Context): CompletionItemKind = + CompletionItemKind.Constant + def namedArg(label: String, sym: ParamSymbol)(using Context ): CompletionValue = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index db578e32663f..03bb8d03d1db 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -319,7 +319,7 @@ class Completions( val ScalaCliCompletions = new ScalaCliCompletions(coursierComplete, pos, text) - path match + val (advanced, exclusive) = path match case ScalaCliCompletions(dependency) => (ScalaCliCompletions.contribute(dependency), true) @@ -525,7 +525,10 @@ class Completions( config.isCompletionSnippetsEnabled() ) (args, false) - end match + val singletonCompletions = InterCompletionType.inferType(path).map( + SingletonCompletions.contribute(path, _, completionPos) + ).getOrElse(Nil) + (singletonCompletions ++ advanced, exclusive) end advancedCompletions private def isAmmoniteCompletionPosition( @@ -704,6 +707,7 @@ class Completions( case fileSysMember: CompletionValue.FileSystemMember => (fileSysMember.label, true) case ii: CompletionValue.IvyImport => (ii.label, true) + case sv: CompletionValue.SingletonValue => (sv.label, true) if !alreadySeen(id) && include then alreadySeen += id @@ -911,38 +915,18 @@ class Completions( else 2 } ) - - /** - * This one is used for the following case: - * ```scala - * def foo(argument: Int): Int = ??? - * val argument = 42 - * foo(arg@@) // completions should be ordered as : - * // - argument (local val) - actual value comes first - * // - argument = ... (named arg) - named arg after - * // - ... all other options - * ``` - */ - def compareInApplyParams(o1: CompletionValue, o2: CompletionValue): Int = - def priority(v: CompletionValue): Int = - v match - case _: CompletionValue.Compiler => 0 - case CompletionValue.ExtraMethod(_, _: CompletionValue.Compiler) => 0 - case _ => 1 - - priority(o1) - priority(o2) - end compareInApplyParams - - def prioritizeKeywords(o1: CompletionValue, o2: CompletionValue): Int = + def prioritizeByClass(o1: CompletionValue, o2: CompletionValue): Int = def priority(v: CompletionValue): Int = v match - case _: CompletionValue.CaseKeyword => 0 - case _: CompletionValue.NamedArg => 1 - case _: CompletionValue.Keyword => 2 - case _ => 3 + case _: CompletionValue.SingletonValue => 0 + case _: CompletionValue.Compiler => 1 + case _: CompletionValue.CaseKeyword => 2 + case _: CompletionValue.NamedArg => 3 + case _: CompletionValue.Keyword => 4 + case _ => 5 priority(o1) - priority(o2) - end prioritizeKeywords + end prioritizeByClass /** * Some completion values should be shown first such as CaseKeyword and * NamedArg @@ -1041,12 +1025,9 @@ class Completions( end if end if case _ => - val byApplyParams = compareInApplyParams(o1, o2) - if byApplyParams != 0 then byApplyParams - else - val keywords = prioritizeKeywords(o1, o2) - if keywords != 0 then keywords - else compareByRelevance(o1, o2) + val byClass = prioritizeByClass(o1, o2) + if byClass != 0 then byClass + else compareByRelevance(o1, o2) end compare end Completions diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala new file mode 100644 index 000000000000..769f47114c98 --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala @@ -0,0 +1,133 @@ +package dotty.tools.pc.completions + +import scala.meta.internal.metals.Fuzzy +import dotty.tools.pc.utils.InteractiveEnrichments.* +import dotty.tools.pc.completions.CompletionValue.SingletonValue + +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.core.Symbols +import dotty.tools.dotc.core.Types.AndType +import dotty.tools.dotc.core.Types.AppliedType +import dotty.tools.dotc.core.Types.ConstantType +import dotty.tools.dotc.core.Types.OrType +import dotty.tools.dotc.core.Types.TermRef +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Types.TypeRef +import dotty.tools.dotc.util.Spans.Span +import dotty.tools.dotc.core.Symbols.defn + +object SingletonCompletions: + def contribute( + path: List[Tree], + tpe0: Type, + completionPos: CompletionPos + )(using ctx: Context): List[CompletionValue] = + for { + (name, span) <- + path match + case (i @ Ident(name)) :: _ => List(name.toString() -> i.span) + case (l @ Literal(const)) :: _ => List(const.show -> l.span) + case _ => Nil + query = name.replace(Cursor.value, "") + tpe = tpe0 match + // for Tuple 2 we want to suggest first arg completion + case AppliedType(t: TypeRef, args) if t.classSymbol == Symbols.defn.Tuple2 && args.nonEmpty => + args.head + case t => t + singletonValues = collectSingletons(tpe).map(_.show) + range = completionPos.originalCursorPosition.withStart(span.start).withEnd(span.start + query.length).toLsp + value <- singletonValues.collect { + case name if Fuzzy.matches(query, name) => + SingletonValue(name, tpe, Some(range)) + } + } yield value + + private def collectSingletons(tpe: Type)(using Context): List[Constant] = + tpe.deepDealias match + case ConstantType(value) => List(value) + case OrType(tpe1, tpe2) => + collectSingletons(tpe1) ++ collectSingletons(tpe2) + case AndType(tpe1, tpe2) => + collectSingletons(tpe1).intersect(collectSingletons(tpe2)) + case _ => Nil + +object InterCompletionType: + def inferType(path: List[Tree])(using Context): Option[Type] = + path match + case (lit: Literal) :: Select(Literal(_), _) :: Apply(Select(Literal(_), _), List(s: Select)) :: rest if s.symbol == defn.Predef_undefined => + inferType(rest, lit.span) + case ident :: rest => inferType(rest, ident.span) + case _ => None + + def inferType(path: List[Tree], span: Span)(using Context): Option[Type] = + path match + case Apply(head, List(p : Select)) :: rest if p.name == StdNames.nme.??? && p.qualifier.symbol.name == StdNames.nme.Predef && p.span.isSynthetic => + inferType(rest, span) + case Block(_, expr) :: rest if expr.span.contains(span) => + inferType(rest, span) + case If(cond, _, _) :: rest if !cond.span.contains(span) => + inferType(rest, span) + case Typed(expr, tpt) :: _ if expr.span.contains(span) && !tpt.tpe.isErroneous => Some(tpt.tpe) + case Block(_, expr) :: rest if expr.span.contains(span) => + inferType(rest, span) + case Bind(_, body) :: rest if body.span.contains(span) => inferType(rest, span) + case Alternative(_) :: rest => inferType(rest, span) + case Try(block, _, _) :: rest if block.span.contains(span) => inferType(rest, span) + case CaseDef(_, _, body) :: Try(_, cases, _) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => inferType(rest, span) + case If(cond, _, _) :: rest if !cond.span.contains(span) => inferType(rest, span) + case CaseDef(_, _, body) :: Match(_, cases) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => + inferType(rest, span) + case NamedArg(_, arg) :: rest if arg.span.contains(span) => inferType(rest, span) + // x match + // case @@ + case CaseDef(pat, _, _) :: Match(sel, cases) :: rest if pat.span.contains(span) && cases.exists(_.span.contains(span)) && !sel.tpe.isErroneous => + sel.tpe match + case tpe: TermRef => Some(tpe.symbol.info).filterNot(_.isErroneous) + case tpe => Some(tpe) + // List(@@) + case SeqLiteral(_, tpe) :: _ if !tpe.tpe.isErroneous => + Some(tpe.tpe) + // val _: T = @@ + // def _: T = @@ + case (defn: ValOrDefDef) :: rest if !defn.tpt.tpe.isErroneous => Some(defn.tpt.tpe) + // f(@@) + case (app: Apply) :: rest => + val param = + for { + ind <- app.args.zipWithIndex.collectFirst { + case (arg, id) if arg.span.contains(span) => id + } + params <- app.symbol.paramSymss.find(!_.exists(_.isTypeParam)) + param <- params.get(ind) + } yield param.info + param match + // def f[T](a: T): T = ??? + // f[Int](@@) + // val _: Int = f(@@) + case Some(t : TypeRef) if t.symbol.is(Flags.TypeParam) => + for { + (typeParams, args) <- + app match + case Apply(TypeApply(fun, args), _) => + val typeParams = fun.symbol.paramSymss.headOption.filter(_.forall(_.isTypeParam)) + typeParams.map((_, args.map(_.tpe))) + // val f: (j: "a") => Int + // f(@@) + case Apply(Select(v, StdNames.nme.apply), _) => + v.symbol.info match + case AppliedType(des, args) => + Some((des.typeSymbol.typeParams, args)) + case _ => None + case _ => None + ind = typeParams.indexOf(t.symbol) + tpe <- args.get(ind) + if !tpe.isErroneous + } yield tpe + case Some(tpe) => Some(tpe) + case _ => None + case _ => None + diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/SingletonCompletionsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/SingletonCompletionsSuite.scala new file mode 100644 index 000000000000..25d1418900fd --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/SingletonCompletionsSuite.scala @@ -0,0 +1,300 @@ +package dotty.tools.pc.tests.completion + +import dotty.tools.pc.base.BaseCompletionSuite + +import org.junit.Test + +class SingletonCompletionsSuite extends BaseCompletionSuite { + + @Test def `basic` = + check( + """|val k: 1 = @@ + |""".stripMargin, + "1: 1", + topLines = Some(1) + ) + + @Test def `literal` = + check( + """|val k: 1 = 1@@ + |""".stripMargin, + "1: 1", + topLines = Some(1) + ) + + @Test def `string` = + check( + """|val k: "aaa" = "@@" + |""".stripMargin, + """|"aaa": "aaa" + |""".stripMargin + ) + + @Test def `string-edit` = + checkEdit( + """|val k: "aaa" = "@@" + |""".stripMargin, + """|val k: "aaa" = "aaa" + |""".stripMargin, + assertSingleItem = false + ) + + @Test def `string-edit-2` = + checkEdit( + """|val k: "aaa" = @@ //something + |""".stripMargin, + """|val k: "aaa" = "aaa" //something + |""".stripMargin, + assertSingleItem = false + ) + + @Test def `union` = + check( + """|val k: "aaa" | "bbb" = "@@" + |""".stripMargin, + """|"aaa": "aaa" | "bbb" + |"bbb": "aaa" | "bbb" + |""".stripMargin + ) + + @Test def `type-alias-union` = + check( + """|type Color = "red" | "green" | "blue" + |val c: Color = "r@@" + |""".stripMargin, + """|"red": Color + |""".stripMargin + ) + + @Test def `param` = + check( + """|type Color = "red" | "green" | "blue" + |def paint(c: Color) = ??? + |val _ = paint(@@) + |""".stripMargin, + """|"red": Color + |"green": Color + |"blue": Color + |c = : Color + |""".stripMargin, + topLines = Some(4) + ) + + @Test def `with-block` = + check( + """|type Color = "red" | "green" | "blue" + |def c: Color = { + | "r@@" + |} + |""".stripMargin, + """|"red": Color + |""".stripMargin + ) + + @Test def `if-statement` = + check( + """|type Color = "red" | "green" | "blue" + |def c(shouldBeBlue: Boolean): Color = { + | if(shouldBeBlue) "b@@" + | else "red" + |} + |""".stripMargin, + """|"blue": Color + |""".stripMargin + ) + + @Test def `if-statement-2` = + check( + """|type Color = "red" | "green" | "blue" + |def c(shouldBeBlue: Boolean): Color = { + | if(shouldBeBlue) { + | println("is blue") + | "b@@" + | } else "red" + |} + |""".stripMargin, + """|"blue": Color + |""".stripMargin + ) + + @Test def `if-statement-3` = + check( + """|type Color = "red" | "green" | "blue" + |def c(shouldBeBlue: Boolean): Color = { + | if(shouldBeBlue) { + | "b@@" + | println("is blue") + | "blue" + | } else "red" + |} + |""".stripMargin, + """""".stripMargin + ) + + @Test def `middle-of-a-block` = + check( + """|type Color = "red" | "green" | "blue" + |def c: Color = { + | "r@@" + | ??? + |} + |""".stripMargin, + "" + ) + + @Test def overloaded = + check( + """| + |type Color = "red" | "green" | "blue" + |def foo(i: Int) = ??? + |def foo(c: Color) = ??? + | + |def c = foo(@@) + |""".stripMargin, + """|c = : Color + |i = : Int + |""".stripMargin, + topLines = Some(2) + ) + + @Test def `and-type` = + check( + """|type Color = "red" | "green" | "blue" | "black" + |type FordColor = Color & "black" + |val i: FordColor = "@@" + |""".stripMargin, + """|"black": FordColor + |""".stripMargin + ) + + @Test def list = + check( + """|type Color = "red" | "green" | "blue" + |val i: List[Color] = List("@@") + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def option = + check( + """|type Color = "red" | "green" | "blue" + |val i: Option[Color] = Some("@@") + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def map = + check( + """|type Color = "red" | "green" | "blue" + |val i: Option[Int] = Some(1) + |val g: Option[Color] = i.map { _ => "@@" } + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def `some-for-comp` = + check( + """|type Color = "red" | "green" | "blue" + |val i: Option[Int] = Some(1) + |val g: Option[Color] = + | for + | _ <- i + | yield "@@" + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def `some-for-comp-1` = + check( + """|type Color = "red" | "green" | "blue" + |val i: Option[Int] = Some(1) + |val g: Option[Color] = + | for + | _ <- i + | _ <- i + | if i > 2 + | yield "@@" + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def lambda = + check( + """|def m = + | val j = (f: "foo") => 1 + | j("f@@") + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `match-case-result` = + check( + """|val h: "foo" = + | 1 match + | case _ => "@@" + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `dont-show-on-select` = + check( + """|val f: "foo" = List(1,2,3).@@ + |""".stripMargin, + "", + filter = _ == "\"foo\": \"foo\"" + ) + + @Test def `match-case` = + check( + """|def h(foo: "foo") = + | foo match + | case "@@" => + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `match-case2` = + check( + """|def h = + | ("foo" : "foo") match + | case "@@" => + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `named-args` = + check( + """|def h(foo: "foo") = ??? + |def k = h(foo = "@@") + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `map-type` = + check( + """|def m = Map["foo", Int]("@@") + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) +} From 637b1d3dc3ddddd0cbe3e2a4ac5ec3e972a6ad05 Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Tue, 16 Jul 2024 12:58:05 +0200 Subject: [PATCH 388/827] deal with unsafe nulls and fix sorting --- .../main/dotty/tools/pc/completions/Completions.scala | 1 + .../tools/pc/completions/SingletonCompletions.scala | 2 +- .../test/dotty/tools/pc/tests/PcReferencesSuite.scala | 10 +++++++--- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index 03bb8d03d1db..a517df60e833 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -920,6 +920,7 @@ class Completions( v match case _: CompletionValue.SingletonValue => 0 case _: CompletionValue.Compiler => 1 + case CompletionValue.ExtraMethod(_, _: CompletionValue.Compiler) => 1 case _: CompletionValue.CaseKeyword => 2 case _: CompletionValue.NamedArg => 3 case _: CompletionValue.Keyword => 4 diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala index 769f47114c98..6e59c9afca3a 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala @@ -32,7 +32,7 @@ object SingletonCompletions: case (i @ Ident(name)) :: _ => List(name.toString() -> i.span) case (l @ Literal(const)) :: _ => List(const.show -> l.span) case _ => Nil - query = name.replace(Cursor.value, "") + query = name.replace(Cursor.value, "").nn tpe = tpe0 match // for Tuple 2 we want to suggest first arg completion case AppliedType(t: TypeRef, args) if t.classSymbol == Symbols.defn.Tuple2 && args.nonEmpty => diff --git a/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala index aee3fd37617f..15ee35928872 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala @@ -1,12 +1,16 @@ package dotty.tools.pc.tests + +import scala.language.unsafeNulls + import dotty.tools.pc.base.BasePCSuite import dotty.tools.pc.utils.RangeReplace -import scala.meta.internal.pc.PcReferencesRequest -import scala.meta.internal.metals.CompilerVirtualFileParams + import java.net.URI -import scala.meta.internal.metals.EmptyCancelToken import org.eclipse.lsp4j.jsonrpc.messages.{Either => JEither} import scala.meta.internal.jdk.CollectionConverters.* +import scala.meta.internal.metals.CompilerVirtualFileParams +import scala.meta.internal.metals.EmptyCancelToken +import scala.meta.internal.pc.PcReferencesRequest import org.junit.Test From 875af444d008f84dedc39cc531779c0dc76a895c Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Thu, 25 Jul 2024 12:39:48 +0200 Subject: [PATCH 389/827] chore: bump mtags version --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 8c9a0e69f0ef..4f90566a60f9 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1359,7 +1359,7 @@ object Build { BuildInfoPlugin.buildInfoDefaultSettings lazy val presentationCompilerSettings = { - val mtagsVersion = "1.3.3" + val mtagsVersion = "1.3.4" Seq( libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", From d28bd5294ffa732fc90ed295b6ca88215c6fe965 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 25 Jul 2024 15:06:15 +0200 Subject: [PATCH 390/827] simplify trees that appear in tastycheck test --- tests/pos/i21154/Z.scala | 10 +- tests/pos/i21154/Z.tastycheck | 1555 +++++---------------------------- 2 files changed, 241 insertions(+), 1324 deletions(-) diff --git a/tests/pos/i21154/Z.scala b/tests/pos/i21154/Z.scala index e631d159f8c5..63bb50294180 100644 --- a/tests/pos/i21154/Z.scala +++ b/tests/pos/i21154/Z.scala @@ -3,7 +3,9 @@ // in the original issue https://github.com/scala/scala3/issues/21154, the non-deterministic tasty // depends on the order of compilation of files, the use-site (A.scala) has to come first, // and the file defining the enum has to come second (Z.scala), A.scala in namer will force Z to complete. -enum Z: - case AOptions() - case BOptions() - case COptions() +sealed trait Z + +object Z: + class AOptions() extends Z + class BOptions() extends Z + class COptions() extends Z diff --git a/tests/pos/i21154/Z.tastycheck b/tests/pos/i21154/Z.tastycheck index ac45ca6c4a53..8ba6e2a43a4d 100644 --- a/tests/pos/i21154/Z.tastycheck +++ b/tests/pos/i21154/Z.tastycheck @@ -3,1338 +3,253 @@ Header: tooling: UUID: -Names (936 bytes, starting from ): +Names (332 bytes, starting from ): 0: ASTs 1: 2: Z - 3: + 3: Object 4: java 5: lang 6: java[Qualified . lang] - 7: Object - 8: java[Qualified . lang][Qualified . Object] - 9: [Signed Signature(List(),java.lang.Object) @] - 10: Enum - 11: scala - 12: reflect - 13: scala[Qualified . reflect] - 14: Unit - 15: AOptions - 16: BOptions - 17: COptions - 18: SourceFile - 19: annotation - 20: scala[Qualified . annotation] - 21: internal - 22: scala[Qualified . annotation][Qualified . internal] - 23: scala[Qualified . annotation][Qualified . internal][Qualified . SourceFile] - 24: String - 25: java[Qualified . lang][Qualified . String] - 26: [Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @] - 27: - 28: Child - 29: scala[Qualified . annotation][Qualified . internal][Qualified . Child] - 30: [Signed Signature(List(1),scala.annotation.internal.Child) @] - 31: Z[ModuleClass] - 32: [Signed Signature(List(),Z$) @] - 33: AnyRef - 34: Sum - 35: Mirror - 36: Mirror[ModuleClass] - 37: deriving - 38: scala[Qualified . deriving] - 39: _ - 40: writeReplace - 41: runtime - 42: scala[Qualified . runtime] - 43: ModuleSerializationProxy - 44: scala[Qualified . runtime][Qualified . ModuleSerializationProxy] - 45: Class - 46: java[Qualified . lang][Qualified . Class] - 47: [Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @] - 48: [Signed Signature(List(),Z) @] - 49: hashCode - 50: Int - 51: _hashCode - 52: scala[Qualified . Int] - 53: Product - 54: scala[Qualified . Product] - 55: _hashCode[Signed Signature(List(scala.Product),scala.Int) @_hashCode] - 56: ScalaRunTime - 57: ScalaRunTime[ModuleClass] - 58: equals - 59: x$0 - 60: Any - 61: Boolean - 62: || - 63: scala[Qualified . Boolean] - 64: ||[Signed Signature(List(scala.Boolean),scala.Boolean) @||] - 65: eq - 66: eq[Signed Signature(List(java.lang.Object),scala.Boolean) @eq] - 67: $asInstanceOf$ - 68: $asInstanceOf$[Signed Signature(List(1),java.lang.Object) @$asInstanceOf$] - 69: unchecked - 70: scala[Qualified . unchecked] - 71: [Signed Signature(List(),scala.unchecked) @] - 72: toString - 73: _toString - 74: _toString[Signed Signature(List(scala.Product),java.lang.String) @_toString] - 75: canEqual - 76: that - 77: isInstanceOf - 78: isInstanceOf[Signed Signature(List(1),scala.Boolean) @isInstanceOf] - 79: productArity - 80: productPrefix - 81: Predef - 82: productElement - 83: n - 84: IndexOutOfBoundsException - 85: java[Qualified . lang][Qualified . IndexOutOfBoundsException] - 86: [Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @] - 87: toString[Signed Signature(List(),java.lang.String) @toString] - 88: productElementName - 89: copy - 90: Z[ModuleClass][Qualified . AOptions] - 91: [Signed Signature(List(),Z$.AOptions) @] - 92: ordinal - 93: AOptions[ModuleClass] - 94: Z[ModuleClass][Qualified . AOptions][ModuleClass] - 95: [Signed Signature(List(),Z$.AOptions$) @] - 96: apply - 97: unapply - 98: x$1 - 99: MirroredMonoType - 100: fromProduct - 101: Z[ModuleClass][Qualified . BOptions] - 102: [Signed Signature(List(),Z$.BOptions) @] - 103: BOptions[ModuleClass] - 104: Z[ModuleClass][Qualified . BOptions][ModuleClass] - 105: [Signed Signature(List(),Z$.BOptions$) @] - 106: Z[ModuleClass][Qualified . COptions] - 107: [Signed Signature(List(),Z$.COptions) @] - 108: COptions[ModuleClass] - 109: Z[ModuleClass][Qualified . COptions][ModuleClass] - 110: [Signed Signature(List(),Z$.COptions$) @] - 111: fromOrdinal - 112: util - 113: java[Qualified . util] - 114: NoSuchElementException - 115: java[Qualified . util][Qualified . NoSuchElementException] - 116: [Signed Signature(List(java.lang.String),java.util.NoSuchElementException) @] - 117: + - 118: +[Signed Signature(List(java.lang.Object),java.lang.String) @+] - 119: enum Z has no case with ordinal: - 120: Positions - 121: Comments - 122: Attributes + 7: + 8: Unit + 9: scala + 10: SourceFile + 11: annotation + 12: scala[Qualified . annotation] + 13: internal + 14: scala[Qualified . annotation][Qualified . internal] + 15: scala[Qualified . annotation][Qualified . internal][Qualified . SourceFile] + 16: String + 17: java[Qualified . lang][Qualified . String] + 18: [Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @] + 19: + 20: Child + 21: scala[Qualified . annotation][Qualified . internal][Qualified . Child] + 22: [Signed Signature(List(1),scala.annotation.internal.Child) @] + 23: Z[ModuleClass] + 24: [Signed Signature(List(),Z$) @] + 25: java[Qualified . lang][Qualified . Object] + 26: [Signed Signature(List(),java.lang.Object) @] + 27: _ + 28: writeReplace + 29: AnyRef + 30: runtime + 31: scala[Qualified . runtime] + 32: ModuleSerializationProxy + 33: scala[Qualified . runtime][Qualified . ModuleSerializationProxy] + 34: Class + 35: java[Qualified . lang][Qualified . Class] + 36: [Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @] + 37: AOptions + 38: BOptions + 39: COptions + 40: Positions + 41: Comments + 42: Attributes -Trees (1886 bytes, starting from ): - 0: PACKAGE(1883) +Trees (288 bytes, starting from ): + 0: PACKAGE(285) 3: TERMREFpkg 1 [] - 5: TYPEDEF(132) 2 [Z] - 9: TEMPLATE(39) - 11: APPLY(10) - 13: SELECTin(8) 9 [[Signed Signature(List(),java.lang.Object) @]] - 16: NEW - 17: TYPEREF 7 [Object] - 19: TERMREFpkg 6 [java[Qualified . lang]] - 21: SHAREDtype 17 - 23: TYPEREF 10 [Enum] - 25: TERMREFpkg 13 [scala[Qualified . reflect]] - 27: DEFDEF(7) 3 [] - 30: EMPTYCLAUSE - 31: TYPEREF 14 [Unit] - 33: TERMREFpkg 11 [scala] - 35: STABLE - 36: IMPORT(12) - 38: TERMREFsymbol 140 - 41: THIS - 42: TYPEREFpkg 1 [] - 44: IMPORTED 15 [AOptions] - 46: IMPORTED 16 [BOptions] - 48: IMPORTED 17 [COptions] - 50: ENUM - 51: SEALED - 52: ABSTRACT - 53: ANNOTATION(16) - 55: TYPEREF 18 [SourceFile] - 57: TERMREFpkg 22 [scala[Qualified . annotation][Qualified . internal]] - 59: APPLY(10) - 61: SELECTin(6) 26 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] - 64: NEW - 65: SHAREDtype 55 - 67: SHAREDtype 55 - 69: STRINGconst 27 [] - 71: ANNOTATION(25) - 73: TYPEREF 28 [Child] - 75: SHAREDtype 57 - 77: APPLY(19) - 79: TYPEAPPLY(17) - 81: SELECTin(6) 30 [[Signed Signature(List(1),scala.annotation.internal.Child) @]] - 84: NEW - 85: SHAREDtype 73 - 87: SHAREDtype 73 - 89: TYPEREFsymbol 1280 - 92: THIS - 93: TYPEREFsymbol 160 - 96: SHAREDtype 41 - 98: ANNOTATION(19) - 100: SHAREDtype 73 - 102: APPLY(15) - 104: TYPEAPPLY(13) - 106: SELECTin(6) 30 [[Signed Signature(List(1),scala.annotation.internal.Child) @]] - 109: NEW - 110: SHAREDtype 73 - 112: SHAREDtype 73 - 114: TYPEREFsymbol 769 - 117: SHAREDtype 92 - 119: ANNOTATION(19) - 121: SHAREDtype 73 - 123: APPLY(15) - 125: TYPEAPPLY(13) - 127: SELECTin(6) 30 [[Signed Signature(List(1),scala.annotation.internal.Child) @]] - 130: NEW - 131: SHAREDtype 73 - 133: SHAREDtype 73 - 135: TYPEREFsymbol 223 - 138: SHAREDtype 92 - 140: VALDEF(18) 2 [Z] - 143: IDENTtpt 31 [Z[ModuleClass]] - 145: SHAREDtype 93 - 147: APPLY(9) - 149: SELECTin(7) 32 [[Signed Signature(List(),Z$) @]] - 152: NEW - 153: SHAREDterm 143 - 156: SHAREDtype 93 - 158: OBJECT - 159: SYNTHETIC - 160: TYPEDEF(1723) 31 [Z[ModuleClass]] - 164: TEMPLATE(1701) - 167: APPLY(10) - 169: SELECTin(8) 9 [[Signed Signature(List(),java.lang.Object) @]] - 172: NEW - 173: TYPEREF 33 [AnyRef] - 175: SHAREDtype 33 - 177: SHAREDtype 17 - 179: TYPEREF 34 [Sum] - 181: THIS - 182: TYPEREF 36 [Mirror[ModuleClass]] - 184: TERMREFpkg 38 [scala[Qualified . deriving]] - 186: SELFDEF 39 [_] - 188: SINGLETONtpt - 189: SHAREDtype 38 - 191: DEFDEF(5) 3 [] - 194: EMPTYCLAUSE - 195: SHAREDtype 31 - 197: STABLE - 198: DEFDEF(23) 40 [writeReplace] - 201: EMPTYCLAUSE - 202: SHAREDtype 173 - 205: APPLY(14) - 207: SELECTin(9) 47 [[Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @]] - 210: NEW - 211: TYPEREF 43 [ModuleSerializationProxy] - 213: TERMREFpkg 42 [scala[Qualified . runtime]] - 215: SHAREDtype 211 - 218: CLASSconst - 219: SHAREDtype 38 - 221: PRIVATE - 222: SYNTHETIC - 223: TYPEDEF(369) 15 [AOptions] - 227: TEMPLATE(362) - 230: APPLY(11) - 232: SELECTin(9) 48 [[Signed Signature(List(),Z) @]] - 235: NEW - 236: TYPEREFsymbol 5 - 238: SHAREDtype 41 - 240: SHAREDtype 236 - 243: DEFDEF(5) 3 [] - 246: EMPTYCLAUSE - 247: SHAREDtype 31 - 249: STABLE - 250: DEFDEF(24) 49 [hashCode] - 253: EMPTYCLAUSE - 254: TYPEREF 50 [Int] - 256: SHAREDtype 33 - 258: APPLY(14) - 260: TERMREF 55 [_hashCode[Signed Signature(List(scala.Product),scala.Int) @_hashCode]] - 262: THIS - 263: TYPEREF 57 [ScalaRunTime[ModuleClass]] - 265: SHAREDtype 213 - 268: QUALTHIS - 269: IDENTtpt 15 [AOptions] - 271: SHAREDtype 135 - 274: OVERRIDE - 275: SYNTHETIC - 276: DEFDEF(96) 58 [equals] - 279: PARAM(5) 59 [x$0] - 282: TYPEREF 60 [Any] - 284: SHAREDtype 33 - 286: TYPEREF 61 [Boolean] - 288: SHAREDtype 33 - 290: APPLY(80) - 292: SELECTin(30) 64 [||[Signed Signature(List(scala.Boolean),scala.Boolean) @||]] - 295: APPLY(24) - 297: SELECTin(9) 66 [eq[Signed Signature(List(java.lang.Object),scala.Boolean) @eq]] - 300: QUALTHIS - 301: IDENTtpt 15 [AOptions] - 303: SHAREDtype 135 - 306: SHAREDtype 17 - 308: TYPEAPPLY(11) - 310: SELECTin(7) 68 [$asInstanceOf$[Signed Signature(List(1),java.lang.Object) @$asInstanceOf$]] - 313: TERMREFdirect 279 - 316: SHAREDtype 282 - 319: SHAREDtype 17 - 321: SHAREDtype 286 - 324: MATCH(46) - 326: SHAREDterm 313 - 329: CASEDEF(33) - 331: BIND(30) 59 [x$0] - 334: SHAREDtype 135 - 337: TYPED(23) - 339: IDENT 39 [_] - 341: ANNOTATEDtype(16) - 343: SHAREDtype 135 - 346: APPLY(11) - 348: SELECTin(9) 71 [[Signed Signature(List(),scala.unchecked) @]] - 351: NEW - 352: TYPEREF 69 [unchecked] - 354: SHAREDtype 33 - 356: SHAREDtype 352 - 359: SHAREDtype 341 - 362: SYNTHETIC - 363: TRUEconst - 364: CASEDEF(6) - 366: IDENT 39 [_] - 368: SHAREDtype 282 - 371: FALSEconst - 372: OVERRIDE - 373: SYNTHETIC - 374: DEFDEF(21) 72 [toString] - 377: EMPTYCLAUSE - 378: TYPEREF 24 [String] - 380: SHAREDtype 19 - 382: APPLY(11) - 384: TERMREF 74 [_toString[Signed Signature(List(scala.Product),java.lang.String) @_toString]] - 386: SHAREDtype 262 - 389: QUALTHIS - 390: IDENTtpt 15 [AOptions] - 392: SHAREDtype 135 - 395: OVERRIDE - 396: SYNTHETIC - 397: DEFDEF(40) 75 [canEqual] - 400: PARAM(4) 76 [that] - 403: SHAREDtype 282 - 406: SHAREDtype 286 - 409: TYPEAPPLY(26) - 411: SELECTin(7) 78 [isInstanceOf[Signed Signature(List(1),scala.Boolean) @isInstanceOf]] - 414: TERMREFdirect 400 - 417: SHAREDtype 282 - 420: ANNOTATEDtype(15) - 422: SHAREDtype 135 - 425: APPLY(10) - 427: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] - 430: NEW - 431: SHAREDtype 352 - 434: SHAREDtype 352 - 437: OVERRIDE - 438: SYNTHETIC - 439: DEFDEF(8) 79 [productArity] - 442: SHAREDtype 254 - 445: INTconst 0 - 447: OVERRIDE - 448: SYNTHETIC - 449: DEFDEF(11) 80 [productPrefix] - 452: TYPEREF 24 [String] - 454: TERMREF 81 [Predef] - 456: SHAREDtype 33 - 458: STRINGconst 15 [AOptions] - 460: OVERRIDE - 461: SYNTHETIC - 462: DEFDEF(49) 82 [productElement] - 465: PARAM(4) 83 [n] - 468: SHAREDtype 254 - 471: SHAREDtype 282 - 474: MATCH(35) - 476: TERMREFdirect 465 - 479: CASEDEF(30) - 481: IDENT 39 [_] - 483: SHAREDtype 254 - 486: THROW - 487: APPLY(22) - 489: SELECTin(9) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] - 492: NEW - 493: TYPEREF 84 [IndexOutOfBoundsException] - 495: SHAREDtype 19 - 497: SHAREDtype 493 - 500: APPLY(9) - 502: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] - 505: SHAREDterm 476 - 508: SHAREDtype 282 - 511: OVERRIDE - 512: SYNTHETIC - 513: DEFDEF(48) 88 [productElementName] - 516: PARAM(4) 83 [n] - 519: SHAREDtype 254 - 522: SHAREDtype 452 - 525: MATCH(34) - 527: TERMREFdirect 516 - 530: CASEDEF(29) - 532: IDENT 39 [_] - 534: SHAREDtype 254 - 537: THROW - 538: APPLY(21) - 540: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] - 543: NEW - 544: SHAREDtype 493 - 547: SHAREDtype 493 - 550: APPLY(9) - 552: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] - 555: SHAREDterm 527 - 558: SHAREDtype 282 - 561: OVERRIDE - 562: SYNTHETIC - 563: DEFDEF(18) 89 [copy] - 566: EMPTYCLAUSE - 567: SHAREDtype 135 - 570: APPLY(10) - 572: SELECTin(8) 91 [[Signed Signature(List(),Z$.AOptions) @]] - 575: NEW - 576: SHAREDtype 135 - 579: SHAREDtype 135 - 582: SYNTHETIC - 583: DEFDEF(7) 92 [ordinal] - 586: SHAREDtype 254 - 589: INTconst 0 - 591: SYNTHETIC - 592: FINAL - 593: CASE - 594: ENUM - 595: VALDEF(22) 15 [AOptions] - 598: IDENTtpt 93 [AOptions[ModuleClass]] - 600: TYPEREFsymbol 619 - 603: SHAREDtype 92 - 605: APPLY(10) - 607: SELECTin(8) 95 [[Signed Signature(List(),Z$.AOptions$) @]] - 610: NEW - 611: SHAREDterm 598 - 614: SHAREDtype 600 - 617: OBJECT - 618: SYNTHETIC - 619: TYPEDEF(147) 93 [AOptions[ModuleClass]] - 623: TEMPLATE(141) - 626: APPLY(9) - 628: SELECTin(7) 9 [[Signed Signature(List(),java.lang.Object) @]] - 631: NEW - 632: SHAREDtype 173 - 635: SHAREDtype 17 - 637: TYPEREF 53 [Product] - 639: SHAREDtype 181 - 642: SELFDEF 39 [_] - 644: SINGLETONtpt - 645: TERMREFsymbol 595 - 648: SHAREDtype 92 - 650: DEFDEF(5) 3 [] - 653: EMPTYCLAUSE - 654: SHAREDtype 31 - 656: STABLE - 657: DEFDEF(23) 40 [writeReplace] - 660: EMPTYCLAUSE - 661: SHAREDtype 173 - 664: APPLY(14) - 666: SELECTin(8) 47 [[Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @]] - 669: NEW - 670: SHAREDtype 211 - 673: SHAREDtype 211 - 676: CLASSconst - 677: SHAREDtype 645 - 680: PRIVATE - 681: SYNTHETIC - 682: DEFDEF(18) 96 [apply] - 685: EMPTYCLAUSE - 686: SHAREDtype 135 - 689: APPLY(10) - 691: SELECTin(8) 91 [[Signed Signature(List(),Z$.AOptions) @]] - 694: NEW - 695: SHAREDtype 135 - 698: SHAREDtype 135 - 701: SYNTHETIC - 702: DEFDEF(12) 97 [unapply] - 705: PARAM(5) 98 [x$1] - 708: SHAREDtype 135 - 711: SYNTHETIC - 712: SINGLETONtpt - 713: TRUEconst - 714: TRUEconst - 715: SYNTHETIC - 716: DEFDEF(8) 72 [toString] - 719: SHAREDtype 378 - 722: STRINGconst 15 [AOptions] - 724: OVERRIDE - 725: SYNTHETIC - 726: TYPEDEF(9) 99 [MirroredMonoType] - 729: TYPEBOUNDS(5) - 731: TYPEREFsymbol 223 - 734: SHAREDtype 38 - 736: SYNTHETIC - 737: DEFDEF(28) 100 [fromProduct] - 740: PARAM(5) 59 [x$0] - 743: TYPEREF 53 [Product] - 745: SHAREDtype 33 - 747: TYPEREFsymbol 726 - 750: THIS - 751: SHAREDtype 600 - 754: APPLY(10) - 756: SELECTin(8) 91 [[Signed Signature(List(),Z$.AOptions) @]] - 759: NEW - 760: SHAREDtype 135 - 763: SHAREDtype 135 - 766: SYNTHETIC - 767: OBJECT - 768: SYNTHETIC - 769: TYPEDEF(343) 16 [BOptions] - 773: TEMPLATE(336) - 776: APPLY(10) - 778: SELECTin(8) 48 [[Signed Signature(List(),Z) @]] - 781: NEW - 782: SHAREDtype 236 - 785: SHAREDtype 236 - 788: DEFDEF(5) 3 [] - 791: EMPTYCLAUSE - 792: SHAREDtype 31 - 794: STABLE - 795: DEFDEF(17) 49 [hashCode] - 798: EMPTYCLAUSE - 799: SHAREDtype 254 - 802: APPLY(8) - 804: SHAREDtype 260 - 807: QUALTHIS - 808: IDENTtpt 16 [BOptions] - 810: SHAREDtype 114 - 812: OVERRIDE - 813: SYNTHETIC - 814: DEFDEF(90) 58 [equals] - 817: PARAM(4) 59 [x$0] - 820: SHAREDtype 282 - 823: SHAREDtype 286 - 826: APPLY(76) - 828: SELECTin(29) 64 [||[Signed Signature(List(scala.Boolean),scala.Boolean) @||]] - 831: APPLY(23) - 833: SELECTin(8) 66 [eq[Signed Signature(List(java.lang.Object),scala.Boolean) @eq]] - 836: QUALTHIS - 837: IDENTtpt 16 [BOptions] - 839: SHAREDtype 114 - 841: SHAREDtype 17 - 843: TYPEAPPLY(11) - 845: SELECTin(7) 68 [$asInstanceOf$[Signed Signature(List(1),java.lang.Object) @$asInstanceOf$]] - 848: TERMREFdirect 817 - 851: SHAREDtype 282 - 854: SHAREDtype 17 - 856: SHAREDtype 286 - 859: MATCH(43) - 861: SHAREDterm 848 - 864: CASEDEF(30) - 866: BIND(27) 59 [x$0] - 869: SHAREDtype 114 - 871: TYPED(21) - 873: IDENT 39 [_] - 875: ANNOTATEDtype(14) - 877: SHAREDtype 114 - 879: APPLY(10) - 881: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] - 884: NEW - 885: SHAREDtype 352 - 888: SHAREDtype 352 - 891: SHAREDtype 875 - 894: SYNTHETIC - 895: TRUEconst - 896: CASEDEF(6) - 898: IDENT 39 [_] - 900: SHAREDtype 282 - 903: FALSEconst - 904: OVERRIDE - 905: SYNTHETIC - 906: DEFDEF(17) 72 [toString] - 909: EMPTYCLAUSE - 910: SHAREDtype 378 - 913: APPLY(8) - 915: SHAREDtype 384 - 918: QUALTHIS - 919: IDENTtpt 16 [BOptions] - 921: SHAREDtype 114 - 923: OVERRIDE - 924: SYNTHETIC - 925: DEFDEF(39) 75 [canEqual] - 928: PARAM(4) 76 [that] - 931: SHAREDtype 282 - 934: SHAREDtype 286 - 937: TYPEAPPLY(25) - 939: SELECTin(7) 78 [isInstanceOf[Signed Signature(List(1),scala.Boolean) @isInstanceOf]] - 942: TERMREFdirect 928 - 945: SHAREDtype 282 - 948: ANNOTATEDtype(14) - 950: SHAREDtype 114 - 952: APPLY(10) - 954: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] - 957: NEW - 958: SHAREDtype 352 - 961: SHAREDtype 352 - 964: OVERRIDE - 965: SYNTHETIC - 966: DEFDEF(8) 79 [productArity] - 969: SHAREDtype 254 - 972: INTconst 0 - 974: OVERRIDE - 975: SYNTHETIC - 976: DEFDEF(8) 80 [productPrefix] - 979: SHAREDtype 452 - 982: STRINGconst 16 [BOptions] - 984: OVERRIDE - 985: SYNTHETIC - 986: DEFDEF(48) 82 [productElement] - 989: PARAM(4) 83 [n] - 992: SHAREDtype 254 - 995: SHAREDtype 282 - 998: MATCH(34) - 1000: TERMREFdirect 989 - 1003: CASEDEF(29) - 1005: IDENT 39 [_] - 1007: SHAREDtype 254 - 1010: THROW - 1011: APPLY(21) - 1013: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] - 1016: NEW - 1017: SHAREDtype 493 - 1020: SHAREDtype 493 - 1023: APPLY(9) - 1025: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] - 1028: SHAREDterm 1000 - 1031: SHAREDtype 282 - 1034: OVERRIDE - 1035: SYNTHETIC - 1036: DEFDEF(48) 88 [productElementName] - 1039: PARAM(4) 83 [n] - 1042: SHAREDtype 254 - 1045: SHAREDtype 452 - 1048: MATCH(34) - 1050: TERMREFdirect 1039 - 1053: CASEDEF(29) - 1055: IDENT 39 [_] - 1057: SHAREDtype 254 - 1060: THROW - 1061: APPLY(21) - 1063: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] - 1066: NEW - 1067: SHAREDtype 493 - 1070: SHAREDtype 493 - 1073: APPLY(9) - 1075: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] - 1078: SHAREDterm 1050 - 1081: SHAREDtype 282 - 1084: OVERRIDE - 1085: SYNTHETIC - 1086: DEFDEF(15) 89 [copy] - 1089: EMPTYCLAUSE - 1090: SHAREDtype 114 - 1092: APPLY(8) - 1094: SELECTin(6) 102 [[Signed Signature(List(),Z$.BOptions) @]] - 1097: NEW - 1098: SHAREDtype 114 - 1100: SHAREDtype 114 - 1102: SYNTHETIC - 1103: DEFDEF(7) 92 [ordinal] - 1106: SHAREDtype 254 - 1109: INTconst 1 - 1111: SYNTHETIC - 1112: FINAL - 1113: CASE - 1114: ENUM - 1115: VALDEF(22) 16 [BOptions] - 1118: IDENTtpt 103 [BOptions[ModuleClass]] - 1120: TYPEREFsymbol 1139 - 1123: SHAREDtype 92 - 1125: APPLY(10) - 1127: SELECTin(8) 105 [[Signed Signature(List(),Z$.BOptions$) @]] - 1130: NEW - 1131: SHAREDterm 1118 - 1134: SHAREDtype 1120 - 1137: OBJECT - 1138: SYNTHETIC - 1139: TYPEDEF(138) 103 [BOptions[ModuleClass]] - 1143: TEMPLATE(132) - 1146: APPLY(9) - 1148: SELECTin(7) 9 [[Signed Signature(List(),java.lang.Object) @]] - 1151: NEW - 1152: SHAREDtype 173 - 1155: SHAREDtype 17 - 1157: SHAREDtype 637 - 1160: SELFDEF 39 [_] - 1162: SINGLETONtpt - 1163: TERMREFsymbol 1115 - 1166: SHAREDtype 92 - 1168: DEFDEF(5) 3 [] - 1171: EMPTYCLAUSE - 1172: SHAREDtype 31 - 1174: STABLE - 1175: DEFDEF(23) 40 [writeReplace] - 1178: EMPTYCLAUSE - 1179: SHAREDtype 173 - 1182: APPLY(14) - 1184: SELECTin(8) 47 [[Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @]] - 1187: NEW - 1188: SHAREDtype 211 - 1191: SHAREDtype 211 - 1194: CLASSconst - 1195: SHAREDtype 1163 - 1198: PRIVATE - 1199: SYNTHETIC - 1200: DEFDEF(15) 96 [apply] - 1203: EMPTYCLAUSE - 1204: SHAREDtype 114 - 1206: APPLY(8) - 1208: SELECTin(6) 102 [[Signed Signature(List(),Z$.BOptions) @]] - 1211: NEW - 1212: SHAREDtype 114 - 1214: SHAREDtype 114 - 1216: SYNTHETIC - 1217: DEFDEF(11) 97 [unapply] - 1220: PARAM(4) 98 [x$1] - 1223: SHAREDtype 114 - 1225: SYNTHETIC - 1226: SINGLETONtpt - 1227: TRUEconst - 1228: TRUEconst - 1229: SYNTHETIC - 1230: DEFDEF(8) 72 [toString] - 1233: SHAREDtype 378 - 1236: STRINGconst 16 [BOptions] - 1238: OVERRIDE - 1239: SYNTHETIC - 1240: TYPEDEF(9) 99 [MirroredMonoType] - 1243: TYPEBOUNDS(5) - 1245: TYPEREFsymbol 769 - 1248: SHAREDtype 38 - 1250: SYNTHETIC - 1251: DEFDEF(25) 100 [fromProduct] - 1254: PARAM(4) 59 [x$0] - 1257: SHAREDtype 743 - 1260: TYPEREFsymbol 1240 - 1263: THIS - 1264: SHAREDtype 1120 - 1267: APPLY(8) - 1269: SELECTin(6) 102 [[Signed Signature(List(),Z$.BOptions) @]] - 1272: NEW - 1273: SHAREDtype 114 - 1275: SHAREDtype 114 - 1277: SYNTHETIC - 1278: OBJECT - 1279: SYNTHETIC - 1280: TYPEDEF(343) 17 [COptions] - 1284: TEMPLATE(336) - 1287: APPLY(10) - 1289: SELECTin(8) 48 [[Signed Signature(List(),Z) @]] - 1292: NEW - 1293: SHAREDtype 236 - 1296: SHAREDtype 236 - 1299: DEFDEF(5) 3 [] - 1302: EMPTYCLAUSE - 1303: SHAREDtype 31 - 1305: STABLE - 1306: DEFDEF(17) 49 [hashCode] - 1309: EMPTYCLAUSE - 1310: SHAREDtype 254 - 1313: APPLY(8) - 1315: SHAREDtype 260 - 1318: QUALTHIS - 1319: IDENTtpt 17 [COptions] - 1321: SHAREDtype 89 - 1323: OVERRIDE - 1324: SYNTHETIC - 1325: DEFDEF(90) 58 [equals] - 1328: PARAM(4) 59 [x$0] - 1331: SHAREDtype 282 - 1334: SHAREDtype 286 - 1337: APPLY(76) - 1339: SELECTin(29) 64 [||[Signed Signature(List(scala.Boolean),scala.Boolean) @||]] - 1342: APPLY(23) - 1344: SELECTin(8) 66 [eq[Signed Signature(List(java.lang.Object),scala.Boolean) @eq]] - 1347: QUALTHIS - 1348: IDENTtpt 17 [COptions] - 1350: SHAREDtype 89 - 1352: SHAREDtype 17 - 1354: TYPEAPPLY(11) - 1356: SELECTin(7) 68 [$asInstanceOf$[Signed Signature(List(1),java.lang.Object) @$asInstanceOf$]] - 1359: TERMREFdirect 1328 - 1362: SHAREDtype 282 - 1365: SHAREDtype 17 - 1367: SHAREDtype 286 - 1370: MATCH(43) - 1372: SHAREDterm 1359 - 1375: CASEDEF(30) - 1377: BIND(27) 59 [x$0] - 1380: SHAREDtype 89 - 1382: TYPED(21) - 1384: IDENT 39 [_] - 1386: ANNOTATEDtype(14) - 1388: SHAREDtype 89 - 1390: APPLY(10) - 1392: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] - 1395: NEW - 1396: SHAREDtype 352 - 1399: SHAREDtype 352 - 1402: SHAREDtype 1386 - 1405: SYNTHETIC - 1406: TRUEconst - 1407: CASEDEF(6) - 1409: IDENT 39 [_] - 1411: SHAREDtype 282 - 1414: FALSEconst - 1415: OVERRIDE - 1416: SYNTHETIC - 1417: DEFDEF(17) 72 [toString] - 1420: EMPTYCLAUSE - 1421: SHAREDtype 378 - 1424: APPLY(8) - 1426: SHAREDtype 384 - 1429: QUALTHIS - 1430: IDENTtpt 17 [COptions] - 1432: SHAREDtype 89 - 1434: OVERRIDE - 1435: SYNTHETIC - 1436: DEFDEF(39) 75 [canEqual] - 1439: PARAM(4) 76 [that] - 1442: SHAREDtype 282 - 1445: SHAREDtype 286 - 1448: TYPEAPPLY(25) - 1450: SELECTin(7) 78 [isInstanceOf[Signed Signature(List(1),scala.Boolean) @isInstanceOf]] - 1453: TERMREFdirect 1439 - 1456: SHAREDtype 282 - 1459: ANNOTATEDtype(14) - 1461: SHAREDtype 89 - 1463: APPLY(10) - 1465: SELECTin(8) 71 [[Signed Signature(List(),scala.unchecked) @]] - 1468: NEW - 1469: SHAREDtype 352 - 1472: SHAREDtype 352 - 1475: OVERRIDE - 1476: SYNTHETIC - 1477: DEFDEF(8) 79 [productArity] - 1480: SHAREDtype 254 - 1483: INTconst 0 - 1485: OVERRIDE - 1486: SYNTHETIC - 1487: DEFDEF(8) 80 [productPrefix] - 1490: SHAREDtype 452 - 1493: STRINGconst 17 [COptions] - 1495: OVERRIDE - 1496: SYNTHETIC - 1497: DEFDEF(48) 82 [productElement] - 1500: PARAM(4) 83 [n] - 1503: SHAREDtype 254 - 1506: SHAREDtype 282 - 1509: MATCH(34) - 1511: TERMREFdirect 1500 - 1514: CASEDEF(29) - 1516: IDENT 39 [_] - 1518: SHAREDtype 254 - 1521: THROW - 1522: APPLY(21) - 1524: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] - 1527: NEW - 1528: SHAREDtype 493 - 1531: SHAREDtype 493 - 1534: APPLY(9) - 1536: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] - 1539: SHAREDterm 1511 - 1542: SHAREDtype 282 - 1545: OVERRIDE - 1546: SYNTHETIC - 1547: DEFDEF(48) 88 [productElementName] - 1550: PARAM(4) 83 [n] - 1553: SHAREDtype 254 - 1556: SHAREDtype 452 - 1559: MATCH(34) - 1561: TERMREFdirect 1550 - 1564: CASEDEF(29) - 1566: IDENT 39 [_] - 1568: SHAREDtype 254 - 1571: THROW - 1572: APPLY(21) - 1574: SELECTin(8) 86 [[Signed Signature(List(java.lang.String),java.lang.IndexOutOfBoundsException) @]] - 1577: NEW - 1578: SHAREDtype 493 - 1581: SHAREDtype 493 - 1584: APPLY(9) - 1586: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] - 1589: SHAREDterm 1561 - 1592: SHAREDtype 282 - 1595: OVERRIDE - 1596: SYNTHETIC - 1597: DEFDEF(15) 89 [copy] - 1600: EMPTYCLAUSE - 1601: SHAREDtype 89 - 1603: APPLY(8) - 1605: SELECTin(6) 107 [[Signed Signature(List(),Z$.COptions) @]] - 1608: NEW - 1609: SHAREDtype 89 - 1611: SHAREDtype 89 - 1613: SYNTHETIC - 1614: DEFDEF(7) 92 [ordinal] - 1617: SHAREDtype 254 - 1620: INTconst 2 - 1622: SYNTHETIC - 1623: FINAL - 1624: CASE - 1625: ENUM - 1626: VALDEF(22) 17 [COptions] - 1629: IDENTtpt 108 [COptions[ModuleClass]] - 1631: TYPEREFsymbol 1650 - 1634: SHAREDtype 92 - 1636: APPLY(10) - 1638: SELECTin(8) 110 [[Signed Signature(List(),Z$.COptions$) @]] - 1641: NEW - 1642: SHAREDterm 1629 - 1645: SHAREDtype 1631 - 1648: OBJECT - 1649: SYNTHETIC - 1650: TYPEDEF(138) 108 [COptions[ModuleClass]] - 1654: TEMPLATE(132) - 1657: APPLY(9) - 1659: SELECTin(7) 9 [[Signed Signature(List(),java.lang.Object) @]] - 1662: NEW - 1663: SHAREDtype 173 - 1666: SHAREDtype 17 - 1668: SHAREDtype 637 - 1671: SELFDEF 39 [_] - 1673: SINGLETONtpt - 1674: TERMREFsymbol 1626 - 1677: SHAREDtype 92 - 1679: DEFDEF(5) 3 [] - 1682: EMPTYCLAUSE - 1683: SHAREDtype 31 - 1685: STABLE - 1686: DEFDEF(23) 40 [writeReplace] - 1689: EMPTYCLAUSE - 1690: SHAREDtype 173 - 1693: APPLY(14) - 1695: SELECTin(8) 47 [[Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @]] - 1698: NEW - 1699: SHAREDtype 211 - 1702: SHAREDtype 211 - 1705: CLASSconst - 1706: SHAREDtype 1674 - 1709: PRIVATE - 1710: SYNTHETIC - 1711: DEFDEF(15) 96 [apply] - 1714: EMPTYCLAUSE - 1715: SHAREDtype 89 - 1717: APPLY(8) - 1719: SELECTin(6) 107 [[Signed Signature(List(),Z$.COptions) @]] - 1722: NEW - 1723: SHAREDtype 89 - 1725: SHAREDtype 89 - 1727: SYNTHETIC - 1728: DEFDEF(11) 97 [unapply] - 1731: PARAM(4) 98 [x$1] - 1734: SHAREDtype 89 - 1736: SYNTHETIC - 1737: SINGLETONtpt - 1738: TRUEconst - 1739: TRUEconst - 1740: SYNTHETIC - 1741: DEFDEF(8) 72 [toString] - 1744: SHAREDtype 378 - 1747: STRINGconst 17 [COptions] - 1749: OVERRIDE - 1750: SYNTHETIC - 1751: TYPEDEF(9) 99 [MirroredMonoType] - 1754: TYPEBOUNDS(5) - 1756: TYPEREFsymbol 1280 - 1759: SHAREDtype 38 - 1761: SYNTHETIC - 1762: DEFDEF(25) 100 [fromProduct] - 1765: PARAM(4) 59 [x$0] - 1768: SHAREDtype 743 - 1771: TYPEREFsymbol 1751 - 1774: THIS - 1775: SHAREDtype 1631 - 1778: APPLY(8) - 1780: SELECTin(6) 107 [[Signed Signature(List(),Z$.COptions) @]] - 1783: NEW - 1784: SHAREDtype 89 - 1786: SHAREDtype 89 - 1788: SYNTHETIC - 1789: OBJECT - 1790: SYNTHETIC - 1791: DEFDEF(46) 111 [fromOrdinal] - 1794: PARAM(4) 92 [ordinal] - 1797: SHAREDtype 254 - 1800: SHAREDtype 236 - 1803: THROW - 1804: APPLY(32) - 1806: SELECTin(9) 116 [[Signed Signature(List(java.lang.String),java.util.NoSuchElementException) @]] - 1809: NEW - 1810: TYPEREF 114 [NoSuchElementException] - 1812: TERMREFpkg 113 [java[Qualified . util]] - 1814: SHAREDtype 1810 - 1817: APPLY(19) - 1819: SELECTin(6) 118 [+[Signed Signature(List(java.lang.Object),java.lang.String) @+]] - 1822: STRINGconst 119 [enum Z has no case with ordinal: ] - 1824: SHAREDtype 378 - 1827: APPLY(9) - 1829: SELECTin(7) 87 [toString[Signed Signature(List(),java.lang.String) @toString]] - 1832: TERMREFdirect 1794 - 1835: SHAREDtype 282 - 1838: SYNTHETIC - 1839: TYPEDEF(7) 99 [MirroredMonoType] - 1842: TYPEBOUNDS(3) - 1844: SHAREDtype 236 - 1847: SYNTHETIC - 1848: DEFDEF(18) 92 [ordinal] - 1851: PARAM(6) 59 [x$0] - 1854: TYPEREFsymbol 1839 - 1857: SHAREDtype 92 - 1859: SHAREDtype 254 - 1862: SELECT 92 [ordinal] - 1864: TERMREFdirect 1851 - 1867: SYNTHETIC - 1868: OBJECT - 1869: SYNTHETIC - 1870: ANNOTATION(14) - 1872: SHAREDtype 55 - 1874: APPLY(10) - 1876: SELECTin(6) 26 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] - 1879: NEW - 1880: SHAREDtype 55 - 1882: SHAREDtype 55 - 1884: STRINGconst 27 [] - 1886: + 5: TYPEDEF(106) 2 [Z] + 8: TEMPLATE(13) + 10: TYPEREF 3 [Object] + 12: TERMREFpkg 6 [java[Qualified . lang]] + 14: DEFDEF(7) 7 [] + 17: EMPTYCLAUSE + 18: TYPEREF 8 [Unit] + 20: TERMREFpkg 9 [scala] + 22: STABLE + 23: SEALED + 24: TRAIT + 25: ANNOTATION(16) + 27: TYPEREF 10 [SourceFile] + 29: TERMREFpkg 14 [scala[Qualified . annotation][Qualified . internal]] + 31: APPLY(10) + 33: SELECTin(6) 18 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] + 36: NEW + 37: SHAREDtype 27 + 39: SHAREDtype 27 + 41: STRINGconst 19 [] + 43: ANNOTATION(26) + 45: TYPEREF 20 [Child] + 47: SHAREDtype 29 + 49: APPLY(20) + 51: TYPEAPPLY(18) + 53: SELECTin(6) 22 [[Signed Signature(List(1),scala.annotation.internal.Child) @]] + 56: NEW + 57: SHAREDtype 45 + 59: SHAREDtype 45 + 61: TYPEREFsymbol 244 + 64: THIS + 65: TYPEREFsymbol 131 + 68: THIS + 69: TYPEREFpkg 1 [] + 71: ANNOTATION(19) + 73: SHAREDtype 45 + 75: APPLY(15) + 77: TYPEAPPLY(13) + 79: SELECTin(6) 22 [[Signed Signature(List(1),scala.annotation.internal.Child) @]] + 82: NEW + 83: SHAREDtype 45 + 85: SHAREDtype 45 + 87: TYPEREFsymbol 217 + 90: SHAREDtype 64 + 92: ANNOTATION(19) + 94: SHAREDtype 45 + 96: APPLY(15) + 98: TYPEAPPLY(13) + 100: SELECTin(6) 22 [[Signed Signature(List(1),scala.annotation.internal.Child) @]] + 103: NEW + 104: SHAREDtype 45 + 106: SHAREDtype 45 + 108: TYPEREFsymbol 189 + 111: SHAREDtype 64 + 113: VALDEF(16) 2 [Z] + 116: IDENTtpt 23 [Z[ModuleClass]] + 118: SHAREDtype 65 + 120: APPLY(8) + 122: SELECTin(6) 24 [[Signed Signature(List(),Z$) @]] + 125: NEW + 126: SHAREDterm 116 + 128: SHAREDtype 65 + 130: OBJECT + 131: TYPEDEF(154) 23 [Z[ModuleClass]] + 135: TEMPLATE(133) + 138: APPLY(8) + 140: SELECTin(6) 26 [[Signed Signature(List(),java.lang.Object) @]] + 143: NEW + 144: SHAREDtype 10 + 146: SHAREDtype 10 + 148: SELFDEF 27 [_] + 150: SINGLETONtpt + 151: TERMREFsymbol 113 + 153: SHAREDtype 68 + 155: DEFDEF(5) 7 [] + 158: EMPTYCLAUSE + 159: SHAREDtype 18 + 161: STABLE + 162: DEFDEF(25) 28 [writeReplace] + 165: EMPTYCLAUSE + 166: TYPEREF 29 [AnyRef] + 168: SHAREDtype 20 + 170: APPLY(15) + 172: SELECTin(9) 36 [[Signed Signature(List(java.lang.Class),scala.runtime.ModuleSerializationProxy) @]] + 175: NEW + 176: TYPEREF 32 [ModuleSerializationProxy] + 178: TERMREFpkg 31 [scala[Qualified . runtime]] + 180: SHAREDtype 176 + 183: CLASSconst + 184: SHAREDtype 151 + 187: PRIVATE + 188: SYNTHETIC + 189: TYPEDEF(26) 37 [AOptions] + 192: TEMPLATE(23) + 194: APPLY(8) + 196: SELECTin(6) 26 [[Signed Signature(List(),java.lang.Object) @]] + 199: NEW + 200: SHAREDtype 10 + 202: SHAREDtype 10 + 204: IDENTtpt 2 [Z] + 206: TYPEREFsymbol 5 + 208: SHAREDtype 68 + 210: DEFDEF(5) 7 [] + 213: EMPTYCLAUSE + 214: SHAREDtype 18 + 216: STABLE + 217: TYPEDEF(25) 38 [BOptions] + 220: TEMPLATE(22) + 222: APPLY(8) + 224: SELECTin(6) 26 [[Signed Signature(List(),java.lang.Object) @]] + 227: NEW + 228: SHAREDtype 10 + 230: SHAREDtype 10 + 232: IDENTtpt 2 [Z] + 234: SHAREDtype 206 + 237: DEFDEF(5) 7 [] + 240: EMPTYCLAUSE + 241: SHAREDtype 18 + 243: STABLE + 244: TYPEDEF(25) 39 [COptions] + 247: TEMPLATE(22) + 249: APPLY(8) + 251: SELECTin(6) 26 [[Signed Signature(List(),java.lang.Object) @]] + 254: NEW + 255: SHAREDtype 10 + 257: SHAREDtype 10 + 259: IDENTtpt 2 [Z] + 261: SHAREDtype 206 + 264: DEFDEF(5) 7 [] + 267: EMPTYCLAUSE + 268: SHAREDtype 18 + 270: STABLE + 271: OBJECT + 272: ANNOTATION(14) + 274: SHAREDtype 27 + 276: APPLY(10) + 278: SELECTin(6) 18 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] + 281: NEW + 282: SHAREDtype 27 + 284: SHAREDtype 27 + 286: STRINGconst 19 [] + 288: -Positions (535 bytes, starting from ): - lines: 10 +Positions (154 bytes, starting from ): + lines: 12 line sizes: - 38, 0, 98, 90, 106, 7, 17, 17, 17, 0 + 38, 0, 98, 90, 106, 14, 0, 9, 28, 28, 28, 0 positions: - 0: 337 .. 398 - 5: 337 .. 398 - 9: 347 .. 398 - 17: 342 .. 342 - 23: 347 .. 347 - 27: 347 .. 347 - 31: 347 .. 347 - 38: 347 .. 347 - 44: 347 .. 347 - 46: 347 .. 347 - 48: 347 .. 347 - 59: 337 .. 398 - 65: 337 .. 337 - 69: 337 .. 337 - 85: 342 .. 342 - 89: 342 .. 342 - 110: 342 .. 342 - 114: 342 .. 342 - 131: 342 .. 342 - 135: 342 .. 342 - 140: 337 .. 398 - 143: 347 .. 347 - 160: 337 .. 398 - 164: 347 .. 398 - 173: 347 .. 347 - 179: 347 .. 347 - 189: 347 .. 347 - 191: 347 .. 347 - 195: 347 .. 347 - 198: 342 .. 342 - 202: 342 .. 342 - 211: 342 .. 342 - 218: 342 .. 342 - 223: 347 .. 362 - 227: 360 .. 362 - 236: 362 .. 362 - 243: 360 .. 362 - 247: 360 .. 360 - 250: 352 .. 352 - 254: 352 .. 352 - 260: 352 .. 352 - 269: 352 .. 352 - 276: 352 .. 352 - 279: 352 .. 352 - 282: 352 .. 352 - 286: 352 .. 352 - 301: 352 .. 352 - 313: 352 .. 352 - 319: 352 .. 352 - 331: 352 .. 352 - 339: 352 .. 352 - 359: 352 .. 352 - 363: 352 .. 352 - 366: 352 .. 352 - 371: 352 .. 352 - 374: 352 .. 352 - 378: 352 .. 352 - 384: 352 .. 352 - 390: 352 .. 352 - 397: 352 .. 352 - 400: 352 .. 352 - 403: 352 .. 352 - 406: 352 .. 352 - 414: 352 .. 352 - 420: 352 .. 352 - 439: 352 .. 352 - 442: 352 .. 352 - 445: 352 .. 352 - 449: 352 .. 352 - 452: 352 .. 352 - 458: 352 .. 352 - 462: 352 .. 352 - 465: 352 .. 352 - 468: 352 .. 352 - 471: 352 .. 352 - 476: 352 .. 352 - 481: 352 .. 352 - 493: 352 .. 352 - 513: 352 .. 352 - 516: 352 .. 352 - 519: 352 .. 352 - 522: 352 .. 352 - 527: 352 .. 352 - 532: 352 .. 352 - 544: 352 .. 352 - 563: 347 .. 347 - 567: 347 .. 347 - 576: 347 .. 347 - 583: 362 .. 362 - 586: 362 .. 362 - 589: 362 .. 362 - 595: 347 .. 362 - 598: 347 .. 347 - 619: 347 .. 362 - 623: 347 .. 347 - 632: 347 .. 347 - 637: 347 .. 347 - 645: 347 .. 347 - 650: 347 .. 347 - 654: 347 .. 347 - 657: 352 .. 352 - 661: 352 .. 352 - 670: 352 .. 352 - 676: 352 .. 352 - 682: 347 .. 347 - 686: 347 .. 347 - 695: 347 .. 347 - 702: 347 .. 347 - 705: 347 .. 347 - 708: 347 .. 347 - 713: 347 .. 347 - 714: 347 .. 347 - 716: 347 .. 347 - 719: 347 .. 347 - 722: 347 .. 347 - 726: 352 .. 352 - 729: 352 .. 352 - 737: 352 .. 352 - 740: 352 .. 352 - 743: 352 .. 352 - 747: 352 .. 352 - 760: 352 .. 352 - 769: 365 .. 380 - 773: 378 .. 380 - 782: 380 .. 380 - 788: 378 .. 380 - 792: 378 .. 378 - 795: 370 .. 370 - 799: 370 .. 370 - 804: 370 .. 370 - 808: 370 .. 370 - 814: 370 .. 370 - 817: 370 .. 370 - 820: 370 .. 370 - 823: 370 .. 370 - 837: 370 .. 370 - 848: 370 .. 370 - 854: 370 .. 370 - 866: 370 .. 370 - 873: 370 .. 370 - 891: 370 .. 370 - 895: 370 .. 370 - 898: 370 .. 370 - 903: 370 .. 370 - 906: 370 .. 370 - 910: 370 .. 370 - 915: 370 .. 370 - 919: 370 .. 370 - 925: 370 .. 370 - 928: 370 .. 370 - 931: 370 .. 370 - 934: 370 .. 370 - 942: 370 .. 370 - 948: 370 .. 370 - 966: 370 .. 370 - 969: 370 .. 370 - 972: 370 .. 370 - 976: 370 .. 370 - 979: 370 .. 370 - 982: 370 .. 370 - 986: 370 .. 370 - 989: 370 .. 370 - 992: 370 .. 370 - 995: 370 .. 370 - 1000: 370 .. 370 - 1005: 370 .. 370 - 1017: 370 .. 370 - 1036: 370 .. 370 - 1039: 370 .. 370 - 1042: 370 .. 370 - 1045: 370 .. 370 - 1050: 370 .. 370 - 1055: 370 .. 370 - 1067: 370 .. 370 - 1086: 365 .. 365 - 1090: 365 .. 365 - 1098: 365 .. 365 - 1103: 380 .. 380 - 1106: 380 .. 380 - 1109: 380 .. 380 - 1115: 365 .. 380 - 1118: 365 .. 365 - 1139: 365 .. 380 - 1143: 365 .. 365 - 1152: 365 .. 365 - 1157: 365 .. 365 - 1163: 365 .. 365 - 1168: 365 .. 365 - 1172: 365 .. 365 - 1175: 370 .. 370 - 1179: 370 .. 370 - 1188: 370 .. 370 - 1194: 370 .. 370 - 1200: 365 .. 365 - 1204: 365 .. 365 - 1212: 365 .. 365 - 1217: 365 .. 365 - 1220: 365 .. 365 - 1223: 365 .. 365 - 1227: 365 .. 365 - 1228: 365 .. 365 - 1230: 365 .. 365 - 1233: 365 .. 365 - 1236: 365 .. 365 - 1240: 370 .. 370 - 1243: 370 .. 370 - 1251: 370 .. 370 - 1254: 370 .. 370 - 1257: 370 .. 370 - 1260: 370 .. 370 - 1273: 370 .. 370 - 1280: 383 .. 398 - 1284: 396 .. 398 - 1293: 398 .. 398 - 1299: 396 .. 398 - 1303: 396 .. 396 - 1306: 388 .. 388 - 1310: 388 .. 388 - 1315: 388 .. 388 - 1319: 388 .. 388 - 1325: 388 .. 388 - 1328: 388 .. 388 - 1331: 388 .. 388 - 1334: 388 .. 388 - 1348: 388 .. 388 - 1359: 388 .. 388 - 1365: 388 .. 388 - 1377: 388 .. 388 - 1384: 388 .. 388 - 1402: 388 .. 388 - 1406: 388 .. 388 - 1409: 388 .. 388 - 1414: 388 .. 388 - 1417: 388 .. 388 - 1421: 388 .. 388 - 1426: 388 .. 388 - 1430: 388 .. 388 - 1436: 388 .. 388 - 1439: 388 .. 388 - 1442: 388 .. 388 - 1445: 388 .. 388 - 1453: 388 .. 388 - 1459: 388 .. 388 - 1477: 388 .. 388 - 1480: 388 .. 388 - 1483: 388 .. 388 - 1487: 388 .. 388 - 1490: 388 .. 388 - 1493: 388 .. 388 - 1497: 388 .. 388 - 1500: 388 .. 388 - 1503: 388 .. 388 - 1506: 388 .. 388 - 1511: 388 .. 388 - 1516: 388 .. 388 - 1528: 388 .. 388 - 1547: 388 .. 388 - 1550: 388 .. 388 - 1553: 388 .. 388 - 1556: 388 .. 388 - 1561: 388 .. 388 - 1566: 388 .. 388 - 1578: 388 .. 388 - 1597: 383 .. 383 - 1601: 383 .. 383 - 1609: 383 .. 383 - 1614: 398 .. 398 - 1617: 398 .. 398 - 1620: 398 .. 398 - 1626: 383 .. 398 - 1629: 383 .. 383 - 1650: 383 .. 398 - 1654: 383 .. 383 - 1663: 383 .. 383 - 1668: 383 .. 383 - 1674: 383 .. 383 - 1679: 383 .. 383 - 1683: 383 .. 383 - 1686: 388 .. 388 - 1690: 388 .. 388 - 1699: 388 .. 388 - 1705: 388 .. 388 - 1711: 383 .. 383 - 1715: 383 .. 383 - 1723: 383 .. 383 - 1728: 383 .. 383 - 1731: 383 .. 383 - 1734: 383 .. 383 - 1738: 383 .. 383 - 1739: 383 .. 383 - 1741: 383 .. 383 - 1744: 383 .. 383 - 1747: 383 .. 383 - 1751: 388 .. 388 - 1754: 388 .. 388 - 1762: 388 .. 388 - 1765: 388 .. 388 - 1768: 388 .. 388 - 1771: 388 .. 388 - 1784: 388 .. 388 - 1791: 398 .. 398 - 1794: 398 .. 398 - 1797: 398 .. 398 - 1800: 398 .. 398 - 1810: 398 .. 398 - 1822: 398 .. 398 - 1832: 398 .. 398 - 1839: 342 .. 342 - 1842: 342 .. 342 - 1848: 342 .. 342 - 1851: 342 .. 342 - 1854: 342 .. 342 - 1859: 342 .. 342 - 1864: 342 .. 342 - 1874: 337 .. 398 - 1880: 337 .. 337 - 1884: 337 .. 337 + 0: 337 .. 449 + 5: 337 .. 351 + 8: 337 .. 337 + 10: 350 .. 350 + 14: 337 .. 337 + 18: 337 .. 337 + 31: 337 .. 351 + 37: 337 .. 337 + 41: 337 .. 337 + 57: 350 .. 350 + 61: 350 .. 350 + 83: 350 .. 350 + 87: 350 .. 350 + 104: 350 .. 350 + 108: 350 .. 350 + 113: 353 .. 353 + 116: 353 .. 353 + 131: 353 .. 449 + 135: 365 .. 449 + 144: 360 .. 360 + 151: 365 .. 365 + 155: 365 .. 365 + 159: 365 .. 365 + 162: 360 .. 360 + 166: 360 .. 360 + 176: 360 .. 360 + 183: 360 .. 360 + 189: 365 .. 391 + 192: 379 .. 391 + 200: 371 .. 371 + 204: 390 .. 391 + 210: 379 .. 381 + 214: 379 .. 379 + 217: 394 .. 420 + 220: 408 .. 420 + 228: 400 .. 400 + 232: 419 .. 420 + 237: 408 .. 410 + 241: 408 .. 408 + 244: 423 .. 449 + 247: 437 .. 449 + 255: 429 .. 429 + 259: 448 .. 449 + 264: 437 .. 439 + 268: 437 .. 437 + 276: 353 .. 449 + 282: 353 .. 353 + 286: 353 .. 353 source paths: - 0: 27 [] + 0: 19 [] Attributes (2 bytes, starting from ): - SOURCEFILEattr 27 [] + SOURCEFILEattr 19 [] From 39f65b143834a8f208ef702e1fda345abe842e85 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 16 Jul 2024 17:26:27 +0200 Subject: [PATCH 391/827] Revised given syntax Update given syntax to latest discussed variant in the SIP tests/pos/given-syntax.scala shows a semi-systematic list of possible syntax forms. --- .../dotty/tools/dotc/parsing/Parsers.scala | 160 ++++++++++++------ docs/_docs/internals/syntax.md | 14 +- tests/neg/deferred-givens-2.scala | 2 +- tests/neg/deferred-givens.scala | 4 +- tests/neg/i13580.scala | 13 ++ tests/neg/i8150.scala | 2 +- tests/pos/deferred-givens.scala | 2 +- tests/pos/given-syntax.scala | 120 +++++++++++++ tests/pos/hylolib-cb/HyArray.scala | 18 +- tests/pos/i13580.scala | 4 +- tests/pos/typeclasses-arrow.scala | 8 +- tests/pos/typeclasses-arrow0.scala | 10 +- tests/run/byname-given.scala | 9 + tests/warn/abstract-givens-new.check | 5 + tests/warn/abstract-givens-new.scala | 9 + 15 files changed, 300 insertions(+), 80 deletions(-) create mode 100644 tests/neg/i13580.scala create mode 100644 tests/pos/given-syntax.scala create mode 100644 tests/run/byname-given.scala create mode 100644 tests/warn/abstract-givens-new.check create mode 100644 tests/warn/abstract-givens-new.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 37587868da58..acb7b869b269 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -972,18 +972,16 @@ object Parsers { followedByToken(LARROW) // `<-` comes before possible statement starts } - /** Are the next token the "GivenSig" part of a given definition, - * i.e. an identifier followed by type and value parameters, followed by `:`? + /** Are the next tokens a valid continuation of a named given def? + * i.e. an identifier, possibly followed by type and value parameters, followed by `:`? * @pre The current token is an identifier */ - def followingIsOldStyleGivenSig() = + def followingIsGivenDefWithColon() = val lookahead = in.LookaheadScanner() if lookahead.isIdent then lookahead.nextToken() - var paramsSeen = false def skipParams(): Unit = if lookahead.token == LPAREN || lookahead.token == LBRACKET then - paramsSeen = true lookahead.skipParens() skipParams() else if lookahead.isNewLine then @@ -1002,6 +1000,11 @@ object Parsers { } } + def followingIsArrow() = + val lookahead = in.LookaheadScanner() + lookahead.skipParens() + lookahead.token == ARROW + def followingIsExtension() = val next = in.lookahead.token next == LBRACKET || next == LPAREN @@ -3441,7 +3444,11 @@ object Parsers { /** ContextTypes ::= FunArgType {‘,’ FunArgType} */ def contextTypes(paramOwner: ParamOwner, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = - val tps = commaSeparated(() => paramTypeOf(() => toplevelTyp())) + typesToParams( + commaSeparated(() => paramTypeOf(() => toplevelTyp())), + paramOwner, numLeadParams, impliedMods) + + def typesToParams(tps: List[Tree], paramOwner: ParamOwner, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = var counter = numLeadParams def nextIdx = { counter += 1; counter } val paramFlags = if paramOwner.isClass then LocalParamAccessor else Param @@ -3468,18 +3475,20 @@ object Parsers { def termParamClause( paramOwner: ParamOwner, numLeadParams: Int, // number of parameters preceding this clause - firstClause: Boolean = false // clause is the first in regular list of clauses + firstClause: Boolean = false, // clause is the first in regular list of clauses + initialMods: Modifiers = EmptyModifiers ): List[ValDef] = { - var impliedMods: Modifiers = EmptyModifiers + var impliedMods: Modifiers = initialMods def addParamMod(mod: () => Mod) = impliedMods = addMod(impliedMods, atSpan(in.skipToken()) { mod() }) def paramMods() = if in.token == IMPLICIT then addParamMod(() => Mod.Implicit()) - else - if isIdent(nme.using) then - addParamMod(() => Mod.Given()) + else if isIdent(nme.using) then + if initialMods.is(Given) then + syntaxError(em"`using` is already implied here, should not be given explicitly", in.offset) + addParamMod(() => Mod.Given()) def param(): ValDef = { val start = in.offset @@ -4144,18 +4153,67 @@ object Parsers { * OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ * StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] * - * NewGivenDef ::= [GivenConditional '=>'] NewGivenSig - * GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} - * NewGivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) - * | ConstrApps ['as' id] TemplateBody - * + * NewGivenDef ::= [id ':'] GivenSig + * GivenSig ::= GivenImpl + * | '(' ')' '=>' GivenImpl + * | GivenConditional '=>' GivenSig + * GivenImpl ::= GivenType ([‘=’ Expr] | TemplateBody) + * | ConstrApps TemplateBody + * GivenConditional ::= DefTypeParamClause + * | DefTermParamClause + * | '(' FunArgTypes ')' + * | GivenType * GivenType ::= AnnotType1 {id [nl] AnnotType1} */ def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) val nameStart = in.offset - var name = if isIdent && followingIsOldStyleGivenSig() then ident() else EmptyTermName var newSyntaxAllowed = in.featureEnabled(Feature.modularity) + val hasEmbeddedColon = !in.isColon && followingIsGivenDefWithColon() + val name = if isIdent && hasEmbeddedColon then ident() else EmptyTermName + + def implemented(): List[Tree] = + if isSimpleLiteral then + rejectWildcardType(annotType()) :: Nil + else constrApp() match + case parent: Apply => parent :: moreConstrApps() + case parent if in.isIdent && newSyntaxAllowed => + infixTypeRest(parent, _ => annotType1()) :: Nil + case parent => parent :: moreConstrApps() + + // The term parameters and parent references */ + def newTermParamssAndParents(numLeadParams: Int): (List[List[ValDef]], List[Tree]) = + if in.token == LPAREN && followingIsArrow() then + val params = + if in.lookahead.token == RPAREN && numLeadParams == 0 then + in.nextToken() + in.nextToken() + Nil + else + termParamClause( + ParamOwner.Given, numLeadParams, firstClause = true, initialMods = Modifiers(Given)) + accept(ARROW) + if params.isEmpty then (params :: Nil, implemented()) + else + val (paramss, parents) = newTermParamssAndParents(numLeadParams + params.length) + (params :: paramss, parents) + else + val parents = implemented() + if in.token == ARROW && parents.length == 1 && parents.head.isType then + in.nextToken() + val (paramss, parents1) = newTermParamssAndParents(numLeadParams + parents.length) + (typesToParams(parents, ParamOwner.Given, numLeadParams, Modifiers(Given)) :: paramss, parents1) + else + (Nil, parents) + + /** Type parameters, term parameters and parent clauses */ + def newSignature(): (List[TypeDef], (List[List[ValDef]], List[Tree])) = + val tparams = + if in.token == LBRACKET then + try typeParamClause(ParamOwner.Given) + finally accept(ARROW) + else Nil + (tparams, newTermParamssAndParents(numLeadParams = 0)) def moreConstrApps() = if newSyntaxAllowed && in.token == COMMA then @@ -4176,47 +4234,49 @@ object Parsers { .asInstanceOf[List[ParamClause]] val gdef = - val tparams = typeParamClauseOpt(ParamOwner.Given) - newLineOpt() - val vparamss = - if in.token == LPAREN && (in.lookahead.isIdent(nme.using) || name != EmptyTermName) - then termParamClauses(ParamOwner.Given) - else Nil - newLinesOpt() - val noParams = tparams.isEmpty && vparamss.isEmpty - val hasParamsOrId = !name.isEmpty || !noParams - if hasParamsOrId then - if in.isColon then - newSyntaxAllowed = false + val (tparams, (vparamss0, parents)) = + if in.isColon && !name.isEmpty then in.nextToken() - else if newSyntaxAllowed then accept(ARROW) - else acceptColon() - val parents = - if isSimpleLiteral then - rejectWildcardType(annotType()) :: Nil - else constrApp() match - case parent: Apply => parent :: moreConstrApps() - case parent if in.isIdent && newSyntaxAllowed => - infixTypeRest(parent, _ => annotType1()) :: Nil - case parent => parent :: moreConstrApps() - if newSyntaxAllowed && in.isIdent(nme.as) then - in.nextToken() - name = ident() - + newSignature() + else if hasEmbeddedColon then + newSyntaxAllowed = false + val tparamsOld = typeParamClauseOpt(ParamOwner.Given) + newLineOpt() + val vparamssOld = + if in.token == LPAREN && (in.lookahead.isIdent(nme.using) || name != EmptyTermName) + then termParamClauses(ParamOwner.Given) + else Nil + acceptColon() + (tparamsOld, (vparamssOld, implemented())) + else + newSignature() + val hasParams = tparams.nonEmpty || vparamss0.nonEmpty + val vparamss = vparamss0 match + case Nil :: Nil => Nil + case _ => vparamss0 val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then // given alias accept(EQUALS) mods1 |= Final - if noParams && !mods.is(Inline) then + if !hasParams && !mods.is(Inline) then mods1 |= Lazy ValDef(name, parents.head, subExpr()) else DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) - else if (isStatSep || isStatSeqEnd) && parentsIsType && !newSyntaxAllowed then + else if (isStatSep || isStatSeqEnd) && parentsIsType + && !(name.isEmpty && newSyntaxAllowed) + // under new syntax, anonymous givens are translated to concrete classes, + // so it's treated as a structural instance. + then // old-style abstract given if name.isEmpty then - syntaxError(em"anonymous given cannot be abstract") + syntaxError(em"Anonymous given cannot be abstract, or maybe you want to define a concrete given and are missing a `()` argument?", in.lastOffset) + if newSyntaxAllowed then + warning( + em"""This defines an abstract given, which is deprecated. Use a `deferred` given instead. + |Or, if you intend to define a concrete given, follow the type with `()` arguments.""", + in.lastOffset) DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else // structural instance @@ -4228,12 +4288,16 @@ object Parsers { val templ = if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil) - else if !newSyntaxAllowed || in.token == WITH then + else if !newSyntaxAllowed + || in.token == WITH && tparams.isEmpty && vparamss.isEmpty + // if new syntax is still allowed and there are parameters, they mist be new style conditions, + // so old with-style syntax would not be allowed. + then withTemplate(constr, parents) else possibleTemplateStart() templateBodyOpt(constr, parents, Nil) - if noParams && !mods.is(Inline) then ModuleDef(name, templ) + if !hasParams && !mods.is(Inline) then ModuleDef(name, templ) else TypeDef(name.toTypeName, templ) end gdef finalizeDef(gdef, mods1, start) diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 1036397eed7b..a5e1427998bc 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -471,10 +471,16 @@ ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenConditional '=>'] GivenSig -GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} -GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) - | ConstrApps ['as' id] TemplateBody +GivenDef ::= [id ':'] GivenSig +GivenSig ::= GivenImpl + | '(' ')' '=>' GivenImpl + | GivenConditional '=>' GivenSig +GivenImpl ::= GivenType ([‘=’ Expr] | TemplateBody) + | ConstrApps TemplateBody +GivenConditional ::= DefTypeParamClause + | DefTermParamClause + | '(' FunArgTypes ')' + | GivenType GivenType ::= AnnotType1 {id [nl] AnnotType1} Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} diff --git a/tests/neg/deferred-givens-2.scala b/tests/neg/deferred-givens-2.scala index 4e75ceb08728..9a95271a4f46 100644 --- a/tests/neg/deferred-givens-2.scala +++ b/tests/neg/deferred-givens-2.scala @@ -12,7 +12,7 @@ object Scoped: class SortedIntCorrect2 extends Sorted: type Element = Int - override given (Int is Ord)() as given_Ord_Element + override given given_Ord_Element: (Int is Ord)() class SortedIntWrong1 extends Sorted: // error type Element = Int diff --git a/tests/neg/deferred-givens.scala b/tests/neg/deferred-givens.scala index 7ff67d784714..8a1bcb2b50fc 100644 --- a/tests/neg/deferred-givens.scala +++ b/tests/neg/deferred-givens.scala @@ -5,7 +5,7 @@ class Ctx class Ctx2 trait A: - given Ctx as ctx = deferred + given ctx: Ctx = deferred given Ctx2 = deferred class B extends A // error @@ -13,7 +13,7 @@ class B extends A // error abstract class C extends A // error class D extends A: - given Ctx as ctx = Ctx() // ok, was implemented + given ctx: Ctx = Ctx() // ok, was implemented given Ctx2 = Ctx2() // ok class Ctx3[T] diff --git a/tests/neg/i13580.scala b/tests/neg/i13580.scala new file mode 100644 index 000000000000..7388ee532526 --- /dev/null +++ b/tests/neg/i13580.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +trait IntWidth: + type Out +given IntWidth: + type Out = 155 + +trait IntCandidate: + type Out +given (using tracked val w: IntWidth) => IntCandidate: // error + type Out = w.Out + +val x = summon[IntCandidate] +val xx = summon[x.Out =:= 155] diff --git a/tests/neg/i8150.scala b/tests/neg/i8150.scala index 2f0505c6265a..b7edceec9426 100644 --- a/tests/neg/i8150.scala +++ b/tests/neg/i8150.scala @@ -1,3 +1,3 @@ trait A trait B -type T = {given(using a: A) as B} // error: refinement cannot be `given` \ No newline at end of file +type T = {given x(using a: A): B} // error: refinement cannot be `given` \ No newline at end of file diff --git a/tests/pos/deferred-givens.scala b/tests/pos/deferred-givens.scala index b9018c97e151..0ad751fcc7e0 100644 --- a/tests/pos/deferred-givens.scala +++ b/tests/pos/deferred-givens.scala @@ -1,7 +1,7 @@ //> using options -language:experimental.modularity -source future import compiletime.* class Ord[Elem] -given Ord[Double] +given Ord[Double]() trait A: type Elem : Ord diff --git a/tests/pos/given-syntax.scala b/tests/pos/given-syntax.scala new file mode 100644 index 000000000000..a8b64c1030a7 --- /dev/null +++ b/tests/pos/given-syntax.scala @@ -0,0 +1,120 @@ +//> using options -language:experimental.modularity -source future + +class Context +class Ord[T] + +class Ordered: + type Self + +class Monoid[T] +import compiletime.deferred + +def IntOrd[A](): Ord[Int] = ??? +def ListOrd[A](using Ord[A]): Ord[List[A]] = ??? +def curCtx: Context = ??? + +trait anon1: + // Simple typeclass + given Ord[Int]: + def compare(x: Int, y: Int) = ??? + + // Simple type class with extension method: + given Monoid[Int]: + extension (x: Int) + def combine(y: Int) = x + y + def unit = 0 + + // Parameterized typeclass with context bound + given [A: Ord] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ??? + +trait anon2: + // Parameterized typeclass with context parameter + given [A] => Ord[A] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ??? + +trait anon3: + // Parameterized typeclass with named context parameter + given [A] => (ord: Ord[A]) => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ??? + + // Simple alias + given Ord[Int] = IntOrd() + +trait anon4: + // Parameterized alias with context bound + given [A: Ord] => Ord[List[A]] = + ListOrd[A] + +trait anon5: + // Parameterized alias with context parameter + given [A] => Ord[A] => Ord[List[A]] = + ListOrd[A] + + given [A] => A is Ordered => List[A] is Ordered = + ??? + +trait anon6: + // Parameterized alias with named context parameter + given [A] => (ord: Ord[A]) => Ord[List[A]] = + ListOrd[A](using ord) + + given [A] => (A is Ordered) => List[A] is Ordered = + ??? + + // Concrete class instance + given Context() + +trait anon7: + // Abstract or deferred given + given Context = deferred + +trait anon8: + // By-name given + given () => Context = curCtx + +trait named: + given intOrd: Ord[Int]: + def compare(x: Int, y: Int) = ??? + + // Simple type class with extension method: + given intMonoid: Monoid[Int]: + extension (x: Int) + def combine(y: Int) = x + y + def unit = 0 + + // Parameterized typeclass with context bound + given listOrd: [A: Ord] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ??? + + // Parameterized typeclass with context parameter + given listOrd2: [A] => Ord[A] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ??? + + // Parameterized typeclass with named context parameter + given listOrd3: [A] => (ord: Ord[A]) => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ??? + + // Simple alias + given intOrd2: Ord[Int] = IntOrd() + + // Parameterized alias with context bound + given listOrd4: [A: Ord] => Ord[List[A]] = + ListOrd[A] + + // Parameterized alias with context parameter + given listOrd5: [A] => Ord[A] => Ord[List[A]] = + ListOrd[A] + + // Parameterized alias with named context parameter + given listOrd6: [A] => (ord: Ord[A]) => Ord[List[A]] = + ListOrd[A](using ord) + + // Concrete class instance + given context: Context() + + // Abstract or deferred given + given context2: Context = deferred + + // By-name given + given context3: () => Context = curCtx \ No newline at end of file diff --git a/tests/pos/hylolib-cb/HyArray.scala b/tests/pos/hylolib-cb/HyArray.scala index 0fff45e744ec..e4ccab000448 100644 --- a/tests/pos/hylolib-cb/HyArray.scala +++ b/tests/pos/hylolib-cb/HyArray.scala @@ -161,9 +161,9 @@ object HyArray { } -given [T: Value] => Value[HyArray[T]] with { +given [T: Value] => Value[HyArray[T]]: - extension (self: HyArray[T]) { + extension (self: HyArray[T]) def copy(): HyArray[T] = self.copy() @@ -173,17 +173,14 @@ given [T: Value] => Value[HyArray[T]] with { def hashInto(hasher: Hasher): Hasher = self.reduce(hasher, (h, e) => e.hashInto(h)) +end given - } - -} - -given [T: Value] => Collection[HyArray[T]] with { +given [T: Value] => Collection[HyArray[T]]: type Element = T type Position = Int - extension (self: HyArray[T]) { + extension (self: HyArray[T]) // NOTE: Having to explicitly override means that primary declaration can't automatically // specialize trait requirements. @@ -198,10 +195,7 @@ given [T: Value] => Collection[HyArray[T]] with { def positionAfter(p: Int) = p + 1 def at(p: Int) = self.at(p) - - } - -} +end given // NOTE: This should work. // given hyArrayIsStringConvertible[T](using diff --git a/tests/pos/i13580.scala b/tests/pos/i13580.scala index c3c491a19dbe..b9b3a00392a0 100644 --- a/tests/pos/i13580.scala +++ b/tests/pos/i13580.scala @@ -1,12 +1,12 @@ //> using options -language:experimental.modularity -source future trait IntWidth: type Out -given IntWidth: +given IntWidth with type Out = 155 trait IntCandidate: type Out -given (using tracked val w: IntWidth) => IntCandidate: +given (using tracked val w: IntWidth): IntCandidate with type Out = w.Out val x = summon[IntCandidate] diff --git a/tests/pos/typeclasses-arrow.scala b/tests/pos/typeclasses-arrow.scala index 379365ffa1c5..4b2a25122b0d 100644 --- a/tests/pos/typeclasses-arrow.scala +++ b/tests/pos/typeclasses-arrow.scala @@ -36,7 +36,7 @@ end Common object Instances extends Common: - given Int is Ord as intOrd: + given intOrd: Int is Ord: extension (x: Int) def compareTo(y: Int) = if x < y then -1 @@ -52,7 +52,7 @@ object Instances extends Common: val fst = x.compareTo(y) if (fst != 0) fst else xs1.compareTo(ys1) - given List is Monad as listMonad: + given listMonad: List is Monad: extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = @@ -60,7 +60,7 @@ object Instances extends Common: type Reader[Ctx] = [X] =>> Ctx => X - given [Ctx] => Reader[Ctx] is Monad as readerMonad: + given readerMonad: [Ctx] => Reader[Ctx] is Monad: extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -82,7 +82,7 @@ object Instances extends Common: def maximum[T: Ord](xs: List[T]): T = xs.reduce(_ `max` _) - given [T: Ord] => T is Ord as descending: + given descending: [T: Ord] => T is Ord: extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) def minimum[T: Ord](xs: List[T]) = diff --git a/tests/pos/typeclasses-arrow0.scala b/tests/pos/typeclasses-arrow0.scala index 22d84fe6478d..d7d85e6b7400 100644 --- a/tests/pos/typeclasses-arrow0.scala +++ b/tests/pos/typeclasses-arrow0.scala @@ -32,14 +32,14 @@ end Common object Instances extends Common: - given Ord[Int] as intOrd: + given intOrd: Ord[Int]: extension (x: Int) def compareTo(y: Int) = if x < y then -1 else if x > y then +1 else 0 - given [T: Ord] => Ord[List[T]]: + given listOrd: [T: Ord] => Ord[List[T]]: extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -48,7 +48,7 @@ object Instances extends Common: val fst = x.compareTo(y) if (fst != 0) fst else xs1.compareTo(ys1) - given Monad[List] as listMonad: + given listMonad: Monad[List]: extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = @@ -56,7 +56,7 @@ object Instances extends Common: type Reader[Ctx] = [X] =>> Ctx => X - given [Ctx] => Monad[Reader[Ctx]] as readerMonad: + given readerMonad: [Ctx] => Monad[Reader[Ctx]]: extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -78,7 +78,7 @@ object Instances extends Common: def maximum[T: Ord](xs: List[T]): T = xs.reduce(_ `max` _) - given [T: Ord] => Ord[T] as descending: + given descending: [T: Ord] => Ord[T]: extension (x: T) def compareTo(y: T) = summon[Ord[T]].compareTo(y)(x) def minimum[T: Ord](xs: List[T]) = diff --git a/tests/run/byname-given.scala b/tests/run/byname-given.scala new file mode 100644 index 000000000000..d18ebb221a68 --- /dev/null +++ b/tests/run/byname-given.scala @@ -0,0 +1,9 @@ +//> using options -language:experimental.modularity -source future + +@main def Test = + var x: Int = 0 + given () => Int = x + assert(summon[Int] == 0) + x += 1 + assert(summon[Int] == 1) + diff --git a/tests/warn/abstract-givens-new.check b/tests/warn/abstract-givens-new.check new file mode 100644 index 000000000000..197d9bcb4f3e --- /dev/null +++ b/tests/warn/abstract-givens-new.check @@ -0,0 +1,5 @@ +-- Warning: tests/warn/abstract-givens-new.scala:7:22 ------------------------------------------------------------------ +7 | given intC: Int is C // warn + | ^ + | This defines an abstract given, which is deprecated. Use a `deferred` given instead. + | Or, if you intend to define a concrete given, follow the type with `()` arguments. diff --git a/tests/warn/abstract-givens-new.scala b/tests/warn/abstract-givens-new.scala new file mode 100644 index 000000000000..b38fd11c4458 --- /dev/null +++ b/tests/warn/abstract-givens-new.scala @@ -0,0 +1,9 @@ +//> using options -language:experimental.modularity -source future +class C: + type Self + +trait T: + given Int is C // ok + given intC: Int is C // warn + given intC2: (Int is C)() // ok + given intC3: Int is C {} // also ok From 42578c0bd4d7f21808f64dd1ae49e825c320a100 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 19 Jul 2024 12:42:39 +0200 Subject: [PATCH 392/827] Revise doc page for new typeclasses --- .../reference/experimental/typeclasses.md | 246 +++++++++++++----- 1 file changed, 180 insertions(+), 66 deletions(-) diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index a78e764bbe7d..08839ffe58eb 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -329,6 +329,41 @@ The using clause in class `SortedSet` provides an implementation for the deferre **Alternative:** It was suggested that we use a modifier for a deferred given instead of a `= deferred`. Something like `deferred given C[T]`. But a modifier does not suggest the concept that a deferred given will be implemented automatically in subclasses unless an explicit definition is written. In a sense, we can see `= deferred` as the invocation of a magic macro that is provided by the compiler. So from a user's point of view a given with `deferred` right hand side is not abstract. It is a concrete definition where the compiler will provide the correct implementation. +### Abolish Abstract Givens + +With `deferred` givens there is no need anymore to also define abstract givens. The two mechanisms are very similar, but the user experience for +deferred givens is generally more ergonomic. Abstract givens also are uncomfortably close to concrete class instances. Their syntax clashes +with the quite common case where we want to establish a given without any nested definitions. For instance, consider a given that constructs a type tag: +```scala +class Tag[T] +``` +Then this works: +```scala +given Tag[String]() +given Tag[String] with {} +``` +But the following more natural syntax fails: +```scala +given Tag[String] +``` +The last line gives a rather cryptic error: +``` +1 |given Tag[String] + | ^ + | anonymous given cannot be abstract +``` +The underlying problem is that abstract givens are very rare (and should become completely unnecessary once deferred givens are introduced), yet occupy a syntax that looks very close to the more common case of concrete +typeclasses without nested definitions. + +**Proposal:** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. Deprecate the current version of abstract givens, and remove them in a future Scala version. + +**Benefits:** + + - Simplification of the language since a feature is dropped + - Eliminate non-obvious and misleading syntax. + +The only downside is that deferred givens are restricted to be used in traits, whereas abstract givens are also allowed in abstract classes. But I would be surprised if actual code relied on that difference, and such code could in any case be easily rewritten to accommodate the restriction. + ## New Given Syntax A good language syntax is like a Bach fugue: A small set of motifs is combined in a multitude of harmonic ways. Dissonances and irregularities should be avoided. @@ -340,106 +375,185 @@ given [A](using Ord[A]): Ord[List[A]] with ``` The `:` feels utterly foreign in this position. It's definitely not a type ascription, so what is its role? Just as bad is the trailing `with`. Everywhere else we use braces or trailing `:` to start a scope of nested definitions, so the need of `with` sticks out like a sore thumb. -We arrived at that syntax not because of a flight of fancy but because even after trying for about a year to find other solutions it seemed like the least bad alternative. The awkwardness of the given syntax arose because we insisted that givens could be named or anonymous, with the default on anonymous, that we would not use underscore for an anonymous given, and that the name, if present, had to come first, and have the form `name [parameters] :`. In retrospect, that last requirement showed a lack of creativity on our part. - Sometimes unconventional syntax grows on you and becomes natural after a while. But here it was unfortunately the opposite. The longer I used given definitions in this style the more awkward they felt, in particular since the rest of the language seemed so much better put together by comparison. And I believe many others agree with me on this. Since the current syntax is unnatural and esoteric, this means it's difficult to discover and very foreign even after that. This makes it much harder to learn and apply givens than it need be. -Things become much simpler if we introduce the optional name instead with an `as name` clause at the end, just like we did for context bounds. We can then use a more intuitive syntax for givens like this: +The previous conditional given syntax was inspired from method definitions. If we add the optional name to the previous example, we obtain something akin to an implicit method in Scala 2: ```scala -given String is Ord: - def compare(x: String, y: String) = ... - -given [A : Ord] => List[A] is Ord: +given listOrd[A](using Ord[A]): Ord[List[A]] with def compare(x: List[A], y: List[A]) = ... - -given Int is Monoid: - extension (x: Int) def combine(y: Int) = x + y - def unit = 0 ``` -Here, the second given can be read as if `A` is an `Ord` then `List[A]` is also an`Ord`. Or: for all `A: Ord`, `List[A]` is `Ord`. The arrow can be seen as an implication, note also the analogy to pattern matching syntax. +The anonymous syntax was then obtained by simply dropping the name. +But without a name, the syntax looks weird and inconsistent. + +This is a problem since at least for typeclasses, anonymous givens should be the norm. +Givens are like extends clauses. We state a _fact_, that a +type implements a type class, or that a value can be used implicitly. We don't need a name for that fact. It's analogous to extends clauses, where we state that a class is a subclass of some other class or trait. We would not think it useful to name an extends clause, it's simply a fact that is stated. +It's also telling that every other language that defines type classes uses anonymous syntax. Somehow, nobody ever found it necessary to name these instances. -If explicit names are desired, we add them with `as` clauses: +A more intuitive and in my opinion cleaner alternative is to decree that a given should always look like it _implements a type_. Conditional givens should look like they implement function types. The `Ord` typeclass instances for `Int` and `List` would then look like this: ```scala -given String is Ord as intOrd: +given Ord[String]: def compare(x: String, y: String) = ... -given [A : Ord] => List[A] is Ord as listOrd: +given [A : Ord] => Ord[List[A]]: def compare(x: List[A], y: List[A]) = ... - -given Int is Monoid as intMonoid: - extension (x: Int) def combine(y: Int) = x + y - def unit = 0 ``` +The second, conditional instance looks like it implements the function type +```scala +[A : Ord] => Ord[List[A]] +``` +Another way to see this is as an implication: +If `A` is a type that is `Ord`, then `List[A]` is `Ord` (and the rest of the given clause gives the implementation that makes it so). +Equivalently, `A` is `Ord` _implies_ `List[A]` is `Ord`, hence the `=>`. -The underlying principles are: +Yet another related meaning is that the given clause establishes a _context function_ of type `[A: Ord] ?=> Ord[List[A]]` that is automatically applied to evidence arguments of type `Ord[A]` and that yields instances of type `Ord[List[A]]`. Since givens are in any case applied automatically to all their arguments, we don't need to specify that separately with `?=>`, a simple `=>` arrow is sufficiently clear and is easier to read. - - A `given` clause consists of the following elements: +All these viewpoints are equivalent, in a deep sense. This is exactly the Curry Howard isomorphism, which equates function types and implications. - - An optional _precondition_, which introduces type parameters and/or using clauses and which ends in `=>`, - - the implemented _type_, - - an optional name binding using `as`, - - an implementation which consists of either an `=` and an expression, - or a template body. +In the new syntax, a `given` clause consists of the following elements: - - Since there is no longer a middle `:` separating name and parameters from the implemented type, we can use a `:` to start the class body without looking unnatural, as is done everywhere else. That eliminates the special case where `with` was used before. + - An optional name binding `id :` + - Zero or more _conditions_, which introduce type or value parameters. Each precondition ends in a `=>`. + - the implemented _type_, + - an implementation which consists of either an `=` and an expression, + or a template body. -This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s -in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. +**Examples:** -Changing something introduced just recently in Scala 3 is not fun, -but I believe these adjustments are preferable to let bad syntax -sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code -starts migrating. +Here is an enumeration of common forms of given definitions in the new syntax. We show the following use cases: -Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. + 1. A simple typeclass instance, such as `Ord[Int]`. + 2. A parameterized type class instance, such as `Ord` for lists. + 3. A type class instance with an explicit context parameter. + 4. A type class instance with a named eexplicit context parameter. + 4. A simple given alias. + 5. A parameterized given alias + 6. A given alias with an explicit context parameter. + 8. An abstract or deferred given + 9. A by-name given, e.g. if we have a given alias of a mutable variable, and we + want to make sure that it gets re-evaluated on each access. +```scala + // Simple typeclass + given Ord[Int]: + def compare(x: Int, y: Int) = ... + // Parameterized typeclass with context bound + given [A: Ord] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... -### Abolish Abstract Givens + // Parameterized typeclass with context parameter + given [A] => Ord[A] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... -Another simplification is possible. So far we have special syntax for abstract givens: -```scala -given x: T + // Parameterized typeclass with named context parameter + given [A] => (ord: Ord[A]) => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Simple alias + given Ord[Int] = IntOrd() + + // Parameterized alias with context bound + given [A: Ord] => Ord[List[A]] = + ListOrd[A] + + // Parameterized alias with context parameter + given [A] => Ord[A] => Ord[List[A]] = + ListOrd[A] + + // Abstract or deferred given + given Context = deferred + + // By-name given + given () => Context = curCtx ``` -The problem is that this syntax clashes with the quite common case where we want to establish a given without any nested definitions. For instance -consider a given that constructs a type tag: +Here are the same examples, with optional names provided: ```scala -class Tag[T] + // Simple typeclass + given intOrd: Ord[Int]: + def compare(x: Int, y: Int) = ... + + // Parameterized typeclass with context bound + given listOrd: [A: Ord] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with context parameter + given listOrd: [A] => Ord[A] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with named context parameter + given listOrd: [A] => (ord: Ord[A]) => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Simple alias + given intOrd: Ord[Int] = IntOrd() + + // Parameterized alias with context bound + given listOrd: [A: Ord] => Ord[List[A]] = + ListOrd[A] + + // Parameterized alias with context parameter + given listOrd: [A] => Ord[A] => Ord[List[A]] = + ListOrd[A] + + // Abstract or deferred given + given context: Context = deferred + + // By-name given + given context: () => Context = curCtx ``` -Then this works: + +**By Name Givens** + +We sometimes find it necessary that a given alias is re-evaluated each time it is called. For instance, say we have a mutable variable `curCtx` and we want to define a given that returns the current value of that variable. A normal given alias will not do since by default given aliases are mapped to +lazy vals. + +In general, we want to avoid re-evaluation of the given. But there are situations like the one above where we want to specify _by-name_ evaluation instead. The proposed new syntax for this is shown in the last clause above. This is arguably the a natural way to express by-name givens. We want to use a conditional given, since these map to methods, but the set of preconditions is empty, hence the `()` parameter. Equivalently, under the context function viewpoint, we are defining a context function of the form `() ?=> T`, and these are equivalent to by-name parameters. + +Compare with the current best way to do achieve this, which is to use a dummy type parameter. ```scala -given Tag[String]() -given Tag[String] with {} + given [DummySoThatItsByName]: Context = curCtx ``` -But the following more natural syntax fails: +This has the same effect, but feels more like a hack than a clean solution. + +**Dropping `with`** + +In the new syntax, all typeclass instances introduce definitions like normal +class bodies, enclosed in braces `{...}` or following a `:`. The irregular +requirement to use `with` is dropped. In retrospect, the main reason to introduce `with` was since a definition like + ```scala -given Tag[String] -``` -The last line gives a rather cryptic error: +given [A](using Ord[A]): Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... ``` -1 |given Tag[String] - | ^ - | anonymous given cannot be abstract +was deemed to be too cryptic, with the double meaning of colons. But since that syntax is gone, we don't need `with` anymore. There's still a double meaning of colons, e.g. in +```scala +given intOrd: Ord[Int]: + ... ``` -The problem is that the compiler thinks that the last given is intended to be abstract, and complains since abstract givens need to be named. This is another annoying dissonance. Nowhere else in Scala's syntax does adding a -`()` argument to a class cause a drastic change in meaning. And it's also a violation of the principle that it should be possible to define all givens without providing names for them. +but since now both uses of `:` are very familiar (type ascription _vs_ start of nested definitions), it's manageable. Besides, the problem occurs only for named typeclass instances, which should be the exceptional case anyway. -Fortunately, abstract givens are no longer necessary since they are superseded by the new `deferred` scheme. So we can deprecate that syntax over time. Abstract givens are a highly specialized mechanism with a so far non-obvious syntax. We have seen that this syntax clashes with reasonable expectations of Scala programmers. My estimate is that maybe a dozen people world-wide have used abstract givens in anger so far. -**Proposal** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. +**Possible ambiguities** -This is less of a disruption than it might appear at first: +If one wants to define a given for an a actual function type (which is probably not advisable in practice), one needs to enclose the function type in parentheses, i.e. `given ([A] => F[A])`. This is true in the currently implemented syntax and stays true for all discussed change proposals. - - `given T` was illegal before since abstract givens could not be anonymous. - It now means a concrete given of class `T` with no member definitions. - - `given x: T` is legacy syntax for an abstract given. - - `given T as x = deferred` is the analogous new syntax, which is more powerful since - it allows for automatic instantiation. - - `given T = deferred` is the anonymous version in the new syntax, which was not expressible before. +The double meaning of : with optional prefix names is resolved as usual. A : at the end of a line starts a nested definition block. If for some obscure reason one wants to define a named given on multiple lines, one has to format it as follows: +```scala + given intOrd + : Ord = ... +``` -**Benefits:** +**Summary** - - Simplification of the language since a feature is dropped - - Eliminate non-obvious and misleading syntax. +This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s +in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. + +Changing something introduced just recently in Scala 3 is not fun, +but I believe these adjustments are preferable to let bad syntax +sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code +starts migrating. + +Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. ### Bonus: Fixing Singleton @@ -586,7 +700,7 @@ Here are some standard type classes, which were mostly already introduced at the def maximum[T: Ord](xs: List[T]): T = xs.reduce(_ `max` _) - given [T: Ord] => T is Ord as descending: + given descending: [T: Ord] => T is Ord: extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) def minimum[T: Ord](xs: List[T]) = From c40760dafa651fc30063c37332c34efb5621ab24 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 22 Jul 2024 15:32:22 +0200 Subject: [PATCH 393/827] Test case: Dependency injection via Providers --- tests/run/Providers.check | 20 +++++ tests/run/Providers.scala | 177 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 197 insertions(+) create mode 100644 tests/run/Providers.check create mode 100644 tests/run/Providers.scala diff --git a/tests/run/Providers.check b/tests/run/Providers.check new file mode 100644 index 000000000000..7b0a9a8b143e --- /dev/null +++ b/tests/run/Providers.check @@ -0,0 +1,20 @@ +11 +hi +List(1, 2, 3) +hi + +Direct: +You've just been subscribed to RockTheJVM. Welcome, Daniel +Acquired connection +Executing query: insert into subscribers(name, email) values Daniel daniel@RocktheJVM.com +You've just been subscribed to RockTheJVM. Welcome, Martin +Acquired connection +Executing query: insert into subscribers(name, email) values Martin odersky@gmail.com + +Injected +You've just been subscribed to RockTheJVM. Welcome, Daniel +Acquired connection +Executing query: insert into subscribers(name, email) values Daniel daniel@RocktheJVM.com +You've just been subscribed to RockTheJVM. Welcome, Martin +Acquired connection +Executing query: insert into subscribers(name, email) values Martin odersky@gmail.com diff --git a/tests/run/Providers.scala b/tests/run/Providers.scala new file mode 100644 index 000000000000..3eb4b2df2207 --- /dev/null +++ b/tests/run/Providers.scala @@ -0,0 +1,177 @@ +import language.experimental.modularity +import compiletime.constValue +import compiletime.ops.int.S + +// Featherweight dependency injection library, inspired by the use case +// laid out in the ZIO course of RockTheJVM. + +/** Some things that are not part of Tuple yet, but that would be nice to have. */ +object TupleUtils: + + /** The index of the first element type of the tuple `Xs` that is a subtype of `X` */ + type IndexOf[Xs <: Tuple, X] <: Int = Xs match + case X *: _ => 0 + case _ *: ys => S[IndexOf[ys, X]] + + /** A trait describing a selection from a tuple `Xs` returning an element of type `X` */ + trait Select[Xs <: Tuple, X]: + def apply(xs: Xs): X + + /** A given implementing `Select` to return the first element of tuple `Xs` + * that has a static type matching `X`. + */ + given [Xs <: NonEmptyTuple, X] => (idx: ValueOf[IndexOf[Xs, X]]) => Select[Xs, X]: + def apply(xs: Xs) = xs.apply(idx.value).asInstanceOf[X] + +/** A featherweight library for dependency injection */ +object Providers: + import TupleUtils.* + + /** A provider is a zero-cost wrapper around a type that is intended + * to be passed implicitly + */ + opaque type Provider[T] = T + + def provide[X](x: X): Provider[X] = x + + def provided[X](using p: Provider[X]): X = p + + /** Project a provider to one of its element types */ + given [Xs <: Tuple, X] => (ps: Provider[Xs], select: Select[Xs, X]) => Provider[X] = + select(ps) + + /** Form a compound provider wrapping a tuple */ + given [X, Xs <: Tuple] => (p: Provider[X], ps: Provider[Xs]) => Provider[X *: Xs] = + p *: ps + + given Provider[EmptyTuple] = EmptyTuple + +end Providers + +@main def Test = + import TupleUtils.* + + type P = (Int, String, List[Int]) + val x: P = (11, "hi", List(1, 2, 3)) + val selectInt = summon[Select[P, Int]] + println(selectInt(x)) + val selectString = summon[Select[P, String]] + println(selectString(x)) + val selectList = summon[Select[P, List[Int]]] + println(selectList(x)) + val selectObject = summon[Select[P, Object]] + println(selectObject(x)) // prints "hi" + println(s"\nDirect:") + Explicit().test() + println(s"\nInjected") + Injected().test() + +/** Demonstrator for explicit dependency construction */ +class Explicit: + + case class User(name: String, email: String) + + class UserSubscription(emailService: EmailService, db: UserDatabase): + def subscribe(user: User) = + emailService.email(user) + db.insert(user) + + class EmailService: + def email(user: User) = + println(s"You've just been subscribed to RockTheJVM. Welcome, ${user.name}") + + class UserDatabase(pool: ConnectionPool): + def insert(user: User) = + val conn = pool.get() + conn.runQuery(s"insert into subscribers(name, email) values ${user.name} ${user.email}") + + class ConnectionPool(n: Int): + def get(): Connection = + println(s"Acquired connection") + Connection() + + class Connection(): + def runQuery(query: String): Unit = + println(s"Executing query: $query") + + def test() = + val subscriptionService = + UserSubscription( + EmailService(), + UserDatabase( + ConnectionPool(10) + ) + ) + + def subscribe(user: User) = + val sub = subscriptionService + sub.subscribe(user) + + subscribe(User("Daniel", "daniel@RocktheJVM.com")) + subscribe(User("Martin", "odersky@gmail.com")) + +end Explicit + +/** The same application as `Explicit` but using dependency injection */ +class Injected: + import Providers.* + + case class User(name: String, email: String) + + class UserSubscription(using Provider[(EmailService, UserDatabase)]): + def subscribe(user: User) = + provided[EmailService].email(user) + provided[UserDatabase].insert(user) + + class EmailService: + def email(user: User) = + println(s"You've just been subscribed to RockTheJVM. Welcome, ${user.name}") + + class UserDatabase(using Provider[ConnectionPool]): + def insert(user: User) = + val conn = provided[ConnectionPool].get() + conn.runQuery(s"insert into subscribers(name, email) values ${user.name} ${user.email}") + + class ConnectionPool(n: Int): + def get(): Connection = + println(s"Acquired connection") + Connection() + + class Connection(): + def runQuery(query: String): Unit = + println(s"Executing query: $query") + + def test() = + given Provider[EmailService] = provide(EmailService()) + given Provider[ConnectionPool] = provide(ConnectionPool(10)) + given Provider[UserDatabase] = provide(UserDatabase()) + given Provider[UserSubscription] = provide(UserSubscription()) + + def subscribe(user: User)(using Provider[UserSubscription]) = + val sub = provided[UserSubscription] + sub.subscribe(user) + + subscribe(User("Daniel", "daniel@RocktheJVM.com")) + subscribe(User("Martin", "odersky@gmail.com")) + end test + + // explicit version, not used here + object explicit: + val subscriptionService = + UserSubscription( + using provide( + EmailService(), + UserDatabase( + using provide( + ConnectionPool(10) + ) + ) + ) + ) + + given Provider[UserSubscription] = provide(subscriptionService) + end explicit +end Injected + + + From 62c71c08d5264a1fb9b8eae66c3a76ffa05c1628 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 25 Jul 2024 17:24:21 +0200 Subject: [PATCH 394/827] Fix tests --- tests/neg/i13580.check | 4 ++++ tests/pos/i13580.scala | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 tests/neg/i13580.check diff --git a/tests/neg/i13580.check b/tests/neg/i13580.check new file mode 100644 index 000000000000..8f91bcf9bde5 --- /dev/null +++ b/tests/neg/i13580.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i13580.scala:9:7 ----------------------------------------------------------------------------------- +9 |given (using tracked val w: IntWidth) => IntCandidate: // error + | ^^^^^ + | `using` is already implied here, should not be given explicitly diff --git a/tests/pos/i13580.scala b/tests/pos/i13580.scala index b9b3a00392a0..60e971610209 100644 --- a/tests/pos/i13580.scala +++ b/tests/pos/i13580.scala @@ -1,12 +1,12 @@ //> using options -language:experimental.modularity -source future trait IntWidth: type Out -given IntWidth with +given IntWidth: type Out = 155 trait IntCandidate: type Out -given (using tracked val w: IntWidth): IntCandidate with +given (tracked val w: IntWidth) => IntCandidate: type Out = w.Out val x = summon[IntCandidate] From f24abff97f1d7e48f64ac7514a7cfbc7dc45d1f3 Mon Sep 17 00:00:00 2001 From: Raphael Jolly Date: Sun, 28 Jul 2024 13:04:24 +0200 Subject: [PATCH 395/827] Update ScAS --- community-build/community-projects/scas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community-build/community-projects/scas b/community-build/community-projects/scas index acaad1055738..83d0f62bbc57 160000 --- a/community-build/community-projects/scas +++ b/community-build/community-projects/scas @@ -1 +1 @@ -Subproject commit acaad1055738dbbcae7b18e6c6c2fc95f06eb7d6 +Subproject commit 83d0f62bbc57691e509f07186b34847bafe4b96e From eec1e35676837048f1c4499121589a38e02deabe Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Tue, 30 Jul 2024 11:44:10 +0200 Subject: [PATCH 396/827] change mock symbol search --- .../tools/pc/CompilerSearchVisitor.scala | 12 +- .../completion/CompletionWorkspaceSuite.scala | 10 ++ .../pc/utils/TestingWorkspaceSearch.scala | 125 ++++++++++++++---- 3 files changed, 113 insertions(+), 34 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala index 231960ec5116..035c1062a3e3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala @@ -12,6 +12,7 @@ import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Symbols.* +import dotty.tools.pc.utils.InteractiveEnrichments.companion class CompilerSearchVisitor( visitSymbol: Symbol => Boolean @@ -91,11 +92,12 @@ class CompilerSearchVisitor( range: org.eclipse.lsp4j.Range ): Int = val gsym = SemanticdbSymbols.inverseSemanticdbSymbol(symbol).headOption - gsym - .filter(isAccessible) - .map(visitSymbol) - .map(_ => 1) - .getOrElse(0) + val matching = for + sym0 <- gsym.toList + sym <- if sym0.companion.is(Flags.Synthetic) then List(sym0, sym0.companion) else List(sym0) + if isAccessible(sym) + yield visitSymbol(sym) + matching.size def shouldVisitPackage(pkg: String): Boolean = isAccessible(requiredPackage(normalizePackage(pkg))) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala index c8cfbd178f32..e5c81e3c044e 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala @@ -937,3 +937,13 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |""".stripMargin, "" ) + + @Test def `metals-i6593` = + check( + """|package a: + | class UniqueObject + |package b: + | val i = Uniq@@ + |""".stripMargin, + "UniqueObject(): UniqueObject - a" + ) diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala index 0b49bdf8bca8..27b9a49f9555 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala @@ -1,25 +1,63 @@ package dotty.tools.pc.utils +import dotty.tools.dotc.ast.untpd.* +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.pc.CompilerSearchVisitor +import dotty.tools.pc.utils.InteractiveEnrichments.decoded + import java.io.File import java.nio.file.Paths - import scala.collection.mutable -import scala.meta.internal.metals.{ - CompilerVirtualFileParams, - Fuzzy, - WorkspaceSymbolQuery -} -import scala.meta.pc.SymbolSearchVisitor import scala.language.unsafeNulls +import scala.meta.internal.metals.CompilerVirtualFileParams +import scala.meta.internal.metals.Fuzzy +import scala.meta.internal.metals.WorkspaceSymbolQuery +import scala.meta.pc.SymbolSearchVisitor -import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.interactive.InteractiveDriver -import dotty.tools.dotc.semanticdb.SemanticSymbolBuilder -import dotty.tools.pc.CompilerSearchVisitor +import TestingWorkspaceSearch.* object TestingWorkspaceSearch: def empty: TestingWorkspaceSearch = new TestingWorkspaceSearch(Nil) + class Disambiguator: + val nameMap = mutable.Map[String, Int]() + def methodPart(name: String) = + val i = nameMap.getOrElse(name, 0) + nameMap.put(name, i + 1) + if i == 0 then "()." + else s"(+$i)." + + case class ParentSymbol(symbol: SearchSymbol, fileName: String): + private val dis: Disambiguator = new Disambiguator + private def isPackage = symbol.lastOption.exists(_.suffix == "/") + private def isMethod = symbol.lastOption.exists(_.suffix.endsWith(").")) + private def isInit = symbol.lastOption.exists(_.name == "") + private def filePackage = SymbolPart(fileName, "$package.") + private def member(part: SymbolPart)= + if isPackage then Some(symbol :+ filePackage :+ part) + else if isMethod then + if isInit then Some(symbol.dropRight(1) :+ part) + else None + else Some(symbol :+ part) + def makeMethod(newPart: String) = member(SymbolPart(newPart, dis.methodPart(newPart))) + def makeVal(newPart: String) = + member(SymbolPart(newPart, ".")) + def makeTypeAlias(newPart: String) = member(SymbolPart(newPart, "#")) + def makeType(newPart: String) = symbol :+ SymbolPart(newPart, "#") + def makeTerm(newPart: String) = symbol :+ SymbolPart(newPart, ".") + def makePackage(parts: List[String], isPackageObject: Boolean = false) = + val suffix = if isPackageObject then "/package." else "/" + parts match + case "" :: Nil => List(SymbolPart("_empty_", suffix)) + case list if symbol.map(_.name) == List("_empty_") => list.map(SymbolPart(_, suffix)) + case list => symbol ++ list.map(SymbolPart(_, suffix)) + + object ParentSymbol: + def empty(fileName: String) = ParentSymbol(Nil, fileName) + + case class SymbolPart(name: String, suffix: String) + type SearchSymbol = List[SymbolPart] class TestingWorkspaceSearch(classpath: Seq[String]): val inputs: mutable.Map[String, String] = mutable.Map.empty[String, String] @@ -30,8 +68,41 @@ class TestingWorkspaceSearch(classpath: Seq[String]): defaultFlags ++ List("-classpath", classpath.mkString(File.pathSeparator)) + private class SymbolCollector extends UntypedTreeAccumulator[List[Tree]]: + override def apply(x: List[Tree], tree: Tree)(using Context): List[Tree] = tree :: x + + private def newSymbol(tree: Tree, parent: ParentSymbol)(using Context): Option[SearchSymbol] = + tree match + case PackageDef(name, _) => + Some(parent.makePackage(namesFromSelect(name).reverse)) + case m @ ModuleDef(name, _) if m.mods.is(Flags.Package) => + Some(parent.makePackage(List(name.decoded), isPackageObject = true)) + case ModuleDef(name, _) => + Some(parent.makeTerm(name.decoded)) + case ValDef(name, _, _) => + parent.makeVal(name.decoded) + case t @ TypeDef(name, _: Template) if !t.mods.is(Flags.Implicit) => + Some(parent.makeType(name.decoded)) + case TypeDef(name, _) => + parent.makeTypeAlias(name.decoded) + case DefDef(name, _, _, _) => + parent.makeMethod(name.decoded) + case _ => None + + def traverse(acc: List[SearchSymbol], tree: Tree, parent: ParentSymbol)(using Context): List[SearchSymbol] = + val symbol = newSymbol(tree, parent) + val res = symbol.filter(_.lastOption.exists(_.suffix != "/")).map(_ :: acc).getOrElse(acc) + val children = foldOver(Nil, tree).reverse + val newParent = symbol.map(ParentSymbol(_, parent.fileName)).getOrElse(parent) + children.foldLeft(res)((a, c) => traverse(a, c, newParent)) + val driver = new InteractiveDriver(settings) + private def namesFromSelect(select: Tree)(using Context): List[String] = + select match + case Select(qual, name) => name.decoded :: namesFromSelect(qual) + case Ident(name) => List(name.decoded) + def search( query: WorkspaceSymbolQuery, visitor: SymbolSearchVisitor, @@ -41,21 +112,17 @@ class TestingWorkspaceSearch(classpath: Seq[String]): visitor match case visitor: CompilerSearchVisitor => - inputs.map { (path, text) => - - val nioPath = Paths.get(path) - val uri = nioPath.toUri() - val symbols = DefSymbolCollector(driver, CompilerVirtualFileParams(uri, text)).namedDefSymbols - - // We have to map symbol from this Context, to one in PresentationCompiler - // To do it we are searching it with semanticdb symbol - val semanticSymbolBuilder = SemanticSymbolBuilder() - symbols - .filter((symbol, _) => filter(symbol)) - .filter((_, name) => Fuzzy.matches(query.query, name)) - .map(symbol => semanticSymbolBuilder.symbolName(symbol._1)) - .map( - visitor.visitWorkspaceSymbol(Paths.get(""), _, null, null) - ) - } + inputs.map: (path, text) => + val nio = Paths.get(path) + val uri = nio.toUri() + driver.run(uri, text) + val run = driver.currentCtx.run + val unit = run.units.head + val symbols = SymbolCollector().traverse(Nil, unit.untpdTree, ParentSymbol.empty(nio.getFileName().toString().stripSuffix(".scala"))) + symbols.foreach: sym => + val name = sym.last.name + if Fuzzy.matches(query.query, name) + then + val symbolsString = sym.map{ case SymbolPart(name, suffix) => name ++ suffix}.mkString + visitor.visitWorkspaceSymbol(Paths.get(""), symbolsString, null, null) case _ => From a699502ecbf80ecd05df92ac308dc9beb6a64ecd Mon Sep 17 00:00:00 2001 From: kasiaMarek Date: Tue, 30 Jul 2024 17:07:39 +0200 Subject: [PATCH 397/827] fix: completions when parenthesis already provided --- .../tools/pc/completions/Completions.scala | 21 ++++++++++++------- .../completion/CompletionSnippetSuite.scala | 6 ++++-- .../pc/tests/completion/CompletionSuite.scala | 11 ++++++++++ 3 files changed, 28 insertions(+), 10 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index 2cd8db318690..d043a2cfddbf 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -60,6 +60,14 @@ class Completions( private lazy val shouldAddSnippet = path match + case (_: (Import | Export)) :: _ => false + case _ :: (_: (Import | Export)) :: _ => false + // UnApply has patterns included in MatchCaseCompletions + case _ :: (_: UnApply) :: _ => false + case _ => true + + private lazy val shouldAddSuffix = shouldAddSnippet && + (path match /* In case of `method@@()` we should not add snippets and the path * will contain apply as the parent of the current tree. */ @@ -72,11 +80,8 @@ class Completions( case _ :: (withcursor @ Select(fun, name)) :: (appl: GenericApply) :: _ if appl.fun == withcursor && name.decoded == Cursor.value => false - case (_: (Import | Export)) :: _ => false - case _ :: (_: (Import | Export)) :: _ => false - // UnApply has patterns included in MatchCaseCompletions - case _ :: (_: UnApply) :: _ => false - case _ => true + case _ => true) + private lazy val isNew: Boolean = Completion.isInNewContext(adjustedPath) @@ -198,12 +203,12 @@ class Completions( private def findSuffix(symbol: Symbol): CompletionAffix = CompletionAffix.empty .chain { suffix => // for [] suffix - if shouldAddSnippet && symbol.info.typeParams.nonEmpty then + if shouldAddSuffix && symbol.info.typeParams.nonEmpty then suffix.withNewSuffixSnippet(Affix(SuffixKind.Bracket)) else suffix } .chain { suffix => // for () suffix - if shouldAddSnippet && symbol.is(Flags.Method) then + if shouldAddSuffix && symbol.is(Flags.Method) then val paramss = getParams(symbol) paramss match case Nil => suffix @@ -224,7 +229,7 @@ class Completions( else suffix } .chain { suffix => // for {} suffix - if shouldAddSnippet && isNew && isAbstractType(symbol) then + if shouldAddSuffix && isNew && isAbstractType(symbol) then if suffix.hasSnippet then suffix.withNewSuffix(Affix(SuffixKind.Template)) else suffix.withNewSuffixSnippet(Affix(SuffixKind.Template)) else suffix diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala index 2c91f71d8d19..381375c65131 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala @@ -289,7 +289,8 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |} |""".stripMargin, "scala.util.Try@@(1)", - "scala.util.Try(1)" + "scala.util.Try(1)", + assertSingleItem = false ) @Test def `case-class` = @@ -300,7 +301,8 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |""".stripMargin, "scala.util.Tr@@(1)", "scala.util.Try(1)", - filter = str => str.contains("Try") + filter = str => str.contains("Try"), + assertSingleItem = false ) @Test def `case-class2` = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 6cccc923a5f5..437fe606932b 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -2031,3 +2031,14 @@ class CompletionSuite extends BaseCompletionSuite: """.stripMargin, filter = _.contains("name") ) + + @Test def `with-parenthesis` = + check( + """|package a + |class MyClass + |val i = MyClass@@() + |""".stripMargin, + """|MyClass(): MyClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) From e2427533c563cb5ca708dbcaa51823fcc30102db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 22 Jul 2024 17:13:41 +0200 Subject: [PATCH 398/827] Fix #20856: Serialize `Waiting` and `Evaluating` as if `null`. This strategy ensures the "serializability" condition of parallel programs--not to be confused with the data being `java.io.Serializable`. Indeed, if thread A is evaluating the lazy val while thread B attempts to serialize its owner object, there is also an alternative schedule where thread B serializes the owner object *before* A starts evaluating the lazy val. Therefore, forcing B to see the non-evaluating state is correct. --- library/src/scala/runtime/LazyVals.scala | 20 ++++++- tests/run/i20856.check | 1 + tests/run/i20856.scala | 70 ++++++++++++++++++++++++ 3 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 tests/run/i20856.check create mode 100644 tests/run/i20856.scala diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index e38e016f5182..15220ea2410a 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -52,13 +52,29 @@ object LazyVals { * Used to indicate the state of a lazy val that is being * evaluated and of which other threads await the result. */ - final class Waiting extends CountDownLatch(1) with LazyValControlState + final class Waiting extends CountDownLatch(1) with LazyValControlState { + /* #20856 If not fully evaluated yet, serialize as if not-evaluat*ing* yet. + * This strategy ensures the "serializability" condition of parallel + * programs--not to be confused with the data being `java.io.Serializable`. + * Indeed, if thread A is evaluating the lazy val while thread B attempts + * to serialize its owner object, there is also an alternative schedule + * where thread B serializes the owner object *before* A starts evaluating + * the lazy val. Therefore, forcing B to see the non-evaluating state is + * correct. + */ + private def writeReplace(): Any = null + } /** * Used to indicate the state of a lazy val that is currently being * evaluated with no other thread awaiting its result. */ - object Evaluating extends LazyValControlState + object Evaluating extends LazyValControlState { + /* #20856 If not fully evaluated yet, serialize as if not-evaluat*ing* yet. + * See longer comment in `Waiting.writeReplace()`. + */ + private def writeReplace(): Any = null + } /** * Used to indicate the state of a lazy val that has been evaluated to diff --git a/tests/run/i20856.check b/tests/run/i20856.check new file mode 100644 index 000000000000..a677d8bd3ca6 --- /dev/null +++ b/tests/run/i20856.check @@ -0,0 +1 @@ +succeeded: BOMB: test diff --git a/tests/run/i20856.scala b/tests/run/i20856.scala new file mode 100644 index 000000000000..893ddee73adc --- /dev/null +++ b/tests/run/i20856.scala @@ -0,0 +1,70 @@ +// scalajs: --skip + +import java.io.* + +class Message(content: String) extends Serializable: + //@transient + lazy val bomb: String = + Thread.sleep(200) + "BOMB: " + content +end Message + +object Test: + def serialize(obj: Message): Array[Byte] = + val byteStream = ByteArrayOutputStream() + val objectStream = ObjectOutputStream(byteStream) + try + objectStream.writeObject(obj) + byteStream.toByteArray + finally + objectStream.close() + byteStream.close() + end serialize + + def deserialize(bytes: Array[Byte]): Message = + val byteStream = ByteArrayInputStream(bytes) + val objectStream = ObjectInputStream(byteStream) + try + objectStream.readObject().asInstanceOf[Message] + finally + objectStream.close() + byteStream.close() + end deserialize + + def main(args: Array[String]): Unit = + val bytes = + val msg = Message("test") + + val touch = Thread(() => { + msg.bomb // start evaluation before serialization + () + }) + touch.start() + + Thread.sleep(50) // give some time for the fork to start lazy val rhs eval + + serialize(msg) // serialize in the meantime so that we capture Waiting state + end bytes + + val deserializedMsg = deserialize(bytes) + + @volatile var msg = "" + @volatile var started = false + val read = Thread(() => { + started = true + msg = deserializedMsg.bomb + () + }) + read.start() + + Thread.sleep(1000) + if !started then + throw Exception("ouch, the thread has not started yet after 1s") + + if !msg.isEmpty() then + println(s"succeeded: $msg") + else + read.interrupt() + throw new AssertionError("failed to read bomb in 1s!") + end main +end Test From 32e4056a00ce76045f2299ed312437b9bff4286e Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Jul 2024 16:19:11 +0200 Subject: [PATCH 399/827] Move `NamedTuple.head` to `NamedTupleDecomposition` This is in particular necessary for #21291, to avoid problems encountered after inlining from scopes defining opaque types (such as in the example below), as was already done for the other NamedTuple operations in #20504. ```scala -- Error: tests/pos/named-tuple-combinators.scala:46:17 ------------------------ 46 | val res1 = x.head | ^^^^^^ |(Int, String) does not conform to bound >: | (x$proxy55 : (x : Test.NT) & | $proxy19.NamedTuple[ | Tuple.Concat[ | NamedTupleDecomposition.Names[ | $proxy19.NamedTuple[Tuple1[("hi" : String)], Tuple1[Int]]], | NamedTupleDecomposition.Names[ | $proxy19.NamedTuple[Tuple1[("bla" : String)], Tuple1[String]]] | ], | Tuple.Concat[ | NamedTupleDecomposition.DropNames[ | $proxy19.NamedTuple[Tuple1[("hi" : String)], Tuple1[Int]]], | NamedTupleDecomposition.DropNames[ | $proxy19.NamedTuple[Tuple1[("bla" : String)], Tuple1[String]]] | ] | ] | ) | <: Tuple |---------------------------------------------------------------------------- |Inline stack trace |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from NamedTuple.scala:47 47 | inline def head: Tuple.Elem[V, 0] = x.apply(0) | ^^^^^^^ ---------------------------------------------------------------------------- ``` --- library/src/scala/NamedTuple.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index 4a9d2b9f13d8..21c4c6840f5c 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -30,7 +30,7 @@ object NamedTuple: export NamedTupleDecomposition.{ Names, DropNames, - apply, size, init, last, tail, take, drop, splitAt, ++, map, reverse, zip, toList, toArray, toIArray + apply, size, init, head, last, tail, take, drop, splitAt, ++, map, reverse, zip, toList, toArray, toIArray } extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) @@ -43,9 +43,6 @@ object NamedTuple: // and should be reverted, just like NonEmptyList is also appealing at first, but a bad idea // in the end. - /** The first element value of this tuple */ - inline def head: Tuple.Elem[V, 0] = x.apply(0) - // inline def :* [L] (x: L): NamedTuple[Append[N, ???], Append[V, L] = ??? // inline def *: [H] (x: H): NamedTuple[??? *: N], H *: V] = ??? @@ -149,6 +146,9 @@ object NamedTupleDecomposition: /** The number of elements in this tuple */ inline def size: Tuple.Size[V] = x.toTuple.size + /** The first element value of this tuple */ + inline def head: Tuple.Elem[V, 0] = apply(0) + /** The last element value of this tuple */ inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] From 0000d23e7530cded8df5637f7aa295fa5fd89302 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Jul 2024 15:27:06 +0200 Subject: [PATCH 400/827] Only replace *new* errors by warnings under `-migration` This makes `errorOrMigrationWarning` monotonic from the sourceVersion onto ok < warn < error For example, ForComprehensionPatternWithoutCase: - became an error in 3.4, - was hence a warning in 3.4-migration, - but it should still be an error in 3.5-migration. --- compiler/src/dotty/tools/dotc/report.scala | 2 +- tests/neg/migrate-once.scala | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 tests/neg/migrate-once.scala diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 1d8ca5f208fa..c77d4eb2fc7e 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -99,7 +99,7 @@ object report: def errorOrMigrationWarning(msg: Message, pos: SrcPos, migrationVersion: MigrationVersion)(using Context): Unit = if sourceVersion.isAtLeast(migrationVersion.errorFrom) then - if !sourceVersion.isMigrating then error(msg, pos) + if sourceVersion != migrationVersion.errorFrom.prevMigrating then error(msg, pos) else if ctx.settings.rewrite.value.isEmpty then migrationWarning(msg, pos) else if sourceVersion.isAtLeast(migrationVersion.warnFrom) then warning(msg, pos) diff --git a/tests/neg/migrate-once.scala b/tests/neg/migrate-once.scala new file mode 100644 index 000000000000..da5b76e4fb8c --- /dev/null +++ b/tests/neg/migrate-once.scala @@ -0,0 +1,5 @@ +//> using options -source:3.5-migration + +object Test: + for Some(x) <- Seq(Option(1)) yield x // error + // was warn before changes, but should warn only until 3.4-migration From 053c8f9cd2c382481d147d90e37c6c8c327e2882 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 1 Aug 2024 13:36:40 +0200 Subject: [PATCH 401/827] A tweak to type improvement When we replace Nothing by a fresh type variable, we should not accidentally instantiate that type variable to Any in case it is still undetermined. We achieve this by giving the type variable a slightly disguised version of Nothing which makes the compiler believe it has a lower bound. Fixes #21275 --- compiler/src/dotty/tools/dotc/typer/Inferencing.scala | 10 +++++++++- tests/pos/i21725.scala | 10 ++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21725.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 92be3130c99d..09284d0a2874 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -180,7 +180,15 @@ object Inferencing { t match case t: TypeRef => if t.symbol == defn.NothingClass then - newTypeVar(TypeBounds.empty, nestingLevel = tvar.nestingLevel) + val notExactlyNothing = LazyRef(_ => defn.NothingType) + val bounds = TypeBounds(notExactlyNothing, defn.AnyType) + // The new type variale has a slightly disguised lower bound Nothing. + // This foils the `isExactlyNothing` test in `hasLowerBound` and + // therefore makes the new type variable have a lower bound. That way, + // we favor in `apply` below instantiating from below to `Nothing` instead + // of from above to `Any`. That avoids a spurious flip of the roginal `Nothing` + // instance to `Any`. See i21275 for a test case. + newTypeVar(bounds, nestingLevel = tvar.nestingLevel) else if t.symbol.is(ModuleClass) then tryWidened(t.parents.filter(!_.isTransparent()) .foldLeft(defn.AnyType: Type)(TypeComparer.andType(_, _))) diff --git a/tests/pos/i21725.scala b/tests/pos/i21725.scala new file mode 100644 index 000000000000..6d586aa891b6 --- /dev/null +++ b/tests/pos/i21725.scala @@ -0,0 +1,10 @@ +class Box[+O]: + def ++[O2 >: O](other: Box[O2]): Box[O2] = ??? +object Box: + val empty: Box[Nothing] = ??? + +def test[T]: Box[T] = + List(Box.empty, Box.empty) + // .reduceOption[Box[T]](_ ++ _) // works + .reduceOption(_ ++ _) // fails + .getOrElse(Box.empty) \ No newline at end of file From 020587177672c4d0bdef9308bae59ad78130641a Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 1 Aug 2024 18:15:13 +0200 Subject: [PATCH 402/827] Rename test to correct issue number --- compiler/src/dotty/tools/dotc/typer/Inferencing.scala | 2 +- tests/pos/{i21725.scala => i21275.scala} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename tests/pos/{i21725.scala => i21275.scala} (100%) diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 09284d0a2874..c41fb2e60ae5 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -186,7 +186,7 @@ object Inferencing { // This foils the `isExactlyNothing` test in `hasLowerBound` and // therefore makes the new type variable have a lower bound. That way, // we favor in `apply` below instantiating from below to `Nothing` instead - // of from above to `Any`. That avoids a spurious flip of the roginal `Nothing` + // of from above to `Any`. That avoids a spurious flip of the original `Nothing` // instance to `Any`. See i21275 for a test case. newTypeVar(bounds, nestingLevel = tvar.nestingLevel) else if t.symbol.is(ModuleClass) then diff --git a/tests/pos/i21725.scala b/tests/pos/i21275.scala similarity index 100% rename from tests/pos/i21725.scala rename to tests/pos/i21275.scala From 6837445ef4f03898a9c96c82910141dcebdf362f Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Fri, 2 Aug 2024 10:26:33 +0200 Subject: [PATCH 403/827] Fix syntax errors introduced by #21206 --- tests/pos/i21189-alt.scala | 2 +- tests/pos/i21189.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pos/i21189-alt.scala b/tests/pos/i21189-alt.scala index 08213cd627d4..10a55ec25185 100644 --- a/tests/pos/i21189-alt.scala +++ b/tests/pos/i21189-alt.scala @@ -7,6 +7,6 @@ trait Ord[T] trait Sorted[T] extends ParentOfSorted[T] trait ParentOfSorted[T]: - given Ord[T] as ord = compiletime.deferred + given ord: Ord[T] = compiletime.deferred class SortedSet[T : Ord] extends Sorted[T] diff --git a/tests/pos/i21189.scala b/tests/pos/i21189.scala index 88a0bf601476..ea27f88402de 100644 --- a/tests/pos/i21189.scala +++ b/tests/pos/i21189.scala @@ -5,6 +5,6 @@ class MySortedSet[T : Ord] extends SortedSet[T] trait Ord[T] trait Sorted[T]: - given Ord[T] as ord = compiletime.deferred + given orrd: Ord[T] = compiletime.deferred class SortedSet[T : Ord] extends Sorted[T] From b4dcf7837accf04fd660b58354b0adca7d86c90f Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Fri, 2 Aug 2024 18:17:43 +0200 Subject: [PATCH 404/827] Wrap the arguments passed to test scalac task between " --- project/Build.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 4f90566a60f9..05eb164f91f3 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -844,7 +844,8 @@ object Build { extraClasspath ++= Seq(dottyCompiler, dottyInterfaces, asm, dottyStaging, dottyTastyInspector, tastyCore, compilerInterface) } - val fullArgs = main :: defaultOutputDirectory ::: (if (printTasty) args else insertClasspathInArgs(args, extraClasspath.mkString(File.pathSeparator))) + val wrappedArgs = (if (printTasty) args else insertClasspathInArgs(args, extraClasspath.mkString(File.pathSeparator))).map(arg => "\""+ arg + "\"") + val fullArgs = main :: defaultOutputDirectory ::: wrappedArgs (Compile / runMain).toTask(fullArgs.mkString(" ", " ", "")) }.evaluated, From f43fe1fdf6acd23e1a751adf958bc5c4b0509239 Mon Sep 17 00:00:00 2001 From: Aleksander Rainko Date: Sun, 4 Aug 2024 17:11:48 +0200 Subject: [PATCH 405/827] reject derived with explicit term params --- .../src/dotty/tools/dotc/typer/Deriving.scala | 15 +++++++-- tests/neg/i15987/DerivedIssue.check | 4 +++ tests/neg/i15987/DerivedIssue.scala | 32 +++++++++++++++++++ 3 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 tests/neg/i15987/DerivedIssue.check create mode 100644 tests/neg/i15987/DerivedIssue.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index 619dfcf4d7cb..60148319a61c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -292,10 +292,21 @@ trait Deriving { val companion = companionRef(resultType) val module = untpd.ref(companion).withSpan(sym.span) val rhs = untpd.Select(module, nme.derived) - if companion.termSymbol.exists then typed(rhs, resultType) - else errorTree(rhs, em"$resultType cannot be derived since ${resultType.typeSymbol} has no companion object") + val derivedMember = companion.member(nme.derived) + + if !companion.termSymbol.exists then + errorTree(rhs, em"$resultType cannot be derived since ${resultType.typeSymbol} has no companion object") + else if hasExplicitParams(derivedMember.symbol) then + errorTree(rhs, em"""derived instance $resultType failed to generate: + |method `derived` from object ${module} takes explicit term parameters""") + else + typed(rhs, resultType) end typeclassInstance + // checks whether any of the params of 'sym' is explicit + def hasExplicitParams(sym: Symbol) = + !sym.paramSymss.flatten.forall(sym => sym.isType || sym.is(Flags.Given) || sym.is(Flags.Implicit)) + def syntheticDef(sym: Symbol): Tree = inContext(ctx.fresh.setOwner(sym).setNewScope) { if sym.is(Method) then tpd.DefDef(sym.asTerm, typeclassInstance(sym)) else tpd.ValDef(sym.asTerm, typeclassInstance(sym)(Nil)) diff --git a/tests/neg/i15987/DerivedIssue.check b/tests/neg/i15987/DerivedIssue.check new file mode 100644 index 000000000000..f1254c1c6f57 --- /dev/null +++ b/tests/neg/i15987/DerivedIssue.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/7722.scala:2:40 ------------------------------------------------------------------------------------ +2 | @scala.annotation.targetName("E") def this() = this(3) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | @targetName annotation may not be used on a constructor diff --git a/tests/neg/i15987/DerivedIssue.scala b/tests/neg/i15987/DerivedIssue.scala new file mode 100644 index 000000000000..3da8b7765607 --- /dev/null +++ b/tests/neg/i15987/DerivedIssue.scala @@ -0,0 +1,32 @@ +import scala.language.experimental.clauseInterleaving + +trait ShowWithExplicit[A] + +object ShowWithExplicit: + def derived[A, B](explicit: String)(using DummyImplicit)(implicit dummy: DummyImplicit): ShowWithExplicit[A] = ??? + +trait ShowUsingAndImplicit[A] + +object ShowUsingAndImplicit: + def derived[A, B](using DummyImplicit)(implicit dummy: DummyImplicit): ShowUsingAndImplicit[A] = ??? + +trait ShowUsing[A] + +object ShowUsing: + def derived[A](using DummyImplicit): ShowUsing[A] = ??? + +trait ShowImplicit[A] + +object ShowImplicit: + def derived[A](implicit ev: DummyImplicit): ShowImplicit[A] = ??? + +trait ShowContra[-A] + +object ShowContra: + val derived: ShowContra[Any] = ??? + +case class Person(name: String) derives ShowWithExplicit, // error + ShowUsingAndImplicit, + ShowUsing, + ShowImplicit, + ShowContra From 88c98711dfbb10c5ce2b9e247469c7f10ac80e2e Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 5 Aug 2024 11:04:27 +0200 Subject: [PATCH 406/827] Fix test to avoid error with scala2_library_tasty --- tests/warn/nonunit-statement.check | 89 ++++++++++++++++++++++++++++++ tests/warn/nonunit-statement.scala | 6 +- 2 files changed, 92 insertions(+), 3 deletions(-) create mode 100644 tests/warn/nonunit-statement.check diff --git a/tests/warn/nonunit-statement.check b/tests/warn/nonunit-statement.check new file mode 100644 index 000000000000..742a9fe911e8 --- /dev/null +++ b/tests/warn/nonunit-statement.check @@ -0,0 +1,89 @@ +-- [E176] Potential Issue Warning: tests/warn/nonunit-statement.scala:13:4 --------------------------------------------- +13 | improved // warn + | ^^^^^^^^ + | unused value of type (improved : => scala.concurrent.Future[Int]) +-- [E176] Potential Issue Warning: tests/warn/nonunit-statement.scala:20:4 --------------------------------------------- +20 | new E().toString // warn + | ^^^^^^^^^^^^^^^^ + | unused value of type String +-- [E176] Potential Issue Warning: tests/warn/nonunit-statement.scala:26:2 --------------------------------------------- +26 | Future(42) // warn + | ^^^^^^^^^^ + | unused value of type scala.concurrent.Future[Int] +-- [E176] Potential Issue Warning: tests/warn/nonunit-statement.scala:30:6 --------------------------------------------- +30 | copy() // warn + | ^^^^^^ + | unused value of type K +-- [E176] Potential Issue Warning: tests/warn/nonunit-statement.scala:37:2 --------------------------------------------- +37 | 27 +: xs // warn + | ^^^^^^^^ + | unused value of type List[Int] +-- [E129] Potential Issue Warning: tests/warn/nonunit-statement.scala:44:2 --------------------------------------------- +44 | null // warn for purity + | ^^^^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:58:19 -------------------------------------------- +58 | if (!isEmpty) f(a) // warn (if) + | ^^^^ + | discarded non-Unit value of type U +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:62:7 --------------------------------------------- +62 | f(a) // warn (if) + | ^^^^ + | discarded non-Unit value of type Boolean +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:73:25 -------------------------------------------- +73 | if (!fellback) action(z) // warn (if) + | ^^^^^^^^^ + | discarded non-Unit value of type U +-- [E176] Potential Issue Warning: tests/warn/nonunit-statement.scala:79:6 --------------------------------------------- +79 | g // warn block statement + | ^ + | unused value of type (g : => Int) +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:81:6 --------------------------------------------- +81 | g // warn (if) + | ^ + | discarded non-Unit value of type (g : => Int) +-- [E176] Potential Issue Warning: tests/warn/nonunit-statement.scala:84:6 --------------------------------------------- +84 | g // warn + | ^ + | unused value of type (g : => Int) +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:86:6 --------------------------------------------- +86 | g // warn + | ^ + | discarded non-Unit value of type (g : => Int) +-- [E176] Potential Issue Warning: tests/warn/nonunit-statement.scala:96:4 --------------------------------------------- + 96 | if (b) { // warn, at least one branch looks interesting + | ^ + | unused value of type Int + 97 | println("true") + 98 | i + 99 | } +100 | else { +101 | println("false") +102 | j +103 | } +-- [E176] Potential Issue Warning: tests/warn/nonunit-statement.scala:116:4 -------------------------------------------- +116 | set += a // warn because cannot know whether the `set` was supposed to be consumed or assigned + | ^^^^^^^^ + | unused value of type scala.collection.mutable.LinkedHashSet[A] +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:126:37 ------------------------------------------- +126 | if (start.length != 0) jsb.append(start) // warn (value-discard) + | ^^^^^^^^^^^^^^^^^ + | discarded non-Unit value of type StringBuilder +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:132:18 ------------------------------------------- +132 | jsb.append(it.next()) // warn (value-discard) + | ^^^^^^^^^^^^^^^^^^^^^ + | discarded non-Unit value of type StringBuilder +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:135:35 ------------------------------------------- +135 | if (end.length != 0) jsb.append(end) // warn (value-discard) + | ^^^^^^^^^^^^^^^ + | discarded non-Unit value of type StringBuilder +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:141:14 ------------------------------------------- +141 | b.append(it.next()) // warn (value-discard) + | ^^^^^^^^^^^^^^^^^^^ + | discarded non-Unit value of type StringBuilder +-- [E175] Potential Issue Warning: tests/warn/nonunit-statement.scala:146:30 ------------------------------------------- +146 | while (it.hasNext) it.next() // warn + | ^^^^^^^^^ + | discarded non-Unit value of type String diff --git a/tests/warn/nonunit-statement.scala b/tests/warn/nonunit-statement.scala index f90deb647d6e..b8e18a9a9c48 100644 --- a/tests/warn/nonunit-statement.scala +++ b/tests/warn/nonunit-statement.scala @@ -158,8 +158,8 @@ class J { class Variant { var bs = ListBuffer.empty[Int] val xs = ListBuffer.empty[Int] - private[this] val ys = ListBuffer.empty[Int] - private[this] var zs = ListBuffer.empty[Int] + private val ys = ListBuffer.empty[Int] + private var zs = ListBuffer.empty[Int] def f(i: Int): Unit = { bs.addOne(i) xs.addOne(i) @@ -175,7 +175,7 @@ final class ArrayOops[A](private val xs: Array[A]) extends AnyVal { val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) if (xs.length == 0) bb.result() else { - def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + def mkRowBuilder() = ArrayBuilder.make[B](using ClassTag[B](aClass.getComponentType)) val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) for (xs <- other) { var i = 0 From af0412ced6ae8eca55a682ef3f8d42229cbd8782 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Tue, 2 Jul 2024 02:47:05 +0200 Subject: [PATCH 407/827] Let `-Wall` override `-Wunused` --- compiler/src/dotty/tools/dotc/config/ScalaSettings.scala | 2 +- tests/warn/i18559c.check | 8 ++++++++ tests/warn/i18559c.scala | 6 +++--- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 011b31aba50a..d775a4239d1b 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -203,7 +203,7 @@ private sealed trait WarningSettings: def nowarn(using Context) = allOr("nowarn") // Is any choice set for -Wunused? - def any(using Context): Boolean = Wunused.value.nonEmpty + def any(using Context): Boolean = Wall.value || Wunused.value.nonEmpty // overrided by strict-no-implicit-warn def imports(using Context) = diff --git a/tests/warn/i18559c.check b/tests/warn/i18559c.check index 7fd42a48db0c..1c1bd86bf15f 100644 --- a/tests/warn/i18559c.check +++ b/tests/warn/i18559c.check @@ -1,4 +1,12 @@ +-- [E198] Unused Symbol Warning: tests/warn/i18559c.scala:4:28 --------------------------------------------------------- +4 | import collection.mutable.Set // warn + | ^^^ + | unused import -- [E198] Unused Symbol Warning: tests/warn/i18559c.scala:8:8 ---------------------------------------------------------- 8 | val x = 1 // warn | ^ | unused local definition +-- [E198] Unused Symbol Warning: tests/warn/i18559c.scala:11:26 -------------------------------------------------------- +11 | import SomeGivenImports.given // warn + | ^^^^^ + | unused import diff --git a/tests/warn/i18559c.scala b/tests/warn/i18559c.scala index 3ca0c8893a66..34576cd831b0 100644 --- a/tests/warn/i18559c.scala +++ b/tests/warn/i18559c.scala @@ -1,14 +1,14 @@ //> using options -Wall -Wunused:locals -// This test checks that -Wall leaves -Wunused:... untouched if it is already set +// This test checks that -Wall overrides -Wunused:... if it is already set object FooImportUnused: - import collection.mutable.Set // not warn + import collection.mutable.Set // warn object FooUnusedLocal: def test(): Unit = val x = 1 // warn object FooGivenUnused: - import SomeGivenImports.given // not warn + import SomeGivenImports.given // warn object SomeGivenImports: given Int = 0 From e4f38d4b2c1e5b3a641e7a716362a629781ec59f Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Mon, 5 Aug 2024 16:24:34 +0200 Subject: [PATCH 408/827] fix: Dealias NamedTuple's name types when resolving NamedTuple's element types --- .../src/dotty/tools/dotc/core/TypeUtils.scala | 2 +- tests/pos/i21300.scala | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21300.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index beacf15e4afe..ca0f0d7e43bd 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -129,7 +129,7 @@ class TypeUtils: def namedTupleElementTypesUpTo(bound: Int, normalize: Boolean = true)(using Context): List[(TermName, Type)] = (if normalize then self.normalized else self).dealias match case defn.NamedTuple(nmes, vals) => - val names = nmes.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil).map: + val names = nmes.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil).map(_.dealias).map: case ConstantType(Constant(str: String)) => str.toTermName case t => throw TypeError(em"Malformed NamedTuple: names must be string types, but $t was found.") val values = vals.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil) diff --git a/tests/pos/i21300.scala b/tests/pos/i21300.scala new file mode 100644 index 000000000000..22859482ef98 --- /dev/null +++ b/tests/pos/i21300.scala @@ -0,0 +1,17 @@ +import scala.language.experimental.namedTuples + +class Test[S <: String & Singleton](name: S): + + type NT = NamedTuple.NamedTuple[(S, "foo"), (Int, Long)] + def nt: NT = ??? + + type Name = S + + type NT2 = NamedTuple.NamedTuple[(Name, "foo"), (Int, Long)] + def nt2: NT2 = ??? + +def test = + val foo = new Test("bar") + + foo.nt.bar + foo.nt2.bar From 4f22672d12faf90e395769ee1028ac48b9c3d7e9 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 12 Jun 2024 16:06:10 +0100 Subject: [PATCH 409/827] Re-fix skipping match analysis & inlining --- .../tools/dotc/transform/PatternMatcher.scala | 14 ++------------ .../dotty/tools/dotc/transform/patmat/Space.scala | 5 +---- 2 files changed, 3 insertions(+), 16 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index a5c85d4f9f3a..9750c41b7252 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -35,13 +35,6 @@ class PatternMatcher extends MiniPhase { override def runsAfter: Set[String] = Set(ElimRepeated.name) - private val InInlinedCode = new util.Property.Key[Boolean] - private def inInlinedCode(using Context) = ctx.property(InInlinedCode).getOrElse(false) - - override def prepareForInlined(tree: Inlined)(using Context): Context = - if inInlinedCode then ctx - else ctx.fresh.setProperty(InInlinedCode, true) - override def transformMatch(tree: Match)(using Context): Tree = if (tree.isInstanceOf[InlineMatch]) tree else { @@ -53,13 +46,10 @@ class PatternMatcher extends MiniPhase { case rt => tree.tpe val translated = new Translator(matchType, this).translateMatch(tree) - if !inInlinedCode then + // Skip analysis on inlined code (eg pos/i19157) + if !tpd.enclosingInlineds.nonEmpty then // check exhaustivity and unreachability SpaceEngine.checkMatch(tree) - else - // only check exhaustivity, as inlining may generate unreachable code - // like in i19157.scala - SpaceEngine.checkMatchExhaustivityOnly(tree) translated.ensureConforms(matchType) } diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index eb74058dfb10..774909ee271e 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -902,9 +902,6 @@ object SpaceEngine { } def checkMatch(m: Match)(using Context): Unit = - checkMatchExhaustivityOnly(m) - if reachabilityCheckable(m.selector) then checkReachability(m) - - def checkMatchExhaustivityOnly(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then checkExhaustivity(m) + if reachabilityCheckable(m.selector) then checkReachability(m) } From 3ba51a60fad87fc80c041df0ba5cb0c2c7306baf Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 17 Jun 2024 10:25:08 +0100 Subject: [PATCH 410/827] Fix PrefixSetting --- compiler/src/dotty/tools/dotc/config/Settings.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 9250303e8cc8..7454682fba56 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -411,9 +411,10 @@ object Settings: def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation)) - def PrefixSetting(category: SettingCategory, name: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] = + def PrefixSetting(category: SettingCategory, name0: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] = + val name = prependName(name0) val prefix = name.takeWhile(_ != '<') - publish(Setting(category, "-" + name, descr, Nil, prefix = Some(prefix), deprecation = deprecation)) + publish(Setting(category, name, descr, Nil, prefix = Some(prefix), deprecation = deprecation)) def VersionSetting(category: SettingCategory, name: String, descr: String, default: ScalaVersion = NoScalaVersion, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[ScalaVersion] = publish(Setting(category, prependName(name), descr, default, legacyArgs = legacyArgs, deprecation = deprecation)) From 5994ea79dce2ce30778592effff8b00df29a930f Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 1 Jul 2024 12:27:44 +0100 Subject: [PATCH 411/827] Childless --- .../src/dotty/tools/dotc/transform/patmat/Space.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 774909ee271e..7b33c0398f00 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -616,7 +616,7 @@ object SpaceEngine { case tp if tp.classSymbol.isAllOf(JavaEnum) => tp.classSymbol.children.map(_.termRef) // the class of a java enum value is the enum class, so this must follow SingletonType to not loop infinitely - case tp @ AppliedType(Parts(parts), targs) if tp.classSymbol.children.isEmpty => + case Childless(tp @ AppliedType(Parts(parts), targs)) => // It might not obvious that it's OK to apply the type arguments of a parent type to child types. // But this is guarded by `tp.classSymbol.children.isEmpty`, // meaning we'll decompose to the same class, just not the same type. @@ -676,6 +676,12 @@ object SpaceEngine { final class PartsExtractor(val get: List[Type]) extends AnyVal: def isEmpty: Boolean = get == ListOfNoType + object Childless: + def unapply(tp: Type)(using Context): Result = + Result(if tp.classSymbol.children.isEmpty then tp else NoType) + class Result(val get: Type) extends AnyVal: + def isEmpty: Boolean = !get.exists + /** Show friendly type name with current scope in mind * * E.g. C.this.B --> B if current owner is C From b56da91c7ad1f1c61c8cf0499db5f5399abc5802 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 20 Jun 2024 16:47:24 +0100 Subject: [PATCH 412/827] Fix a bundle of patmat issues Some of the issues are legitimate --- .../tools/dotc/transform/patmat/Space.scala | 31 ++++++++++++------- tests/warn/i20121.scala | 13 ++++++++ tests/warn/i20122.scala | 17 ++++++++++ tests/warn/i20123.scala | 16 ++++++++++ tests/warn/i20128.scala | 9 ++++++ tests/warn/i20129.scala | 14 +++++++++ tests/warn/i20130.scala | 11 +++++++ tests/warn/i20131.scala | 17 ++++++++++ tests/warn/i20132.alt.scala | 8 +++++ tests/warn/i20132.scala | 8 +++++ tests/warn/i20132.wo.scala | 8 +++++ tests/warn/i5422.scala | 9 ++++++ tests/warn/t11620.scala | 9 ++++++ 13 files changed, 158 insertions(+), 12 deletions(-) create mode 100644 tests/warn/i20121.scala create mode 100644 tests/warn/i20122.scala create mode 100644 tests/warn/i20123.scala create mode 100644 tests/warn/i20128.scala create mode 100644 tests/warn/i20129.scala create mode 100644 tests/warn/i20130.scala create mode 100644 tests/warn/i20131.scala create mode 100644 tests/warn/i20132.alt.scala create mode 100644 tests/warn/i20132.scala create mode 100644 tests/warn/i20132.wo.scala create mode 100644 tests/warn/i5422.scala create mode 100644 tests/warn/t11620.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 7b33c0398f00..a4bd62622d8a 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -528,8 +528,7 @@ object SpaceEngine { // force type inference to infer a narrower type: could be singleton // see tests/patmat/i4227.scala mt.paramInfos(0) <:< scrutineeTp - instantiateSelected(mt, tvars) - isFullyDefined(mt, ForceDegree.all) + maximizeType(mt.paramInfos(0), Spans.NoSpan) mt } @@ -543,7 +542,7 @@ object SpaceEngine { // Case unapplySeq: // 1. return the type `List[T]` where `T` is the element type of the unapplySeq return type `Seq[T]` - val resTp = ctx.typeAssigner.safeSubstMethodParams(mt, scrutineeTp :: Nil).finalResultType + val resTp = wildApprox(ctx.typeAssigner.safeSubstMethodParams(mt, scrutineeTp :: Nil).finalResultType) val sig = if (resTp.isRef(defn.BooleanClass)) @@ -564,20 +563,14 @@ object SpaceEngine { if (arity > 0) productSelectorTypes(resTp, unappSym.srcPos) else { - val getTp = resTp.select(nme.get).finalResultType match - case tp: TermRef if !tp.isOverloaded => - // Like widenTermRefExpr, except not recursively. - // For example, in i17184 widen Option[foo.type]#get - // to Option[foo.type] instead of Option[Int]. - tp.underlying.widenExpr - case tp => tp + val getTp = extractorMemberType(resTp, nme.get, unappSym.srcPos) if (argLen == 1) getTp :: Nil else productSelectorTypes(getTp, unappSym.srcPos) } } } - sig.map(_.annotatedToRepeated) + sig.map { case tp: WildcardType => tp.bounds.hi case tp => tp } } /** Whether the extractor covers the given type */ @@ -623,7 +616,21 @@ object SpaceEngine { // For instance, from i15029, `decompose((X | Y).Field[T]) = [X.Field[T], Y.Field[T]]`. parts.map(tp.derivedAppliedType(_, targs)) - case tp if tp.isDecomposableToChildren => + case tpOriginal if tpOriginal.isDecomposableToChildren => + // isDecomposableToChildren uses .classSymbol.is(Sealed) + // But that classSymbol could be from an AppliedType + // where the type constructor is a non-class type + // E.g. t11620 where `?1.AA[X]` returns as "sealed" + // but using that we're not going to infer A1[X] and A2[X] + // but end up with A1[] and A2[]. + // So we widen (like AppliedType superType does) away + // non-class type constructors. + def getAppliedClass(tp: Type): Type = tp match + case tp @ AppliedType(_: HKTypeLambda, _) => tp + case tp @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => tp + case tp @ AppliedType(tycon: TypeProxy, _) => getAppliedClass(tycon.superType.applyIfParameterized(tp.args)) + case tp => tp + val tp = getAppliedClass(tpOriginal) def getChildren(sym: Symbol): List[Symbol] = sym.children.flatMap { child => if child eq sym then List(sym) // i3145: sealed trait Baz, val x = new Baz {}, Baz.children returns Baz... diff --git a/tests/warn/i20121.scala b/tests/warn/i20121.scala new file mode 100644 index 000000000000..ce8e3e4d74f6 --- /dev/null +++ b/tests/warn/i20121.scala @@ -0,0 +1,13 @@ +sealed trait T_A[A, B] +type X = T_A[Byte, Byte] + +case class CC_B[A](a: A) extends T_A[A, X] + +val v_a: T_A[X, X] = CC_B(null) +val v_b = v_a match + case CC_B(_) => 0 // warn: unreachable + case _ => 1 + // for CC_B[A] to match T_A[X, X] + // A := X + // so require X, aka T_A[Byte, Byte] + // which isn't instantiable, outside of null diff --git a/tests/warn/i20122.scala b/tests/warn/i20122.scala new file mode 100644 index 000000000000..50da42a5926c --- /dev/null +++ b/tests/warn/i20122.scala @@ -0,0 +1,17 @@ +sealed trait T_B[C, D] + +case class CC_A() +case class CC_B[A, C](a: A) extends T_B[C, CC_A] +case class CC_C[C, D](a: T_B[C, D]) extends T_B[Int, CC_A] +case class CC_E(a: CC_C[Char, Byte]) + +val v_a: T_B[Int, CC_A] = CC_B(CC_E(CC_C(null))) +val v_b = v_a match + case CC_B(CC_E(CC_C(_))) => 0 // warn: unreachable + case _ => 1 + // for CC_B[A, C] to match T_B[C, CC_A] + // C <: Int, ok + // A <: CC_E, ok + // but you need a CC_C[Char, Byte] + // which requires a T_B[Char, Byte] + // which isn't instantiable, outside of null diff --git a/tests/warn/i20123.scala b/tests/warn/i20123.scala new file mode 100644 index 000000000000..32de903210b2 --- /dev/null +++ b/tests/warn/i20123.scala @@ -0,0 +1,16 @@ +sealed trait T_A[A, B] +sealed trait T_B[C] + +case class CC_D[A, C]() extends T_A[A, C] +case class CC_E() extends T_B[Nothing] +case class CC_G[A, C](c: C) extends T_A[A, C] + +val v_a: T_A[Boolean, T_B[Boolean]] = CC_G(null) +val v_b = v_a match { + case CC_D() => 0 + case CC_G(_) => 1 // warn: unreachable + // for CC_G[A, C] to match T_A[Boolean, T_B[Boolean]] + // A := Boolean, which is ok + // C := T_B[Boolean], + // which isn't instantiable, outside of null +} diff --git a/tests/warn/i20128.scala b/tests/warn/i20128.scala new file mode 100644 index 000000000000..f09b323c6ca0 --- /dev/null +++ b/tests/warn/i20128.scala @@ -0,0 +1,9 @@ +sealed trait T_A[A] +case class CC_B[A](a: T_A[A]) extends T_A[Byte] +case class CC_E[A](b: T_A[A]) extends T_A[Byte] + +val v_a: T_A[Byte] = CC_E(CC_B(null)) +val v_b: Int = v_a match { // warn: not exhaustive + case CC_E(CC_E(_)) => 0 + case CC_B(_) => 1 +} diff --git a/tests/warn/i20129.scala b/tests/warn/i20129.scala new file mode 100644 index 000000000000..de0f9af76718 --- /dev/null +++ b/tests/warn/i20129.scala @@ -0,0 +1,14 @@ +sealed trait T_A[A] +case class CC_B[A](a: T_A[A], c: T_A[A]) extends T_A[Char] +case class CC_C[A]() extends T_A[A] +case class CC_G() extends T_A[Char] + +val v_a: T_A[Char] = CC_B(CC_G(), CC_C()) +val v_b: Int = v_a match { // warn: not exhaustive + case CC_C() => 0 + case CC_G() => 1 + case CC_B(CC_B(_, _), CC_C()) => 2 + case CC_B(CC_C(), CC_C()) => 3 + case CC_B(_, CC_G()) => 4 + case CC_B(_, CC_B(_, _)) => 5 +} diff --git a/tests/warn/i20130.scala b/tests/warn/i20130.scala new file mode 100644 index 000000000000..571959c2b388 --- /dev/null +++ b/tests/warn/i20130.scala @@ -0,0 +1,11 @@ +sealed trait T_A[B] +sealed trait T_B[C] +case class CC_B[C]() extends T_A[T_B[C]] +case class CC_C[B, C](c: T_A[B], d: T_B[C]) extends T_B[C] +case class CC_E[C]() extends T_B[C] + +val v_a: T_B[Int] = CC_C(null, CC_E()) +val v_b: Int = v_a match { // warn: not exhaustive + case CC_C(_, CC_C(_, _)) => 0 + case CC_E() => 5 +} diff --git a/tests/warn/i20131.scala b/tests/warn/i20131.scala new file mode 100644 index 000000000000..662c2896dc9a --- /dev/null +++ b/tests/warn/i20131.scala @@ -0,0 +1,17 @@ +sealed trait Foo +case class Foo1() extends Foo +case class Foo2[A, B]() extends Foo + +sealed trait Bar[A, B] +case class Bar1[A, C, D](a: Bar[C, D]) extends Bar[A, Bar[C, D]] +case class Bar2[ C, D](b: Bar[C, D], c: Foo) extends Bar[Bar1[Int, Byte, Int], Bar[C, D]] + +class Test: + def m1(bar: Bar[Bar1[Int, Byte, Int], Bar[Char, Char]]): Int = bar match + case Bar1(_) => 0 + case Bar2(_, Foo2()) => 1 + def t1 = m1(Bar2(null, Foo1())) + // for Bar2[C, D] to match the scrutinee + // C := Char and D := Char + // which requires a Bar[Char, Char] + // which isn't instantiable, outside of null diff --git a/tests/warn/i20132.alt.scala b/tests/warn/i20132.alt.scala new file mode 100644 index 000000000000..2d45367c61b8 --- /dev/null +++ b/tests/warn/i20132.alt.scala @@ -0,0 +1,8 @@ +sealed trait Foo[A] +case class Bar[C](x: Foo[C]) extends Foo[C] +case class End[B]() extends Foo[B] +class Test: + def m1[M](foo: Foo[M]): Int = foo match // warn: not exhaustive + case End() => 0 + case Bar(End()) => 1 + def t1 = m1[Int](Bar[Int](Bar[Int](End[Int]()))) diff --git a/tests/warn/i20132.scala b/tests/warn/i20132.scala new file mode 100644 index 000000000000..a5f40278234a --- /dev/null +++ b/tests/warn/i20132.scala @@ -0,0 +1,8 @@ +sealed trait Foo[A] +case class Bar[C](x: Foo[C]) extends Foo[Int] +case class End[B]() extends Foo[B] +class Test: + def m1[M](foo: Foo[M]): Int = foo match // warn: not exhaustive + case End() => 0 + case Bar(End()) => 1 + def t1 = m1[Int](Bar[Int](Bar[Int](End[Int]()))) diff --git a/tests/warn/i20132.wo.scala b/tests/warn/i20132.wo.scala new file mode 100644 index 000000000000..a6945758ae8d --- /dev/null +++ b/tests/warn/i20132.wo.scala @@ -0,0 +1,8 @@ +sealed trait Foo[A] +case class Bar[C](x: Foo[C]) extends Foo[Int] +case class End[B]() extends Foo[B] +class Test: + def m1[M](foo: Foo[M]): Int = foo match + case End() => 0 + case Bar(_) => 1 + def t1 = m1[Int](Bar[Int](Bar[Int](End[Int]()))) diff --git a/tests/warn/i5422.scala b/tests/warn/i5422.scala new file mode 100644 index 000000000000..bc124382d7d3 --- /dev/null +++ b/tests/warn/i5422.scala @@ -0,0 +1,9 @@ +sealed trait Foo[A[_]] + +case class Bar[C[_], X](x: C[X]) extends Foo[C] +case class End[B[_]]() extends Foo[B] + +class Test: + def foo[M[_]](foo: Foo[M]): Int = foo match + case End() => 0 + case Bar(_) => 1 diff --git a/tests/warn/t11620.scala b/tests/warn/t11620.scala new file mode 100644 index 000000000000..2d87d4c1a2c6 --- /dev/null +++ b/tests/warn/t11620.scala @@ -0,0 +1,9 @@ +sealed trait A[+T0] +case class A1[+T1](t1: T1) extends A[T1] +case class A2[+T2](t2: T2) extends A[T2] +sealed trait B[+T3] { type AA[+U] <: A[U] ; def a: AA[T3] } +object B { def unapply[T4](b: B[T4]): Some[b.AA[T4]] = Some(b.a) } +class Test: + def m1[X](b: B[X]): X = b match + case B(A1(v1)) => v1 + case B(A2(v2)) => v2 From 7183bb2714ac85125a38637950cd4266436113d6 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 4 Jul 2024 12:09:33 +0100 Subject: [PATCH 413/827] Detail why not baseType --- .../dotty/tools/dotc/transform/patmat/Space.scala | 8 ++++++++ tests/warn/i15893.min.scala | 13 +++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 tests/warn/i15893.min.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index a4bd62622d8a..d4aafa91676f 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -625,6 +625,14 @@ object SpaceEngine { // but end up with A1[] and A2[]. // So we widen (like AppliedType superType does) away // non-class type constructors. + // + // Can't use `tpOriginal.baseType(cls)` because it causes + // i15893 to return exhaustivity warnings, because instead of: + // <== refineUsingParent(N, class Succ, []) = Succ[] + // <== isSub(Succ[] <:< Succ[Succ[]]) = true + // we get + // <== refineUsingParent(NatT, class Succ, []) = Succ[NatT] + // <== isSub(Succ[NatT] <:< Succ[Succ[]]) = false def getAppliedClass(tp: Type): Type = tp match case tp @ AppliedType(_: HKTypeLambda, _) => tp case tp @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => tp diff --git a/tests/warn/i15893.min.scala b/tests/warn/i15893.min.scala new file mode 100644 index 000000000000..755dda5cbcda --- /dev/null +++ b/tests/warn/i15893.min.scala @@ -0,0 +1,13 @@ +sealed trait NatT +case class Zero() extends NatT +case class Succ[+N <: NatT](n: N) extends NatT + +type Mod2[N <: NatT] <: NatT = N match + case Zero => Zero + case Succ[Zero] => Succ[Zero] + case Succ[Succ[predPredN]] => Mod2[predPredN] + +def dependentlyTypedMod2[N <: NatT](n: N): Mod2[N] = n match + case Zero(): Zero => Zero() // warn + case Succ(Zero()): Succ[Zero] => Succ(Zero()) // warn + case Succ(Succ(predPredN)): Succ[Succ[?]] => dependentlyTypedMod2(predPredN) // warn From 8df91fa50500b1f5b87e78499a3988591c104f12 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 4 Jul 2024 18:01:56 +0100 Subject: [PATCH 414/827] Strip null in exhaustivityCheckable --- .../tools/dotc/transform/patmat/Space.scala | 10 ++++++---- tests/warn/i20132.stream-Tuple2.scala | 18 ++++++++++++++++++ 2 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 tests/warn/i20132.stream-Tuple2.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index d4aafa91676f..39bf0d9bfc2a 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -3,7 +3,9 @@ package dotc package transform package patmat -import core.*, Constants.*, Contexts.*, Decorators.*, Flags.*, Names.*, NameOps.*, StdNames.*, Symbols.*, Types.* +import core.* +import Constants.*, Contexts.*, Decorators.*, Flags.*, NullOpsDecorator.*, Symbols.*, Types.* +import Names.*, NameOps.*, StdNames.* import ast.*, tpd.* import config.Printers.* import printing.{ Printer, * }, Texts.* @@ -793,12 +795,12 @@ object SpaceEngine { doShow(s) } - private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = { + private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = trace(i"exhaustivityCheckable($sel ${sel.className})") { val seen = collection.mutable.Set.empty[Symbol] // Possible to check everything, but be compatible with scalac by default - def isCheckable(tp: Type): Boolean = - val tpw = tp.widen.dealias + def isCheckable(tp: Type): Boolean = trace(i"isCheckable($tp ${tp.className})"): + val tpw = tp.widen.dealias.stripNull() val classSym = tpw.classSymbol classSym.is(Sealed) && !tpw.isLargeGenericTuple || // exclude large generic tuples from exhaustivity // requires an unknown number of changes to make work diff --git a/tests/warn/i20132.stream-Tuple2.scala b/tests/warn/i20132.stream-Tuple2.scala new file mode 100644 index 000000000000..b7cf58f8f930 --- /dev/null +++ b/tests/warn/i20132.stream-Tuple2.scala @@ -0,0 +1,18 @@ +//> using options -Yexplicit-nulls -Yno-flexible-types + +// Previously failed because the scrutinee under +// unsafeNulls/explicit-nulls/no-flexible-types +// is (String, String) | Null +// Need to strip the null before considering it exhaustivity checkable + +import scala.language.unsafeNulls + +import scala.jdk.CollectionConverters.* + +class Test2: + def t1: Unit = { + val xs = List.empty[(String, String)] + xs.asJava.forEach { case (a, b) => + () + } + } From e160de736a0e223d7b4acdc7b1d4710632c74deb Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 5 Jul 2024 12:24:01 +0100 Subject: [PATCH 415/827] Fix SeqFactoryClass#unapplySeq Previously `defn.ListType.appliedTo(elemTp) <:< pat.tpe` failed when pat.tpe is something like ParamClause, an alias. --- .../src/dotty/tools/dotc/transform/patmat/Space.scala | 2 +- tests/warn/i20132.list-Seq.scala | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 tests/warn/i20132.list-Seq.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 39bf0d9bfc2a..9d60336c02d7 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -352,7 +352,7 @@ object SpaceEngine { val funRef = fun1.tpe.asInstanceOf[TermRef] if (fun.symbol.name == nme.unapplySeq) val (arity, elemTp, resultTp) = unapplySeqInfo(fun.tpe.widen.finalResultType, fun.srcPos) - if (fun.symbol.owner == defn.SeqFactoryClass && defn.ListType.appliedTo(elemTp) <:< pat.tpe) + if fun.symbol.owner == defn.SeqFactoryClass && pat.tpe.hasClassSymbol(defn.ListClass) then // The exhaustivity and reachability logic already handles decomposing sum types (into its subclasses) // and product types (into its components). To get better counter-examples for patterns that are of type // List (or a super-type of list, like LinearSeq) we project them into spaces that use `::` and Nil. diff --git a/tests/warn/i20132.list-Seq.scala b/tests/warn/i20132.list-Seq.scala new file mode 100644 index 000000000000..95d6e962d547 --- /dev/null +++ b/tests/warn/i20132.list-Seq.scala @@ -0,0 +1,10 @@ +class D1 +class D2 + +class Test1: + type Ds = List[D1] | List[D2] + def m1(dss: List[Ds]) = + dss.flatMap: + case Seq(d) => Some(1) + case Seq(head, tail*) => Some(2) + case Seq() => None From 5fd810e6eda0d357b5e3e3117ad9260b237de6d5 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 5 Jul 2024 14:21:05 +0100 Subject: [PATCH 416/827] Strip null on the scrutinee Without that, we end up with either a flexible type or a `| Null`. --- .../dotty/tools/dotc/transform/patmat/Space.scala | 4 ++-- tests/warn/i20132.future-Left.scala | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 tests/warn/i20132.future-Left.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 9d60336c02d7..804beab83b13 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -836,7 +836,7 @@ object SpaceEngine { /** Return the underlying type of non-module, non-constant, non-enum case singleton types. * Also widen ExprType to its result type, and rewrap any annotation wrappers. * For example, with `val opt = None`, widen `opt.type` to `None.type`. */ - def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp)")(tp match { + def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp ${tp.className})")(tp match { case _: ConstantType => tp case tp: TermRef if tp.symbol.is(Module) => tp case tp: TermRef if tp.symbol.isAllOf(EnumCase) => tp @@ -847,7 +847,7 @@ object SpaceEngine { }) def checkExhaustivity(m: Match)(using Context): Unit = trace(i"checkExhaustivity($m)") { - val selTyp = toUnderlying(m.selector.tpe).dealias + val selTyp = toUnderlying(m.selector.tpe.stripNull()).dealias val targetSpace = trace(i"targetSpace($selTyp)")(project(selTyp)) val patternSpace = Or(m.cases.foldLeft(List.empty[Space]) { (acc, x) => diff --git a/tests/warn/i20132.future-Left.scala b/tests/warn/i20132.future-Left.scala new file mode 100644 index 000000000000..a25718eadb6b --- /dev/null +++ b/tests/warn/i20132.future-Left.scala @@ -0,0 +1,13 @@ +//> using options -Yexplicit-nulls -Yno-flexible-types + +import scala.language.unsafeNulls + +import java.util.concurrent.CompletableFuture +import scala.jdk.CollectionConverters._ + +class Test1: + def m1 = + val fut: CompletableFuture[Either[String, List[String]]] = ??? + fut.thenApply: + case Right(edits: List[String]) => edits.asJava + case Left(error: String) => throw new Exception(error) From 4dd8e8ae3b6adae09a5c8a8bd80f9c022c4264e8 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 8 Jul 2024 11:40:29 +0100 Subject: [PATCH 417/827] Maximise once more --- .../src/dotty/tools/dotc/transform/patmat/Space.scala | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 804beab83b13..42983597e37a 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -524,6 +524,7 @@ object SpaceEngine { val mt: MethodType = unapp.widen match { case mt: MethodType => mt case pt: PolyType => + val locked = ctx.typerState.ownedVars val tvars = constrained(pt) val mt = pt.instantiate(tvars).asInstanceOf[MethodType] scrutineeTp <:< mt.paramInfos(0) @@ -531,6 +532,14 @@ object SpaceEngine { // see tests/patmat/i4227.scala mt.paramInfos(0) <:< scrutineeTp maximizeType(mt.paramInfos(0), Spans.NoSpan) + if !(ctx.typerState.ownedVars -- locked).isEmpty then + // constraining can create type vars out of wildcard types + // (in legalBound, by using a LevelAvoidMap) + // maximise will only do one pass at maximising the type vars in the target type + // which means we can maximise to types that include other type vars + // this fails TreeChecker's "non-empty constraint at end of $fusedPhase" check + // e.g. run-macros/string-context-implicits + maximizeType(mt.paramInfos(0), Spans.NoSpan) mt } From 5b425ee40d51e1c7e5c36745c83102f04f13af42 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 5 Aug 2024 15:42:27 +0100 Subject: [PATCH 418/827] Detail the second-pass maximizeType in Space.signature --- compiler/src/dotty/tools/dotc/transform/patmat/Space.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 42983597e37a..f7ec95e21c90 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -539,6 +539,10 @@ object SpaceEngine { // which means we can maximise to types that include other type vars // this fails TreeChecker's "non-empty constraint at end of $fusedPhase" check // e.g. run-macros/string-context-implicits + // I can't prove that a second call won't also create type vars, + // but I'd rather have an unassigned new-new type var, than an infinite loop. + // After all, there's nothing strictly "wrong" with unassigned type vars, + // it just fails TreeChecker's linting. maximizeType(mt.paramInfos(0), Spans.NoSpan) mt } From ab240f1d5b12e242a79b8791e020a89b1a877c7f Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 5 Aug 2024 16:19:18 +0100 Subject: [PATCH 419/827] Only strip under unsafeNulls --- .../dotty/tools/dotc/transform/patmat/Space.scala | 7 +++++-- tests/warn/i20132.stream-Tuple2.safeNulls.check | 8 ++++++++ .../warn/i20132.stream-Tuple2.safeNulls.fixed.scala | 12 ++++++++++++ tests/warn/i20132.stream-Tuple2.safeNulls.scala | 11 +++++++++++ 4 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 tests/warn/i20132.stream-Tuple2.safeNulls.check create mode 100644 tests/warn/i20132.stream-Tuple2.safeNulls.fixed.scala create mode 100644 tests/warn/i20132.stream-Tuple2.safeNulls.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index f7ec95e21c90..20b0099d82e2 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -808,12 +808,15 @@ object SpaceEngine { doShow(s) } + extension (self: Type) private def stripUnsafeNulls()(using Context): Type = + if Nullables.unsafeNullsEnabled then self.stripNull() else self + private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = trace(i"exhaustivityCheckable($sel ${sel.className})") { val seen = collection.mutable.Set.empty[Symbol] // Possible to check everything, but be compatible with scalac by default def isCheckable(tp: Type): Boolean = trace(i"isCheckable($tp ${tp.className})"): - val tpw = tp.widen.dealias.stripNull() + val tpw = tp.widen.dealias.stripUnsafeNulls() val classSym = tpw.classSymbol classSym.is(Sealed) && !tpw.isLargeGenericTuple || // exclude large generic tuples from exhaustivity // requires an unknown number of changes to make work @@ -860,7 +863,7 @@ object SpaceEngine { }) def checkExhaustivity(m: Match)(using Context): Unit = trace(i"checkExhaustivity($m)") { - val selTyp = toUnderlying(m.selector.tpe.stripNull()).dealias + val selTyp = toUnderlying(m.selector.tpe.stripUnsafeNulls()).dealias val targetSpace = trace(i"targetSpace($selTyp)")(project(selTyp)) val patternSpace = Or(m.cases.foldLeft(List.empty[Space]) { (acc, x) => diff --git a/tests/warn/i20132.stream-Tuple2.safeNulls.check b/tests/warn/i20132.stream-Tuple2.safeNulls.check new file mode 100644 index 000000000000..e444ef8c3340 --- /dev/null +++ b/tests/warn/i20132.stream-Tuple2.safeNulls.check @@ -0,0 +1,8 @@ +-- [E029] Pattern Match Exhaustivity Warning: tests/warn/i20132.stream-Tuple2.safeNulls.scala:8:24 --------------------- +8 | xs.asJava.forEach { case (a, b) => // warn + | ^ + | match may not be exhaustive. + | + | It would fail on pattern case: _: Null + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i20132.stream-Tuple2.safeNulls.fixed.scala b/tests/warn/i20132.stream-Tuple2.safeNulls.fixed.scala new file mode 100644 index 000000000000..817d8ce06cee --- /dev/null +++ b/tests/warn/i20132.stream-Tuple2.safeNulls.fixed.scala @@ -0,0 +1,12 @@ +//> using options -Yexplicit-nulls -Yno-flexible-types + +import scala.jdk.CollectionConverters.* + +class Test2: + def t1: Unit = { + val xs = List.empty[(String, String)] + xs.asJava.forEach { + case (a, b) => () + case null => () + } + } diff --git a/tests/warn/i20132.stream-Tuple2.safeNulls.scala b/tests/warn/i20132.stream-Tuple2.safeNulls.scala new file mode 100644 index 000000000000..2d4a2318039e --- /dev/null +++ b/tests/warn/i20132.stream-Tuple2.safeNulls.scala @@ -0,0 +1,11 @@ +//> using options -Yexplicit-nulls -Yno-flexible-types + +import scala.jdk.CollectionConverters.* + +class Test2: + def t1: Unit = { + val xs = List.empty[(String, String)] + xs.asJava.forEach { case (a, b) => // warn + () + } + } From ffac87d3e9429cbe43e9363cc779c92547534cc1 Mon Sep 17 00:00:00 2001 From: Aleksander Rainko Date: Mon, 5 Aug 2024 21:13:40 +0200 Subject: [PATCH 420/827] add a test for the new error message --- tests/neg/i15987.check | 5 +++++ tests/neg/{i15987/DerivedIssue.scala => i15987.scala} | 2 -- tests/neg/i15987/DerivedIssue.check | 4 ---- 3 files changed, 5 insertions(+), 6 deletions(-) create mode 100644 tests/neg/i15987.check rename tests/neg/{i15987/DerivedIssue.scala => i15987.scala} (93%) delete mode 100644 tests/neg/i15987/DerivedIssue.check diff --git a/tests/neg/i15987.check b/tests/neg/i15987.check new file mode 100644 index 000000000000..b62c8cac160b --- /dev/null +++ b/tests/neg/i15987.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i15987.scala:26:40 --------------------------------------------------------------------------------- +26 |case class Person(name: String) derives ShowWithExplicit, // error + | ^ + | derived instance ShowWithExplicit[Person] failed to generate: + | method `derived` from object ShowWithExplicit takes explicit term parameters diff --git a/tests/neg/i15987/DerivedIssue.scala b/tests/neg/i15987.scala similarity index 93% rename from tests/neg/i15987/DerivedIssue.scala rename to tests/neg/i15987.scala index 3da8b7765607..743f5f7ccda0 100644 --- a/tests/neg/i15987/DerivedIssue.scala +++ b/tests/neg/i15987.scala @@ -1,5 +1,3 @@ -import scala.language.experimental.clauseInterleaving - trait ShowWithExplicit[A] object ShowWithExplicit: diff --git a/tests/neg/i15987/DerivedIssue.check b/tests/neg/i15987/DerivedIssue.check deleted file mode 100644 index f1254c1c6f57..000000000000 --- a/tests/neg/i15987/DerivedIssue.check +++ /dev/null @@ -1,4 +0,0 @@ --- Error: tests/neg/7722.scala:2:40 ------------------------------------------------------------------------------------ -2 | @scala.annotation.targetName("E") def this() = this(3) // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | @targetName annotation may not be used on a constructor From fd458478c669ba4dd64a06349fb2dcd374ff1617 Mon Sep 17 00:00:00 2001 From: kenji yoshida <6b656e6a69@gmail.com> Date: Tue, 6 Aug 2024 16:13:35 +0900 Subject: [PATCH 421/827] fix typo (#21324) --- compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala | 2 +- compiler/src/dotty/tools/dotc/CompilationUnit.scala | 2 +- compiler/src/dotty/tools/dotc/Compiler.scala | 2 +- .../src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala | 4 ++-- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 4 ++-- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 4 ++-- compiler/src/dotty/tools/dotc/core/Annotations.scala | 2 +- compiler/src/dotty/tools/dotc/core/Contexts.scala | 2 +- compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala | 2 +- compiler/src/dotty/tools/dotc/parsing/Scanners.scala | 2 +- compiler/src/dotty/tools/dotc/transform/CtxLazy.scala | 2 +- .../src/dotty/tools/dotc/transform/ElimErasedValueType.scala | 2 +- compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala | 2 +- compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala | 4 ++-- compiler/src/dotty/tools/dotc/transform/Pickler.scala | 2 +- compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala | 2 +- compiler/src/dotty/tools/dotc/transform/SelectStatic.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Synthesizer.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Typer.scala | 2 +- compiler/src/dotty/tools/io/FileWriters.scala | 2 +- docs/_docs/contributing/architecture/phases.md | 2 +- docs/_docs/internals/overall-structure.md | 2 +- docs/_docs/reference/experimental/runtimeChecked.md | 2 +- .../dotty/tools/pc/tests/completion/CompletionArgSuite.scala | 4 ++-- scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala | 2 +- .../interpreter/TreeInterpreter.scala | 2 +- tests/pos-with-compiler-cc/dotc/CompilationUnit.scala | 2 +- tests/pos-with-compiler-cc/dotc/Compiler.scala | 2 +- tests/pos-with-compiler-cc/dotc/config/Config.scala | 2 +- tests/pos-with-compiler-cc/dotc/core/Contexts.scala | 2 +- .../pos-with-compiler-cc/dotc/core/tasty/CommentPickler.scala | 2 +- tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala | 2 +- tests/pos-with-compiler-cc/dotc/transform/CtxLazy.scala | 2 +- .../dotc/transform/ElimErasedValueType.scala | 2 +- tests/pos-with-compiler-cc/dotc/transform/ExpandSAMs.scala | 2 +- tests/pos-with-compiler-cc/dotc/transform/ExplicitOuter.scala | 4 ++-- .../pos-with-compiler-cc/dotc/transform/ReifiedReflect.scala | 2 +- tests/pos-with-compiler-cc/dotc/transform/SelectStatic.scala | 2 +- tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala | 2 +- tests/pos-with-compiler-cc/dotc/typer/Typer.scala | 2 +- tests/warn/i15503c.scala | 2 +- 41 files changed, 47 insertions(+), 47 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index 36e95c788086..e2730c1e84ab 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -129,7 +129,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { if (file.isInstanceOf[JarArchive]) { val jarCompressionLevel = compilerSettings.jarCompressionLevel // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where - // created using `AbstractFile.bufferedOutputStream`instead of JarWritter + // created using `AbstractFile.bufferedOutputStream`instead of JarWriter val jarFile = file.underlyingSource.getOrElse{ throw new IllegalStateException("No underlying source for jar") } diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index a9e5dbacc938..0975c94e916a 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -87,7 +87,7 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn */ val depRecorder: sbt.DependencyRecorder = sbt.DependencyRecorder() - /** Suspends the compilation unit by thowing a SuspendException + /** Suspends the compilation unit by throwing a SuspendException * and recording the suspended compilation unit */ def suspend(hint: => String)(using Context): Nothing = diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index ffd3d27f7c99..9b130e7d7804 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -110,7 +110,7 @@ class Compiler { new LetOverApply, // Lift blocks from receivers of applications new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. - List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types + List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types new PureStats, // Remove pure stats from blocks new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala index b302a2463a4e..6f0723bf8f35 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala @@ -32,7 +32,7 @@ abstract class TreeMapWithTrackedStats extends TreeMapWithImplicits: case _ => tree end updateTracked - /** Process a list of trees and give the priority to trakced trees */ + /** Process a list of trees and give the priority to tracked trees */ private final def withUpdatedTrackedTrees(stats: List[Tree])(using Context) = val trackedTrees = TreeMapWithTrackedStats.trackedTrees stats.mapConserve: @@ -67,7 +67,7 @@ end TreeMapWithTrackedStats object TreeMapWithTrackedStats: private val TrackedTrees = new Property.Key[mutable.Map[Symbol, tpd.MemberDef]] - /** Fetch the tracked trees in the cuurent context */ + /** Fetch the tracked trees in the current context */ private def trackedTrees(using Context): mutable.Map[Symbol, MemberDef] = ctx.property(TrackedTrees).get diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 1f19641e3b08..5680df476f8d 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -21,7 +21,7 @@ private val Captures: Key[CaptureSet] = Key() object ccConfig: - /** If true, allow mappping capture set variables under captureChecking with maps that are neither + /** If true, allow mapping capture set variables under captureChecking with maps that are neither * bijective nor idempotent. We currently do now know how to do this correctly in all * cases, though. */ @@ -35,7 +35,7 @@ object ccConfig: /** If enabled, use a special path in recheckClosure for closures * that are eta expansions. This can improve some error messages but - * currently leads to unsoundess for handlng reach capabilities. + * currently leads to unsoundess for handling reach capabilities. * TODO: The unsoundness needs followin up. */ inline val handleEtaExpansionsSpecially = false diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 9f6cb278f012..6fa63c21edaa 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -39,7 +39,7 @@ object CheckCaptures: /** A class describing environments. * @param owner the current owner * @param kind the environment's kind - * @param captured the caputure set containing all references to tracked free variables outside of boxes + * @param captured the capture set containing all references to tracked free variables outside of boxes * @param outer0 the next enclosing environment */ case class Env( @@ -509,7 +509,7 @@ class CheckCaptures extends Recheck, SymTransformer: override def recheckApply(tree: Apply, pt: Type)(using Context): Type = val meth = tree.fun.symbol - // Unsafe box/unbox handlng, only for versions < 3.3 + // Unsafe box/unbox handling, only for versions < 3.3 def mapArgUsing(f: Type => Type) = val arg :: Nil = tree.args: @unchecked val argType0 = f(recheckStart(arg, pt)) diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index a5ef4c26eed1..b4cdeba4600b 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -43,7 +43,7 @@ object Annotations { def argumentConstantString(i: Int)(using Context): Option[String] = for (case Constant(s: String) <- argumentConstant(i)) yield s - /** The tree evaluaton is in progress. */ + /** The tree evaluation is in progress. */ def isEvaluating: Boolean = false /** The tree evaluation has finished. */ diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 79a0b279aefe..388720e7f3f4 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -265,7 +265,7 @@ object Contexts { /** SourceFile with given path, memoized */ def getSource(path: String): SourceFile = getSource(path.toTermName) - /** AbstraFile with given path name, memoized */ + /** AbstractFile with given path name, memoized */ def getFile(name: TermName): AbstractFile = base.files.get(name) match case Some(file) => file diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala index 10df2a437af6..39293b947326 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala @@ -29,7 +29,7 @@ object CommentPickler: def traverse(x: Any): Unit = x match case x: untpd.Tree @unchecked => x match - case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d. + case x: tpd.MemberDef @unchecked => // at this point all MemberDefs are t(y)p(e)d. for comment <- docString(x) do pickleComment(addrOfTree(x), comment) case _ => val limit = x.productArity diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 831d31d6fa6e..c6d07f005fbd 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -684,7 +684,7 @@ object Scanners { if !r.isOutermost && closingRegionTokens.contains(token) && !(token == CASE && r.prefix == MATCH) - && next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.sala + && next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.scala => insert(OUTDENT, offset) case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala index 89161cc8c013..58040c4ef89f 100644 --- a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala +++ b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala @@ -11,7 +11,7 @@ import scala.compiletime.uninitialized * with a different context. * * A typical use case is a lazy val in a phase object which exists once per root context where - * the expression intiializing the lazy val depends only on the root context, but not any changes afterwards. + * the expression initializing the lazy val depends only on the root context, but not any changes afterwards. */ class CtxLazy[T](expr: Context ?=> T) { private var myValue: T = uninitialized diff --git a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala index 0b0906148ba1..2deb50956537 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala @@ -13,7 +13,7 @@ import NameKinds.SuperAccessorName object ElimErasedValueType { val name: String = "elimErasedValueType" - val description: String = "expand erased value types to their underlying implmementation types" + val description: String = "expand erased value types to their underlying implementation types" def elimEVT(tp: Type)(using Context): Type = tp match { case ErasedValueType(_, underlying) => diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index d0e90566f333..cdbef792dfa9 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -94,7 +94,7 @@ class ExpandSAMs extends MiniPhase: * } * ``` * - * is expanded to an anomymous class: + * is expanded to an anonymous class: * * ``` * val x: PartialFunction[A, B] = { diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index 15dfda845389..0db1ddc5750c 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -101,7 +101,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => val parentCls = parent.tpe.classSymbol.asClass parent match // if we are in a regular class and first parent is also a regular class, - // make sure we have a contructor + // make sure we have a constructor case parent: TypeTree if !cls.is(Trait) && !parentCls.is(Trait) && !defn.NotRuntimeClasses.contains(parentCls) => New(parent.tpe, Nil).withSpan(impl.span) @@ -454,7 +454,7 @@ object ExplicitOuter { val enclClass = ctx.owner.lexicallyEnclosingClass.asClass val outerAcc = atPhaseNoLater(lambdaLiftPhase) { // lambdalift mangles local class names, which means we cannot - // reliably find outer acessors anymore + // reliably find outer accessors anymore tree match case tree: This if tree.symbol == enclClass && !enclClass.is(Trait) => outerParamAccessor(enclClass) diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 6c3dcc669877..dd24f38990df 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -44,7 +44,7 @@ object Pickler { */ inline val ParallelPickling = true - /**A holder for syncronization points and reports when writing TASTy asynchronously. + /**A holder for synchronization points and reports when writing TASTy asynchronously. * The callbacks should only be called once. */ class AsyncTastyHolder private ( diff --git a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala index 90c5ac85167c..f1603db0e5a0 100644 --- a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala @@ -51,7 +51,7 @@ trait ReifiedReflect: .select(defn.Quotes_reflect_TypeApply_apply) .appliedTo(fn, argTrees) - /** Create tree for `quotes.reflect.Assing(, )` */ + /** Create tree for `quotes.reflect.Assign(, )` */ def Assign(lhs: Tree, rhs: Tree)(using Context) = self.select(defn.Quotes_reflect_Assign) .select(defn.Quotes_reflect_Assign_apply) diff --git a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala index 6dc718ef526b..36a40658ffa5 100644 --- a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala @@ -15,7 +15,7 @@ import dotty.tools.dotc.transform.MegaPhase.* * Otherwise, the backend needs to be aware that some qualifiers need to be * dropped. * - * A tranformation similar to what this phase does seems to be performed by + * A transformation similar to what this phase does seems to be performed by * flatten in nsc. * * The side effects of the qualifier of a dropped `Select` is normally diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 7f6be8f89314..5ef5b1a420ee 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -455,7 +455,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): MirrorSource.reduce(mirroredType) match case Right(msrc) => msrc match case MirrorSource.Singleton(_, tref) => - val singleton = tref.termSymbol // prefer alias name over the orignal name + val singleton = tref.termSymbol // prefer alias name over the original name val singletonPath = tpd.singleton(tref).withSpan(span) if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object. val mirrorType = formal.constrained_& { diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 32f04c13a3d6..947d1fcbfa73 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1282,7 +1282,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * For example, both `@Annot(5)` and `@Annot({5, 6}) are viable calls of the constructor * of annotation defined as `@interface Annot { int[] value() }` * We assume that calling `typedNamedArg` in context of Java implies that we are dealing - * with annotation contructor, as named arguments are not allowed anywhere else in Java. + * with annotation constructor, as named arguments are not allowed anywhere else in Java. * Under explicit nulls, the pt could be nullable. We need to strip `Null` type first. */ val arg1 = pt.stripNull() match { diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index b6338082c696..5fdf43cfe8e3 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -226,7 +226,7 @@ object FileWriters { if (file.isInstanceOf[JarArchive]) { val jarCompressionLevel = ctx.settings.jarCompressionLevel // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where - // created using `AbstractFile.bufferedOutputStream`instead of JarWritter + // created using `AbstractFile.bufferedOutputStream`instead of JarWriter val jarFile = file.underlyingSource.getOrElse{ throw new IllegalStateException("No underlying source for jar") } diff --git a/docs/_docs/contributing/architecture/phases.md b/docs/_docs/contributing/architecture/phases.md index 8e63de04dadb..1421667922df 100644 --- a/docs/_docs/contributing/architecture/phases.md +++ b/docs/_docs/contributing/architecture/phases.md @@ -63,7 +63,7 @@ Finally are [staging], which ensures that quotes conform to the trees to embedded TASTy strings. ### `transformPhases` -These phases are concerned with tranformation into lower-level forms +These phases are concerned with transformation into lower-level forms suitable for the runtime system, with two sub-groupings: - High-level transformations: All phases from [firstTransform] to [erasure]. Most of these phases transform syntax trees, expanding high-level constructs diff --git a/docs/_docs/internals/overall-structure.md b/docs/_docs/internals/overall-structure.md index a25c287e16c9..6dbe387a7cfb 100644 --- a/docs/_docs/internals/overall-structure.md +++ b/docs/_docs/internals/overall-structure.md @@ -160,7 +160,7 @@ phases. The current list of phases is specified in class [Compiler] as follows: new LetOverApply, // Lift blocks from receivers of applications new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. - List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types + List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types new PureStats, // Remove pure stats from blocks new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` diff --git a/docs/_docs/reference/experimental/runtimeChecked.md b/docs/_docs/reference/experimental/runtimeChecked.md index bb0272ea8a3e..71fac3ad8728 100644 --- a/docs/_docs/reference/experimental/runtimeChecked.md +++ b/docs/_docs/reference/experimental/runtimeChecked.md @@ -124,7 +124,7 @@ As an escape hatch in 3.2 we recommended to use a type ascription of `: @uncheck |which may result in a MatchError at runtime. ``` -However, `: @unchecked` is syntactically awkward, and is also a misnomer - in fact in this case the the pattern _is_ fully checked, but the necessary checks occur at runtime. The `runtimeChecked` method is intended to replace `@unchecked` for this purpose. +However, `: @unchecked` is syntactically awkward, and is also a misnomer - in fact in this case the pattern _is_ fully checked, but the necessary checks occur at runtime. The `runtimeChecked` method is intended to replace `@unchecked` for this purpose. The `@unchecked` annotation is still retained for silencing warnings on unsound type tests. diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala index f4bfc806dbb3..210a28f6a7a1 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala @@ -583,7 +583,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin ) - @Test def `contructor-param` = + @Test def `constructor-param` = check( """|class Foo (xxx: Int) | @@ -595,7 +595,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin ) - @Test def `contructor-param2` = + @Test def `constructor-param2` = check( """|class Foo () | diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala index 471d338522f0..a5e32c7332bd 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala @@ -48,7 +48,7 @@ trait BasicSupport: "scala.transient", "scala.volatile", "scala.annotation.experimental", - "scala.annotation.contructorOnly", + "scala.annotation.constructorOnly", "scala.annotation.static", "scala.annotation.targetName", "scala.annotation.threadUnsafe", diff --git a/tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala b/tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala index a76379e22313..7d43463cd569 100644 --- a/tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala +++ b/tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala @@ -146,7 +146,7 @@ abstract class TreeInterpreter[Q <: Quotes & Singleton](using val q: Q) { } case Assign(lhs, rhs) => - log("", tree)(localValue(lhs.symbol).update(eval(rhs))) + log("", tree)(localValue(lhs.symbol).update(eval(rhs))) case If(cond, thenp, elsep) => log("interpretIf", tree)(interpretIf(cond, thenp, elsep)) case While(cond, body) => log("interpretWhile", tree)(interpretWhile(cond, body)) diff --git a/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala index f70bda947129..ad51305d5858 100644 --- a/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala +++ b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala @@ -66,7 +66,7 @@ class CompilationUnit protected (val source: SourceFile) { /** Can this compilation unit be suspended */ def isSuspendable: Boolean = true - /** Suspends the compilation unit by thowing a SuspendException + /** Suspends the compilation unit by throwing a SuspendException * and recording the suspended compilation unit */ def suspend()(using Context): Nothing = diff --git a/tests/pos-with-compiler-cc/dotc/Compiler.scala b/tests/pos-with-compiler-cc/dotc/Compiler.scala index b121a47781e1..c8c95647b5e4 100644 --- a/tests/pos-with-compiler-cc/dotc/Compiler.scala +++ b/tests/pos-with-compiler-cc/dotc/Compiler.scala @@ -107,7 +107,7 @@ class Compiler { new LetOverApply, // Lift blocks from receivers of applications new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. - List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types + List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types new PureStats, // Remove pure stats from blocks new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference diff --git a/tests/pos-with-compiler-cc/dotc/config/Config.scala b/tests/pos-with-compiler-cc/dotc/config/Config.scala index cbd50429492e..02d075c8853d 100644 --- a/tests/pos-with-compiler-cc/dotc/config/Config.scala +++ b/tests/pos-with-compiler-cc/dotc/config/Config.scala @@ -248,7 +248,7 @@ object Config { */ inline val printCaptureSetsAsPrefix = true - /** If true, allow mappping capture set variables under captureChecking with maps that are neither + /** If true, allow mapping capture set variables under captureChecking with maps that are neither * bijective nor idempotent. We currently do now know how to do this correctly in all * cases, though. */ diff --git a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala index 37fde2d7b604..a2389a28e941 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala @@ -252,7 +252,7 @@ object Contexts { /** SourceFile with given path, memoized */ def getSource(path: String): SourceFile = getSource(path.toTermName) - /** AbstraFile with given path name, memoized */ + /** AbstractFile with given path name, memoized */ def getFile(name: TermName): AbstractFile = base.files.get(name) match case Some(file) => file diff --git a/tests/pos-with-compiler-cc/dotc/core/tasty/CommentPickler.scala b/tests/pos-with-compiler-cc/dotc/core/tasty/CommentPickler.scala index df3e4df497f8..a49e3b066c62 100644 --- a/tests/pos-with-compiler-cc/dotc/core/tasty/CommentPickler.scala +++ b/tests/pos-with-compiler-cc/dotc/core/tasty/CommentPickler.scala @@ -27,7 +27,7 @@ class CommentPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Addr, docStr private def traverse(x: Any): Unit = x match case x: untpd.Tree @unchecked => x match - case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d. + case x: tpd.MemberDef @unchecked => // at this point all MemberDefs are t(y)p(e)d. for comment <- docString(x) do pickleComment(addrOfTree(x), comment) case _ => val limit = x.productArity diff --git a/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala b/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala index 737a37b2d4ce..71540df26539 100644 --- a/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala +++ b/tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala @@ -667,7 +667,7 @@ object Scanners { if !r.isOutermost && closingRegionTokens.contains(token) && !(token == CASE && r.prefix == MATCH) - && next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.sala + && next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.scala => insert(OUTDENT, offset) case _ => diff --git a/tests/pos-with-compiler-cc/dotc/transform/CtxLazy.scala b/tests/pos-with-compiler-cc/dotc/transform/CtxLazy.scala index 808cf928ecc2..e3eb74cbfe9c 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/CtxLazy.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/CtxLazy.scala @@ -9,7 +9,7 @@ import core.Contexts._ * with a different context. * * A typical use case is a lazy val in a phase object which exists once per root context where - * the expression intiializing the lazy val depends only on the root context, but not any changes afterwards. + * the expression initializing the lazy val depends only on the root context, but not any changes afterwards. */ class CtxLazy[T](expr: Context ?=> T) { private var myValue: T = _ diff --git a/tests/pos-with-compiler-cc/dotc/transform/ElimErasedValueType.scala b/tests/pos-with-compiler-cc/dotc/transform/ElimErasedValueType.scala index 2643aa147734..f4310adb6f3d 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/ElimErasedValueType.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/ElimErasedValueType.scala @@ -13,7 +13,7 @@ import NameKinds.SuperAccessorName object ElimErasedValueType { val name: String = "elimErasedValueType" - val description: String = "expand erased value types to their underlying implmementation types" + val description: String = "expand erased value types to their underlying implementation types" def elimEVT(tp: Type)(using Context): Type = tp match { case ErasedValueType(_, underlying) => diff --git a/tests/pos-with-compiler-cc/dotc/transform/ExpandSAMs.scala b/tests/pos-with-compiler-cc/dotc/transform/ExpandSAMs.scala index 0552fe31f8a2..6bbc1cdfdf37 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/ExpandSAMs.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/ExpandSAMs.scala @@ -88,7 +88,7 @@ class ExpandSAMs extends MiniPhase: * } * ``` * - * is expanded to an anomymous class: + * is expanded to an anonymous class: * * ``` * val x: PartialFunction[A, B] = { diff --git a/tests/pos-with-compiler-cc/dotc/transform/ExplicitOuter.scala b/tests/pos-with-compiler-cc/dotc/transform/ExplicitOuter.scala index 539002590e6a..2eda974e0ef2 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/ExplicitOuter.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/ExplicitOuter.scala @@ -100,7 +100,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => val parentCls = parent.tpe.classSymbol.asClass parent match // if we are in a regular class and first parent is also a regular class, - // make sure we have a contructor + // make sure we have a constructor case parent: TypeTree if !cls.is(Trait) && !parentCls.is(Trait) && !defn.NotRuntimeClasses.contains(parentCls) => New(parent.tpe, Nil).withSpan(impl.span) @@ -459,7 +459,7 @@ object ExplicitOuter { val enclClass = ctx.owner.lexicallyEnclosingClass.asClass val outerAcc = atPhaseNoLater(lambdaLiftPhase) { // lambdalift mangles local class names, which means we cannot - // reliably find outer acessors anymore + // reliably find outer accessors anymore tree match case tree: This if tree.symbol == enclClass && !enclClass.is(Trait) => outerParamAccessor(enclClass) diff --git a/tests/pos-with-compiler-cc/dotc/transform/ReifiedReflect.scala b/tests/pos-with-compiler-cc/dotc/transform/ReifiedReflect.scala index e462f82b1dad..0882247df8a6 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/ReifiedReflect.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/ReifiedReflect.scala @@ -52,7 +52,7 @@ trait ReifiedReflect: .select(defn.Quotes_reflect_TypeApply_apply) .appliedTo(fn, argTrees) - /** Create tree for `quotes.reflect.Assing(, )` */ + /** Create tree for `quotes.reflect.Assign(, )` */ def Assign(lhs: Tree, rhs: Tree)(using Context) = self.select(defn.Quotes_reflect_Assign) .select(defn.Quotes_reflect_Assign_apply) diff --git a/tests/pos-with-compiler-cc/dotc/transform/SelectStatic.scala b/tests/pos-with-compiler-cc/dotc/transform/SelectStatic.scala index 1df9809c2f62..bffa743fdb88 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/SelectStatic.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/SelectStatic.scala @@ -15,7 +15,7 @@ import dotty.tools.dotc.transform.SymUtils._ * Otherwise, the backend needs to be aware that some qualifiers need to be * dropped. * - * A tranformation similar to what this phase does seems to be performed by + * A transformation similar to what this phase does seems to be performed by * flatten in nsc. * * The side effects of the qualifier of a dropped `Select` is normally diff --git a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala index d1a017d67f20..e253a9b6356e 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala @@ -431,7 +431,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): MirrorSource.reduce(mirroredType) match case Right(msrc) => msrc match case MirrorSource.Singleton(_, tref) => - val singleton = tref.termSymbol // prefer alias name over the orignal name + val singleton = tref.termSymbol // prefer alias name over the original name val singletonPath = pathFor(tref).withSpan(span) if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object. val mirrorType = formal.constrained_& { diff --git a/tests/pos-with-compiler-cc/dotc/typer/Typer.scala b/tests/pos-with-compiler-cc/dotc/typer/Typer.scala index 0baae1730f4a..0b3f7d75f6e2 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Typer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Typer.scala @@ -959,7 +959,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * For example, both `@Annot(5)` and `@Annot({5, 6}) are viable calls of the constructor * of annotation defined as `@interface Annot { int[] value() }` * We assume that calling `typedNamedArg` in context of Java implies that we are dealing - * with annotation contructor, as named arguments are not allowed anywhere else in Java. + * with annotation constructor, as named arguments are not allowed anywhere else in Java. * Under explicit nulls, the pt could be nullable. We need to strip `Null` type first. */ val arg1 = pt.stripNull match { diff --git a/tests/warn/i15503c.scala b/tests/warn/i15503c.scala index e70df10f3140..a813329da89b 100644 --- a/tests/warn/i15503c.scala +++ b/tests/warn/i15503c.scala @@ -32,7 +32,7 @@ class A: def z = g // OK var w = 2 // OK -package foo.test.contructors: +package foo.test.constructors: case class A private (x:Int) // OK class B private (val x: Int) // OK class C private (private val x: Int) // warn From 312d33f705fa8ce76cf80598ae172b2b9bf6b7d2 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Mon, 5 Aug 2024 15:24:16 +0200 Subject: [PATCH 422/827] Allow JLine to fall back to a dumb terminal Set the `dumb` JLine option to `null` instead of `false` when it is not forced. This allows JLine to fall back to a dumb terminal. Also adapt `CoursierScalaTests` accordingly: test that the `scala` command executes commands fed through standard input correctly. --- .../src/dotty/tools/repl/JLineTerminal.scala | 13 ++++++--- .../tools/coursier/CoursierScalaTests.scala | 28 +++++++++++-------- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala index 294f0a331ec2..e4ac1626525e 100644 --- a/compiler/src/dotty/tools/repl/JLineTerminal.scala +++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala @@ -21,11 +21,16 @@ class JLineTerminal extends java.io.Closeable { // Logger.getLogger("org.jline").setLevel(Level.FINEST) private val terminal = - TerminalBuilder.builder() - .dumb(dumbTerminal) // fail early if not able to create a terminal - .build() + var builder = TerminalBuilder.builder() + if System.getenv("TERM") == "dumb" then + // Force dumb terminal if `TERM` is `"dumb"`. + // Note: the default value for the `dumb` option is `null`, which allows + // JLine to fall back to a dumb terminal. This is different than `true` or + // `false` and can't be set using the `dumb` setter. + // This option is used at https://github.com/jline/jline3/blob/894b5e72cde28a551079402add4caea7f5527806/terminal/src/main/java/org/jline/terminal/TerminalBuilder.java#L528. + builder.dumb(true) + builder.build() private val history = new DefaultHistory - def dumbTerminal = Option(System.getenv("TERM")) == Some("dumb") private def blue(str: String)(using Context) = if (ctx.settings.color.value != "never") Console.BLUE + str + Console.RESET diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index 115803d79dc1..86b22009d15a 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -75,8 +75,8 @@ class CoursierScalaTests: version() def emptyArgsEqualsRepl() = - val output = CoursierScalaTests.csScalaCmd() - assertTrue(output.mkString("\n").contains("Unable to create a terminal")) // Scala attempted to create REPL so we can assume it is working + val output = CoursierScalaTests.csScalaCmdWithStdin(Seq.empty, Some("println(\"Hello World\")\n:quit")) + assertTrue(output.mkString("\n").contains("Hello World")) emptyArgsEqualsRepl() def run() = @@ -132,8 +132,8 @@ class CoursierScalaTests: compileFilesToJarAndRun() def replWithArgs() = - val output = CoursierScalaTests.csScalaCmd("-source", "3.0-migration") - assertTrue(output.mkString("\n").contains("Unable to create a terminal")) // Scala attempted to create REPL so we can assume it is working + val output = CoursierScalaTests.csScalaCmdWithStdin(Seq("-source", "3.0-migration"), Some("println(\"Hello World\")\n:quit")) + assertTrue(output.mkString("\n").contains("Hello World")) replWithArgs() def argumentFile() = @@ -148,25 +148,31 @@ class CoursierScalaTests: object CoursierScalaTests: - def execCmd(command: String, options: String*): (Int, List[String]) = + private def execCmd(command: String, options: Seq[String] = Seq.empty, stdin: Option[String] = None): (Int, List[String]) = val cmd = (command :: options.toList).toSeq.mkString(" ") val out = new ListBuffer[String] - val code = cmd.!(ProcessLogger(out += _, out += _)) + val process = stdin match + case Some(input) => Process(cmd) #< new java.io.ByteArrayInputStream(input.getBytes) + case None => Process(cmd) + val code = process.!(ProcessLogger(out += _, out += _)) (code, out.toList) def csScalaCmd(options: String*): List[String] = - csCmd("dotty.tools.MainGenericRunner", options*) + csScalaCmdWithStdin(options, None) + + def csScalaCmdWithStdin(options: Seq[String], stdin: Option[String]): List[String] = + csCmd("dotty.tools.MainGenericRunner", options, stdin) def csScalaCompilerCmd(options: String*): List[String] = - csCmd("dotty.tools.dotc.Main", options*) + csCmd("dotty.tools.dotc.Main", options) - private def csCmd(entry: String, options: String*): List[String] = + private def csCmd(entry: String, options: Seq[String], stdin: Option[String] = None): List[String] = val (jOpts, args) = options.partition(_.startsWith("-J")) val newOptions = args match case Nil => args case _ => "--" +: args val newJOpts = jOpts.map(s => s"--java-opt ${s.stripPrefix("-J")}").mkString(" ") - execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true" --property "scala.use_legacy_launcher=true"""" +: newOptions)*)._2 + execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true" --property "scala.use_legacy_launcher=true"""" +: newOptions), stdin)._2 /** Get coursier script */ @BeforeClass def setup(): Unit = @@ -177,7 +183,7 @@ object CoursierScalaTests: case other => fail(s"Unsupported OS for coursier launcher: $other") def runAndCheckCmd(cmd: String, options: String*): Unit = - val (code, out) = execCmd(cmd, options*) + val (code, out) = execCmd(cmd, options) if code != 0 then fail(s"Failed to run $cmd ${options.mkString(" ")}, exit code: $code, output: ${out.mkString("\n")}") From c0b6b0f9572ea35d52961881da53a92bd57206c7 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 5 Aug 2024 14:36:46 +0200 Subject: [PATCH 423/827] Require named arguments for java defined annotations --- .../tools/dotc/reporting/ErrorMessageID.scala | 1 + .../dotty/tools/dotc/reporting/messages.scala | 22 ++++++++++ .../src/dotty/tools/dotc/typer/Checking.scala | 20 +++++++++ .../src/dotty/tools/dotc/typer/Namer.scala | 23 ++++++---- tests/neg/i20554-a.check | 42 +++++++++++++++++++ tests/neg/i20554-a/Annotation.java | 4 ++ tests/neg/i20554-a/Test.scala | 4 ++ tests/neg/i20554-b.check | 21 ++++++++++ tests/neg/i20554-b/SimpleAnnotation.java | 4 ++ tests/neg/i20554-b/Test.scala | 4 ++ tests/pos/i20554-a/Annotation.java | 5 +++ tests/pos/i20554-a/Test.scala | 3 ++ tests/pos/i20554-b/SimpleAnnotation.java | 9 ++++ tests/pos/i20554-b/Test.scala | 3 ++ tests/pos/i20554-c.scala | 5 +++ 15 files changed, 161 insertions(+), 9 deletions(-) create mode 100644 tests/neg/i20554-a.check create mode 100644 tests/neg/i20554-a/Annotation.java create mode 100644 tests/neg/i20554-a/Test.scala create mode 100644 tests/neg/i20554-b.check create mode 100644 tests/neg/i20554-b/SimpleAnnotation.java create mode 100644 tests/neg/i20554-b/Test.scala create mode 100644 tests/pos/i20554-a/Annotation.java create mode 100644 tests/pos/i20554-a/Test.scala create mode 100644 tests/pos/i20554-b/SimpleAnnotation.java create mode 100644 tests/pos/i20554-b/Test.scala create mode 100644 tests/pos/i20554-c.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index e3613e3f783a..cb5e8a7b314c 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -214,6 +214,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case UnusedSymbolID // errorNumber: 198 case TailrecNestedCallID //errorNumber: 199 case FinalLocalDefID // errorNumber: 200 + case NonNamedArgumentInJavaAnnotationID // errorNumber: 201 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 1d906130d4e4..f112b6fb5aa7 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3288,3 +3288,25 @@ object UnusedSymbol { def privateMembers(using Context): UnusedSymbol = new UnusedSymbol(i"unused private member") def patVars(using Context): UnusedSymbol = new UnusedSymbol(i"unused pattern variable") } + +class NonNamedArgumentInJavaAnnotation(using Context) extends SyntaxMsg(NonNamedArgumentInJavaAnnotationID): + + override protected def msg(using Context): String = + "Named arguments are required for Java defined annotations" + + override protected def explain(using Context): String = + i"""Starting from Scala 3.6.0, named arguments are required for Java defined annotations. + |Java defined annotations don't have an exact constructor representation + |and we previously relied on the order of the fields to create one. + |One possible issue with this representation is the reordering of the fields. + |Lets take the following example: + | + | public @interface Annotation { + | int a() default 41; + | int b() default 42; + | } + | + |Reordering the fields is binary-compatible but it might affect the meaning of @Annotation(1) + """ + +end NonNamedArgumentInJavaAnnotation diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 421f00e61584..aeda38cc7646 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -883,6 +883,26 @@ object Checking { templ.parents.find(_.tpe.derivesFrom(defn.PolyFunctionClass)) match case Some(parent) => report.error(s"`PolyFunction` marker trait is reserved for compiler generated refinements", parent.srcPos) case None => + + /** check that parameters of a java defined annotations are all named arguments if we have more than one parameter */ + def checkNamedArgumentForJavaAnnotation(annot: untpd.Tree, sym: ClassSymbol)(using Context): untpd.Tree = + assert(sym.is(JavaDefined)) + + def annotationHasValueField: Boolean = + sym.info.decls.exists(_.name == nme.value) + + annot match + case untpd.Apply(fun, List(param)) if !param.isInstanceOf[untpd.NamedArg] && annotationHasValueField => + untpd.cpy.Apply(annot)(fun, List(untpd.cpy.NamedArg(param)(nme.value, param))) + case untpd.Apply(_, params) => + for + param <- params + if !param.isInstanceOf[untpd.NamedArg] + do report.error(NonNamedArgumentInJavaAnnotation(), param) + annot + case _ => annot + end checkNamedArgumentForJavaAnnotation + } trait Checking { diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 2089e3f14be7..3844380f8952 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -868,15 +868,20 @@ class Namer { typer: Typer => protected def addAnnotations(sym: Symbol): Unit = original match { case original: untpd.MemberDef => lazy val annotCtx = annotContext(original, sym) - for (annotTree <- original.mods.annotations) { - val cls = typedAheadAnnotationClass(annotTree)(using annotCtx) - if (cls eq sym) - report.error(em"An annotation class cannot be annotated with iself", annotTree.srcPos) - else { - val ann = Annotation.deferred(cls)(typedAheadExpr(annotTree)(using annotCtx)) - sym.addAnnotation(ann) - } - } + original.setMods: + original.mods.withAnnotations : + original.mods.annotations.mapConserve: annotTree => + val cls = typedAheadAnnotationClass(annotTree)(using annotCtx) + if (cls eq sym) + report.error(em"An annotation class cannot be annotated with iself", annotTree.srcPos) + annotTree + else + val ann = + if cls.is(JavaDefined) then Checking.checkNamedArgumentForJavaAnnotation(annotTree, cls.asClass) + else annotTree + val ann1 = Annotation.deferred(cls)(typedAheadExpr(ann)(using annotCtx)) + sym.addAnnotation(ann1) + ann case _ => } diff --git a/tests/neg/i20554-a.check b/tests/neg/i20554-a.check new file mode 100644 index 000000000000..5cfa4e3faaad --- /dev/null +++ b/tests/neg/i20554-a.check @@ -0,0 +1,42 @@ +-- [E201] Syntax Error: tests/neg/i20554-a/Test.scala:3:12 ------------------------------------------------------------- +3 |@Annotation(3, 4) // error // error : Java defined annotation should be called with named arguments + | ^ + | Named arguments are required for Java defined annotations + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Starting from Scala 3.6.0, named arguments are required for Java defined annotations. + | Java defined annotations don't have an exact constructor representation + | and we previously relied on the order of the fields to create one. + | One possible issue with this representation is the reordering of the fields. + | Lets take the following example: + | + | public @interface Annotation { + | int a() default 41; + | int b() default 42; + | } + | + | Reordering the fields is binary-compatible but it might affect the meaning of @Annotation(1) + | + --------------------------------------------------------------------------------------------------------------------- +-- [E201] Syntax Error: tests/neg/i20554-a/Test.scala:3:15 ------------------------------------------------------------- +3 |@Annotation(3, 4) // error // error : Java defined annotation should be called with named arguments + | ^ + | Named arguments are required for Java defined annotations + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Starting from Scala 3.6.0, named arguments are required for Java defined annotations. + | Java defined annotations don't have an exact constructor representation + | and we previously relied on the order of the fields to create one. + | One possible issue with this representation is the reordering of the fields. + | Lets take the following example: + | + | public @interface Annotation { + | int a() default 41; + | int b() default 42; + | } + | + | Reordering the fields is binary-compatible but it might affect the meaning of @Annotation(1) + | + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i20554-a/Annotation.java b/tests/neg/i20554-a/Annotation.java new file mode 100644 index 000000000000..728bbded7a06 --- /dev/null +++ b/tests/neg/i20554-a/Annotation.java @@ -0,0 +1,4 @@ +public @interface Annotation { + int a() default 41; + int b() default 42; +} diff --git a/tests/neg/i20554-a/Test.scala b/tests/neg/i20554-a/Test.scala new file mode 100644 index 000000000000..f0b3ea40b87a --- /dev/null +++ b/tests/neg/i20554-a/Test.scala @@ -0,0 +1,4 @@ +//> using options -explain + +@Annotation(3, 4) // error // error : Java defined annotation should be called with named arguments +class Test \ No newline at end of file diff --git a/tests/neg/i20554-b.check b/tests/neg/i20554-b.check new file mode 100644 index 000000000000..2395554a7485 --- /dev/null +++ b/tests/neg/i20554-b.check @@ -0,0 +1,21 @@ +-- [E201] Syntax Error: tests/neg/i20554-b/Test.scala:3:18 ------------------------------------------------------------- +3 |@SimpleAnnotation(1) // error: the parameters is not named 'value' + | ^ + | Named arguments are required for Java defined annotations + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Starting from Scala 3.6.0, named arguments are required for Java defined annotations. + | Java defined annotations don't have an exact constructor representation + | and we previously relied on the order of the fields to create one. + | One possible issue with this representation is the reordering of the fields. + | Lets take the following example: + | + | public @interface Annotation { + | int a() default 41; + | int b() default 42; + | } + | + | Reordering the fields is binary-compatible but it might affect the meaning of @Annotation(1) + | + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i20554-b/SimpleAnnotation.java b/tests/neg/i20554-b/SimpleAnnotation.java new file mode 100644 index 000000000000..65b37a7508d2 --- /dev/null +++ b/tests/neg/i20554-b/SimpleAnnotation.java @@ -0,0 +1,4 @@ + +public @interface SimpleAnnotation { + int a() default 1; +} diff --git a/tests/neg/i20554-b/Test.scala b/tests/neg/i20554-b/Test.scala new file mode 100644 index 000000000000..c6586409aa62 --- /dev/null +++ b/tests/neg/i20554-b/Test.scala @@ -0,0 +1,4 @@ +//> using options -explain + +@SimpleAnnotation(1) // error: the parameters is not named 'value' +class Test \ No newline at end of file diff --git a/tests/pos/i20554-a/Annotation.java b/tests/pos/i20554-a/Annotation.java new file mode 100644 index 000000000000..3f8389517eae --- /dev/null +++ b/tests/pos/i20554-a/Annotation.java @@ -0,0 +1,5 @@ +public @interface Annotation { + int a() default 41; + int b() default 42; + int c() default 43; +} diff --git a/tests/pos/i20554-a/Test.scala b/tests/pos/i20554-a/Test.scala new file mode 100644 index 000000000000..4747f3a06783 --- /dev/null +++ b/tests/pos/i20554-a/Test.scala @@ -0,0 +1,3 @@ + +@Annotation(a = 1, b = 2) +class Test \ No newline at end of file diff --git a/tests/pos/i20554-b/SimpleAnnotation.java b/tests/pos/i20554-b/SimpleAnnotation.java new file mode 100644 index 000000000000..24fc988b6050 --- /dev/null +++ b/tests/pos/i20554-b/SimpleAnnotation.java @@ -0,0 +1,9 @@ + +public @interface SimpleAnnotation { + + int a() default 0; + + int value() default 1; + + int b() default 0; +} diff --git a/tests/pos/i20554-b/Test.scala b/tests/pos/i20554-b/Test.scala new file mode 100644 index 000000000000..c4a442f75fdb --- /dev/null +++ b/tests/pos/i20554-b/Test.scala @@ -0,0 +1,3 @@ + +@SimpleAnnotation(1) // works because of the presence of a field called value +class Test \ No newline at end of file diff --git a/tests/pos/i20554-c.scala b/tests/pos/i20554-c.scala new file mode 100644 index 000000000000..b8a43584a00a --- /dev/null +++ b/tests/pos/i20554-c.scala @@ -0,0 +1,5 @@ + +class MyAnnotation(a: Int, b: Int) extends annotation.StaticAnnotation + +@MyAnnotation(1, 2) // don't require named arguments as it is Scala Defined +class Test \ No newline at end of file From 6ccabeaf14982c6ad3a12b3f34d058f1cffd2418 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 6 Aug 2024 13:30:25 +0200 Subject: [PATCH 424/827] Fix tests to new scheme --- tests/pos-java-interop-separate/i6868/MyScala_2.scala | 2 +- tests/pos/i6151/Test.scala | 2 +- .../ScalaUser_1.scala | 11 +++-------- .../i19951-java-annotations-tasty-compat.check | 1 - .../ScalaUser_2.scala | 10 ++++------ 5 files changed, 9 insertions(+), 17 deletions(-) diff --git a/tests/pos-java-interop-separate/i6868/MyScala_2.scala b/tests/pos-java-interop-separate/i6868/MyScala_2.scala index e0fd84008f39..607eefafa6a3 100644 --- a/tests/pos-java-interop-separate/i6868/MyScala_2.scala +++ b/tests/pos-java-interop-separate/i6868/MyScala_2.scala @@ -1,4 +1,4 @@ -@MyJava_1("MyScala1", typeA = MyJava_1.MyClassTypeA.B) +@MyJava_1(value = "MyScala1", typeA = MyJava_1.MyClassTypeA.B) object MyScala { def a(mj: MyJava_1): Unit = { println("MyJava") diff --git a/tests/pos/i6151/Test.scala b/tests/pos/i6151/Test.scala index 314cf5a0ea8f..118e6a72c354 100644 --- a/tests/pos/i6151/Test.scala +++ b/tests/pos/i6151/Test.scala @@ -1,3 +1,3 @@ import Expect.* -@Outcome(ExpectVal) +@Outcome(enm = ExpectVal) class SimpleTest diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat-2/ScalaUser_1.scala b/tests/run-macros/i19951-java-annotations-tasty-compat-2/ScalaUser_1.scala index a14a69eae21b..7601dcf43ed2 100644 --- a/tests/run-macros/i19951-java-annotations-tasty-compat-2/ScalaUser_1.scala +++ b/tests/run-macros/i19951-java-annotations-tasty-compat-2/ScalaUser_1.scala @@ -1,20 +1,15 @@ class ScalaUser { - @JavaAnnot(5) - def f1(): Int = 1 @JavaAnnot(a = 5) def f2(): Int = 1 - @JavaAnnot(5, "foo") + @JavaAnnot(a = 5, b = "foo") def f3(): Int = 1 - @JavaAnnot(5, "foo", 3) - def f4(): Int = 1 - - @JavaAnnot(5, c = 3) + @JavaAnnot(a = 5, c = 3) def f5(): Int = 1 - @JavaAnnot(5, c = 3, b = "foo") + @JavaAnnot(a = 5, c = 3, b = "foo") def f6(): Int = 1 @JavaAnnot(b = "foo", c = 3, a = 5) diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat.check b/tests/run-macros/i19951-java-annotations-tasty-compat.check index c41fcc64c559..60cf38794296 100644 --- a/tests/run-macros/i19951-java-annotations-tasty-compat.check +++ b/tests/run-macros/i19951-java-annotations-tasty-compat.check @@ -1,6 +1,5 @@ ScalaUser: new JavaAnnot(c = _, a = 5, d = _, b = _) -new JavaAnnot(c = _, a = 5, d = _, b = _) new JavaAnnot(c = _, a = 5, d = _, b = "foo") new JavaAnnot(c = 3, a = 5, d = _, b = "foo") new JavaAnnot(c = 3, a = 5, d = _, b = _) diff --git a/tests/run-macros/i19951-java-annotations-tasty-compat/ScalaUser_2.scala b/tests/run-macros/i19951-java-annotations-tasty-compat/ScalaUser_2.scala index a14a69eae21b..421192636dbc 100644 --- a/tests/run-macros/i19951-java-annotations-tasty-compat/ScalaUser_2.scala +++ b/tests/run-macros/i19951-java-annotations-tasty-compat/ScalaUser_2.scala @@ -1,20 +1,18 @@ class ScalaUser { - @JavaAnnot(5) - def f1(): Int = 1 @JavaAnnot(a = 5) def f2(): Int = 1 - @JavaAnnot(5, "foo") + @JavaAnnot(a = 5, b = "foo") def f3(): Int = 1 - @JavaAnnot(5, "foo", 3) + @JavaAnnot(a = 5, b = "foo", c = 3) def f4(): Int = 1 - @JavaAnnot(5, c = 3) + @JavaAnnot(a = 5, c = 3) def f5(): Int = 1 - @JavaAnnot(5, c = 3, b = "foo") + @JavaAnnot(a = 5, c = 3, b = "foo") def f6(): Int = 1 @JavaAnnot(b = "foo", c = 3, a = 5) From a7844ab5d0df194742815faf4ced2a014216cf3a Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 6 Aug 2024 16:59:54 +0100 Subject: [PATCH 425/827] Add a 3.6-migration warning for MT lubbing --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 8 ++++++-- tests/warn/i21258.check | 6 ++++++ tests/warn/i21258.scala | 14 ++++++++++++++ 3 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 tests/warn/i21258.check create mode 100644 tests/warn/i21258.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 947d1fcbfa73..ace8439553fd 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -40,7 +40,7 @@ import annotation.tailrec import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} -import config.Feature, Feature.{sourceVersion, migrateTo3, modularity} +import config.Feature, Feature.{migrateTo3, modularity, sourceVersion, warnOnMigration} import config.SourceVersion.* import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel @@ -2615,7 +2615,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if !acc.exists then NoType else if case1.body.tpe.isProvisional then NoType else acc | case1.body.tpe - if lub.exists then TypeTree(lub, inferred = true) + if lub.exists then + if !lub.isAny then + val msg = em"Match type upper bound inferred as $lub, where previously it was defaulted to Any" + warnOnMigration(msg, tree, `3.6`) + TypeTree(lub, inferred = true) else bound1 else bound1 assignType(cpy.MatchTypeTree(tree)(bound2, sel1, cases1), bound2, sel1, cases1) diff --git a/tests/warn/i21258.check b/tests/warn/i21258.check new file mode 100644 index 000000000000..e9edc3606909 --- /dev/null +++ b/tests/warn/i21258.check @@ -0,0 +1,6 @@ +-- Migration Warning: tests/warn/i21258.scala:4:17 --------------------------------------------------------------------- +4 | type MT[X] = X match { // warn + | ^ + | Match type upper bound inferred as String, where previously it was defaulted to Any +5 | case Int => String +6 | } diff --git a/tests/warn/i21258.scala b/tests/warn/i21258.scala new file mode 100644 index 000000000000..60c6f859bc24 --- /dev/null +++ b/tests/warn/i21258.scala @@ -0,0 +1,14 @@ +import scala.language.`3.6-migration` + +object Test { + type MT[X] = X match { // warn + case Int => String + } + + def unboundUnreducibleSig[X](x: X): MT[X] = ??? + + type MT2[X] = X match { // no warning + case Int => String + case String => Any + } +} From 834b4f532831dd0c61e9e75655358da1e488a894 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 6 Aug 2024 19:21:46 +0200 Subject: [PATCH 426/827] Bump scala-cli to 1.4.3 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 05eb164f91f3..e625c399ba54 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -124,7 +124,7 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.4.1" + val scalaCliLauncherVersion = "1.4.3" /** Version of Coursier to download for initializing the local maven repo of Scala command */ val coursierJarVersion = "2.1.10" From 8064536e507680888285ccebba685be31cb5963a Mon Sep 17 00:00:00 2001 From: Kavin Satheeskumar <71559920+KavinSatheeskumar@users.noreply.github.com> Date: Tue, 6 Aug 2024 14:27:50 -0400 Subject: [PATCH 427/827] Fix exception on sequence matching with drop (#21281) When using scalac with the flag Ysafe-init-global to compile the following example, I get the following exception. This PR fixes it and adds a test case to catch things like this in the future. Note, I made this pull request in collaboration with Ondrej Lhotak, Enze Xing, Fengyun Liu and David Hua) The example ```Scala object Matcher { val vararg_arr = Array(0, 1, 2, 3) val vararg_lst = List(vararg_arr*) val vararg_splice = vararg_lst match case List(0, 1, xs*) => 1 // binds xs to Seq(2, 3) case List(1, _*) => 0 // wildcard pattern case _ => 2 println(vararg_splice) } ``` The exception ``` Exception in thread "main" java.lang.AssertionError: NoDenotation.owner at dotty.tools.dotc.core.SymDenotations$NoDenotation$.owner(SymDenotations.scala:2623) at dotty.tools.dotc.transform.init.Objects.call(Objects.scala:660) at dotty.tools.dotc.transform.init.Objects.evalSeqPatterns$1(Objects.scala:1494) at dotty.tools.dotc.transform.init.Objects.evalPattern$1(Objects.scala:1406) at dotty.tools.dotc.transform.init.Objects.evalCase$1(Objects.scala:1336) at dotty.tools.dotc.transform.init.Objects.patternMatch$$anonfun$1(Objects.scala:1505) at scala.collection.immutable.List.map(List.scala:247) at dotty.tools.dotc.transform.init.Objects.patternMatch(Objects.scala:1505) at dotty.tools.dotc.transform.init.Objects.cases(Objects.scala:1255) at dotty.tools.dotc.transform.init.Objects.eval$$anonfun$1(Objects.scala:1095) at dotty.tools.dotc.transform.init.Objects$Cache$Data.$anonfun$5(Objects.scala:539) at dotty.tools.dotc.transform.init.Cache.cachedEval(Cache.scala:112) at dotty.tools.dotc.transform.init.Objects$Cache$Data.cachedEval(Objects.scala:538) at dotty.tools.dotc.transform.init.Objects.eval(Objects.scala:1095) at dotty.tools.dotc.transform.init.Objects.init$$anonfun$4(Objects.scala:1708) at scala.collection.immutable.List.foreach(List.scala:334) at dotty.tools.dotc.transform.init.Objects.init(Objects.scala:1705) at dotty.tools.dotc.transform.init.Objects$State$.iterate$1(Objects.scala:278) at dotty.tools.dotc.transform.init.Objects$State$.doCheckObject(Objects.scala:293) at dotty.tools.dotc.transform.init.Objects$State$.checkObjectAccess(Objects.scala:320) at dotty.tools.dotc.transform.init.Objects.accessObject(Objects.scala:1059) at dotty.tools.dotc.transform.init.Objects.checkClasses$$anonfun$2(Objects.scala:1072) at scala.collection.IterableOnceOps.foreach(IterableOnce.scala:619) at scala.collection.IterableOnceOps.foreach$(IterableOnce.scala:617) at scala.collection.AbstractIterable.foreach(Iterable.scala:935) at scala.collection.IterableOps$WithFilter.foreach(Iterable.scala:905) at dotty.tools.dotc.transform.init.Objects.checkClasses(Objects.scala:1070) at dotty.tools.dotc.transform.init.Checker.runOn$$anonfun$1(Checker.scala:58) at scala.runtime.function.JProcedure1.apply(JProcedure1.java:15) at scala.runtime.function.JProcedure1.apply(JProcedure1.java:10) at dotty.tools.dotc.core.Phases$Phase.cancellable(Phases.scala:521) at dotty.tools.dotc.transform.init.Checker.runOn(Checker.scala:59) at dotty.tools.dotc.Run.runPhases$1$$anonfun$1(Run.scala:343) at scala.runtime.function.JProcedure1.apply(JProcedure1.java:15) at scala.runtime.function.JProcedure1.apply(JProcedure1.java:10) at scala.collection.ArrayOps$.foreach$extension(ArrayOps.scala:1323) at dotty.tools.dotc.Run.runPhases$1(Run.scala:336) at dotty.tools.dotc.Run.compileUnits$$anonfun$1(Run.scala:384) at dotty.tools.dotc.Run.compileUnits$$anonfun$adapted$1(Run.scala:396) at dotty.tools.dotc.util.Stats$.maybeMonitored(Stats.scala:69) at dotty.tools.dotc.Run.compileUnits(Run.scala:396) at dotty.tools.dotc.Run.compileSources(Run.scala:282) at dotty.tools.dotc.Run.compile(Run.scala:267) at dotty.tools.dotc.Driver.doCompile(Driver.scala:37) at dotty.tools.dotc.Driver.process(Driver.scala:201) at dotty.tools.dotc.Driver.process(Driver.scala:169) at dotty.tools.dotc.Driver.process(Driver.scala:181) at dotty.tools.dotc.Driver.main(Driver.scala:211) at dotty.tools.dotc.Main.main(Main.scala) exception occurred while compiling List(../test-case.scala) An unhandled exception was thrown in the compiler. Please file a crash report here: https://github.com/scala/scala3/issues/new/choose For non-enriched exceptions, compile with -Xno-enrich-error-messages. while compiling: during phase: parser mode: Mode(ImplicitsEnabled,ReadPositions) library version: version 2.13.14 compiler version: version 3.6.0-RC1-bin-SNAPSHOT-nonbootstrapped-git-3dfd762 settings: -Ysafe-init-global true -classpath /home/kavin/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.14/scala-library-2.13.14.jar:/home/kavin/Documents/4A/URA2/scala3/library/../out/bootstrap/scala3-library-bootstrapped/scala-3.6.0-RC1-bin-SNAPSHOT-nonbootstrapped/scala3-library_3-3.6.0-RC1-bin-SNAPSHOT.jar -d /home/kavin/Documents/4A/URA2/scala3/compiler/../out/default-last-scalac-out.jar [error] Nonzero exit code returned from runner: 1 [error] (scala3-compiler / Compile / runMain) Nonzero exit code returned from runner: 1 [error] Total time: 17 s, completed Jul 26, 2024, 6:35:43 PM ``` --- .../tools/dotc/transform/init/Objects.scala | 4 +- tests/init-global/pos/match-complete.scala | 118 ++++++++++++++++++ 2 files changed, 120 insertions(+), 2 deletions(-) create mode 100644 tests/init-global/pos/match-complete.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index bfa684eef8b4..1ceb8d4472a3 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -1485,12 +1485,12 @@ class Objects(using Context @constructorOnly): if isWildcardStarArgList(pats) then if pats.size == 1 then // call .toSeq - val toSeqDenot = scrutineeType.member(nme.toSeq).suchThat(_.info.isParameterless) + val toSeqDenot = getMemberMethod(scrutineeType, nme.toSeq, toSeqType(elemType)) val toSeqRes = call(scrutinee, toSeqDenot.symbol, Nil, scrutineeType, superType = NoType, needResolve = true) evalPattern(toSeqRes, pats.head) else // call .drop - val dropDenot = getMemberMethod(scrutineeType, nme.drop, applyType(elemType)) + val dropDenot = getMemberMethod(scrutineeType, nme.drop, dropType(elemType)) val dropRes = call(scrutinee, dropDenot.symbol, ArgInfo(Bottom, summon[Trace], EmptyTree) :: Nil, scrutineeType, superType = NoType, needResolve = true) for pat <- pats.init do evalPattern(applyRes, pat) evalPattern(dropRes, pats.last) diff --git a/tests/init-global/pos/match-complete.scala b/tests/init-global/pos/match-complete.scala new file mode 100644 index 000000000000..eb0e4481f59e --- /dev/null +++ b/tests/init-global/pos/match-complete.scala @@ -0,0 +1,118 @@ +object Matcher { + // Chained Match + val chained_match_xs: List[Any] = List(1, 2, 3) + val chained_match_x = chained_match_xs match { + case Nil => "empty" + case _ => "nonempty" + } match { + case "empty" => 0 + case "nonempty" => 1 + } + println(chained_match_x) + + // Vararg Splices + val vararg_arr = Array(0, 1, 2, 3) + val vararg_lst = List(vararg_arr*) // vararg splice argument + // Throws an exception? + val vararg_splice = vararg_lst match + case List(0, 1, xs*) => 1 // binds xs to Seq(2, 3) + case List(1, _*) => 0 // wildcard pattern + case _ => 2 + println(vararg_splice) + println(vararg_lst) + + // Pattern Definitions + val patter_def_xs: List[Any] = List(1, 2, 3) + val (patter_def_x: Any) :: _ = patter_def_xs : @unchecked + println(patter_def_x) + + val patter_def_pair = (1, true) + val (patter_def_a, patter_def_b) = patter_def_pair + println(patter_def_a) + + val elems: List[(Int, Int)] = List((1, 2), (3, 4), (5, 6)) + + for ((x,y) <- elems) do println(x) + + def main(args: Array[String]) = { + // println(chained_match_x) + println(vararg_splice) + // println(patter_def_x) + // println( + } +} + + +// Patter Matching Using Extractors + +// Option Extractors +case class Person(name: String, age: Int) +object Person { + def unapply(person: Person): Option[(String, Int)] = Some((person.name, person.age)) +} + +object OptionMatcher { + val person = Person("Alice", 25) + + val result = person match { + case Person(name, age) => s"Name: $name, Age: $age" + case _ => "Not a person" + } + println(result) +} + + + +// Boolean Extractors +object Adult { + def unapply(person: Person): Boolean = person.age >= 18 +} + +object BooleanMatcher { + val person = Person("Charlie", 17) + + val adultResult = person match { + case Adult() => s"${person.name} is an adult" + case _ => s"${person.name} is not an adult" + } + + println(adultResult) +} + + + +// Variadic Extractors +// Add cases for exceptions +// +// Adding some warning test cases +// - + +object VariadicExtractor { + // Define an unapply method that takes a List and returns an Option of Seq + def unapplySeq[A](list: List[A]): Option[Seq[A]] = Some(list) +} + +object PatternMatchExample extends App { + def describeList(list: List[Int]): String = list match { + case VariadicExtractor(1, 2, rest @ _*) => + s"Starts with 1, 2 followed by: ${rest.mkString(", ")}" + case VariadicExtractor(1, rest @ _*) => + s"Starts with 1 followed by: ${rest.mkString(", ")}" + case VariadicExtractor(first, second, rest @ _*) => + s"Starts with $first, $second followed by: ${rest.mkString(", ")}" + case VariadicExtractor(single) => + s"Only one element: $single" + case VariadicExtractor() => + "Empty list" + case _ => + "Unknown pattern" + } + + // Test cases + println(describeList(List(1, 2, 3, 4, 5))) // Output: Starts with 1, 2 followed by: 3, 4, 5 + println(describeList(List(1, 3, 4, 5))) // Output: Starts with 1 followed by: 3, 4, 5 + println(describeList(List(2, 3, 4, 5))) // Output: Starts with 2, 3 followed by: 4, 5 + println(describeList(List(1))) // Output: Only one element: 1 + println(describeList(List())) // Output: Empty list +} + From 35b5f7cccbef0dfacc8c48e2fbd6c4f864c0bb7e Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 5 Aug 2024 11:28:18 +0200 Subject: [PATCH 428/827] A left-biased variant for implicit/given pairs We now use a left-biased scheme, as follows. From 3.6 on: - A given x: X is better than a given or implicit y: Y if y can be instantiated/widened to X. - An implicit x: X is better than a given or implicit y: Y if y can be instantiated to a supertype of X. - Use owner score for givens as a tie breaker if after all other tests we still have an ambiguity. This is not transitive, so we need a separate scheme to work around that. Other change: - Drop special handling of NotGiven in prioritization. The previous logic pretended to do so, but was ineffective. --- community-build/community-projects/PPrint | 2 +- .../tools/dotc/printing/Formatting.scala | 17 ++--- .../dotty/tools/dotc/reporting/messages.scala | 2 +- .../dotty/tools/dotc/typer/Applications.scala | 65 +++++++++++------ .../dotty/tools/dotc/typer/Implicits.scala | 39 ++++++++-- .../tools/dotc/StringFormatterTest.scala | 1 + tests/neg/given-triangle.check | 8 ++ tests/neg/i21212.check | 4 + tests/neg/i21212.scala | 11 +++ tests/neg/i21303/JavaEnum.java | 1 + tests/neg/i21303/Test.scala | 33 +++++++++ tests/neg/i2974.scala | 16 ++++ tests/neg/scala-uri.check | 14 ++++ tests/neg/scala-uri.scala | 30 ++++++++ tests/pos/given-priority.scala | 24 ++++++ tests/pos/i21212.scala | 11 --- tests/pos/i21303/JavaEnum.java | 1 + tests/pos/i21303/Test.scala | 32 ++++++++ tests/pos/i21303a/JavaEnum.java | 1 + tests/pos/i21303a/Test.scala | 35 +++++++++ tests/pos/i21320.scala | 73 +++++++++++++++++++ tests/pos/i2974.scala | 3 +- tests/pos/scala-uri.scala | 22 ++++++ tests/pos/slick-migration-api-example.scala | 23 ++++++ tests/warn/i21036a.check | 6 +- tests/warn/i21036b.check | 6 +- 26 files changed, 424 insertions(+), 56 deletions(-) create mode 100644 tests/neg/i21212.check create mode 100644 tests/neg/i21212.scala create mode 100644 tests/neg/i21303/JavaEnum.java create mode 100644 tests/neg/i21303/Test.scala create mode 100644 tests/neg/i2974.scala create mode 100644 tests/neg/scala-uri.check create mode 100644 tests/neg/scala-uri.scala create mode 100644 tests/pos/given-priority.scala create mode 100644 tests/pos/i21303/JavaEnum.java create mode 100644 tests/pos/i21303/Test.scala create mode 100644 tests/pos/i21303a/JavaEnum.java create mode 100644 tests/pos/i21303a/Test.scala create mode 100644 tests/pos/i21320.scala create mode 100644 tests/pos/scala-uri.scala create mode 100644 tests/pos/slick-migration-api-example.scala diff --git a/community-build/community-projects/PPrint b/community-build/community-projects/PPrint index 34a777f687bc..2203dc6081f5 160000 --- a/community-build/community-projects/PPrint +++ b/community-build/community-projects/PPrint @@ -1 +1 @@ -Subproject commit 34a777f687bc851953e682f99edcae9d2875babc +Subproject commit 2203dc6081f5e8fa89f552b155724b0a8fdcec03 diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 6f1c32beb822..43cac17e6318 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -2,8 +2,6 @@ package dotty.tools package dotc package printing -import scala.language.unsafeNulls - import scala.collection.mutable import core.* @@ -52,7 +50,11 @@ object Formatting { object ShowAny extends Show[Any]: def show(x: Any): Shown = x - class ShowImplicits3: + class ShowImplicits4: + given [X: Show]: Show[X | Null] with + def show(x: X | Null) = if x == null then "null" else CtxShow(toStr(x.nn)) + + class ShowImplicits3 extends ShowImplicits4: given Show[Product] = ShowAny class ShowImplicits2 extends ShowImplicits3: @@ -77,15 +79,10 @@ object Formatting { given [K: Show, V: Show]: Show[Map[K, V]] with def show(x: Map[K, V]) = CtxShow(x.map((k, v) => s"${toStr(k)} => ${toStr(v)}")) - end given given [H: Show, T <: Tuple: Show]: Show[H *: T] with def show(x: H *: T) = CtxShow(toStr(x.head) *: toShown(x.tail).asInstanceOf[Tuple]) - end given - - given [X: Show]: Show[X | Null] with - def show(x: X | Null) = if x == null then "null" else CtxShow(toStr(x.nn)) given Show[FlagSet] with def show(x: FlagSet) = x.flagsString @@ -148,8 +145,8 @@ object Formatting { private def treatArg(arg: Shown, suffix: String)(using Context): (String, String) = arg.runCtxShow match { case arg: Seq[?] if suffix.indexOf('%') == 0 && suffix.indexOf('%', 1) != -1 => val end = suffix.indexOf('%', 1) - val sep = StringContext.processEscapes(suffix.substring(1, end)) - (arg.mkString(sep), suffix.substring(end + 1)) + val sep = StringContext.processEscapes(suffix.substring(1, end).nn) + (arg.mkString(sep), suffix.substring(end + 1).nn) case arg: Seq[?] => (arg.map(showArg).mkString("[", ", ", "]"), suffix) case arg => diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index f112b6fb5aa7..38b49e63c685 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -2988,7 +2988,7 @@ class MissingImplicitArgument( /** Default error message for non-nested ambiguous implicits. */ def defaultAmbiguousImplicitMsg(ambi: AmbiguousImplicits) = - s"Ambiguous given instances: ${ambi.explanation}${location("of")}" + s"Ambiguous given instances: ${ambi.explanation}${location("of")}${ambi.priorityChangeWarningNote}" /** Default error messages for non-ambiguous implicits, or nested ambiguous * implicits. diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 42765cd6c0bf..d063854038a1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1762,6 +1762,17 @@ trait Applications extends Compatibility { else if sym2.is(Module) then compareOwner(sym1, cls2) else 0 + enum CompareScheme: + case Old // Normal specificity test for overloading resolution (where `preferGeneral` is false) + // and in mode Scala3-migration when we compare with the old Scala 2 rules. + + case Intermediate // Intermediate rules: better means specialize, but map all type arguments downwards + // These are enabled for 3.0-3.4, or if OldImplicitResolution + // is specified, and also for all comparisons between old-style implicits, + + case New // New rules: better means generalize, givens (and extensions) always beat implicits + end CompareScheme + /** Compare two alternatives of an overloaded call or an implicit search. * * @param alt1, alt2 Non-overloaded references indicating the two choices @@ -1788,6 +1799,15 @@ trait Applications extends Compatibility { */ def compare(alt1: TermRef, alt2: TermRef, preferGeneral: Boolean = false)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { record("resolveOverloaded.compare") + val scheme = + val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) + if !preferGeneral || Feature.migrateTo3 && oldResolution then + CompareScheme.Old + else if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) + || oldResolution + || alt1.symbol.is(Implicit) && alt2.symbol.is(Implicit) + then CompareScheme.Intermediate + else CompareScheme.New /** Is alternative `alt1` with type `tp1` as good as alternative * `alt2` with type `tp2` ? @@ -1830,15 +1850,15 @@ trait Applications extends Compatibility { isAsGood(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) } case _ => // (3) - def compareValues(tp1: Type, tp2: Type)(using Context) = - isAsGoodValueType(tp1, tp2, alt1.symbol.is(Implicit), alt2.symbol.is(Implicit)) + def compareValues(tp2: Type)(using Context) = + isAsGoodValueType(tp1, tp2, alt1.symbol.is(Implicit)) tp2 match case tp2: MethodType => true // (3a) case tp2: PolyType if tp2.resultType.isInstanceOf[MethodType] => true // (3a) case tp2: PolyType => // (3b) - explore(compareValues(tp1, instantiateWithTypeVars(tp2))) + explore(compareValues(instantiateWithTypeVars(tp2))) case _ => // 3b) - compareValues(tp1, tp2) + compareValues(tp2) } /** Test whether value type `tp1` is as good as value type `tp2`. @@ -1876,9 +1896,8 @@ trait Applications extends Compatibility { * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. */ - def isAsGoodValueType(tp1: Type, tp2: Type, alt1IsImplicit: Boolean, alt2IsImplicit: Boolean)(using Context): Boolean = - val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) - if !preferGeneral || Feature.migrateTo3 && oldResolution then + def isAsGoodValueType(tp1: Type, tp2: Type, alt1IsImplicit: Boolean)(using Context): Boolean = + if scheme == CompareScheme.Old then // Normal specificity test for overloading resolution (where `preferGeneral` is false) // and in mode Scala3-migration when we compare with the old Scala 2 rules. isCompatible(tp1, tp2) @@ -1892,13 +1911,7 @@ trait Applications extends Compatibility { val tp1p = prepare(tp1) val tp2p = prepare(tp2) - if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) - || oldResolution - || alt1IsImplicit && alt2IsImplicit - then - // Intermediate rules: better means specialize, but map all type arguments downwards - // These are enabled for 3.0-3.5, and for all comparisons between old-style implicits, - // and in 3.5 and 3.6-migration when we compare with previous rules. + if scheme == CompareScheme.Intermediate || alt1IsImplicit then val flip = new TypeMap: def apply(t: Type) = t match case t @ AppliedType(tycon, args) => @@ -1909,9 +1922,7 @@ trait Applications extends Compatibility { case _ => mapOver(t) (flip(tp1p) relaxed_<:< flip(tp2p)) || viewExists(tp1, tp2) else - // New rules: better means generalize, givens (and extensions) always beat implicits - if alt1IsImplicit != alt2IsImplicit then alt2IsImplicit - else (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) + (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) end isAsGoodValueType /** Widen the result type of synthetic given methods from the implementation class to the @@ -1982,13 +1993,19 @@ trait Applications extends Compatibility { // alternatives are the same after following ExprTypes, pick one of them // (prefer the one that is not a method, but that's arbitrary). if alt1.widenExpr =:= alt2 then -1 else 1 - else ownerScore match - case 1 => if winsType1 || !winsType2 then 1 else 0 - case -1 => if winsType2 || !winsType1 then -1 else 0 - case 0 => - if winsType1 != winsType2 then if winsType1 then 1 else -1 - else if alt1.symbol == alt2.symbol then comparePrefixes - else 0 + else + // For new implicit resolution, take ownerscore as more significant than type resolution + // Reason: People use owner hierarchies to explicitly prioritize, we should not + // break that by changing implicit priority of types. + def drawOrOwner = + if scheme == CompareScheme.New then ownerScore else 0 + ownerScore match + case 1 => if winsType1 || !winsType2 then 1 else drawOrOwner + case -1 => if winsType2 || !winsType1 then -1 else drawOrOwner + case 0 => + if winsType1 != winsType2 then if winsType1 then 1 else -1 + else if alt1.symbol == alt2.symbol then comparePrefixes + else 0 end compareWithTypes if alt1.symbol.is(ConstructorProxy) && !alt2.symbol.is(ConstructorProxy) then -1 diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index dac0c0e78448..32a95ae501f0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -549,6 +549,11 @@ object Implicits: /** An ambiguous implicits failure */ class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree, val nested: Boolean = false) extends SearchFailureType: + private[Implicits] var priorityChangeWarnings: List[Message] = Nil + + def priorityChangeWarningNote(using Context): String = + priorityChangeWarnings.map(msg => s"\n\nNote: $msg").mkString + def msg(using Context): Message = var str1 = err.refStr(alt1.ref) var str2 = err.refStr(alt2.ref) @@ -1330,7 +1335,7 @@ trait Implicits: if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level else - var cmp = comp(using searchContext()) + val cmp = comp(using searchContext()) val sv = Feature.sourceVersion if isWarnPriorityChangeVersion(sv) then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) @@ -1345,13 +1350,21 @@ trait Implicits: case _ => "none - it's ambiguous" if sv.stable == SourceVersion.`3.5` then warn( - em"""Given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} will change + em"""Given search preference for $pt between alternatives + | ${alt1.ref} + |and + | ${alt2.ref} + |will change. |Current choice : ${choice(prev)} |New choice from Scala 3.6: ${choice(cmp)}""") prev else warn( - em"""Change in given search preference for $pt between alternatives ${alt1.ref} and ${alt2.ref} + em"""Given search preference for $pt between alternatives + | ${alt1.ref} + |and + | ${alt2.ref} + |has changed. |Previous choice : ${choice(prev)} |New choice from Scala 3.6: ${choice(cmp)}""") cmp @@ -1610,9 +1623,23 @@ trait Implicits: throw ex val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) - for (critical, msg) <- priorityChangeWarnings do - if result.found.exists(critical.contains(_)) then - report.warning(msg, srcPos) + + // Issue all priority change warnings that can affect the result + val shownWarnings = priorityChangeWarnings.toList.collect: + case (critical, msg) if result.found.exists(critical.contains(_)) => + msg + result match + case result: SearchFailure => + result.reason match + case ambi: AmbiguousImplicits => + // Make warnings part of error message because otherwise they are suppressed when + // the error is emitted. + ambi.priorityChangeWarnings = shownWarnings + case _ => + case _ => + for msg <- shownWarnings do + report.warning(msg, srcPos) + result end searchImplicit diff --git a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala index 4dfc08cc7e9b..b0ff8b8fc03e 100644 --- a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala +++ b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala @@ -23,6 +23,7 @@ class StringFormatterTest extends AbstractStringFormatterTest: @Test def flagsTup = check("(,final)", i"${(JavaStatic, Final)}") @Test def seqOfTup2 = check("(final,given), (private,lazy)", i"${Seq((Final, Given), (Private, Lazy))}%, %") @Test def seqOfTup3 = check("(Foo,given, (right is approximated))", i"${Seq((Foo, Given, TypeComparer.ApproxState.None.addHigh))}%, %") + @Test def tupleNull = check("(1,null)", i"${(1, null: String | Null)}") class StorePrinter extends Printer: var string: String = "" diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check index f548df0078de..73d5aea12dc4 100644 --- a/tests/neg/given-triangle.check +++ b/tests/neg/given-triangle.check @@ -2,3 +2,11 @@ 15 |@main def Test = f // error | ^ |Ambiguous given instances: both given instance given_B and given instance given_C match type A of parameter a of method f + | + |Note: Given search preference for A between alternatives + | (given_A : A) + |and + | (given_B : B) + |will change. + |Current choice : the second alternative + |New choice from Scala 3.6: the first alternative diff --git a/tests/neg/i21212.check b/tests/neg/i21212.check new file mode 100644 index 000000000000..5d9fe7728cbc --- /dev/null +++ b/tests/neg/i21212.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/i21212.scala:8:52 ---------------------------------------------------------------------- +8 | def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous + | ^ + |Ambiguous given instances: both parameter b2 and parameter a2 match type Minimization.A of parameter x of method summon in object Predef diff --git a/tests/neg/i21212.scala b/tests/neg/i21212.scala new file mode 100644 index 000000000000..4cb3741b2d65 --- /dev/null +++ b/tests/neg/i21212.scala @@ -0,0 +1,11 @@ + +object Minimization: + + trait A + trait B extends A + + def test1(using a1: A)(using b1: B) = summon[A] // picks (most general) a1 + def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous + def test3(implicit a3: A, b3: B) = summon[A] // picks (most specific) b3 + +end Minimization diff --git a/tests/neg/i21303/JavaEnum.java b/tests/neg/i21303/JavaEnum.java new file mode 100644 index 000000000000..e626d5070626 --- /dev/null +++ b/tests/neg/i21303/JavaEnum.java @@ -0,0 +1 @@ +public enum JavaEnum { ABC, DEF, GHI } diff --git a/tests/neg/i21303/Test.scala b/tests/neg/i21303/Test.scala new file mode 100644 index 000000000000..fa8058140067 --- /dev/null +++ b/tests/neg/i21303/Test.scala @@ -0,0 +1,33 @@ +//> using options -source 3.6-migration +import scala.deriving.Mirror +import scala.compiletime.* +import scala.reflect.ClassTag +import scala.annotation.implicitNotFound + + +trait TSType[T] +object TSType extends DefaultTSTypes with TSTypeMacros + +trait TSNamedType[T] extends TSType[T] + +trait DefaultTSTypes extends JavaTSTypes +trait JavaTSTypes { + given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSNamedType[E] = ??? +} +object DefaultTSTypes extends DefaultTSTypes +trait TSTypeMacros { + inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + ??? + } + + private inline def summonAll[T <: Tuple]: List[TSType[_]] = { + inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[TSType[t]] :: summonAll[ts] + } + } +} + +@main def Test = summon[TSType[JavaEnum]] // error \ No newline at end of file diff --git a/tests/neg/i2974.scala b/tests/neg/i2974.scala new file mode 100644 index 000000000000..0bff2da1f3ba --- /dev/null +++ b/tests/neg/i2974.scala @@ -0,0 +1,16 @@ + +trait Foo[-T] +trait Bar[-T] extends Foo[T] + +object Test { + + locally: + implicit val fa: Foo[Int] = ??? + implicit val ba: Bar[Int] = ??? + summon[Foo[Int]] // ok + + locally: + implicit val fa: Foo[Int] = ??? + implicit val ba: Bar[Any] = ??? + summon[Foo[Int]] // error: ambiguous +} diff --git a/tests/neg/scala-uri.check b/tests/neg/scala-uri.check new file mode 100644 index 000000000000..91bcd7ab6a6c --- /dev/null +++ b/tests/neg/scala-uri.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg/scala-uri.scala:30:59 ------------------------------------------------------------------ +30 |@main def Test = summon[QueryKeyValue[(String, None.type)]] // error + | ^ + |No best given instance of type QueryKeyValue[(String, None.type)] was found for parameter x of method summon in object Predef. + |I found: + | + | QueryKeyValue.tuple2QueryKeyValue[String, None.type](QueryKey.stringQueryKey, + | QueryValue.optionQueryValue[A]( + | /* ambiguous: both given instance stringQueryValue in trait QueryValueInstances1 and given instance noneQueryValue in trait QueryValueInstances1 match type QueryValue[A] */ + | summon[QueryValue[A]] + | ) + | ) + | + |But both given instance stringQueryValue in trait QueryValueInstances1 and given instance noneQueryValue in trait QueryValueInstances1 match type QueryValue[A]. diff --git a/tests/neg/scala-uri.scala b/tests/neg/scala-uri.scala new file mode 100644 index 000000000000..3820f8cf5613 --- /dev/null +++ b/tests/neg/scala-uri.scala @@ -0,0 +1,30 @@ +import scala.language.implicitConversions + +trait QueryKey[A] +object QueryKey extends QueryKeyInstances +sealed trait QueryKeyInstances: + given stringQueryKey: QueryKey[String] = ??? + +trait QueryValue[-A] +object QueryValue extends QueryValueInstances +sealed trait QueryValueInstances1: + given stringQueryValue: QueryValue[String] = ??? + given noneQueryValue: QueryValue[None.type] = ??? + // The noneQueryValue makes no sense at this priority. Since QueryValue + // is contravariant, QueryValue[None.type] is always better than QueryValue[Option[A]] + // no matter whether it's old or new resolution. So taking both owner and type + // score into account, it's always a draw. With the new disambiguation, we prefer + // the optionQueryValue[A], which gives an ambiguity down the road, because we don't + // know what the wrapped type A is. Previously, we preferred QueryValue[None.type] + // because it is unconditional. The solution is to put QueryValue[None.type] in the + // same trait as QueryValue[Option[A]], as is shown in pos/scala-uri.scala. + +sealed trait QueryValueInstances extends QueryValueInstances1: + given optionQueryValue[A: QueryValue]: QueryValue[Option[A]] = ??? + +trait QueryKeyValue[A] +object QueryKeyValue: + given tuple2QueryKeyValue[K: QueryKey, V: QueryValue]: QueryKeyValue[(K, V)] = ??? + + +@main def Test = summon[QueryKeyValue[(String, None.type)]] // error diff --git a/tests/pos/given-priority.scala b/tests/pos/given-priority.scala new file mode 100644 index 000000000000..048e063eff35 --- /dev/null +++ b/tests/pos/given-priority.scala @@ -0,0 +1,24 @@ +/* These tests show various mechanisms available for implicit prioritization. + */ +import language.`3.6` + +class A // The type for which we infer terms below +class B extends A + +/* First, two schemes that require a pre-planned architecture for how and + * where given instances are defined. + * + * Traditional scheme: prioritize with location in class hierarchy + */ +class LowPriorityImplicits: + given g1: A() + +object NormalImplicits extends LowPriorityImplicits: + given g2: B() + +def test1 = + import NormalImplicits.given + val x = summon[A] + val _: B = x + val y = summon[B] + val _: B = y diff --git a/tests/pos/i21212.scala b/tests/pos/i21212.scala index 2116beb72012..1a1f2e35819a 100644 --- a/tests/pos/i21212.scala +++ b/tests/pos/i21212.scala @@ -20,14 +20,3 @@ class UsingArguments[F[_]](using Temporal[F])(using err: MonadError[F, Throwable val bool: F[Boolean] = ??? def works = toFunctorOps(bool).map(_ => ()) // warns under -source:3.5 - -object Minimization: - - trait A - trait B extends A - - def test1(using a1: A)(using b1: B) = summon[A] // picks (most general) a1 - def test2(using a2: A)(implicit b2: B) = summon[A] // picks (most general) a2, was ambiguous - def test3(implicit a3: A, b3: B) = summon[A] // picks (most specific) b3 - -end Minimization diff --git a/tests/pos/i21303/JavaEnum.java b/tests/pos/i21303/JavaEnum.java new file mode 100644 index 000000000000..e626d5070626 --- /dev/null +++ b/tests/pos/i21303/JavaEnum.java @@ -0,0 +1 @@ +public enum JavaEnum { ABC, DEF, GHI } diff --git a/tests/pos/i21303/Test.scala b/tests/pos/i21303/Test.scala new file mode 100644 index 000000000000..fe3efa6e38f3 --- /dev/null +++ b/tests/pos/i21303/Test.scala @@ -0,0 +1,32 @@ +import scala.deriving.Mirror +import scala.compiletime.* +import scala.reflect.ClassTag +import scala.annotation.implicitNotFound + + +trait TSType[T] +object TSType extends DefaultTSTypes with TSTypeMacros + +trait TSNamedType[T] extends TSType[T] + +trait DefaultTSTypes extends JavaTSTypes +trait JavaTSTypes { + given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSType[E] = ??? +} +object DefaultTSTypes extends DefaultTSTypes +trait TSTypeMacros { + inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + ??? + } + + private inline def summonAll[T <: Tuple]: List[TSType[_]] = { + inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[TSType[t]] :: summonAll[ts] + } + } +} + +@main def Test = summon[TSType[JavaEnum]] \ No newline at end of file diff --git a/tests/pos/i21303a/JavaEnum.java b/tests/pos/i21303a/JavaEnum.java new file mode 100644 index 000000000000..e626d5070626 --- /dev/null +++ b/tests/pos/i21303a/JavaEnum.java @@ -0,0 +1 @@ +public enum JavaEnum { ABC, DEF, GHI } diff --git a/tests/pos/i21303a/Test.scala b/tests/pos/i21303a/Test.scala new file mode 100644 index 000000000000..83a598b5f17f --- /dev/null +++ b/tests/pos/i21303a/Test.scala @@ -0,0 +1,35 @@ +import scala.deriving.Mirror +import scala.compiletime.* +import scala.reflect.ClassTag +import scala.annotation.implicitNotFound + + +trait TSType[T] +object TSType extends DefaultTSTypes with TSTypeMacros + +trait TSNamedType[T] extends TSType[T] + +trait DefaultTSTypes extends JavaTSTypes +trait JavaTSTypes { + given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSType[E] = ??? + given javaEnumTSNamedType[E <: java.lang.Enum[E]: ClassTag]: TSNamedType[E] = ??? +} +object DefaultTSTypes extends DefaultTSTypes +trait TSTypeMacros { + inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + ??? + } + + private inline def summonAll[T <: Tuple]: List[TSType[_]] = { + inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[TSType[t]] :: summonAll[ts] + } + } +} + +@main def Test = + summon[TSType[JavaEnum]] + summon[TSNamedType[JavaEnum]] diff --git a/tests/pos/i21320.scala b/tests/pos/i21320.scala new file mode 100644 index 000000000000..0a7e0d1941d1 --- /dev/null +++ b/tests/pos/i21320.scala @@ -0,0 +1,73 @@ +import scala.deriving.* +import scala.compiletime.* + +trait ConfigMonoid[T]: + def zero: T + def orElse(main: T, defaults: T): T + +object ConfigMonoid: + given option[T]: ConfigMonoid[Option[T]] = ??? + + inline def zeroTuple[C <: Tuple]: Tuple = + inline erasedValue[C] match + case _: EmptyTuple => EmptyTuple + case _: (t *: ts) => + summonInline[ConfigMonoid[t]].zero *: zeroTuple[ts] + + inline def valueTuple[C <: Tuple, T](index: Int, main: T, defaults: T): Tuple = + inline erasedValue[C] match + case _: EmptyTuple => EmptyTuple + case _: (t *: ts) => + def get(v: T) = v.asInstanceOf[Product].productElement(index).asInstanceOf[t] + summonInline[ConfigMonoid[t]].orElse(get(main), get(defaults)) *: valueTuple[ts, T]( + index + 1, + main, + defaults + ) + + inline given derive[T](using m: Mirror.ProductOf[T]): ConfigMonoid[T] = + new ConfigMonoid[T]: + def zero: T = m.fromProduct(zeroTuple[m.MirroredElemTypes]) + def orElse(main: T, defaults: T): T = m.fromProduct(valueTuple[m.MirroredElemTypes, T](0, main, defaults)) + + + +final case class PublishOptions( + v1: Option[String] = None, + v2: Option[String] = None, + v3: Option[String] = None, + v4: Option[String] = None, + v5: Option[String] = None, + v6: Option[String] = None, + v7: Option[String] = None, + v8: Option[String] = None, + v9: Option[String] = None, + ci: PublishContextualOptions = PublishContextualOptions(), +) +object PublishOptions: + implicit val monoid: ConfigMonoid[PublishOptions] = ConfigMonoid.derive + +final case class PublishContextualOptions( + v1: Option[String] = None, + v2: Option[String] = None, + v3: Option[String] = None, + v4: Option[String] = None, + v5: Option[String] = None, + v6: Option[String] = None, + v7: Option[String] = None, + v8: Option[String] = None, + v9: Option[String] = None, + v10: Option[String] = None, + v11: Option[String] = None, + v12: Option[String] = None, + v13: Option[String] = None, + v14: Option[String] = None, + v15: Option[String] = None, + v16: Option[String] = None, + v17: Option[String] = None, + v18: Option[String] = None, + v19: Option[String] = None, + v20: Option[String] = None +) +object PublishContextualOptions: + given monoid: ConfigMonoid[PublishContextualOptions] = ConfigMonoid.derive \ No newline at end of file diff --git a/tests/pos/i2974.scala b/tests/pos/i2974.scala index 75c6a24a41bb..8f1c2e2d6d2f 100644 --- a/tests/pos/i2974.scala +++ b/tests/pos/i2974.scala @@ -7,6 +7,7 @@ object Test { implicit val ba: Bar[Int] = ??? def test: Unit = { - implicitly[Foo[Int]] + val x = summon[Foo[Int]] + val _: Bar[Int] = x } } diff --git a/tests/pos/scala-uri.scala b/tests/pos/scala-uri.scala new file mode 100644 index 000000000000..75ea2fc70d8a --- /dev/null +++ b/tests/pos/scala-uri.scala @@ -0,0 +1,22 @@ +// This works for implicit/implicit pairs but not for givens, see neg version. +import scala.language.implicitConversions + +trait QueryKey[A] +object QueryKey extends QueryKeyInstances +sealed trait QueryKeyInstances: + implicit val stringQueryKey: QueryKey[String] = ??? + +trait QueryValue[-A] +object QueryValue extends QueryValueInstances +sealed trait QueryValueInstances1: + implicit final val stringQueryValue: QueryValue[String] = ??? + implicit final val noneQueryValue: QueryValue[None.type] = ??? + +sealed trait QueryValueInstances extends QueryValueInstances1: + implicit final def optionQueryValue[A: QueryValue]: QueryValue[Option[A]] = ??? + +trait QueryKeyValue[A] +object QueryKeyValue: + implicit def tuple2QueryKeyValue[K: QueryKey, V: QueryValue]: QueryKeyValue[(K, V)] = ??? + +@main def Test = summon[QueryKeyValue[(String, None.type)]] diff --git a/tests/pos/slick-migration-api-example.scala b/tests/pos/slick-migration-api-example.scala new file mode 100644 index 000000000000..3b6f1b4a82f4 --- /dev/null +++ b/tests/pos/slick-migration-api-example.scala @@ -0,0 +1,23 @@ +trait Migration +object Migration: + implicit class MigrationConcat[M <: Migration](m: M): + def &[N <: Migration, O](n: N)(implicit ccm: CanConcatMigrations[M, N, O]): O = ??? + +trait ReversibleMigration extends Migration +trait MigrationSeq extends Migration +trait ReversibleMigrationSeq extends MigrationSeq with ReversibleMigration + +trait ToReversible[-A <: Migration] +object ToReversible: + implicit val reversible: ToReversible[ReversibleMigration] = ??? +class CanConcatMigrations[-A, -B, +C] +trait CanConcatMigrationsLow: + implicit def default[A <: Migration, B <: Migration]: CanConcatMigrations[A, B, MigrationSeq] = ??? +object CanConcatMigrations extends CanConcatMigrationsLow: + implicit def reversible[A <: Migration, B <: Migration](implicit reverseA: ToReversible[A], + reverseB: ToReversible[B]): CanConcatMigrations[A, B, ReversibleMigrationSeq] = ??? + +@main def Test = + val rm: ReversibleMigration = ??? + val rms = rm & rm & rm + summon[rms.type <:< ReversibleMigrationSeq] // error Cannot prove that (rms : slick.migration.api.MigrationSeq) <:< slick.migration.api.ReversibleMigrationSeq. \ No newline at end of file diff --git a/tests/warn/i21036a.check b/tests/warn/i21036a.check index 673c01374ef3..876a81ad8a83 100644 --- a/tests/warn/i21036a.check +++ b/tests/warn/i21036a.check @@ -1,6 +1,10 @@ -- Warning: tests/warn/i21036a.scala:7:17 ------------------------------------------------------------------------------ 7 |val y = summon[A] // warn | ^ - | Given search preference for A between alternatives (b : B) and (a : A) will change + | Given search preference for A between alternatives + | (b : B) + | and + | (a : A) + | will change. | Current choice : the first alternative | New choice from Scala 3.6: the second alternative diff --git a/tests/warn/i21036b.check b/tests/warn/i21036b.check index ff7fdfd7a87c..11bb38727d77 100644 --- a/tests/warn/i21036b.check +++ b/tests/warn/i21036b.check @@ -1,6 +1,10 @@ -- Warning: tests/warn/i21036b.scala:7:17 ------------------------------------------------------------------------------ 7 |val y = summon[A] // warn | ^ - | Change in given search preference for A between alternatives (b : B) and (a : A) + | Given search preference for A between alternatives + | (b : B) + | and + | (a : A) + | has changed. | Previous choice : the first alternative | New choice from Scala 3.6: the second alternative From 7c4bd676744afe67b297fd32b66b4bb40cec78a2 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 5 Aug 2024 13:48:25 +0200 Subject: [PATCH 429/827] Compensate loss of transitivity We only have transitivity between givens or between implicits. To cope with that - We tank first all implicits, giving a best implicit search result. - Then we rank all givens startign with the implicit result. If there is a given that is better than the best implicit, the best given will be chosen. Otherwise we will stick with the best implicit. --- .../src/dotty/tools/dotc/typer/Implicits.scala | 18 +++++++++++++++--- tests/pos/given-owner-disambiguate.scala | 13 +++++++++++++ 2 files changed, 28 insertions(+), 3 deletions(-) create mode 100644 tests/pos/given-owner-disambiguate.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 32a95ae501f0..7aa88e1e582a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1381,8 +1381,6 @@ trait Implicits: def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => var diff = compareAlternatives(alt1, alt2, disambiguate = true) - assert(diff <= 0 || isWarnPriorityChangeVersion(Feature.sourceVersion)) - // diff > 0 candidates should already have been eliminated in `rank` if diff == 0 && alt1.ref =:= alt2.ref then diff = 1 // See i12951 for a test where this happens else if diff == 0 && alt2.isExtension then @@ -1622,7 +1620,21 @@ trait Implicits: validateOrdering(ord) throw ex - val result = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) + val sorted = sort(eligible) + val result = sorted match + case first :: rest => + val firstIsImplicit = first.ref.symbol.is(Implicit) + if rest.exists(_.ref.symbol.is(Implicit) != firstIsImplicit) then + // Mixture of implicits and givens + // Rank implicits first, then, if there is a given that it better than the best implicit(s) + // switch over to givens. + val (sortedImplicits, sortedGivens) = sorted.partition(_.ref.symbol.is(Implicit)) + val implicitResult = rank(sortedImplicits, NoMatchingImplicitsFailure, Nil) + rank(sortedGivens, implicitResult, Nil) + else + rank(sorted, NoMatchingImplicitsFailure, Nil) + case _ => + NoMatchingImplicitsFailure // Issue all priority change warnings that can affect the result val shownWarnings = priorityChangeWarnings.toList.collect: diff --git a/tests/pos/given-owner-disambiguate.scala b/tests/pos/given-owner-disambiguate.scala new file mode 100644 index 000000000000..f0a44ecc441a --- /dev/null +++ b/tests/pos/given-owner-disambiguate.scala @@ -0,0 +1,13 @@ +class General +class Specific extends General + +class LowPriority: + given a:General() + +object NormalPriority extends LowPriority: + given b:Specific() + +def run = + import NormalPriority.given + val x = summon[General] + val _: Specific = x // <- b was picked \ No newline at end of file From f62e14169656dde9c1abcc587b70f163d81bb442 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 5 Aug 2024 18:14:37 +0200 Subject: [PATCH 430/827] Delay priority change until 3.7 Warnings from 3.6, change in 3.7. This is one version later than originally planned. --- .../dotty/tools/dotc/typer/Applications.scala | 14 +++++++------- .../src/dotty/tools/dotc/typer/Implicits.scala | 18 +++++++++--------- tests/neg/ctx-bounds-priority.scala | 2 +- tests/neg/given-triangle.check | 2 +- tests/neg/given-triangle.scala | 2 +- tests/neg/i15264.scala | 2 +- tests/neg/i21212.scala | 2 +- tests/neg/i21303/Test.scala | 2 +- tests/run/given-triangle.scala | 2 +- tests/run/implicit-specifity.scala | 2 +- tests/run/implied-priority.scala | 2 +- tests/warn/i20420.scala | 2 +- tests/warn/i21036a.check | 2 +- tests/warn/i21036a.scala | 2 +- tests/warn/i21036b.check | 2 +- tests/warn/i21036b.scala | 2 +- 16 files changed, 30 insertions(+), 30 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index d063854038a1..7cb0dbcdc833 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1767,7 +1767,7 @@ trait Applications extends Compatibility { // and in mode Scala3-migration when we compare with the old Scala 2 rules. case Intermediate // Intermediate rules: better means specialize, but map all type arguments downwards - // These are enabled for 3.0-3.4, or if OldImplicitResolution + // These are enabled for 3.0-3.5, or if OldImplicitResolution // is specified, and also for all comparisons between old-style implicits, case New // New rules: better means generalize, givens (and extensions) always beat implicits @@ -1803,7 +1803,7 @@ trait Applications extends Compatibility { val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) if !preferGeneral || Feature.migrateTo3 && oldResolution then CompareScheme.Old - else if Feature.sourceVersion.isAtMost(SourceVersion.`3.4`) + else if Feature.sourceVersion.isAtMost(SourceVersion.`3.5`) || oldResolution || alt1.symbol.is(Implicit) && alt2.symbol.is(Implicit) then CompareScheme.Intermediate @@ -1869,7 +1869,7 @@ trait Applications extends Compatibility { * available in 3.0-migration if mode `Mode.OldImplicitResolution` is turned on as well. * It is used to highlight differences between Scala 2 and 3 behavior. * - * - In Scala 3.0-3.5, the behavior is as follows: `T <:p U` iff there is an implicit conversion + * - In Scala 3.0-3.6, the behavior is as follows: `T <:p U` iff there is an implicit conversion * from `T` to `U`, or * * flip(T) <: flip(U) @@ -1884,14 +1884,14 @@ trait Applications extends Compatibility { * of parameters are not affected. So `T <: U` would imply `Set[Cmp[U]] <:p Set[Cmp[T]]`, * as usual, because `Set` is non-variant. * - * - From Scala 3.6, `T <:p U` means `T <: U` or `T` convertible to `U` + * - From Scala 3.7, `T <:p U` means `T <: U` or `T` convertible to `U` * for overloading resolution (when `preferGeneral is false), and the opposite relation * `U <: T` or `U convertible to `T` for implicit disambiguation between givens - * (when `preferGeneral` is true). For old-style implicit values, the 3.4 behavior is kept. + * (when `preferGeneral` is true). For old-style implicit values, the 3.5 behavior is kept. * If one of the alternatives is an implicit and the other is a given (or an extension), the implicit loses. * - * - In Scala 3.5 and Scala 3.6-migration, we issue a warning if the result under - * Scala 3.6 differ wrt to the old behavior up to 3.5. + * - In Scala 3.6 and Scala 3.7-migration, we issue a warning if the result under + * Scala 3.7 differs wrt to the old behavior up to 3.6. * * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 7aa88e1e582a..ebdca078d345 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1305,13 +1305,13 @@ trait Implicits: /** Search a list of eligible implicit references */ private def searchImplicit(eligible: List[Candidate], contextual: Boolean): SearchResult = - // A map that associates a priority change warning (between -source 3.4 and 3.6) + // A map that associates a priority change warning (between -source 3.6 and 3.7) // with the candidate refs mentioned in the warning. We report the associated // message if one of the critical candidates is part of the result of the implicit search. val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() def isWarnPriorityChangeVersion(sv: SourceVersion): Boolean = - sv.stable == SourceVersion.`3.5` || sv == SourceVersion.`3.6-migration` + sv.stable == SourceVersion.`3.6` || sv == SourceVersion.`3.7-migration` /** Compare `alt1` with `alt2` to determine which one should be chosen. * @@ -1319,12 +1319,12 @@ trait Implicits: * a number < 0 if `alt2` is preferred over `alt1` * 0 if neither alternative is preferred over the other * The behavior depends on the source version - * before 3.5: compare with preferGeneral = false - * 3.5: compare twice with preferGeneral = false and true, warning if result is different, + * before 3.6: compare with preferGeneral = false + * 3.6: compare twice with preferGeneral = false and true, warning if result is different, * return old result with preferGeneral = false - * 3.6-migration: compare twice with preferGeneral = false and true, warning if result is different, + * 3.7-migration: compare twice with preferGeneral = false and true, warning if result is different, * return new result with preferGeneral = true - * 3.6 and higher: compare with preferGeneral = true + * 3.7 and higher: compare with preferGeneral = true * * @param disambiguate The call is used to disambiguate two successes, not for ranking. * When ranking, we are always filtering out either > 0 or <= 0 results. @@ -1348,7 +1348,7 @@ trait Implicits: case -1 => "the second alternative" case 1 => "the first alternative" case _ => "none - it's ambiguous" - if sv.stable == SourceVersion.`3.5` then + if sv.stable == SourceVersion.`3.6` then warn( em"""Given search preference for $pt between alternatives | ${alt1.ref} @@ -1356,7 +1356,7 @@ trait Implicits: | ${alt2.ref} |will change. |Current choice : ${choice(prev)} - |New choice from Scala 3.6: ${choice(cmp)}""") + |New choice from Scala 3.7: ${choice(cmp)}""") prev else warn( @@ -1366,7 +1366,7 @@ trait Implicits: | ${alt2.ref} |has changed. |Previous choice : ${choice(prev)} - |New choice from Scala 3.6: ${choice(cmp)}""") + |New choice from Scala 3.7: ${choice(cmp)}""") cmp else cmp max prev // When ranking, we keep the better of cmp and prev, which ends up retaining a candidate diff --git a/tests/neg/ctx-bounds-priority.scala b/tests/neg/ctx-bounds-priority.scala index 6594642d67c3..023a3273d586 100644 --- a/tests/neg/ctx-bounds-priority.scala +++ b/tests/neg/ctx-bounds-priority.scala @@ -1,4 +1,4 @@ -//> using options -source 3.6 +//> using options -source 3.7 trait Eq[A] trait Order[A] extends Eq[A]: def toOrdering: Ordering[A] diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check index 73d5aea12dc4..147c54270afb 100644 --- a/tests/neg/given-triangle.check +++ b/tests/neg/given-triangle.check @@ -9,4 +9,4 @@ | (given_B : B) |will change. |Current choice : the second alternative - |New choice from Scala 3.6: the first alternative + |New choice from Scala 3.7: the first alternative diff --git a/tests/neg/given-triangle.scala b/tests/neg/given-triangle.scala index 16aca7c44dee..4842c5314f51 100644 --- a/tests/neg/given-triangle.scala +++ b/tests/neg/given-triangle.scala @@ -1,4 +1,4 @@ -//> using options -source 3.5 +//> using options -source 3.6 class A class B extends A class C extends A diff --git a/tests/neg/i15264.scala b/tests/neg/i15264.scala index 825e74701f73..d690eccf23f3 100644 --- a/tests/neg/i15264.scala +++ b/tests/neg/i15264.scala @@ -1,4 +1,4 @@ -import language.`3.6` +import language.`3.7` object priority: // lower number = higher priority class Prio0 extends Prio1 diff --git a/tests/neg/i21212.scala b/tests/neg/i21212.scala index 4cb3741b2d65..99e4c44f9489 100644 --- a/tests/neg/i21212.scala +++ b/tests/neg/i21212.scala @@ -1,4 +1,4 @@ - +//> using options -source 3.7 object Minimization: trait A diff --git a/tests/neg/i21303/Test.scala b/tests/neg/i21303/Test.scala index fa8058140067..25d43dac344e 100644 --- a/tests/neg/i21303/Test.scala +++ b/tests/neg/i21303/Test.scala @@ -1,4 +1,4 @@ -//> using options -source 3.6-migration +//> using options -source 3.7-migration import scala.deriving.Mirror import scala.compiletime.* import scala.reflect.ClassTag diff --git a/tests/run/given-triangle.scala b/tests/run/given-triangle.scala index 0b483e87f28c..66339f44e43c 100644 --- a/tests/run/given-triangle.scala +++ b/tests/run/given-triangle.scala @@ -1,4 +1,4 @@ -import language.`3.6` +import language.`3.7` class A class B extends A diff --git a/tests/run/implicit-specifity.scala b/tests/run/implicit-specifity.scala index da90110c9866..9e59cf5f1869 100644 --- a/tests/run/implicit-specifity.scala +++ b/tests/run/implicit-specifity.scala @@ -1,4 +1,4 @@ -import language.`3.6` +import language.`3.7` case class Show[T](val i: Int) object Show { diff --git a/tests/run/implied-priority.scala b/tests/run/implied-priority.scala index 15f6a40a27ef..a9380e117875 100644 --- a/tests/run/implied-priority.scala +++ b/tests/run/implied-priority.scala @@ -1,6 +1,6 @@ /* These tests show various mechanisms available for implicit prioritization. */ -import language.`3.6` +import language.`3.7` class E[T](val str: String) // The type for which we infer terms below diff --git a/tests/warn/i20420.scala b/tests/warn/i20420.scala index d28270509f91..4c7585e32f48 100644 --- a/tests/warn/i20420.scala +++ b/tests/warn/i20420.scala @@ -1,4 +1,4 @@ -//> using options -source 3.5-migration +//> using options -source 3.6-migration final class StrictEqual[V] final class Less[V] diff --git a/tests/warn/i21036a.check b/tests/warn/i21036a.check index 876a81ad8a83..63d611a6e246 100644 --- a/tests/warn/i21036a.check +++ b/tests/warn/i21036a.check @@ -7,4 +7,4 @@ | (a : A) | will change. | Current choice : the first alternative - | New choice from Scala 3.6: the second alternative + | New choice from Scala 3.7: the second alternative diff --git a/tests/warn/i21036a.scala b/tests/warn/i21036a.scala index ab97429852d6..b7aba27ca95e 100644 --- a/tests/warn/i21036a.scala +++ b/tests/warn/i21036a.scala @@ -1,4 +1,4 @@ -//> using options -source 3.5 +//> using options -source 3.6 trait A trait B extends A given b: B = ??? diff --git a/tests/warn/i21036b.check b/tests/warn/i21036b.check index 11bb38727d77..dfa19a0e9bb1 100644 --- a/tests/warn/i21036b.check +++ b/tests/warn/i21036b.check @@ -7,4 +7,4 @@ | (a : A) | has changed. | Previous choice : the first alternative - | New choice from Scala 3.6: the second alternative + | New choice from Scala 3.7: the second alternative diff --git a/tests/warn/i21036b.scala b/tests/warn/i21036b.scala index 16dd72266613..c440f5d3c06d 100644 --- a/tests/warn/i21036b.scala +++ b/tests/warn/i21036b.scala @@ -1,4 +1,4 @@ -//> using options -source 3.6-migration +//> using options -source 3.7-migration trait A trait B extends A given b: B = ??? From 87f5ca00cabb04361c4270f6c98f1ea30b21a1e9 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 6 Aug 2024 19:57:01 +0200 Subject: [PATCH 431/827] Fix ranking logic --- .../src/dotty/tools/dotc/ast/Desugar.scala | 2 +- .../dotty/tools/dotc/typer/Implicits.scala | 31 +++++++--- tests/pos/i15264.scala | 1 + tests/warn/i15264.scala | 56 +++++++++++++++++++ 4 files changed, 80 insertions(+), 10 deletions(-) create mode 100644 tests/warn/i15264.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 4574dd72ffa8..026b8a409d3d 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -483,7 +483,7 @@ object desugar { params.map: param => val normFlags = param.mods.flags &~ GivenOrImplicit | (mparam.mods.flags & (GivenOrImplicit)) param.withMods(param.mods.withFlags(normFlags)) - .showing(i"ADAPTED PARAM $result ${result.mods.flags} for ${meth.name}") + .showing(i"adapted param $result ${result.mods.flags} for ${meth.name}", Printers.desugar) else params (normParams ++ mparams) :: Nil case mparams :: mparamss1 => diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index ebdca078d345..b9f225f6a42a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1369,8 +1369,13 @@ trait Implicits: |New choice from Scala 3.7: ${choice(cmp)}""") cmp else cmp max prev - // When ranking, we keep the better of cmp and prev, which ends up retaining a candidate - // if it is retained in either version. + // When ranking, alt1 is always the new candidate and alt2 is the + // solution found previously. We keep the candidate if the outcome is 0 + // (ambiguous) or 1 (first wins). Or, when ranking in healImplicit we keep the + // candidate only if the outcome is 1. In both cases, keeping the better + // of `cmp` and `prev` means we keep candidates that could match + // in either scheme. This means that subsequent disambiguation + // comparisons will record a warning if cmp != prev. else cmp end compareAlternatives @@ -1416,7 +1421,15 @@ trait Implicits: if diff < 0 then alt2 else if diff > 0 then alt1 else SearchFailure(new AmbiguousImplicits(alt1, alt2, pt, argument), span) - case _: SearchFailure => alt2 + case fail: SearchFailure => + fail.reason match + case ambi: AmbiguousImplicits => + if compareAlternatives(ambi.alt1, alt2) < 0 && + compareAlternatives(ambi.alt2, alt2) < 0 + then alt2 + else alt1 + case _ => + alt2 /** Try to find a best matching implicit term among all the candidates in `pending`. * @param pending The list of candidates that remain to be tested @@ -1621,7 +1634,7 @@ trait Implicits: throw ex val sorted = sort(eligible) - val result = sorted match + val res = sorted match case first :: rest => val firstIsImplicit = first.ref.symbol.is(Implicit) if rest.exists(_.ref.symbol.is(Implicit) != firstIsImplicit) then @@ -1638,11 +1651,11 @@ trait Implicits: // Issue all priority change warnings that can affect the result val shownWarnings = priorityChangeWarnings.toList.collect: - case (critical, msg) if result.found.exists(critical.contains(_)) => + case (critical, msg) if res.found.exists(critical.contains(_)) => msg - result match - case result: SearchFailure => - result.reason match + res match + case res: SearchFailure => + res.reason match case ambi: AmbiguousImplicits => // Make warnings part of error message because otherwise they are suppressed when // the error is emitted. @@ -1652,7 +1665,7 @@ trait Implicits: for msg <- shownWarnings do report.warning(msg, srcPos) - result + res end searchImplicit def isUnderSpecifiedArgument(tp: Type): Boolean = diff --git a/tests/pos/i15264.scala b/tests/pos/i15264.scala index 5be8436c12ba..18ca92df6cb1 100644 --- a/tests/pos/i15264.scala +++ b/tests/pos/i15264.scala @@ -1,3 +1,4 @@ +import language.`3.7` object priority: // lower number = higher priority class Prio0 extends Prio1 diff --git a/tests/warn/i15264.scala b/tests/warn/i15264.scala new file mode 100644 index 000000000000..9435c6364c08 --- /dev/null +++ b/tests/warn/i15264.scala @@ -0,0 +1,56 @@ +// Note: No check file for this test since the precise warning messages are non-deterministic +import language.`3.7-migration` +object priority: + // lower number = higher priority + class Prio0 extends Prio1 + object Prio0 { given Prio0() } + + class Prio1 extends Prio2 + object Prio1 { given Prio1() } + + class Prio2 + object Prio2 { given Prio2() } + +object repro: + // analogous to cats Eq, Hash, Order: + class A[V] + class B[V] extends A[V] + class C[V] extends A[V] + + class Q[V] + + object context: + // prios work here, which is cool + given[V](using priority.Prio0): C[V] = new C[V] + given[V](using priority.Prio1): B[V] = new B[V] + given[V](using priority.Prio2): A[V] = new A[V] + + object exports: + // so will these exports + export context.given + + // if you import these don't import from 'context' above + object qcontext: + // base defs, like what you would get from cats + given ga: A[Int] = new B[Int] // added so that we don't get an ambiguity in test2 + given gb: B[Int] = new B[Int] + given gc: C[Int] = new C[Int] + + // these seem like they should work but don't + given gcq[V](using p0: priority.Prio0)(using c: C[V]): C[Q[V]] = new C[Q[V]] + given gbq[V](using p1: priority.Prio1)(using b: B[V]): B[Q[V]] = new B[Q[V]] + given gaq[V](using p2: priority.Prio2)(using a: A[V]): A[Q[V]] = new A[Q[V]] + +object test1: + import repro.* + import repro.exports.given + + // these will work + val a = summon[A[Int]] // warn + + +object test2: + import repro.* + import repro.qcontext.given + + val a = summon[A[Q[Int]]] // warn From 0d50a30628e0ce83a63cf40ec48afadda6ecd56a Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Tue, 6 Aug 2024 18:07:14 +0200 Subject: [PATCH 432/827] Report only non-overridden unimplemented members Previously, when a concrete class A had unimplemented members that are overridden, all overrides would be reported as unimplemented in the error message. This would produce error messages that are not accurate, and that suggest stubs that are not correct. This patch fixes the issue by reporting in the error message only the unimplemented members that are not overridden by other unimplemented members. Fixes #21335 --- compiler/src/dotty/tools/dotc/typer/RefChecks.scala | 12 +++++++++++- tests/neg/i21335.check | 8 ++++++++ tests/neg/i21335.scala | 12 ++++++++++++ 3 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 tests/neg/i21335.check create mode 100644 tests/neg/i21335.scala diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 2601bfb42074..5f7504fa072f 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -698,6 +698,15 @@ object RefChecks { && withMode(Mode.IgnoreCaptures)(mbrDenot.matchesLoosely(impl, alwaysCompareTypes = true))) .exists + /** Filter out symbols from `syms` that are overridden by a symbol appearing later in the list. + * Symbols that are not overridden are kept. */ + def lastOverrides(syms: List[Symbol]): List[Symbol] = + val deduplicated = + syms.foldLeft(List.empty[Symbol]): + case (acc, sym) if acc.exists(s => isOverridingPair(s, sym, clazz.thisType)) => acc + case (acc, sym) => sym :: acc + deduplicated.reverse + /** The term symbols in this class and its baseclasses that are * abstract in this class. We can't use memberNames for that since * a concrete member might have the same signature as an abstract @@ -720,7 +729,8 @@ object RefChecks { val missingMethods = grouped.toList flatMap { case (name, syms) => - syms.filterConserve(!_.isSetter) + lastOverrides(syms) + .filterConserve(!_.isSetter) .distinctBy(_.signature) // Avoid duplication for similar definitions (#19731) } diff --git a/tests/neg/i21335.check b/tests/neg/i21335.check new file mode 100644 index 000000000000..a7ee092eec0e --- /dev/null +++ b/tests/neg/i21335.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i21335.scala:7:6 ----------------------------------------------------------------------------------- +7 |class Z1 extends Bar1 // error + | ^ + | class Z1 needs to be abstract, since override def bar(): Bar1 in trait Bar1 is not defined +-- Error: tests/neg/i21335.scala:12:6 ---------------------------------------------------------------------------------- +12 |class Z2 extends Bar2 // error + | ^ + | class Z2 needs to be abstract, since def bar(): Bar2 in trait Bar2 is not defined diff --git a/tests/neg/i21335.scala b/tests/neg/i21335.scala new file mode 100644 index 000000000000..270765c80535 --- /dev/null +++ b/tests/neg/i21335.scala @@ -0,0 +1,12 @@ +trait Foo: + def bar(): Foo + +trait Bar1 extends Foo: + override def bar(): Bar1 + +class Z1 extends Bar1 // error + +trait Bar2 extends Foo: + def bar(): Bar2 + +class Z2 extends Bar2 // error From f5f390ef575a3f5101960144125697db9b7e66ce Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 6 Aug 2024 20:05:59 +0200 Subject: [PATCH 433/827] Make priority change warning messages stable Make the wording of a priority change warning message stable under different orders of eligibles. We now always report the previously chosen alternative first and the new one second. Note: We can still get ambiguities by fallging different pairs of alternatives depending on initial order. --- .../dotty/tools/dotc/typer/Implicits.scala | 66 +++++++++---------- tests/neg/given-triangle.check | 8 +-- 2 files changed, 36 insertions(+), 38 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index b9f225f6a42a..a4fa989cc85c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1310,9 +1310,6 @@ trait Implicits: // message if one of the critical candidates is part of the result of the implicit search. val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() - def isWarnPriorityChangeVersion(sv: SourceVersion): Boolean = - sv.stable == SourceVersion.`3.6` || sv == SourceVersion.`3.7-migration` - /** Compare `alt1` with `alt2` to determine which one should be chosen. * * @return a number > 0 if `alt1` is preferred over `alt2` @@ -1337,37 +1334,38 @@ trait Implicits: else val cmp = comp(using searchContext()) val sv = Feature.sourceVersion - if isWarnPriorityChangeVersion(sv) then + val isLastOldVersion = sv.stable == SourceVersion.`3.6` + val isMigratingVersion = sv == SourceVersion.`3.7-migration` + if isLastOldVersion || isMigratingVersion then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) if disambiguate && cmp != prev then - def warn(msg: Message) = - val critical = alt1.ref :: alt2.ref :: Nil - priorityChangeWarnings += ((critical, msg)) - implicits.println(i"PRIORITY CHANGE ${alt1.ref}, ${alt2.ref}, $disambiguate") - def choice(c: Int) = c match - case -1 => "the second alternative" - case 1 => "the first alternative" - case _ => "none - it's ambiguous" - if sv.stable == SourceVersion.`3.6` then - warn( - em"""Given search preference for $pt between alternatives - | ${alt1.ref} - |and - | ${alt2.ref} - |will change. - |Current choice : ${choice(prev)} - |New choice from Scala 3.7: ${choice(cmp)}""") - prev - else - warn( - em"""Given search preference for $pt between alternatives - | ${alt1.ref} - |and - | ${alt2.ref} - |has changed. - |Previous choice : ${choice(prev)} - |New choice from Scala 3.7: ${choice(cmp)}""") - cmp + implicits.println(i"PRIORITY CHANGE ${alt1.ref}, ${alt2.ref}") + val (loser, winner) = + prev match + case 1 => (alt1, alt2) + case -1 => (alt2, alt1) + case 0 => + cmp match + case 1 => (alt2, alt1) + case -1 => (alt1, alt2) + def choice(nth: String, c: Int) = + if c == 0 then "none - it's ambiguous" + else s"the $nth alternative" + val (change, whichChoice) = + if isLastOldVersion + then ("will change", "Current choice ") + else ("has changed", "Previous choice") + val msg = + em"""Given search preference for $pt between alternatives + | ${loser.ref} + |and + | ${winner.ref} + |$change. + |$whichChoice : ${choice("first", prev)} + |New choice from Scala 3.7: ${choice("second", cmp)}""" + val critical = alt1.ref :: alt2.ref :: Nil + priorityChangeWarnings += ((critical, msg)) + if isLastOldVersion then prev else cmp else cmp max prev // When ranking, alt1 is always the new candidate and alt2 is the // solution found previously. We keep the candidate if the outcome is 0 @@ -1424,8 +1422,8 @@ trait Implicits: case fail: SearchFailure => fail.reason match case ambi: AmbiguousImplicits => - if compareAlternatives(ambi.alt1, alt2) < 0 && - compareAlternatives(ambi.alt2, alt2) < 0 + if compareAlternatives(ambi.alt1, alt2, disambiguate = true) < 0 + && compareAlternatives(ambi.alt2, alt2, disambiguate = true) < 0 then alt2 else alt1 case _ => diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check index 147c54270afb..f366c18e78f0 100644 --- a/tests/neg/given-triangle.check +++ b/tests/neg/given-triangle.check @@ -4,9 +4,9 @@ |Ambiguous given instances: both given instance given_B and given instance given_C match type A of parameter a of method f | |Note: Given search preference for A between alternatives - | (given_A : A) - |and | (given_B : B) + |and + | (given_A : A) |will change. - |Current choice : the second alternative - |New choice from Scala 3.7: the first alternative + |Current choice : the first alternative + |New choice from Scala 3.7: the second alternative From 54344a199791ed81c52fca10be32b39633176d62 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 8 Jun 2024 21:11:49 -0700 Subject: [PATCH 434/827] CheckUnused checks type param annotations --- .../tools/dotc/transform/CheckUnused.scala | 11 +++----- tests/warn/i20536.scala | 27 +++++++++++++++++++ 2 files changed, 31 insertions(+), 7 deletions(-) create mode 100644 tests/warn/i20536.scala diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index d396d60c096a..337e41cf92de 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -135,25 +135,22 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke } override def prepareForDefDef(tree: tpd.DefDef)(using Context): Context = - unusedDataApply{ ud => + unusedDataApply: ud => if !tree.symbol.is(Private) then tree.termParamss.flatten.foreach { p => ud.addIgnoredParam(p.symbol) } - import ud.registerTrivial - tree.registerTrivial + ud.registerTrivial(tree) traverseAnnotations(tree.symbol) ud.registerDef(tree) ud.addIgnoredUsage(tree.symbol) - } override def prepareForTypeDef(tree: tpd.TypeDef)(using Context): Context = - unusedDataApply{ ud => + unusedDataApply: ud => + traverseAnnotations(tree.symbol) if !tree.symbol.is(Param) then // Ignore type parameter (as Scala 2) - traverseAnnotations(tree.symbol) ud.registerDef(tree) ud.addIgnoredUsage(tree.symbol) - } override def prepareForBind(tree: tpd.Bind)(using Context): Context = traverseAnnotations(tree.symbol) diff --git a/tests/warn/i20536.scala b/tests/warn/i20536.scala new file mode 100644 index 000000000000..8f28c367e68d --- /dev/null +++ b/tests/warn/i20536.scala @@ -0,0 +1,27 @@ +//> using options -Wunused:all +object Anns { + final class S extends annotation.StaticAnnotation +} + +object Main { + locally { + import Anns.* + class C[@S A] + C().toString + } + locally { + import Anns.S as T + class C[@T A] + C().toString + } + locally { + import scala.specialized as T + class C[@T A] + C().toString + } + locally { + import scala.specialized as T // warn + class C[A] + C().toString + } +} From 0361991d2e978dc8c34ea8086d42d1696ccc3195 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 6 Aug 2024 23:42:07 +0200 Subject: [PATCH 435/827] Fix `healAmbiguous` to `compareAlternatives` with `disambiguate = true` On the final result, compared with all the ambiguous candidates we are trying to recover from. We should still use `disambiguate = false` when filtering the `pending` candidates for the purpose of warnings, as in the other cases. Before the changes, it was possible for an ambiguous SearchFailure to be healed by a candidate which was considered better (possibly only) under a prioritization scheme different from the current one. As an optimization, we can avoid redoing compareAlternatives in versions which could have only used the new prioritization scheme to begin with. Also restores behaviour avoiding false positive warnings. Specifically, in cases where we could report a change in prioritization, despite having not yet done `tryImplicit` on the alternative, i.e. it was only compared as part of an early filtering See #21045 for related changes --- .../dotty/tools/dotc/typer/Implicits.scala | 49 ++++++++++--------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index a4fa989cc85c..5e9575903895 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1310,6 +1310,10 @@ trait Implicits: // message if one of the critical candidates is part of the result of the implicit search. val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() + val sv = Feature.sourceVersion + val isLastOldVersion = sv.stable == SourceVersion.`3.6` + val isWarnPriorityChangeVersion = isLastOldVersion || sv == SourceVersion.`3.7-migration` + /** Compare `alt1` with `alt2` to determine which one should be chosen. * * @return a number > 0 if `alt1` is preferred over `alt2` @@ -1333,10 +1337,7 @@ trait Implicits: else if alt1.level != alt2.level then alt1.level - alt2.level else val cmp = comp(using searchContext()) - val sv = Feature.sourceVersion - val isLastOldVersion = sv.stable == SourceVersion.`3.6` - val isMigratingVersion = sv == SourceVersion.`3.7-migration` - if isLastOldVersion || isMigratingVersion then + if isWarnPriorityChangeVersion then val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) if disambiguate && cmp != prev then implicits.println(i"PRIORITY CHANGE ${alt1.ref}, ${alt2.ref}") @@ -1419,15 +1420,7 @@ trait Implicits: if diff < 0 then alt2 else if diff > 0 then alt1 else SearchFailure(new AmbiguousImplicits(alt1, alt2, pt, argument), span) - case fail: SearchFailure => - fail.reason match - case ambi: AmbiguousImplicits => - if compareAlternatives(ambi.alt1, alt2, disambiguate = true) < 0 - && compareAlternatives(ambi.alt2, alt2, disambiguate = true) < 0 - then alt2 - else alt1 - case _ => - alt2 + case _: SearchFailure => alt2 /** Try to find a best matching implicit term among all the candidates in `pending`. * @param pending The list of candidates that remain to be tested @@ -1451,12 +1444,27 @@ trait Implicits: pending match { case cand :: remaining => /** To recover from an ambiguous implicit failure, we need to find a pending - * candidate that is strictly better than the failed candidate(s). + * candidate that is strictly better than the failed `ambiguous` candidate(s). * If no such candidate is found, we propagate the ambiguity. */ - def healAmbiguous(fail: SearchFailure, betterThanFailed: Candidate => Boolean) = - val newPending = remaining.filter(betterThanFailed) - rank(newPending, fail, Nil).recoverWith(_ => fail) + def healAmbiguous(fail: SearchFailure, ambiguous: List[RefAndLevel]) = + def betterThanAmbiguous(newCand: RefAndLevel, disambiguate: Boolean): Boolean = + ambiguous.forall(compareAlternatives(newCand, _, disambiguate) > 0) + + inline def betterByCurrentScheme(newCand: RefAndLevel): Boolean = + if isWarnPriorityChangeVersion then + // newCand may have only been kept in pending because it was better in the other priotization scheme. + // If that candidate produces a SearchSuccess, disambiguate will return it as the found SearchResult. + // We must now recheck it was really better than the ambigous candidates we are recovering from, + // under the rules of the current scheme, which are applied when disambiguate = true. + betterThanAmbiguous(newCand, disambiguate = true) + else true + + val newPending = remaining.filter(betterThanAmbiguous(_, disambiguate = false)) + rank(newPending, fail, Nil) match + case found: SearchSuccess if betterByCurrentScheme(found) => found + case _ => fail + end healAmbiguous negateIfNot(tryImplicit(cand, contextual)) match { case fail: SearchFailure => @@ -1471,8 +1479,7 @@ trait Implicits: else // The ambiguity happened in a nested search: to recover we // need a candidate better than `cand` - healAmbiguous(fail, newCand => - compareAlternatives(newCand, cand) > 0) + healAmbiguous(fail, cand :: Nil) else // keep only warnings that don't involve the failed candidate reference priorityChangeWarnings.filterInPlace: (critical, _) => @@ -1491,9 +1498,7 @@ trait Implicits: // The ambiguity happened in the current search: to recover we // need a candidate better than the two ambiguous alternatives. val ambi = fail.reason.asInstanceOf[AmbiguousImplicits] - healAmbiguous(fail, newCand => - compareAlternatives(newCand, ambi.alt1) > 0 && - compareAlternatives(newCand, ambi.alt2) > 0) + healAmbiguous(fail, ambi.alt1 :: ambi.alt2 :: Nil) } } case nil => From cb079b9b052335f1a92f9307aa984e212f26d511 Mon Sep 17 00:00:00 2001 From: kasiaMarek Date: Wed, 7 Aug 2024 10:09:16 +0200 Subject: [PATCH 436/827] improvement: sorting workspace members with same name by frequency --- .../tools/pc/ScalaPresentationCompiler.scala | 11 ++++- .../pc/completions/CompletionProvider.scala | 7 +++- .../tools/pc/completions/Completions.scala | 40 ++++++++++++++----- .../dotty/tools/pc/base/BasePCSuite.scala | 3 ++ .../completion/CompletionContextSuite.scala | 27 +++++++++++++ 5 files changed, 73 insertions(+), 15 deletions(-) create mode 100644 presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionContextSuite.scala diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index ad8ac02ec811..a8ab7af0d147 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -48,7 +48,8 @@ case class ScalaPresentationCompiler( sh: Option[ScheduledExecutorService] = None, config: PresentationCompilerConfig = PresentationCompilerConfigImpl(), folderPath: Option[Path] = None, - reportsLevel: ReportLevel = ReportLevel.Info + reportsLevel: ReportLevel = ReportLevel.Info, + completionItemPriority: CompletionItemPriority = (_: String) => 0, ) extends PresentationCompiler: def this() = this("", None, Nil, Nil) @@ -63,6 +64,11 @@ case class ScalaPresentationCompiler( .map(StdReportContext(_, _ => buildTargetName, reportsLevel)) .getOrElse(EmptyReportContext) + override def withCompletionItemPriority( + priority: CompletionItemPriority + ): PresentationCompiler = + copy(completionItemPriority = priority) + override def withBuildTargetName(buildTargetName: String) = copy(buildTargetName = Some(buildTargetName)) @@ -142,7 +148,8 @@ case class ScalaPresentationCompiler( params, config, buildTargetIdentifier, - folderPath + folderPath, + completionItemPriority ).completions() } diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 9cd98de33141..4d45595dac8d 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -32,6 +32,7 @@ import org.eclipse.lsp4j.InsertTextFormat import org.eclipse.lsp4j.InsertTextMode import org.eclipse.lsp4j.Range as LspRange import org.eclipse.lsp4j.TextEdit +import scala.meta.pc.CompletionItemPriority class CompletionProvider( search: SymbolSearch, @@ -39,7 +40,8 @@ class CompletionProvider( params: OffsetParams, config: PresentationCompilerConfig, buildTargetIdentifier: String, - folderPath: Option[Path] + folderPath: Option[Path], + referenceCounter: CompletionItemPriority )(using reports: ReportContext): def completions(): CompletionList = val uri = params.uri().nn @@ -86,7 +88,8 @@ class CompletionProvider( folderPath, autoImportsGen, unit.comments, - driver.settings + driver.settings, + referenceCounter ).completions() val items = completions.zipWithIndex.map { case (item, idx) => diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index d043a2cfddbf..b0441c18df9a 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -50,7 +50,8 @@ class Completions( workspace: Option[Path], autoImports: AutoImportsGenerator, comments: List[Comment], - options: List[String] + options: List[String], + completionItemPriority: CompletionItemPriority )(using ReportContext): given context: Context = ctx @@ -909,6 +910,20 @@ class Completions( else 0 end compareLocalSymbols + private def workspaceMemberPriority(symbol: Symbol): Int = + completionItemPriority + .workspaceMemberPriority( + SemanticdbSymbols.symbolName(symbol), + ) + + def compareFrequency(o1: CompletionValue, o2: CompletionValue): Int = + (o1, o2) match + case (w1: CompletionValue.Workspace, w2: CompletionValue.Workspace) => + workspaceMemberPriority(w1.symbol) + .compareTo(workspaceMemberPriority(w2.symbol)) + case _ => 0 + end compareFrequency + def compareByRelevance(o1: CompletionValue, o2: CompletionValue): Int = Integer.compare( computeRelevancePenalty(o1, application), @@ -1018,17 +1033,20 @@ class Completions( ) if byIdentifier != 0 then byIdentifier else - val byOwner = - s1.owner.fullName.toString - .compareTo(s2.owner.fullName.toString) - if byOwner != 0 then byOwner + val byFrequency = compareFrequency(o1, o2) + if byFrequency != 0 then byFrequency else - val byParamCount = Integer.compare( - s1.paramSymss.flatten.size, - s2.paramSymss.flatten.size - ) - if byParamCount != 0 then byParamCount - else s1.detailString.compareTo(s2.detailString) + val byOwner = + s1.owner.fullName.toString + .compareTo(s2.owner.fullName.toString) + if byOwner != 0 then byOwner + else + val byParamCount = Integer.compare( + s1.paramSymss.flatten.size, + s2.paramSymss.flatten.size + ) + if byParamCount != 0 then byParamCount + else s1.detailString.compareTo(s2.detailString) end if end if end if diff --git a/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala index a1fec0af3e8f..1158e433e732 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala @@ -22,6 +22,7 @@ import dotty.tools.pc.utils._ import org.eclipse.lsp4j.MarkupContent import org.eclipse.lsp4j.jsonrpc.messages.Either as JEither import org.junit.runner.RunWith +import scala.meta.pc.CompletionItemPriority object TestResources: val scalaLibrary = BuildInfo.ideTestsDependencyClasspath.map(_.toPath).toSeq @@ -30,6 +31,7 @@ object TestResources: @RunWith(classOf[ReusableClassRunner]) abstract class BasePCSuite extends PcAssertions: + val completionItemPriority: CompletionItemPriority = (_: String) => 0 private val isDebug = ManagementFactory.getRuntimeMXBean.getInputArguments.toString.contains("-agentlib:jdwp") val tmp = Files.createTempDirectory("stable-pc-tests") @@ -53,6 +55,7 @@ abstract class BasePCSuite extends PcAssertions: .withExecutorService(executorService) .withScheduledExecutorService(executorService) .withSearch(search) + .withCompletionItemPriority(completionItemPriority) .newInstance("", myclasspath.asJava, scalacOpts.asJava) protected def config: PresentationCompilerConfig = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionContextSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionContextSuite.scala new file mode 100644 index 000000000000..5314a61ab599 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionContextSuite.scala @@ -0,0 +1,27 @@ +package dotty.tools.pc.tests.completion + +import dotty.tools.pc.base.BaseCompletionSuite +import scala.meta.pc.CompletionItemPriority +import org.junit.Test + +class CompletionContextSuite extends BaseCompletionSuite: + override val completionItemPriority: CompletionItemPriority = { + case "scala/concurrent/Future." => -1 + case _ => 0 + } + // scala.concurrent.Future should be ranked higher than java.util.concurrent.Future + val futureCompletionResult: List[String] = + List("Future - scala.concurrent", "Future - java.util.concurrent") + + @Test + def `context` = + check( + """package fut + |object A { + | Futur@@ + |}""".stripMargin, + """Future - scala.concurrent + |Future - java.util.concurrent + |""".stripMargin, + filter = futureCompletionResult.contains + ) From 6c86910a155a82c9621a10883e9d75d8a2ae3890 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 7 Aug 2024 11:10:53 +0100 Subject: [PATCH 437/827] Add a -3.6-migration warning for opaque select changes --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 17 ++++++++++++++++- tests/warn/i21239.Frac.check | 8 ++++++++ tests/warn/i21239.Frac.scala | 15 +++++++++++++++ 3 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 tests/warn/i21239.Frac.check create mode 100644 tests/warn/i21239.Frac.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index c518de7dbbfe..0102aea221fc 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -767,7 +767,22 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val qual1 = qual.cast(liftedTp) val tree1 = cpy.Select(tree0)(qual1, selName) val rawType1 = selectionType(tree1, qual1) - tryType(tree1, qual1, rawType1) + val adapted = tryType(tree1, qual1, rawType1) + if !adapted.isEmpty && sourceVersion == `3.6-migration` then + val adaptedOld = tryExt(tree, qual) + if !adaptedOld.isEmpty then + val symOld = adaptedOld.symbol + val underlying = liftedTp match + case tp: TypeProxy => i" ${tp.translucentSuperType}" + case _ => "" + report.migrationWarning( + em"""Previously this selected the extension ${symOld}${symOld.showExtendedLocation} + |Now it selects $selName on the opaque type's underlying type$underlying + | + |You can change this back by selecting $adaptedOld + |Or by defining the extension method outside of the opaque type's scope. + |""", tree0) + adapted else EmptyTree // Otherwise, try to expand a named tuple selection diff --git a/tests/warn/i21239.Frac.check b/tests/warn/i21239.Frac.check new file mode 100644 index 000000000000..3c2868479f42 --- /dev/null +++ b/tests/warn/i21239.Frac.check @@ -0,0 +1,8 @@ +-- Migration Warning: tests/warn/i21239.Frac.scala:14:8 ---------------------------------------------------------------- +14 | f + Frac.wrap(((-g.numerator).toLong << 32) | (g.unwrap & 0xFFFFFFFFL)) // warn + | ^^^ + | Previously this selected the extension method + in object Frac + | Now it selects + on the opaque type's underlying type Long + | + | You can change this back by selecting kse.maths.Frac.+(f) + | Or by defining the extension method outside of the opaque type's scope. diff --git a/tests/warn/i21239.Frac.scala b/tests/warn/i21239.Frac.scala new file mode 100644 index 000000000000..b09dbfd6ecad --- /dev/null +++ b/tests/warn/i21239.Frac.scala @@ -0,0 +1,15 @@ +package kse.maths + +import scala.language.`3.6-migration` + +opaque type Frac = Long +object Frac { + inline def wrap(f: Long): kse.maths.Frac = f + extension (f: Frac) + inline def unwrap: Long = f + inline def numerator: Int = ((f: Long) >>> 32).toInt + extension (f: kse.maths.Frac) + def +(g: Frac): kse.maths.Frac = f // eliding domain-specific addition logic + def -(g: Frac): kse.maths.Frac = + f + Frac.wrap(((-g.numerator).toLong << 32) | (g.unwrap & 0xFFFFFFFFL)) // warn +} From e98b50341ec09627a507f26ca01c3d2819e4cf5d Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 30 Jul 2024 19:16:48 +0200 Subject: [PATCH 438/827] Fix setup of CapSet arguments. These arguments tell the whole truth; they cannot possibly be decorated with another capture set. So we should not add a capture set variable. --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 4 +++- .../src/dotty/tools/dotc/cc/CapturingType.scala | 1 + compiler/src/dotty/tools/dotc/cc/Setup.scala | 11 +++++++---- tests/pos/polycap.scala | 14 ++++++++++++++ 4 files changed, 25 insertions(+), 5 deletions(-) create mode 100644 tests/pos/polycap.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 5680df476f8d..a2d2d2cf358c 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -264,7 +264,9 @@ extension (tp: Type) def boxed(using Context): Type = tp.dealias match case tp @ CapturingType(parent, refs) if !tp.isBoxed && !refs.isAlwaysEmpty => tp.annot match - case ann: CaptureAnnotation => AnnotatedType(parent, ann.boxedAnnot) + case ann: CaptureAnnotation => + assert(!parent.derivesFrom(defn.Caps_CapSet)) + AnnotatedType(parent, ann.boxedAnnot) case ann => tp case tp: RealTypeBounds => tp.derivedTypeBounds(tp.lo.boxed, tp.hi.boxed) diff --git a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala index bb79e52f1060..9f9b923b2c88 100644 --- a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala +++ b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala @@ -33,6 +33,7 @@ object CapturingType: * boxing status is the same or if A is boxed. */ def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = + assert(!boxed || !parent.derivesFrom(defn.Caps_CapSet)) if refs.isAlwaysEmpty && !refs.keepAlways then parent else parent match case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index c048edfb2102..25d50052f107 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -134,9 +134,10 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: private def box(tp: Type)(using Context): Type = def recur(tp: Type): Type = tp.dealiasKeepAnnotsAndOpaques match case tp @ CapturingType(parent, refs) => - if tp.isBoxed then tp else tp.boxed + if tp.isBoxed || parent.derivesFrom(defn.Caps_CapSet) then tp + else tp.boxed case tp @ AnnotatedType(parent, ann) => - if ann.symbol.isRetains + if ann.symbol.isRetains && !parent.derivesFrom(defn.Caps_CapSet) then CapturingType(parent, ann.tree.toCaptureSet, boxed = true) else tp.derivedAnnotatedType(box(parent), ann) case tp1 @ AppliedType(tycon, args) if defn.isNonRefinedFunction(tp1) => @@ -605,8 +606,10 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: !refs.isEmpty case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol - if sym.isClass then !sym.isPureClass - else instanceCanBeImpure(tp.superType) + if sym.isClass + then !sym.isPureClass + else !tp.derivesFrom(defn.Caps_CapSet) // CapSet arguments don't get other capture set variables added + && instanceCanBeImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => instanceCanBeImpure(tp.underlying) case tp: AndType => diff --git a/tests/pos/polycap.scala b/tests/pos/polycap.scala new file mode 100644 index 000000000000..684f46454595 --- /dev/null +++ b/tests/pos/polycap.scala @@ -0,0 +1,14 @@ +import language.experimental.captureChecking + +class Source[+T, Cap^] + +def completed[T, Cap^](result: T): Source[T, Cap] = + //val fut = new Source[T, Cap]() + val fut2 = new Source[T, Cap]() + fut2: Source[T, Cap] + + + + + + From 02b1b6d3481342d753b0f678e5383d81faba2c8d Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 4 Aug 2024 11:16:58 +0200 Subject: [PATCH 439/827] Implement caps.Contains --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 24 ++++++++++++++++++- .../dotty/tools/dotc/core/Definitions.scala | 6 +++-- library/src/scala/caps.scala | 12 +++++++++- tests/neg-custom-args/captures/i21313.check | 11 +++++++++ tests/neg-custom-args/captures/i21313.scala | 15 ++++++++++++ tests/pos-custom-args/captures/i21313.scala | 11 +++++++++ 6 files changed, 75 insertions(+), 4 deletions(-) create mode 100644 tests/neg-custom-args/captures/i21313.check create mode 100644 tests/neg-custom-args/captures/i21313.scala create mode 100644 tests/pos-custom-args/captures/i21313.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 6fa63c21edaa..5af57c31e05f 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -674,7 +674,29 @@ class CheckCaptures extends Recheck, SymTransformer: i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) - handleCall(meth, tree, () => Existential.toCap(super.recheckTypeApply(tree, pt))) + val res = handleCall(meth, tree, () => Existential.toCap(super.recheckTypeApply(tree, pt))) + if meth == defn.Caps_containsImpl then checkContains(tree) + res + end recheckTypeApply + + /** Faced with a tree of form `caps.contansImpl[CS, r.type]`, check that `R` is a tracked + * capability and assert that `{r} <:CS`. + */ + def checkContains(tree: TypeApply)(using Context): Unit = + tree.fun.knownType.widen match + case fntpe: PolyType => + tree.args match + case csArg :: refArg :: Nil => + val cs = csArg.knownType.captureSet + val ref = refArg.knownType + capt.println(i"check contains $cs , $ref") + ref match + case ref: CaptureRef if ref.isTracked => + checkElem(ref, cs, tree.srcPos) + case _ => + report.error(em"$refArg is not a tracked capability", refArg.srcPos) + case _ => + case _ => override def recheckBlock(tree: Block, pt: Type)(using Context): Type = inNestedLevel(super.recheckBlock(tree, pt)) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index fda12a5488ce..1d2f2b05feb4 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -993,15 +993,17 @@ class Definitions { @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") @tu lazy val Caps_Capability: TypeSymbol = CapsModule.requiredType("Capability") - @tu lazy val Caps_CapSet = requiredClass("scala.caps.CapSet") + @tu lazy val Caps_CapSet: ClassSymbol = requiredClass("scala.caps.CapSet") @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") @tu lazy val Caps_capsOf: TermSymbol = CapsModule.requiredMethod("capsOf") - @tu lazy val Caps_Exists = requiredClass("scala.caps.Exists") + @tu lazy val Caps_Exists: ClassSymbol = requiredClass("scala.caps.Exists") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") + @tu lazy val Caps_ContainsTrait: TypeSymbol = CapsModule.requiredType("Capability") + @tu lazy val Caps_containsImpl: TermSymbol = CapsModule.requiredMethod("containsImpl") @tu lazy val PureClass: Symbol = requiredClass("scala.Pure") diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 1416a7b35f83..9700ed62738d 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -1,6 +1,6 @@ package scala -import annotation.{experimental, compileTimeOnly} +import annotation.{experimental, compileTimeOnly, retainsCap} @experimental object caps: @@ -19,6 +19,16 @@ import annotation.{experimental, compileTimeOnly} /** Carrier trait for capture set type parameters */ trait CapSet extends Any + /** A type constraint expressing that the capture set `C` needs to contain + * the capability `R` + */ + sealed trait Contains[C <: CapSet @retainsCap, R <: Singleton] + + /** The only implementation of `Contains`. The constraint that `{R} <: C` is + * added separately by the capture checker. + */ + given containsImpl[C <: CapSet @retainsCap, R <: Singleton]: Contains[C, R]() + @compileTimeOnly("Should be be used only internally by the Scala compiler") def capsOf[CS]: Any = ??? diff --git a/tests/neg-custom-args/captures/i21313.check b/tests/neg-custom-args/captures/i21313.check new file mode 100644 index 000000000000..37b944a97d68 --- /dev/null +++ b/tests/neg-custom-args/captures/i21313.check @@ -0,0 +1,11 @@ +-- Error: tests/neg-custom-args/captures/i21313.scala:6:27 ------------------------------------------------------------- +6 |def foo(x: Async) = x.await(???) // error + | ^ + | (x : Async) is not a tracked capability +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i21313.scala:15:12 --------------------------------------- +15 | ac1.await(src2) // error + | ^^^^ + | Found: (src2 : Source[Int, caps.CapSet^{ac2}]^?) + | Required: Source[Int, caps.CapSet^{ac1}]^ + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i21313.scala b/tests/neg-custom-args/captures/i21313.scala new file mode 100644 index 000000000000..01bedb10aefd --- /dev/null +++ b/tests/neg-custom-args/captures/i21313.scala @@ -0,0 +1,15 @@ +import caps.CapSet + +trait Async: + def await[T, Cap^](using caps.Contains[Cap, this.type])(src: Source[T, Cap]^): T + +def foo(x: Async) = x.await(???) // error + +trait Source[+T, Cap^]: + final def await(using ac: Async^{Cap^}) = ac.await[T, Cap](this) // Contains[Cap, ac] is assured because {ac} <: Cap. + +def test(using ac1: Async^, ac2: Async^, x: String) = + val src1 = new Source[Int, CapSet^{ac1}] {} + ac1.await(src1) // ok + val src2 = new Source[Int, CapSet^{ac2}] {} + ac1.await(src2) // error diff --git a/tests/pos-custom-args/captures/i21313.scala b/tests/pos-custom-args/captures/i21313.scala new file mode 100644 index 000000000000..2fda6c0c0e45 --- /dev/null +++ b/tests/pos-custom-args/captures/i21313.scala @@ -0,0 +1,11 @@ +import caps.CapSet + +trait Async: + def await[T, Cap^](using caps.Contains[Cap, this.type])(src: Source[T, Cap]^): T + +trait Source[+T, Cap^]: + final def await(using ac: Async^{Cap^}) = ac.await[T, Cap](this) // Contains[Cap, ac] is assured because {ac} <: Cap. + +def test(using ac1: Async^, ac2: Async^, x: String) = + val src1 = new Source[Int, CapSet^{ac1}] {} + ac1.await(src1) From e0e3695cf1908cb03019bfa8d1656bc781b28930 Mon Sep 17 00:00:00 2001 From: kasiaMarek Date: Wed, 7 Aug 2024 12:06:27 +0200 Subject: [PATCH 440/827] test: don't suggest completions for param names in definition --- .../tools/pc/tests/completion/CompletionSuite.scala | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 437fe606932b..2282e3e5346d 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -2042,3 +2042,16 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin, includeCompletionKind = true ) + + @Test def `def-arg` = + check( + """|package a + |object W { + | val aaaaaa = 1 + |} + |object O { + | def foo(aa@@) + |} + |""".stripMargin, + "" + ) From 618bbc52bf2a3407a2e0ff2788a9df1265df035d Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 7 Aug 2024 19:25:06 +0200 Subject: [PATCH 441/827] Handle local type parameters in markFree These need to be handled like reach capabilities. Fixes #21347 --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 30 ++++++++++--------- tests/neg-custom-args/captures/i21347.check | 15 ++++++++++ tests/neg-custom-args/captures/i21347.scala | 12 ++++++++ 3 files changed, 43 insertions(+), 14 deletions(-) create mode 100644 tests/neg-custom-args/captures/i21347.check create mode 100644 tests/neg-custom-args/captures/i21347.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 5af57c31e05f..dbf01915122d 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -388,23 +388,25 @@ class CheckCaptures extends Recheck, SymTransformer: // should be included. val included = cs.filter: c => c.stripReach match - case ref: TermRef => - //if c.isReach then println(i"REACH $c in ${env.owner}") - //assert(!env.owner.isAnonymousFunction) + case ref: NamedType => val refSym = ref.symbol val refOwner = refSym.owner val isVisible = isVisibleFromEnv(refOwner) - if !isVisible && c.isReach && refSym.is(Param) && refOwner == env.owner then - if refSym.hasAnnotation(defn.UnboxAnnot) then - capt.println(i"exempt: $ref in $refOwner") - else - // Reach capabilities that go out of scope have to be approximated - // by their underlying capture set, which cannot be universal. - // Reach capabilities of @unboxed parameters are exempted. - val cs = CaptureSet.ofInfo(c) - cs.disallowRootCapability: () => - report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", pos) - checkSubset(cs, env.captured, pos, provenance(env)) + if !isVisible + && (c.isReach || ref.isType) + && refSym.is(Param) + && refOwner == env.owner + then + if refSym.hasAnnotation(defn.UnboxAnnot) then + capt.println(i"exempt: $ref in $refOwner") + else + // Reach capabilities that go out of scope have to be approximated + // by their underlying capture set, which cannot be universal. + // Reach capabilities of @unboxed parameters are exempted. + val cs = CaptureSet.ofInfo(c) + cs.disallowRootCapability: () => + report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", pos) + checkSubset(cs, env.captured, pos, provenance(env)) isVisible case ref: ThisType => isVisibleFromEnv(ref.cls) case _ => false diff --git a/tests/neg-custom-args/captures/i21347.check b/tests/neg-custom-args/captures/i21347.check new file mode 100644 index 000000000000..c680a54d3efc --- /dev/null +++ b/tests/neg-custom-args/captures/i21347.check @@ -0,0 +1,15 @@ +-- Error: tests/neg-custom-args/captures/i21347.scala:4:15 ------------------------------------------------------------- +4 | ops.foreach: op => // error + | ^ + | Local reach capability C leaks into capture scope of method runOps +5 | op() +-- Error: tests/neg-custom-args/captures/i21347.scala:8:14 ------------------------------------------------------------- +8 | () => runOps(f :: Nil) // error + | ^^^^^^^^^^^^^^^^ + | reference (caps.cap : caps.Capability) is not included in the allowed capture set {} + | of an enclosing function literal with expected type () -> Unit +-- Error: tests/neg-custom-args/captures/i21347.scala:11:15 ------------------------------------------------------------ +11 | ops.foreach: op => // error + | ^ + | Local reach capability ops* leaks into capture scope of method runOpsAlt +12 | op() diff --git a/tests/neg-custom-args/captures/i21347.scala b/tests/neg-custom-args/captures/i21347.scala new file mode 100644 index 000000000000..41887be6a78a --- /dev/null +++ b/tests/neg-custom-args/captures/i21347.scala @@ -0,0 +1,12 @@ +import language.experimental.captureChecking + +def runOps[C^](ops: List[() ->{C^} Unit]): Unit = + ops.foreach: op => // error + op() + +def boom(f: () => Unit): () -> Unit = + () => runOps(f :: Nil) // error + +def runOpsAlt(ops: List[() => Unit]): Unit = + ops.foreach: op => // error + op() \ No newline at end of file From 8584a9ab516b8e79bcaee41b02b68d22233efe0c Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 8 Aug 2024 11:00:34 +0200 Subject: [PATCH 442/827] Fixes for cleanup retains scheme - Cleanup all inferred types, not just result types of ValDefs and DefDefs. - To compensate, map overriding ValDefs and DefDefs to have declared result types. - Make type trees generated for varargs inferred. --- compiler/src/dotty/tools/dotc/ast/Trees.scala | 2 + .../tools/dotc/core/tasty/TreePickler.scala | 4 +- .../tools/dotc/transform/PostTyper.scala | 39 +++++++++---------- .../dotty/tools/dotc/typer/Applications.scala | 2 +- .../dotty/tools/dotc/typer/TypeAssigner.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 4 +- library/src/scala/caps.scala | 3 ++ .../captures/cc-poly-varargs.scala | 20 ++++++++++ 8 files changed, 50 insertions(+), 26 deletions(-) create mode 100644 tests/pos-custom-args/captures/cc-poly-varargs.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 942fd6c9b0c7..4c7ca396117e 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -778,6 +778,7 @@ object Trees { override def isEmpty: Boolean = !hasType override def toString: String = s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" + def isInferred = false } /** Tree that replaces a level 1 splices in pickled (level 0) quotes. @@ -800,6 +801,7 @@ object Trees { */ class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T]: type ThisTree[+T <: Untyped] <: InferredTypeTree[T] + override def isInferred = true /** ref.type */ case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index eeeaaaf72bf1..6659348fb5de 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -806,10 +806,10 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { report.error(ex.toMessage, tree.srcPos.focus) pickleErrorType() case ex: AssertionError => - println(i"error when pickling tree $tree") + println(i"error when pickling tree $tree of class ${tree.getClass}") throw ex case ex: MatchError => - println(i"error when pickling tree $tree") + println(i"error when pickling tree $tree of class ${tree.getClass}") throw ex } } diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index c6ad1bb860e8..0feee53ca50f 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -303,20 +303,19 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if !tree.symbol.is(Package) then tree else errorTree(tree, em"${tree.symbol} cannot be used as a type") - // Cleans up retains annotations in inferred type trees. This is needed because - // during the typer, it is infeasible to correctly infer the capture sets in most - // cases, resulting ill-formed capture sets that could crash the pickler later on. - // See #20035. - private def cleanupRetainsAnnot(symbol: Symbol, tpt: Tree)(using Context): Tree = + /** Make result types of ValDefs and DefDefs that override some other definitions + * declared types rather than InferredTypes. This is necessary since we otherwise + * clean retains annotations from such types. But for an overriding symbol the + * retains annotations come from the explicitly declared parent types, so should + * be kept. + */ + private def makeOverrideTypeDeclared(symbol: Symbol, tpt: Tree)(using Context): Tree = tpt match case tpt: InferredTypeTree - if !symbol.allOverriddenSymbols.hasNext => - // if there are overridden symbols, the annotation comes from an explicit type of the overridden symbol - // and should be retained. - val tm = new CleanupRetains - val tpe1 = tm(tpt.tpe) - tpt.withType(tpe1) - case _ => tpt + if symbol.allOverriddenSymbols.hasNext => + TypeTree(tpt.tpe, inferred = false).withSpan(tpt.span).withAttachmentsFrom(tpt) + case _ => + tpt override def transform(tree: Tree)(using Context): Tree = try tree match { @@ -432,7 +431,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => registerIfHasMacroAnnotations(tree) checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) - val tree1 = cpy.ValDef(tree)(tpt = cleanupRetainsAnnot(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) + val tree1 = cpy.ValDef(tree)(tpt = makeOverrideTypeDeclared(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) if tree1.removeAttachment(desugar.UntupledParam).isDefined then checkStableSelection(tree.rhs) processValOrDefDef(super.transform(tree1)) @@ -441,7 +440,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) annotateContextResults(tree) - val tree1 = cpy.DefDef(tree)(tpt = cleanupRetainsAnnot(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) + val tree1 = cpy.DefDef(tree)(tpt = makeOverrideTypeDeclared(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) processValOrDefDef(superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef])) case tree: TypeDef => registerIfHasMacroAnnotations(tree) @@ -524,12 +523,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => report.error(em"type ${alias.tpe} outside bounds $bounds", tree.srcPos) super.transform(tree) case tree: TypeTree => - tree.withType( - tree.tpe match { - case AnnotatedType(tpe, annot) => AnnotatedType(tpe, transformAnnot(annot)) - case tpe => tpe - } - ) + val tpe = if tree.isInferred then CleanupRetains()(tree.tpe) else tree.tpe + tree.withType: + tpe match + case AnnotatedType(parent, annot) => + AnnotatedType(parent, transformAnnot(annot)) // TODO: Also map annotations embedded in type? + case _ => tpe case Typed(Ident(nme.WILDCARD), _) => withMode(Mode.Pattern)(super.transform(tree)) // The added mode signals that bounds in a pattern need not diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 42765cd6c0bf..2ad50ac272f7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -883,7 +883,7 @@ trait Applications extends Compatibility { def makeVarArg(n: Int, elemFormal: Type): Unit = { val args = typedArgBuf.takeRight(n).toList typedArgBuf.dropRightInPlace(n) - val elemtpt = TypeTree(elemFormal) + val elemtpt = TypeTree(elemFormal, inferred = true) typedArgBuf += seqToRepeated(SeqLiteral(args, elemtpt)) } diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 2be81a4222cd..fd16f0de5f3a 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -51,7 +51,7 @@ trait TypeAssigner { else sym.info private def toRepeated(tree: Tree, from: ClassSymbol)(using Context): Tree = - Typed(tree, TypeTree(tree.tpe.widen.translateToRepeated(from))) + Typed(tree, TypeTree(tree.tpe.widen.translateToRepeated(from), inferred = true)) def seqToRepeated(tree: Tree)(using Context): Tree = toRepeated(tree, defn.SeqClass) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 947d1fcbfa73..ea828268997b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1457,7 +1457,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Block(block)(stats, expr1) withType expr1.tpe // no assignType here because avoid is redundant case _ => val target = pt.simplified - val targetTpt = InferredTypeTree().withType(target) + val targetTpt = TypeTree(target, inferred = true) if tree.tpe <:< target then Typed(tree, targetTpt) else // This case should not normally arise. It currently does arise in test cases @@ -2092,7 +2092,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // TODO: move the check above to patternMatcher phase val uncheckedTpe = AnnotatedType(sel.tpe.widen, Annotation(defn.UncheckedAnnot, tree.selector.span)) tpd.cpy.Match(result)( - selector = tpd.Typed(sel, new tpd.InferredTypeTree().withType(uncheckedTpe)), + selector = tpd.Typed(sel, tpd.TypeTree(uncheckedTpe, inferred = true)), cases = result.cases ) case _ => diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 9700ed62738d..9911ef920116 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -29,6 +29,9 @@ import annotation.{experimental, compileTimeOnly, retainsCap} */ given containsImpl[C <: CapSet @retainsCap, R <: Singleton]: Contains[C, R]() + /** A wrapper indicating a type variable in a capture argument list of a + * @retains annotation. E.g. `^{x, Y^}` is represented as `@retains(x, capsOf[Y])`. + */ @compileTimeOnly("Should be be used only internally by the Scala compiler") def capsOf[CS]: Any = ??? diff --git a/tests/pos-custom-args/captures/cc-poly-varargs.scala b/tests/pos-custom-args/captures/cc-poly-varargs.scala new file mode 100644 index 000000000000..ac76c47d6dd5 --- /dev/null +++ b/tests/pos-custom-args/captures/cc-poly-varargs.scala @@ -0,0 +1,20 @@ +trait Cancellable + +abstract class Source[+T, Cap^] + +extension[T, Cap^](src: Source[T, Cap]^) + def transformValuesWith[U](f: (T -> U)^{Cap^}): Source[U, Cap]^{src, f} = ??? + +def race[T, Cap^](sources: Source[T, Cap]^{Cap^}*): Source[T, Cap]^{Cap^} = ??? + +def either[T1, T2, Cap^](src1: Source[T1, Cap]^{Cap^}, src2: Source[T2, Cap]^{Cap^}): Source[Either[T1, T2], Cap]^{Cap^} = + val left = src1.transformValuesWith(Left(_)) + val right = src2.transformValuesWith(Right(_)) + race(left, right) + + + + + + + From a8cc13318f74669a168580adbbf6aa204ba74bc3 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 8 Aug 2024 11:10:41 +0200 Subject: [PATCH 443/827] Drop redundant exact distinction in Setup This is now redundant since result types of overriding ValDefs and DefDefs are now mapped to TypeTrees in PostTyper. --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 10 ++++------ .../src/dotty/tools/dotc/printing/RefinedPrinter.scala | 4 ++-- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 25d50052f107..91671d7d7776 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -330,10 +330,10 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: end transformExplicitType /** Transform type of type tree, and remember the transformed type as the type the tree */ - private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = + private def transformTT(tree: TypeTree, boxed: Boolean)(using Context): Unit = if !tree.hasRememberedType then val transformed = - if tree.isInstanceOf[InferredTypeTree] && !exact + if tree.isInferred then transformInferredType(tree.tpe) else transformExplicitType(tree.tpe, tptToCheck = Some(tree)) tree.rememberType(if boxed then box(transformed) else transformed) @@ -398,8 +398,6 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: && !ccConfig.useSealed && !sym.hasAnnotation(defn.UncheckedCapturesAnnot), // types of mutable variables are boxed in pre 3.3 code - exact = sym.allOverriddenSymbols.hasNext, - // types of symbols that override a parent don't get a capture set TODO drop ) catch case ex: IllegalCaptureRef => capt.println(i"fail while transforming result type $tpt of $sym") @@ -442,7 +440,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: traverse(fn) if !defn.isTypeTestOrCast(fn.symbol) then for case arg: TypeTree <- args do - transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + transformTT(arg, boxed = true) // type arguments in type applications are boxed case tree: TypeDef if tree.symbol.isClass => val sym = tree.symbol @@ -465,7 +463,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def postProcess(tree: Tree)(using Context): Unit = tree match case tree: TypeTree => - transformTT(tree, boxed = false, exact = false) + transformTT(tree, boxed = false) case tree: ValOrDefDef => val sym = tree.symbol diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 7f83a16d39a9..ea729e9549d5 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -560,9 +560,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else keywordText("{{") ~ keywordText("/* inlined from ") ~ toText(call) ~ keywordText(" */") ~ bodyText ~ keywordText("}}") case tpt: untpd.DerivedTypeTree => "" - case TypeTree() => + case tree: TypeTree => typeText(toText(tree.typeOpt)) - ~ Str("(inf)").provided(tree.isInstanceOf[InferredTypeTree] && printDebug) + ~ Str("(inf)").provided(tree.isInferred && printDebug) case SingletonTypeTree(ref) => toTextLocal(ref) ~ "." ~ keywordStr("type") case RefinedTypeTree(tpt, refines) => From a28bc0f394fe1f9ca5426f87aba2b007c74a026a Mon Sep 17 00:00:00 2001 From: kasiaMarek Date: Wed, 7 Aug 2024 13:39:18 +0200 Subject: [PATCH 444/827] fix: disambiguate workspace completions for vals --- .../pc/completions/CompletionValue.scala | 17 ++-- .../tools/pc/completions/Completions.scala | 2 +- .../pc/tests/completion/CompletionSuite.scala | 81 +++++++++++++++++++ .../completion/CompletionWorkspaceSuite.scala | 4 +- 4 files changed, 96 insertions(+), 8 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala index 98cceae149d3..90b285bffb3a 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala @@ -101,13 +101,13 @@ object CompletionValue: )(using Context): String = if symbol.isConstructor then s"${snippetAffix.toPrefix}${label}${description(printer)}" else if symbol.is(Method) then s"${label}${description(printer)}" - else if symbol.is(Mutable) then s"$label: ${description(printer)}" + else if symbol.is(Mutable) then s"$label${description(printer)}" else if symbol.is(Package) || symbol.is(Module) || symbol.isClass then s"${labelWithSuffix(printer)}${description(printer)}" else if symbol.isType then labelWithSuffix(printer) else if symbol.isTerm && symbol.info.typeSymbol.is(Module) then s"${label}${description(printer)}" - else s"$label: ${description(printer)}" + else s"$label${description(printer)}" protected def labelWithSuffix(printer: ShortenedTypePrinter)(using Context): String = if snippetAffix.addLabelSnippet @@ -119,7 +119,10 @@ object CompletionValue: else label override def description(printer: ShortenedTypePrinter)(using Context): String = - printer.completionSymbol(denotation) + def info = denotation.info.widenTermRefExpr + val isVal = !(symbol.is(Module) || symbol.is(Method) || symbol.isType || info.typeSymbol.is(Module)) + val prefix = if isVal then ": " else "" + prefix ++ printer.completionSymbol(denotation) end Symbolic @@ -178,9 +181,10 @@ object CompletionValue: override def completionItemDataKind: Integer = CompletionSource.WorkspaceKind.ordinal override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = + def isMethodOrValue = !(symbol.isType || symbol.is(Module)) if symbol.isConstructor || symbol.name == nme.apply then s"${snippetAffix.toPrefix}${label}${description(printer)} - ${printer.fullNameString(importSymbol.effectiveOwner)}" - else if symbol.is(Method) then + else if isMethodOrValue then s"${labelWithSuffix(printer)} - ${printer.fullNameString(symbol.effectiveOwner)}" else if symbol.is(Package) || symbol.is(Module) || symbol.isClass then s"${labelWithSuffix(printer)} -${description(printer)}" @@ -199,7 +203,7 @@ object CompletionValue: CompletionItemKind.Method override def completionItemDataKind: Integer = CompletionSource.ImplicitClassKind.ordinal override def description(printer: ShortenedTypePrinter)(using Context): String = - s"${printer.completionSymbol(denotation)} (implicit)" + s"${super.description(printer)} (implicit)" /** * CompletionValue for extension methods via SymbolSearch @@ -339,6 +343,9 @@ object CompletionValue: override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = label + + override def description(printer: ShortenedTypePrinter)(using Context): String = + printer.completionSymbol(denotation) end CaseKeyword case class Document(label: String, doc: String, description: String) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index b0441c18df9a..3bebaa76a309 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -914,7 +914,7 @@ class Completions( completionItemPriority .workspaceMemberPriority( SemanticdbSymbols.symbolName(symbol), - ) + ).nn def compareFrequency(o1: CompletionValue, o2: CompletionValue): Int = (o1, o2) match diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 2282e3e5346d..47e4cabb76f4 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -2055,3 +2055,84 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin, "" ) + + @Test def conflict = + check( + """|package a + |object O { + | val foofoo: Int = 123 + | def method = { + | val foofoo: String = "abc" + | foofoo@@ + | } + |} + |""".stripMargin, + """|foofoo: String + |foofoo - a.O: Int + |""".stripMargin + ) + + @Test def `conflict-2` = + check( + """|package a + |object A { + | val foo = 1 + |} + |object B { + | val foo = 1 + |} + |object O { + | val x: Int = foo@@ + |} + |""".stripMargin, + """|foo - a.A: Int + |foo - a.B: Int + |""".stripMargin + ) + + @Test def `conflict-3` = + check( + """|package a + |object A { + | var foo = 1 + |} + |object B { + | var foo = 1 + |} + |object O { + | val x: Int = foo@@ + |} + |""".stripMargin, + """|foo - a.A: Int + |foo - a.B: Int + |""".stripMargin + ) + + @Test def `conflict-edit-2` = + checkEdit( + """|package a + |object A { + | val foo = 1 + |} + |object B { + | val foo = 1 + |} + |object O { + | val x: Int = foo@@ + |} + |""".stripMargin, + """|package a + | + |import a.A.foo + |object A { + | val foo = 1 + |} + |object B { + | val foo = 1 + |} + |object O { + | val x: Int = foo + |} + |""".stripMargin, + assertSingleItem = false + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala index e5c81e3c044e..488ae0923ea4 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala @@ -767,7 +767,7 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |package b: | def main: Unit = incre@@ |""".stripMargin, - """|increment3: Int + """|increment3 - d: Int |increment - a: Int |increment2 - a.c: Int |""".stripMargin @@ -810,7 +810,7 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |} |""".stripMargin, """|fooBar: String - |fooBar: List[Int] + |fooBar - test.A: List[Int] |""".stripMargin, ) From 58f3407acdbe7ef42f331b5d7b34fca7a938fcb7 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 9 Aug 2024 09:44:20 +0200 Subject: [PATCH 445/827] Revert "Compensate loss of transitivity" This reverts commit 7c4bd676 See https://github.com/scala/scala3/pull/21344#issuecomment-2275111405 Fixes #21320 --- .../dotty/tools/dotc/typer/Implicits.scala | 18 +---- tests/pos/{i21320.scala => i21320a.scala} | 0 tests/pos/i21320b.scala | 73 +++++++++++++++++++ 3 files changed, 76 insertions(+), 15 deletions(-) rename tests/pos/{i21320.scala => i21320a.scala} (100%) create mode 100644 tests/pos/i21320b.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5e9575903895..51e468153d1f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1385,6 +1385,8 @@ trait Implicits: def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => var diff = compareAlternatives(alt1, alt2, disambiguate = true) + assert(diff <= 0 || isWarnPriorityChangeVersion) + // diff > 0 candidates should already have been eliminated in `rank` if diff == 0 && alt1.ref =:= alt2.ref then diff = 1 // See i12951 for a test where this happens else if diff == 0 && alt2.isExtension then @@ -1636,21 +1638,7 @@ trait Implicits: validateOrdering(ord) throw ex - val sorted = sort(eligible) - val res = sorted match - case first :: rest => - val firstIsImplicit = first.ref.symbol.is(Implicit) - if rest.exists(_.ref.symbol.is(Implicit) != firstIsImplicit) then - // Mixture of implicits and givens - // Rank implicits first, then, if there is a given that it better than the best implicit(s) - // switch over to givens. - val (sortedImplicits, sortedGivens) = sorted.partition(_.ref.symbol.is(Implicit)) - val implicitResult = rank(sortedImplicits, NoMatchingImplicitsFailure, Nil) - rank(sortedGivens, implicitResult, Nil) - else - rank(sorted, NoMatchingImplicitsFailure, Nil) - case _ => - NoMatchingImplicitsFailure + val res = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) // Issue all priority change warnings that can affect the result val shownWarnings = priorityChangeWarnings.toList.collect: diff --git a/tests/pos/i21320.scala b/tests/pos/i21320a.scala similarity index 100% rename from tests/pos/i21320.scala rename to tests/pos/i21320a.scala diff --git a/tests/pos/i21320b.scala b/tests/pos/i21320b.scala new file mode 100644 index 000000000000..6711d3d9d952 --- /dev/null +++ b/tests/pos/i21320b.scala @@ -0,0 +1,73 @@ +import scala.deriving.* +import scala.compiletime.* + +trait ConfigMonoid[T]: + def zero: T + def orElse(main: T, defaults: T): T + +object ConfigMonoid: + given option[T]: ConfigMonoid[Option[T]] = ??? + + inline def zeroTuple[C <: Tuple]: Tuple = + inline erasedValue[C] match + case _: EmptyTuple => EmptyTuple + case _: (t *: ts) => + summonInline[ConfigMonoid[t]].zero *: zeroTuple[ts] + + inline def valueTuple[C <: Tuple, T](index: Int, main: T, defaults: T): Tuple = + inline erasedValue[C] match + case _: EmptyTuple => EmptyTuple + case _: (t *: ts) => + def get(v: T) = v.asInstanceOf[Product].productElement(index).asInstanceOf[t] + summonInline[ConfigMonoid[t]].orElse(get(main), get(defaults)) *: valueTuple[ts, T]( + index + 1, + main, + defaults + ) + + inline given derive[T](using m: Mirror.ProductOf[T]): ConfigMonoid[T] = + new ConfigMonoid[T]: + def zero: T = m.fromProduct(zeroTuple[m.MirroredElemTypes]) + def orElse(main: T, defaults: T): T = m.fromProduct(valueTuple[m.MirroredElemTypes, T](0, main, defaults)) + + + +final case class PublishOptions( + v1: Option[String] = None, + v2: Option[String] = None, + v3: Option[String] = None, + v4: Option[String] = None, + v5: Option[String] = None, + v6: Option[String] = None, + v7: Option[String] = None, + v8: Option[String] = None, + v9: Option[String] = None, + ci: PublishContextualOptions = PublishContextualOptions(), +) +object PublishOptions: + implicit val monoid: ConfigMonoid[PublishOptions] = ConfigMonoid.derive + +final case class PublishContextualOptions( + v1: Option[String] = None, + v2: Option[String] = None, + v3: Option[String] = None, + v4: Option[String] = None, + v5: Option[String] = None, + v6: Option[String] = None, + v7: Option[String] = None, + v8: Option[String] = None, + v9: Option[String] = None, + v10: Option[String] = None, + v11: Option[String] = None, + v12: Option[String] = None, + v13: Option[String] = None, + v14: Option[String] = None, + v15: Option[String] = None, + v16: Option[String] = None, + v17: Option[String] = None, + v18: Option[String] = None, + v19: Option[String] = None, + v20: Option[String] = None +) +object PublishContextualOptions: + implicit val monoid: ConfigMonoid[PublishContextualOptions] = ConfigMonoid.derive // was crash From e5416835a677f03b4af59acfa6d676b25ca607cb Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 8 Aug 2024 18:17:11 +0100 Subject: [PATCH 446/827] Extract parts of defDefSig (& completeConstructor) --- .../src/dotty/tools/dotc/typer/Namer.scala | 120 ++++++++++-------- 1 file changed, 64 insertions(+), 56 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 3844380f8952..e273db9ed8d5 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1526,13 +1526,7 @@ class Namer { typer: Typer => index(constr) index(rest)(using localCtx) - symbolOfTree(constr).info.stripPoly match // Completes constr symbol as a side effect - case mt: MethodType if cls.is(Case) && mt.isParamDependent => - // See issue #8073 for background - report.error( - em"""Implementation restriction: case classes cannot have dependencies between parameters""", - cls.srcPos) - case _ => + checkCaseClassParamDependencies(symbolOfTree(constr).info, cls) // Completes constr symbol as a side effect tempInfo = denot.asClass.classInfo.integrateOpaqueMembers.asInstanceOf[TempClassInfo] denot.info = savedInfo @@ -1860,31 +1854,6 @@ class Namer { typer: Typer => // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR - // A map from context-bounded type parameters to associated evidence parameter names - val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() - if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then - for params <- ddef.paramss; case tdef: TypeDef <- params do - for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do - witnessNamesOfParam(tdef) = ws - - /** Is each name in `wnames` defined somewhere in the longest prefix of all `params` - * that have been typed ahead (i.e. that carry the TypedAhead attachment)? - */ - def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = - (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty - - /** Enter and typecheck parameter list. - * Once all witness parameters for a context bound are seen, create a - * context bound companion for it. - */ - def completeParams(params: List[MemberDef])(using Context): Unit = - index(params) - for param <- params do - typedAheadExpr(param) - for (tdef, wnames) <- witnessNamesOfParam do - if wnames.contains(param.name) && allParamsSeen(wnames, params) then - addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) - // The following 3 lines replace what was previously just completeParams(tparams). // But that can cause bad bounds being computed, as witnessed by // tests/pos/paramcycle.scala. The problematic sequence is this: @@ -1914,33 +1883,10 @@ class Namer { typer: Typer => for tparam <- ddef.leadingTypeParams yield typedAheadExpr(tparam).symbol if completedTypeParams.forall(_.isType) then completer.setCompletedTypeParams(completedTypeParams.asInstanceOf[List[TypeSymbol]]) - ddef.trailingParamss.foreach(completeParams) + completeTrailingParamss(ddef, sym) val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) - /** Under x.modularity, we add `tracked` to context bound witnesses - * that have abstract type members - */ - def needsTracked(sym: Symbol, param: ValDef)(using Context) = - !sym.is(Tracked) - && param.hasAttachment(ContextBoundParam) - && sym.info.memberNames(abstractTypeNameFilter).nonEmpty - - /** Under x.modularity, set every context bound evidence parameter of a class to be tracked, - * provided it has a type that has an abstract type member. Reset private and local flags - * so that the parameter becomes a `val`. - */ - def setTracked(param: ValDef): Unit = - val sym = symbolOfTree(param) - sym.maybeOwner.maybeOwner.infoOrCompleter match - case info: TempClassInfo if needsTracked(sym, param) => - typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") - for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do - acc.resetFlag(PrivateLocal) - acc.setFlag(Tracked) - sym.setFlag(Tracked) - case _ => - def wrapMethType(restpe: Type): Type = instantiateDependent(restpe, paramSymss) methodType(paramSymss, restpe, ddef.mods.is(JavaDefined)) @@ -1966,6 +1912,68 @@ class Namer { typer: Typer => valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) end defDefSig + def completeTrailingParamss(ddef: DefDef, sym: Symbol)(using Context): Unit = + // A map from context-bounded type parameters to associated evidence parameter names + val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() + if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then + for params <- ddef.paramss; case tdef: TypeDef <- params do + for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do + witnessNamesOfParam(tdef) = ws + + /** Is each name in `wnames` defined somewhere in the longest prefix of all `params` + * that have been typed ahead (i.e. that carry the TypedAhead attachment)? + */ + def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = + (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty + + /** Enter and typecheck parameter list. + * Once all witness parameters for a context bound are seen, create a + * context bound companion for it. + */ + def completeParams(params: List[MemberDef])(using Context): Unit = + index(params) + for param <- params do + typedAheadExpr(param) + for (tdef, wnames) <- witnessNamesOfParam do + if wnames.contains(param.name) && allParamsSeen(wnames, params) then + addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) + + ddef.trailingParamss.foreach(completeParams) + end completeTrailingParamss + + /** Checks an implementation restriction on case classes. */ + def checkCaseClassParamDependencies(mt: Type, cls: Symbol)(using Context): Unit = + mt.stripPoly match + case mt: MethodType if cls.is(Case) && mt.isParamDependent => + // See issue #8073 for background + report.error( + em"""Implementation restriction: case classes cannot have dependencies between parameters""", + cls.srcPos) + case _ => + + /** Under x.modularity, we add `tracked` to context bound witnesses + * that have abstract type members + */ + def needsTracked(sym: Symbol, param: ValDef)(using Context) = + !sym.is(Tracked) + && param.hasAttachment(ContextBoundParam) + && sym.info.memberNames(abstractTypeNameFilter).nonEmpty + + /** Under x.modularity, set every context bound evidence parameter of a class to be tracked, + * provided it has a type that has an abstract type member. Reset private and local flags + * so that the parameter becomes a `val`. + */ + def setTracked(param: ValDef)(using Context): Unit = + val sym = symbolOfTree(param) + sym.maybeOwner.maybeOwner.infoOrCompleter match + case info: TempClassInfo if needsTracked(sym, param) => + typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") + for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do + acc.resetFlag(PrivateLocal) + acc.setFlag(Tracked) + sym.setFlag(Tracked) + case _ => + def inferredResultType( mdef: ValOrDefDef, sym: Symbol, From 987235e781b1e832856f878f7dee0f60f6bcd581 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 10 Jun 2024 22:57:09 +0100 Subject: [PATCH 447/827] Avoid forcing ctors & parents which caused cycles --- .../src/dotty/tools/dotc/ast/Desugar.scala | 19 ++++- .../src/dotty/tools/dotc/core/NamerOps.scala | 2 +- .../tools/dotc/core/TypeApplications.scala | 4 +- .../src/dotty/tools/dotc/core/Types.scala | 4 +- .../src/dotty/tools/dotc/typer/Namer.scala | 78 +++++++++++++------ .../src/dotty/tools/dotc/typer/Typer.scala | 6 -- tests/neg/i15177.FakeEnum.min.alt1.scala | 7 ++ tests/neg/i15177.constr-dep.scala | 9 +++ tests/neg/i15177.ub.scala | 13 ++++ tests/pos/i15177.FakeEnum.min.alt2.scala | 7 ++ tests/pos/i15177.FakeEnum.min.alt3.scala | 7 ++ tests/pos/i15177.FakeEnum.min.scala | 4 + tests/pos/i15177.FakeEnum.scala | 21 +++++ tests/pos/i15177.app.scala | 6 ++ tests/pos/i15177.constr-dep.scala | 6 ++ tests/pos/i15177.hk.scala | 5 ++ tests/pos/i15177.hk2.scala | 6 ++ tests/pos/i15177.hylolib.scala | 11 +++ tests/pos/i15177.orig.scala | 7 ++ tests/pos/i15177.scala | 3 + tests/pos/i15177.without.scala | 5 ++ 21 files changed, 198 insertions(+), 32 deletions(-) create mode 100644 tests/neg/i15177.FakeEnum.min.alt1.scala create mode 100644 tests/neg/i15177.constr-dep.scala create mode 100644 tests/neg/i15177.ub.scala create mode 100644 tests/pos/i15177.FakeEnum.min.alt2.scala create mode 100644 tests/pos/i15177.FakeEnum.min.alt3.scala create mode 100644 tests/pos/i15177.FakeEnum.min.scala create mode 100644 tests/pos/i15177.FakeEnum.scala create mode 100644 tests/pos/i15177.app.scala create mode 100644 tests/pos/i15177.constr-dep.scala create mode 100644 tests/pos/i15177.hk.scala create mode 100644 tests/pos/i15177.hk2.scala create mode 100644 tests/pos/i15177.hylolib.scala create mode 100644 tests/pos/i15177.orig.scala create mode 100644 tests/pos/i15177.scala create mode 100644 tests/pos/i15177.without.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 026b8a409d3d..659701b02371 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -93,7 +93,24 @@ object desugar { override def ensureCompletions(using Context): Unit = { def completeConstructor(sym: Symbol) = sym.infoOrCompleter match { - case completer: Namer#ClassCompleter => + case completer: Namer#ClassCompleter if !sym.isCompleting => + // An example, derived from tests/run/t6385.scala + // + // class Test(): + // def t1: Foo = Foo(1) + // final case class Foo(value: Int) + // + // Here's the sequence of events: + // * The symbol for Foo.apply is forced to complete + // * The symbol for the `value` parameter of the apply method is forced to complete + // * Completing that value parameter requires typing its type, which is a DerivedTypeTrees, + // which only types if it has an OriginalSymbol. + // * So if the case class hasn't been completed, we need (at least) its constructor to be completed + // + // Test tests/neg/i9294.scala is an example of why isCompleting is necessary. + // Annotations are added while completing the constructor, + // so the back reference to foo reaches here which re-initiates the constructor completion. + // So we just skip, as completion is already being triggered. completer.completeConstructor(sym) case _ => } diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 07cb9292baa4..363a01665564 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -272,7 +272,7 @@ object NamerOps: * where * * is the CBCompanion type created in Definitions - * withnessRefK is a refence to the K'th witness. + * withnessRefK is a reference to the K'th witness. * * The companion has the same access flags as the original type. */ diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 54636ff4ad58..136384413810 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -267,7 +267,9 @@ class TypeApplications(val self: Type) extends AnyVal { */ def hkResult(using Context): Type = self.dealias match { case self: TypeRef => - if (self.symbol == defn.AnyKindClass) self else self.info.hkResult + if self.symbol == defn.AnyKindClass then self + else if self.symbol.isClass then NoType // avoid forcing symbol if it's a class, not an alias to a HK type lambda + else self.info.hkResult case self: AppliedType => if (self.tycon.typeSymbol.isClass) NoType else self.superType.hkResult case self: HKTypeLambda => self.resultType diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index ca4f8687dc3c..8181b3c83acf 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -199,7 +199,9 @@ object Types extends TypeUtils { */ def isRef(sym: Symbol, skipRefined: Boolean = true)(using Context): Boolean = this match { case this1: TypeRef => - this1.info match { // see comment in Namer#TypeDefCompleter#typeSig + // avoid forcing symbol if it's a class, not a type alias (see i15177.FakeEnum.scala) + if this1.symbol.isClass then this1.symbol eq sym + else this1.info match { // see comment in Namer#TypeDefCompleter#typeSig case TypeAlias(tp) => tp.isRef(sym, skipRefined) case _ => this1.symbol eq sym } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index e273db9ed8d5..0a1a70b98bbb 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -822,8 +822,11 @@ class Namer { typer: Typer => if (sym.is(Module)) moduleValSig(sym) else valOrDefDefSig(original, sym, Nil, identity)(using localContext(sym).setNewScope) case original: DefDef => - val typer1 = ctx.typer.newLikeThis(ctx.nestingLevel + 1) - nestedTyper(sym) = typer1 + // For the primary constructor DefDef, it is: + // * indexed as a part of completing the class, with indexConstructor; and + // * typed ahead when completing the constructor + // So we need to make sure to reuse the same local/nested typer. + val typer1 = nestedTyper.getOrElseUpdate(sym, ctx.typer.newLikeThis(ctx.nestingLevel + 1)) typer1.defDefSig(original, sym, this)(using localContext(sym).setTyper(typer1)) case imp: Import => try @@ -833,6 +836,12 @@ class Namer { typer: Typer => typr.println(s"error while completing ${imp.expr}") throw ex + /** Context setup for indexing the constructor. */ + def indexConstructor(constr: DefDef, sym: Symbol): Unit = + val typer1 = ctx.typer.newLikeThis(ctx.nestingLevel + 1) + nestedTyper(sym) = typer1 + typer1.indexConstructor(constr, sym)(using localContext(sym).setTyper(typer1)) + final override def complete(denot: SymDenotation)(using Context): Unit = { if (Config.showCompletions && ctx.typerState != creationContext.typerState) { def levels(c: Context): Int = @@ -993,7 +1002,7 @@ class Namer { typer: Typer => /** If completion of the owner of the to be completed symbol has not yet started, * complete the owner first and check again. This prevents cyclic references - * where we need to copmplete a type parameter that has an owner that is not + * where we need to complete a type parameter that has an owner that is not * yet completed. Test case is pos/i10967.scala. */ override def needsCompletion(symd: SymDenotation)(using Context): Boolean = @@ -1001,7 +1010,11 @@ class Namer { typer: Typer => !owner.exists || owner.is(Touched) || { - owner.ensureCompleted() + // Only complete the owner if it's a type (eg. the class that owns a type parameter) + // This avoids completing primary constructor methods while completing the type of one of its type parameters + // See i15177.scala. + if owner.isType then + owner.ensureCompleted() !symd.isCompleted } @@ -1526,7 +1539,10 @@ class Namer { typer: Typer => index(constr) index(rest)(using localCtx) - checkCaseClassParamDependencies(symbolOfTree(constr).info, cls) // Completes constr symbol as a side effect + val constrSym = symbolOfTree(constr) + constrSym.infoOrCompleter match + case completer: Completer => completer.indexConstructor(constr, constrSym) + case _ => tempInfo = denot.asClass.classInfo.integrateOpaqueMembers.asInstanceOf[TempClassInfo] denot.info = savedInfo @@ -1756,6 +1772,17 @@ class Namer { typer: Typer => val sym = tree.symbol if sym.isConstructor then sym.owner else sym + /** Index the primary constructor of a class, as a part of completing that class. + * This allows the rest of the constructor completion to be deferred, + * which avoids non-cyclic classes failing, e.g. pos/i15177. + */ + def indexConstructor(constr: DefDef, sym: Symbol)(using Context): Unit = + index(constr.leadingTypeParams) + sym.owner.typeParams.foreach(_.ensureCompleted()) + completeTrailingParamss(constr, sym, indexingCtor = true) + if Feature.enabled(modularity) then + constr.termParamss.foreach(_.foreach(setTracked)) + /** The signature of a module valdef. * This will compute the corresponding module class TypeRef immediately * without going through the defined type of the ValDef. This is necessary @@ -1877,13 +1904,13 @@ class Namer { typer: Typer => // 3. Info of CP is computed (to be copied to DP). // 4. CP is completed. // 5. Info of CP is copied to DP and DP is completed. - index(ddef.leadingTypeParams) - if (isConstructor) sym.owner.typeParams.foreach(_.ensureCompleted()) + if !sym.isPrimaryConstructor then + index(ddef.leadingTypeParams) val completedTypeParams = for tparam <- ddef.leadingTypeParams yield typedAheadExpr(tparam).symbol if completedTypeParams.forall(_.isType) then completer.setCompletedTypeParams(completedTypeParams.asInstanceOf[List[TypeSymbol]]) - completeTrailingParamss(ddef, sym) + completeTrailingParamss(ddef, sym, indexingCtor = false) val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) @@ -1895,11 +1922,11 @@ class Namer { typer: Typer => wrapMethType(addParamRefinements(restpe, paramSymss)) if isConstructor then - if sym.isPrimaryConstructor && Feature.enabled(modularity) then - ddef.termParamss.foreach(_.foreach(setTracked)) // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) - wrapMethType(effectiveResultType(sym, paramSymss)) + val mt = wrapMethType(effectiveResultType(sym, paramSymss)) + if sym.isPrimaryConstructor then checkCaseClassParamDependencies(mt, sym.owner) + mt else if sym.isAllOf(Given | Method) && Feature.enabled(modularity) then // set every context bound evidence parameter of a given companion method // to be tracked, provided it has a type that has an abstract type member. @@ -1912,30 +1939,37 @@ class Namer { typer: Typer => valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) end defDefSig - def completeTrailingParamss(ddef: DefDef, sym: Symbol)(using Context): Unit = + /** Complete the trailing parameters of a DefDef, + * as a part of indexing the primary constructor or + * as a part of completing a DefDef, including the primary constructor. + */ + def completeTrailingParamss(ddef: DefDef, sym: Symbol, indexingCtor: Boolean)(using Context): Unit = // A map from context-bounded type parameters to associated evidence parameter names val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() - if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then + if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) && (indexingCtor || !sym.isPrimaryConstructor) then for params <- ddef.paramss; case tdef: TypeDef <- params do for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do witnessNamesOfParam(tdef) = ws - /** Is each name in `wnames` defined somewhere in the longest prefix of all `params` - * that have been typed ahead (i.e. that carry the TypedAhead attachment)? - */ - def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = - (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty + /** Is each name in `wnames` defined somewhere in the previous parameters? */ + def allParamsSeen(wnames: List[TermName], prevParams: Set[Name]) = + (wnames.toSet[Name] -- prevParams).isEmpty /** Enter and typecheck parameter list. * Once all witness parameters for a context bound are seen, create a * context bound companion for it. */ def completeParams(params: List[MemberDef])(using Context): Unit = - index(params) + if indexingCtor || !sym.isPrimaryConstructor then + index(params) + var prevParams = Set.empty[Name] for param <- params do - typedAheadExpr(param) + if !indexingCtor then + typedAheadExpr(param) + + prevParams += param.name for (tdef, wnames) <- witnessNamesOfParam do - if wnames.contains(param.name) && allParamsSeen(wnames, params) then + if wnames.contains(param.name) && allParamsSeen(wnames, prevParams) then addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) ddef.trailingParamss.foreach(completeParams) @@ -1966,7 +2000,7 @@ class Namer { typer: Typer => def setTracked(param: ValDef)(using Context): Unit = val sym = symbolOfTree(param) sym.maybeOwner.maybeOwner.infoOrCompleter match - case info: TempClassInfo if needsTracked(sym, param) => + case info: ClassInfo if needsTracked(sym, param) => typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do acc.resetFlag(PrivateLocal) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 947d1fcbfa73..d0086210a374 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2516,12 +2516,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer (arg, tparamBounds) else (arg, WildcardType) - if (tpt1.symbol.isClass) - tparam match { - case tparam: Symbol => - tparam.ensureCompleted() // This is needed to get the test `compileParSetSubset` to work - case _ => - } if (desugaredArg.isType) arg match { case untpd.WildcardTypeBoundsTree() diff --git a/tests/neg/i15177.FakeEnum.min.alt1.scala b/tests/neg/i15177.FakeEnum.min.alt1.scala new file mode 100644 index 000000000000..a3e4f7819244 --- /dev/null +++ b/tests/neg/i15177.FakeEnum.min.alt1.scala @@ -0,0 +1,7 @@ +// Like tests/neg/i15177.FakeEnum.min.scala +// But with an actual upper-bound requirement +// Which shouldn't be ignored as a part of overcoming the the cycle +trait Foo +trait X[T <: Foo] { trait Id } +object A extends X[B] // error: Type argument B does not conform to upper bound Foo +class B extends A.Id diff --git a/tests/neg/i15177.constr-dep.scala b/tests/neg/i15177.constr-dep.scala new file mode 100644 index 000000000000..14afb7bb9057 --- /dev/null +++ b/tests/neg/i15177.constr-dep.scala @@ -0,0 +1,9 @@ +// An example of how constructor _type_ parameters +// Which can _not_ be passed to the extends part +// That makes it part of the parent type, +// which has been found to be unsound. +class Foo[A] +class Foo1(val x: Int) + extends Foo[ // error: The type of a class parent cannot refer to constructor parameters, but Foo[(Foo1.this.x : Int)] refers to x + x.type + ] diff --git a/tests/neg/i15177.ub.scala b/tests/neg/i15177.ub.scala new file mode 100644 index 000000000000..d504528572ed --- /dev/null +++ b/tests/neg/i15177.ub.scala @@ -0,0 +1,13 @@ +// like tests/pos/i15177.scala +// but with T having an upper bound +// that B doesn't conform to +// just to be sure that not forcing B +// doesn't backdoor an illegal X[B] +class X[T <: C] { + type Id +} +object A + extends X[ // error + B] // error +class B(id: A.Id) +class C diff --git a/tests/pos/i15177.FakeEnum.min.alt2.scala b/tests/pos/i15177.FakeEnum.min.alt2.scala new file mode 100644 index 000000000000..d9a25a9d6c85 --- /dev/null +++ b/tests/pos/i15177.FakeEnum.min.alt2.scala @@ -0,0 +1,7 @@ +// Like tests/neg/i15177.FakeEnum.min.scala +// With an actual upper-bound requirement +// But that is satisfied on class B +trait Foo +trait X[T <: Foo] { trait Id } +object A extends X[B] +class B extends A.Id with Foo diff --git a/tests/pos/i15177.FakeEnum.min.alt3.scala b/tests/pos/i15177.FakeEnum.min.alt3.scala new file mode 100644 index 000000000000..7984059d0d2a --- /dev/null +++ b/tests/pos/i15177.FakeEnum.min.alt3.scala @@ -0,0 +1,7 @@ +// Like tests/neg/i15177.FakeEnum.min.scala +// With an actual upper-bound requirement +// But that is satisfied on trait Id +trait Foo +trait X[T <: Foo] { trait Id extends Foo } +object A extends X[B] +class B extends A.Id diff --git a/tests/pos/i15177.FakeEnum.min.scala b/tests/pos/i15177.FakeEnum.min.scala new file mode 100644 index 000000000000..e4eb8e95cbe4 --- /dev/null +++ b/tests/pos/i15177.FakeEnum.min.scala @@ -0,0 +1,4 @@ +// Minimisation of tests/neg/i15177.FakeEnum.scala +trait X[T] { trait Id } +object A extends X[B] +class B extends A.Id diff --git a/tests/pos/i15177.FakeEnum.scala b/tests/pos/i15177.FakeEnum.scala new file mode 100644 index 000000000000..356d8f774930 --- /dev/null +++ b/tests/pos/i15177.FakeEnum.scala @@ -0,0 +1,21 @@ +// From https://github.com/scala/scala3/issues/15177#issuecomment-1463088400 +trait FakeEnum[A, @specialized(Byte, Short, Int, Long) B] +{ + trait Value { + self: A => + def name: String + def id: B + } +} + +object FakeEnumType + extends FakeEnum[FakeEnumType, Short] +{ + val MEMBER1 = new FakeEnumType((0: Short), "MEMBER1") {} + val MEMBER2 = new FakeEnumType((1: Short), "MEMBER2") {} +} + +sealed abstract +class FakeEnumType(val id: Short, val name: String) + extends FakeEnumType.Value +{} diff --git a/tests/pos/i15177.app.scala b/tests/pos/i15177.app.scala new file mode 100644 index 000000000000..8695953fd426 --- /dev/null +++ b/tests/pos/i15177.app.scala @@ -0,0 +1,6 @@ +// like tests/pos/i15177.scala +// but with an applied type B[D] +class X[T] { type Id } +object A extends X[B[D]] +class B[C](id: A.Id) +class D diff --git a/tests/pos/i15177.constr-dep.scala b/tests/pos/i15177.constr-dep.scala new file mode 100644 index 000000000000..77f96abea35a --- /dev/null +++ b/tests/pos/i15177.constr-dep.scala @@ -0,0 +1,6 @@ +// An example of how constructor _term_ parameters +// Can be passed to the extends part +// But that doesn't mean the parent type, +// it's just the super constructor call. +class Bar(val y: Long) +class Bar1(val z: Long) extends Bar(z) diff --git a/tests/pos/i15177.hk.scala b/tests/pos/i15177.hk.scala new file mode 100644 index 000000000000..4530e6c960e2 --- /dev/null +++ b/tests/pos/i15177.hk.scala @@ -0,0 +1,5 @@ +// like tests/pos/i15177.scala +// but with B being higher kinded +class X[T[_]] { type Id } +object A extends X[B] +class B[C](id: A.Id) diff --git a/tests/pos/i15177.hk2.scala b/tests/pos/i15177.hk2.scala new file mode 100644 index 000000000000..9813d92aa261 --- /dev/null +++ b/tests/pos/i15177.hk2.scala @@ -0,0 +1,6 @@ +// like tests/pos/i15177.scala +// but with B being higher kinded +// but without the actual cycle (like .without) +class X[T[_]] { type Id } +class A extends X[B] +class B[C] diff --git a/tests/pos/i15177.hylolib.scala b/tests/pos/i15177.hylolib.scala new file mode 100644 index 000000000000..96cf87680a1c --- /dev/null +++ b/tests/pos/i15177.hylolib.scala @@ -0,0 +1,11 @@ +//> using options -language:experimental.modularity -source future +// A minimisation of pos/hylolib-cb that broke while fixing i15177 +trait Value[Self] +trait Coll[Self]: + type Pos: Value + extension (self: Self) def pos: Pos +extension [Self: Coll](self: Self) def trigger = self.pos +class Slice[Base] +given SliceIsColl[T: Coll as c]: Coll[Slice[T]] with + type Pos = c.Pos + extension (self: Slice[T]) def pos: Pos = ??? diff --git a/tests/pos/i15177.orig.scala b/tests/pos/i15177.orig.scala new file mode 100644 index 000000000000..d9e257e1bb34 --- /dev/null +++ b/tests/pos/i15177.orig.scala @@ -0,0 +1,7 @@ +trait DomainIdProvider[T] { + type Id = List[T] +} +object Country extends DomainIdProvider[Country] +case class Country( + id: Country.Id, +) diff --git a/tests/pos/i15177.scala b/tests/pos/i15177.scala new file mode 100644 index 000000000000..278994961810 --- /dev/null +++ b/tests/pos/i15177.scala @@ -0,0 +1,3 @@ +class X[T] { trait Id } +object A extends X[B] +class B(id: A.Id) diff --git a/tests/pos/i15177.without.scala b/tests/pos/i15177.without.scala new file mode 100644 index 000000000000..4919999f21ed --- /dev/null +++ b/tests/pos/i15177.without.scala @@ -0,0 +1,5 @@ +// like tests/pos/i15177.scala +// but without the actual cycle +class X[T] { trait Id } +class A extends X[B] +class B From 5408a8075be556fa0b38bdf0a16f3cb83112b8ad Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 9 Aug 2024 12:57:13 +0200 Subject: [PATCH 448/827] Add regression tests for #21352 and .. * `neandertech/langoustine` - [#21344 (comment)] (https://github.com/scala/scala3/pull/21344#issuecomment-2274037341) * `tpolcat/doobie` - [#21344 (comment)] (https://github.com/scala/scala3/pull/21344#issuecomment-2274207166) Fixes #21352 --- tests/pos/i21352a/schema.scala | 19 ++++++++++++++ tests/pos/i21352a/schemaDerivation.scala | 26 +++++++++++++++++++ tests/pos/i21352b.scala | 33 ++++++++++++++++++++++++ tests/pos/i21352c.scala | 17 ++++++++++++ 4 files changed, 95 insertions(+) create mode 100644 tests/pos/i21352a/schema.scala create mode 100644 tests/pos/i21352a/schemaDerivation.scala create mode 100644 tests/pos/i21352b.scala create mode 100644 tests/pos/i21352c.scala diff --git a/tests/pos/i21352a/schema.scala b/tests/pos/i21352a/schema.scala new file mode 100644 index 000000000000..486e1bb1ea34 --- /dev/null +++ b/tests/pos/i21352a/schema.scala @@ -0,0 +1,19 @@ +//> using options -source:3.5 + +case class Schema[T](format: String): + def asOption: Schema[Option[T]] = ??? + def name(name: Option[SName]): Schema[T] = ??? + def format(f: String): Schema[T] = ??? + +object Schema extends SchemaCompanionMacros: + implicit def schemaForOption[T: Schema]: Schema[Option[T]] = + implicitly[Schema[T]] + ??? + +trait SchemaCompanionMacros extends SchemaDerivation: + given derivedStringBasedUnionEnumeration[S](using IsUnionOf[String, S]): Schema[S] = + val x: Schema[S] = ??? + x.name(None) + +@main def Test = + case class Foo(x: Int) derives Schema diff --git a/tests/pos/i21352a/schemaDerivation.scala b/tests/pos/i21352a/schemaDerivation.scala new file mode 100644 index 000000000000..d34a59c51a61 --- /dev/null +++ b/tests/pos/i21352a/schemaDerivation.scala @@ -0,0 +1,26 @@ +//> using options -source:3.5 + +import scala.deriving.* +import scala.quoted.* + +trait SName +abstract class CaseClass[Typeclass[_], Type]: + def param: CaseClass.Param[Typeclass, Type] + +object CaseClass: + trait Param[Typeclass[_], Type]: + type PType + def typeclass: Typeclass[PType] + + +sealed trait IsUnionOf[T, A] +object IsUnionOf: + transparent inline given derived[T, A]: IsUnionOf[T, A] = ${ deriveImpl[T, A] } + private def deriveImpl[T, A](using quotes: Quotes): Expr[IsUnionOf[T, A]] = ??? + +trait SchemaDerivation: + inline implicit def derived[T](implicit m: Mirror.Of[T]): Schema[T] = + val ctx: CaseClass[Schema, T] = ??? + val valueSchema = ctx.param.typeclass + val format = valueSchema.format + ??? diff --git a/tests/pos/i21352b.scala b/tests/pos/i21352b.scala new file mode 100644 index 000000000000..6e1dfbd18e3c --- /dev/null +++ b/tests/pos/i21352b.scala @@ -0,0 +1,33 @@ + +object serializer: + trait Reader[T] + trait Writer[T] + // Needs to be implicit val + implicit val UnitReader: Reader[Unit] = ??? + implicit val StringReader: Reader[String] = ??? + // A way to derive instances needs to be available + inline given superTypeReader[T: scala.reflect.ClassTag]: Reader[T] = ??? +import serializer.Reader + +trait Codec[T] +trait Channel[F[_]]: + def notificationStub[In: Codec](): In => F[Unit] +trait Monadic[F[_]] + +sealed abstract class LSPNotification(): + type In + given inputReader: Reader[In] + +class PreparedNotification[X <: LSPNotification](val x: X, val in: x.In): + type In = x.In + +trait Communicate[F[_]]: + def notification[X <: LSPNotification](notif: X, in: notif.In): F[Unit] + +object Communicate: + given codec[T: Reader]: Codec[T] = ??? + + def channel[F[_]: Monadic](channel: Channel[F]) = + new Communicate[F]: + override def notification[X <: LSPNotification](notif: X, in: notif.In): F[Unit] = + channel.notificationStub().apply(in) // was error diff --git a/tests/pos/i21352c.scala b/tests/pos/i21352c.scala new file mode 100644 index 000000000000..22169f3560a8 --- /dev/null +++ b/tests/pos/i21352c.scala @@ -0,0 +1,17 @@ + +trait Text[T] +trait Read[A] +object Read extends ReadImplicits: + implicit val unit: Read[Unit] = ??? +trait ReadImplicits: + import scala.deriving.* + given roe: Read[Option[EmptyTuple]] = ??? + given rou: Read[Option[Unit]] = ??? + given cons1[H, T <: Tuple](using Read[Option[H]], Read[Option[T]]): Read[Option[H *: T]] = ??? + +trait Fragment: + def query[B: Read]: String = ??? + +@main def Test = + val f: Fragment = ??? + f.query // was error From 72a20bdb67155bda53e7418a7f0d95e627d11a6c Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 10 Aug 2024 14:21:04 +0200 Subject: [PATCH 449/827] Improve Contains handling Make use of enclosing Contains assumptions to improve the subsumes logic. --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 18 ++++++++ .../src/dotty/tools/dotc/cc/CaptureRef.scala | 4 ++ .../src/dotty/tools/dotc/cc/CaptureSet.scala | 8 +++- .../dotty/tools/dotc/cc/CheckCaptures.scala | 46 +++++++++++-------- .../dotty/tools/dotc/core/Definitions.scala | 2 +- tests/pos-custom-args/captures/i21313.scala | 11 ++++- 6 files changed, 66 insertions(+), 23 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index a2d2d2cf358c..9b7d2b90ed1a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -713,3 +713,21 @@ extension (self: Type) case _ => self +/** An extractor for a contains argument */ +object ContainsImpl: + def unapply(tree: TypeApply)(using Context): Option[(Tree, Tree)] = + tree.fun.tpe.widen match + case fntpe: PolyType if tree.fun.symbol == defn.Caps_containsImpl => + tree.args match + case csArg :: refArg :: Nil => Some((csArg, refArg)) + case _ => None + case _ => None + +/** An extractor for a contains parameter */ +object ContainsParam: + def unapply(sym: Symbol)(using Context): Option[(TypeRef, CaptureRef)] = + sym.info.dealias match + case AppliedType(tycon, (cs: TypeRef) :: (ref: CaptureRef) :: Nil) + if tycon.typeSymbol == defn.Caps_ContainsTrait + && cs.typeSymbol.isAbstractOrParamType => Some((cs, ref)) + case _ => None diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala index 6578da89bbf8..f00c6869cd80 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -116,8 +116,12 @@ trait CaptureRef extends TypeProxy, ValueType: case x1: SingletonCaptureRef => x1.subsumes(y) case _ => false case x: TermParamRef => subsumesExistentially(x, y) + case x: TypeRef => assumedContainsOf(x).contains(y) case _ => false + def assumedContainsOf(x: TypeRef)(using Context): SimpleIdentitySet[CaptureRef] = + CaptureSet.assumedContains.getOrElse(x, SimpleIdentitySet.empty) + end CaptureRef trait SingletonCaptureRef extends SingletonType, CaptureRef diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 1d09b9dc5f20..25d8e0bc6506 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -16,7 +16,7 @@ import util.{SimpleIdentitySet, Property} import typer.ErrorReporting.Addenda import TypeComparer.subsumesExistentially import util.common.alwaysTrue -import scala.collection.mutable +import scala.collection.{mutable, immutable} import CCState.* /** A class for capture sets. Capture sets can be constants or variables. @@ -1125,6 +1125,12 @@ object CaptureSet: foldOver(cs, t) collect(CaptureSet.empty, tp) + type AssumedContains = immutable.Map[TypeRef, SimpleIdentitySet[CaptureRef]] + val AssumedContains: Property.Key[AssumedContains] = Property.Key() + + def assumedContains(using Context): AssumedContains = + ctx.property(AssumedContains).getOrElse(immutable.Map.empty) + private val ShownVars: Property.Key[mutable.Set[Var]] = Property.Key() /** Perform `op`. Under -Ycc-debug, collect and print info about all variables reachable diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index dbf01915122d..51cf362ca667 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -676,29 +676,24 @@ class CheckCaptures extends Recheck, SymTransformer: i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) - val res = handleCall(meth, tree, () => Existential.toCap(super.recheckTypeApply(tree, pt))) - if meth == defn.Caps_containsImpl then checkContains(tree) - res + try handleCall(meth, tree, () => Existential.toCap(super.recheckTypeApply(tree, pt))) + finally checkContains(tree) end recheckTypeApply /** Faced with a tree of form `caps.contansImpl[CS, r.type]`, check that `R` is a tracked * capability and assert that `{r} <:CS`. */ - def checkContains(tree: TypeApply)(using Context): Unit = - tree.fun.knownType.widen match - case fntpe: PolyType => - tree.args match - case csArg :: refArg :: Nil => - val cs = csArg.knownType.captureSet - val ref = refArg.knownType - capt.println(i"check contains $cs , $ref") - ref match - case ref: CaptureRef if ref.isTracked => - checkElem(ref, cs, tree.srcPos) - case _ => - report.error(em"$refArg is not a tracked capability", refArg.srcPos) - case _ => - case _ => + def checkContains(tree: TypeApply)(using Context): Unit = tree match + case ContainsImpl(csArg, refArg) => + val cs = csArg.knownType.captureSet + val ref = refArg.knownType + capt.println(i"check contains $cs , $ref") + ref match + case ref: CaptureRef if ref.isTracked => + checkElem(ref, cs, tree.srcPos) + case _ => + report.error(em"$refArg is not a tracked capability", refArg.srcPos) + case _ => override def recheckBlock(tree: Block, pt: Type)(using Context): Type = inNestedLevel(super.recheckBlock(tree, pt)) @@ -814,15 +809,26 @@ class CheckCaptures extends Recheck, SymTransformer: val localSet = capturedVars(sym) if !localSet.isAlwaysEmpty then curEnv = Env(sym, EnvKind.Regular, localSet, curEnv) + + // ctx with AssumedContains entries for each Contains parameter + val bodyCtx = + var ac = CaptureSet.assumedContains + for paramSyms <- sym.paramSymss do + for case ContainsParam(cs, ref) <- paramSyms do + ac = ac.updated(cs, ac.getOrElse(cs, SimpleIdentitySet.empty) + ref) + if ac.isEmpty then ctx + else ctx.withProperty(CaptureSet.AssumedContains, Some(ac)) + inNestedLevel: // TODO: needed here? - try checkInferredResult(super.recheckDefDef(tree, sym), tree) + try checkInferredResult(super.recheckDefDef(tree, sym)(using bodyCtx), tree) finally if !sym.isAnonymousFunction then // Anonymous functions propagate their type to the enclosing environment // so it is not in general sound to interpolate their types. interpolateVarsIn(tree.tpt) curEnv = saved - + end recheckDefDef + /** If val or def definition with inferred (result) type is visible * in other compilation units, check that the actual inferred type * conforms to the expected type where all inferred capture sets are dropped. diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 1d2f2b05feb4..8981aa4aa6ac 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1002,7 +1002,7 @@ class Definitions { @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") - @tu lazy val Caps_ContainsTrait: TypeSymbol = CapsModule.requiredType("Capability") + @tu lazy val Caps_ContainsTrait: TypeSymbol = CapsModule.requiredType("Contains") @tu lazy val Caps_containsImpl: TermSymbol = CapsModule.requiredMethod("containsImpl") @tu lazy val PureClass: Symbol = requiredClass("scala.Pure") diff --git a/tests/pos-custom-args/captures/i21313.scala b/tests/pos-custom-args/captures/i21313.scala index 2fda6c0c0e45..b388b6487cb5 100644 --- a/tests/pos-custom-args/captures/i21313.scala +++ b/tests/pos-custom-args/captures/i21313.scala @@ -1,7 +1,16 @@ import caps.CapSet trait Async: - def await[T, Cap^](using caps.Contains[Cap, this.type])(src: Source[T, Cap]^): T + def await[T, Cap^](using caps.Contains[Cap, this.type])(src: Source[T, Cap]^): T = + val x: Async^{this} = ??? + val y: Async^{Cap^} = x + val ac: Async^ = ??? + def f(using caps.Contains[Cap, ac.type]) = + val x2: Async^{this} = ??? + val y2: Async^{Cap^} = x2 + val x3: Async^{ac} = ??? + val y3: Async^{Cap^} = x3 + ??? trait Source[+T, Cap^]: final def await(using ac: Async^{Cap^}) = ac.await[T, Cap](this) // Contains[Cap, ac] is assured because {ac} <: Cap. From b2292a8c67eaf09f8604b3c1e6a1fe5dffc145e3 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 10 Aug 2024 14:21:53 +0200 Subject: [PATCH 450/827] Unrelated test: A Provider variant --- tests/run/Providers.check | 8 ++++++ tests/run/Providers.scala | 52 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 60 insertions(+) diff --git a/tests/run/Providers.check b/tests/run/Providers.check index 7b0a9a8b143e..a72c2c1e6fb7 100644 --- a/tests/run/Providers.check +++ b/tests/run/Providers.check @@ -18,3 +18,11 @@ Executing query: insert into subscribers(name, email) values Daniel daniel@Rockt You've just been subscribed to RockTheJVM. Welcome, Martin Acquired connection Executing query: insert into subscribers(name, email) values Martin odersky@gmail.com + +Injected2 +You've just been subscribed to RockTheJVM. Welcome, Daniel +Acquired connection +Executing query: insert into subscribers(name, email) values Daniel daniel@RocktheJVM.com +You've just been subscribed to RockTheJVM. Welcome, Martin +Acquired connection +Executing query: insert into subscribers(name, email) values Martin odersky@gmail.com diff --git a/tests/run/Providers.scala b/tests/run/Providers.scala index 3eb4b2df2207..8c5bf20bc02e 100644 --- a/tests/run/Providers.scala +++ b/tests/run/Providers.scala @@ -65,6 +65,8 @@ end Providers Explicit().test() println(s"\nInjected") Injected().test() + println(s"\nInjected2") + Injected2().test() /** Demonstrator for explicit dependency construction */ class Explicit: @@ -173,5 +175,55 @@ class Injected: end explicit end Injected +/** Injected with builders in companion objects */ +class Injected2: + import Providers.* + + case class User(name: String, email: String) + + class UserSubscription(emailService: EmailService, db: UserDatabase): + def subscribe(user: User) = + emailService.email(user) + db.insert(user) + object UserSubscription: + def apply()(using Provider[(EmailService, UserDatabase)]): UserSubscription = + new UserSubscription(provided[EmailService], provided[UserDatabase]) + + class EmailService: + def email(user: User) = + println(s"You've just been subscribed to RockTheJVM. Welcome, ${user.name}") + + class UserDatabase(pool: ConnectionPool): + def insert(user: User) = + pool.get().runQuery(s"insert into subscribers(name, email) values ${user.name} ${user.email}") + object UserDatabase: + def apply()(using Provider[(ConnectionPool)]): UserDatabase = + new UserDatabase(provided[ConnectionPool]) + + class ConnectionPool(n: Int): + def get(): Connection = + println(s"Acquired connection") + Connection() + + class Connection(): + def runQuery(query: String): Unit = + println(s"Executing query: $query") + + def test() = + given Provider[EmailService] = provide(EmailService()) + given Provider[ConnectionPool] = provide(ConnectionPool(10)) + given Provider[UserDatabase] = provide(UserDatabase()) + given Provider[UserSubscription] = provide(UserSubscription()) + + def subscribe(user: User)(using Provider[UserSubscription]) = + val sub = UserSubscription() + sub.subscribe(user) + + subscribe(User("Daniel", "daniel@RocktheJVM.com")) + subscribe(User("Martin", "odersky@gmail.com")) + end test +end Injected2 + + From e7d479f80a29c6ae21d0755047e563963b87ac82 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sat, 10 Aug 2024 12:00:15 -0400 Subject: [PATCH 451/827] Fixes IllegalAccessError with Java package protected class This is a backport of Scala 2.x scala/scala 6023 Fixes 13841 Fixes 13897 **Problem** When compiling `builder.call1().call2()` where both are Java-defined package-protected class through a public subsclass, Scala 3 does not properly cast the receiver to the public class, and results in an IllegalAccessError. **Solution** This backports the casting fix from the Scala 2.x compiler. --- .../dotty/tools/dotc/transform/Erasure.scala | 53 ++++++++++--------- tests/run/java-package-protected/A.java | 21 ++++++++ tests/run/java-package-protected/C.scala | 10 ++++ tests/run/java-package-protected/Test.scala | 5 ++ 4 files changed, 64 insertions(+), 25 deletions(-) create mode 100644 tests/run/java-package-protected/A.java create mode 100644 tests/run/java-package-protected/C.scala create mode 100644 tests/run/java-package-protected/Test.scala diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index be00d952566c..7414ca7e69c6 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -36,6 +36,7 @@ import ExplicitOuter.* import core.Mode import util.Property import reporting.* +import scala.annotation.tailrec class Erasure extends Phase with DenotTransformer { @@ -764,7 +765,8 @@ object Erasure { (ctx.owner.enclosingPackageClass eq boundary) } - def recur(qual: Tree): Tree = { + @tailrec + def recur(qual: Tree): Tree = val qualIsPrimitive = qual.tpe.widen.isPrimitiveValueType val symIsPrimitive = sym.owner.isPrimitiveValueClass @@ -773,33 +775,34 @@ object Erasure { inContext(preErasureCtx): tree.qualifier.typeOpt.widen.finalResultType) - if (qualIsPrimitive && !symIsPrimitive || qual.tpe.widenDealias.isErasedValueType) + if qualIsPrimitive && !symIsPrimitive || qual.tpe.widenDealias.isErasedValueType then recur(box(qual)) - else if (!qualIsPrimitive && symIsPrimitive) + else if !qualIsPrimitive && symIsPrimitive then recur(unbox(qual, sym.owner.typeRef)) - else if (sym.owner eq defn.ArrayClass) + else if sym.owner eq defn.ArrayClass then selectArrayMember(qual, originalQual) - else { - val qual1 = adaptIfSuper(qual) - if (qual1.tpe.derivesFrom(sym.owner) || qual1.isInstanceOf[Super]) - select(qual1, sym) - else - val castTarget = // Avoid inaccessible cast targets, see i8661 - if isJvmAccessible(sym.owner) && sym.owner.isType - then - sym.owner.typeRef - else - // If the owner is inaccessible, try going through the qualifier, - // but be careful to not go in an infinite loop in case that doesn't - // work either. - val tp = originalQual - if tp =:= qual1.tpe.widen then - return errorTree(qual1, - em"Unable to emit reference to ${sym.showLocated}, ${sym.owner} is not accessible in ${ctx.owner.enclosingClass}") - tp - recur(cast(qual1, castTarget)) - } - } + else + adaptIfSuper(qual) match + case qual1: Super => + select(qual1, sym) + case qual1 if !isJvmAccessible(qual1.tpe.typeSymbol) + || !qual1.tpe.derivesFrom(sym.owner) => + val castTarget = // Avoid inaccessible cast targets, see i8661 + if isJvmAccessible(sym.owner) && sym.owner.isType then + sym.owner.typeRef + else + // If the owner is inaccessible, try going through the qualifier, + // but be careful to not go in an infinite loop in case that doesn't + // work either. + val tp = originalQual + if tp =:= qual1.tpe.widen then + return errorTree(qual1, + em"Unable to emit reference to ${sym.showLocated}, ${sym.owner} is not accessible in ${ctx.owner.enclosingClass}") + tp + recur(cast(qual1, castTarget)) + case qual1 => + select(qual1, sym) + end recur checkNotErased(recur(qual1)) } diff --git a/tests/run/java-package-protected/A.java b/tests/run/java-package-protected/A.java new file mode 100644 index 000000000000..66457741f986 --- /dev/null +++ b/tests/run/java-package-protected/A.java @@ -0,0 +1,21 @@ +// filter: unchecked +package a; + +/** This is package protected. */ +class B> { + private int connectTimeout = 10000; + private int failedAttempts = 3; + + public T setConnectTimeout(int connectTimeout) { + this.connectTimeout = connectTimeout; + return (T) this; + } + + public T setFailedAttempts(int failedAttempts) { + this.failedAttempts = failedAttempts; + return (T) this; + } +} + +/** This is public. */ +public class A extends B { } diff --git a/tests/run/java-package-protected/C.scala b/tests/run/java-package-protected/C.scala new file mode 100644 index 000000000000..100b5819ccde --- /dev/null +++ b/tests/run/java-package-protected/C.scala @@ -0,0 +1,10 @@ +package b + +import a.* + +object C: + def m: Int = + val a = new A() + .setConnectTimeout(1) + .setFailedAttempts(1) + 0 diff --git a/tests/run/java-package-protected/Test.scala b/tests/run/java-package-protected/Test.scala new file mode 100644 index 000000000000..a7036a36d4ae --- /dev/null +++ b/tests/run/java-package-protected/Test.scala @@ -0,0 +1,5 @@ +// scalajs: --skip + +object Test extends App: + assert(b.C.m == 0) +end Test From 5915e511cbe1a19e7b67d7393b77bf16abfc22fa Mon Sep 17 00:00:00 2001 From: philippus Date: Tue, 9 Apr 2024 21:26:26 +0200 Subject: [PATCH 452/827] Update asm to patched 9.7 --- compiler/src/dotty/tools/backend/jvm/BackendUtils.scala | 1 + project/Build.scala | 2 +- tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala index 865ee9bf4af9..cb7ed3d54788 100644 --- a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -185,5 +185,6 @@ object BackendUtils { 20 -> asm.Opcodes.V20, 21 -> asm.Opcodes.V21, 22 -> asm.Opcodes.V22, + 23 -> asm.Opcodes.V23 ) } diff --git a/project/Build.scala b/project/Build.scala index fef7a2bcb60b..b50cafbe6a06 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -625,7 +625,7 @@ object Build { // get libraries onboard libraryDependencies ++= Seq( - "org.scala-lang.modules" % "scala-asm" % "9.6.0-scala-1", // used by the backend + "org.scala-lang.modules" % "scala-asm" % "9.7.0-scala-2", // used by the backend Dependencies.compilerInterface, "org.jline" % "jline-reader" % "3.25.1", // used by the REPL "org.jline" % "jline-terminal" % "3.25.1", diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala index 558eb3e0a12b..914146c3c175 100644 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala @@ -17,7 +17,7 @@ class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` private val minTargetVersion = 8 - private val maxTargetVersion = 22 + private val maxTargetVersion = 23 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) From e66860be41f48517a536c57c032018710e070abb Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Mon, 12 Aug 2024 11:30:28 +0200 Subject: [PATCH 453/827] fix: throw an error before assigning ErrorType when expanding constValue --- compiler/src/dotty/tools/dotc/inlines/Inlines.scala | 1 + tests/neg/i21359.scala | 6 ++++++ 2 files changed, 7 insertions(+) create mode 100644 tests/neg/i21359.scala diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index fffe87c3f57a..788ecbfebd4b 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -429,6 +429,7 @@ object Inlines: val constVal = tryConstValue(tpe) if constVal.isEmpty then val msg = NotConstant("cannot take constValue", tpe) + report.error(msg, callTypeArgs.head.srcPos) ref(defn.Predef_undefined).withSpan(callTypeArgs.head.span).withType(ErrorType(msg)) else constVal diff --git a/tests/neg/i21359.scala b/tests/neg/i21359.scala new file mode 100644 index 000000000000..9d588335c0e6 --- /dev/null +++ b/tests/neg/i21359.scala @@ -0,0 +1,6 @@ +import scala.compiletime.constValueTuple +import scala.deriving.Mirror + +case class Hello(a: Int) +val mirror = summon[Mirror.Of[Hello]] +val test = constValueTuple[mirror.MirroredElemTypes] // error From bc8f235f4b8ceb0e9d17426406efef7d62c1f1f2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Aug 2024 13:40:14 +0000 Subject: [PATCH 454/827] Bump VirtusLab/scala-cli-setup from 1.4.1 to 1.4.3 Bumps [VirtusLab/scala-cli-setup](https://github.com/virtuslab/scala-cli-setup) from 1.4.1 to 1.4.3. - [Release notes](https://github.com/virtuslab/scala-cli-setup/releases) - [Commits](https://github.com/virtuslab/scala-cli-setup/compare/v1.4.1...v1.4.3) --- updated-dependencies: - dependency-name: VirtusLab/scala-cli-setup dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 4eb2474855ce..57a5f105c86e 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.4.1 + - uses: VirtusLab/scala-cli-setup@v1.4.3 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From f0a5da4c82c62b48fea80e7bcc0085513ba27555 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 12 Aug 2024 15:12:21 +0100 Subject: [PATCH 455/827] Bump MiMa check to 3.5.0 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 05eb164f91f3..724010cef536 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -113,7 +113,7 @@ object Build { * - `3.M.0` if `P > 0` * - `3.(M-1).0` if `P = 0` */ - val mimaPreviousDottyVersion = "3.5.0-RC1" + val mimaPreviousDottyVersion = "3.5.0" /** LTS version against which we check binary compatibility. * From b0871ee0b1127f9f90e1ff5052b12bcd7696332f Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 12 Aug 2024 15:16:06 +0100 Subject: [PATCH 456/827] Bump reference compiler to 3.5.0 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 724010cef536..99bfd5be88ce 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -90,7 +90,7 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.5.0-RC4" + val referenceVersion = "3.5.0" val baseVersion = "3.6.0" // Will be required by some automation later From 97bbc243b7cfb6ca9b17dde8803fba7c786e9b8c Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 12 Aug 2024 15:53:12 +0200 Subject: [PATCH 457/827] Use default self type more often We now also use cap as the default for the self type's capture set if a base class has an explicit self type, but that type's capture set is universal. This requires fewer self type annotations. --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 6 ++++-- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- scala2-library-cc/src/scala/collection/IndexedSeqView.scala | 5 +---- scala2-library-cc/src/scala/collection/SeqView.scala | 2 -- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index a2d2d2cf358c..15cdeb9bac11 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -528,9 +528,11 @@ extension (cls: ClassSymbol) // and err on the side of impure. && selfType.exists && selfType.captureSet.isAlwaysEmpty - def baseClassHasExplicitSelfType(using Context): Boolean = + def baseClassHasExplicitNonUniversalSelfType(using Context): Boolean = cls.baseClasses.exists: bc => - bc.is(CaptureChecked) && bc.givenSelfType.exists + bc.is(CaptureChecked) + && bc.givenSelfType.exists + && !bc.givenSelfType.captureSet.isUniversal def matchesExplicitRefsInBaseClass(refs: CaptureSet)(using Context): Boolean = cls.baseClasses.tail.exists: bc => diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 25d50052f107..c0b0d53910fd 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -572,7 +572,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: else if cls.isPureClass then // is cls is known to be pure, nothing needs to be added to self type selfInfo - else if !cls.isEffectivelySealed && !cls.baseClassHasExplicitSelfType then + else if !cls.isEffectivelySealed && !cls.baseClassHasExplicitNonUniversalSelfType then // assume {cap} for completely unconstrained self types of publicly extensible classes CapturingType(cinfo.selfType, CaptureSet.universal) else diff --git a/scala2-library-cc/src/scala/collection/IndexedSeqView.scala b/scala2-library-cc/src/scala/collection/IndexedSeqView.scala index 0b6f1bc8e64e..78f8abb8e327 100644 --- a/scala2-library-cc/src/scala/collection/IndexedSeqView.scala +++ b/scala2-library-cc/src/scala/collection/IndexedSeqView.scala @@ -16,13 +16,10 @@ package collection import scala.annotation.nowarn import language.experimental.captureChecking -trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { - self: IndexedSeqViewOps[A, CC, C]^ => -} +trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] /** View defined in terms of indexing a range */ trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] { - self: IndexedSeqView[A]^ => override def view: IndexedSeqView[A]^{this} = this diff --git a/scala2-library-cc/src/scala/collection/SeqView.scala b/scala2-library-cc/src/scala/collection/SeqView.scala index c7af0077ce1a..292dc61ddaa8 100644 --- a/scala2-library-cc/src/scala/collection/SeqView.scala +++ b/scala2-library-cc/src/scala/collection/SeqView.scala @@ -25,7 +25,6 @@ import scala.annotation.unchecked.uncheckedCaptures * mapping a SeqView with an impure function gives an impure view). */ trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { - self: SeqViewOps[A, CC, C]^ => def length: Int def apply(x: Int): A @@ -75,7 +74,6 @@ trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { } trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] { - self: SeqView[A]^ => override def view: SeqView[A]^{this} = this From 4981f8d7142e9e4f0d331822214e7ef5f46353ba Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 12 Aug 2024 15:53:12 +0200 Subject: [PATCH 458/827] Use default self type more often We now also use cap as the default for the self type's capture set if a base class has an explicit self type, but that type's capture set is universal. This requires fewer self type annotations. --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 6 ++++-- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- scala2-library-cc/src/scala/collection/IndexedSeqView.scala | 5 +---- scala2-library-cc/src/scala/collection/SeqView.scala | 2 -- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 9b7d2b90ed1a..29c6528e36de 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -528,9 +528,11 @@ extension (cls: ClassSymbol) // and err on the side of impure. && selfType.exists && selfType.captureSet.isAlwaysEmpty - def baseClassHasExplicitSelfType(using Context): Boolean = + def baseClassHasExplicitNonUniversalSelfType(using Context): Boolean = cls.baseClasses.exists: bc => - bc.is(CaptureChecked) && bc.givenSelfType.exists + bc.is(CaptureChecked) + && bc.givenSelfType.exists + && !bc.givenSelfType.captureSet.isUniversal def matchesExplicitRefsInBaseClass(refs: CaptureSet)(using Context): Boolean = cls.baseClasses.tail.exists: bc => diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 91671d7d7776..f578d10702e9 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -570,7 +570,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: else if cls.isPureClass then // is cls is known to be pure, nothing needs to be added to self type selfInfo - else if !cls.isEffectivelySealed && !cls.baseClassHasExplicitSelfType then + else if !cls.isEffectivelySealed && !cls.baseClassHasExplicitNonUniversalSelfType then // assume {cap} for completely unconstrained self types of publicly extensible classes CapturingType(cinfo.selfType, CaptureSet.universal) else diff --git a/scala2-library-cc/src/scala/collection/IndexedSeqView.scala b/scala2-library-cc/src/scala/collection/IndexedSeqView.scala index 0b6f1bc8e64e..78f8abb8e327 100644 --- a/scala2-library-cc/src/scala/collection/IndexedSeqView.scala +++ b/scala2-library-cc/src/scala/collection/IndexedSeqView.scala @@ -16,13 +16,10 @@ package collection import scala.annotation.nowarn import language.experimental.captureChecking -trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { - self: IndexedSeqViewOps[A, CC, C]^ => -} +trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] /** View defined in terms of indexing a range */ trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] { - self: IndexedSeqView[A]^ => override def view: IndexedSeqView[A]^{this} = this diff --git a/scala2-library-cc/src/scala/collection/SeqView.scala b/scala2-library-cc/src/scala/collection/SeqView.scala index c7af0077ce1a..292dc61ddaa8 100644 --- a/scala2-library-cc/src/scala/collection/SeqView.scala +++ b/scala2-library-cc/src/scala/collection/SeqView.scala @@ -25,7 +25,6 @@ import scala.annotation.unchecked.uncheckedCaptures * mapping a SeqView with an impure function gives an impure view). */ trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { - self: SeqViewOps[A, CC, C]^ => def length: Int def apply(x: Int): A @@ -75,7 +74,6 @@ trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { } trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] { - self: SeqView[A]^ => override def view: SeqView[A]^{this} = this From 5dd0bf7bf252255d7d760c09c2402a249d304eda Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 12 Aug 2024 21:02:00 +0200 Subject: [PATCH 459/827] Expand Capability types T to T^ only if not explicit capture set is given --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 6 +++--- compiler/src/dotty/tools/dotc/cc/Setup.scala | 16 ++++++++++++---- .../src/dotty/tools/dotc/core/Definitions.scala | 3 +++ tests/pos-custom-args/captures/cap-refine.scala | 12 ++++++++++++ 4 files changed, 30 insertions(+), 7 deletions(-) create mode 100644 tests/pos-custom-args/captures/cap-refine.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 51cf362ca667..27a3d6024b65 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -632,7 +632,7 @@ class CheckCaptures extends Recheck, SymTransformer: def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = var refined: Type = core var allCaptures: CaptureSet = - if core.derivesFromCapability then CaptureSet.universal else initCs + if core.derivesFromCapability then defn.universalCSImpliedByCapability else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do val getter = cls.info.member(getterName).suchThat(_.isRefiningParamAccessor).symbol if !getter.is(Private) && getter.hasTrackedParts then @@ -809,7 +809,7 @@ class CheckCaptures extends Recheck, SymTransformer: val localSet = capturedVars(sym) if !localSet.isAlwaysEmpty then curEnv = Env(sym, EnvKind.Regular, localSet, curEnv) - + // ctx with AssumedContains entries for each Contains parameter val bodyCtx = var ac = CaptureSet.assumedContains @@ -828,7 +828,7 @@ class CheckCaptures extends Recheck, SymTransformer: interpolateVarsIn(tree.tpt) curEnv = saved end recheckDefDef - + /** If val or def definition with inferred (result) type is visible * in other compilation units, check that the actual inferred type * conforms to the expected type where all inferred capture sets are dropped. diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index f578d10702e9..22e7899eeea1 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -299,16 +299,24 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: CapturingType(fntpe, cs, boxed = false) else fntpe + def stripImpliedCaptureSet(tp: Type): Type = tp match + case tp @ CapturingType(parent, refs) + if (refs eq defn.universalCSImpliedByCapability) && !tp.isBoxedCapturing => + parent + case tp @ CapturingType(parent, refs) => tp + case _ => tp + def apply(t: Type) = t match case t @ CapturingType(parent, refs) => - t.derivedCapturingType(this(parent), refs) + t.derivedCapturingType(stripImpliedCaptureSet(this(parent)), refs) case t @ AnnotatedType(parent, ann) => val parent1 = this(parent) if ann.symbol.isRetains then + val parent2 = stripImpliedCaptureSet(parent1) for tpt <- tptToCheck do - checkWellformedLater(parent1, ann.tree, tpt) - CapturingType(parent1, ann.tree.toCaptureSet) + checkWellformedLater(parent2, ann.tree, tpt) + CapturingType(parent2, ann.tree.toCaptureSet) else t.derivedAnnotatedType(parent1, ann) case throwsAlias(res, exc) => @@ -316,7 +324,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case t => // Map references to capability classes C to C^ if t.derivesFromCapability && !t.isSingleton && t.typeSymbol != defn.Caps_Exists - then CapturingType(t, CaptureSet.universal, boxed = false) + then CapturingType(t, defn.universalCSImpliedByCapability, boxed = false) else normalizeCaptures(mapOver(t)) end toCapturing diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 8981aa4aa6ac..f95bb3cea351 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1005,6 +1005,9 @@ class Definitions { @tu lazy val Caps_ContainsTrait: TypeSymbol = CapsModule.requiredType("Contains") @tu lazy val Caps_containsImpl: TermSymbol = CapsModule.requiredMethod("containsImpl") + /** The same as CaptureSet.universal but generated implicitly for references of Capability subtypes */ + @tu lazy val universalCSImpliedByCapability = CaptureSet(captureRoot.termRef) + @tu lazy val PureClass: Symbol = requiredClass("scala.Pure") // Annotation base classes diff --git a/tests/pos-custom-args/captures/cap-refine.scala b/tests/pos-custom-args/captures/cap-refine.scala new file mode 100644 index 000000000000..ed0b4d018b88 --- /dev/null +++ b/tests/pos-custom-args/captures/cap-refine.scala @@ -0,0 +1,12 @@ +//> using options -Werror +import caps.Capability + +trait Buffer[T] extends Capability: + def append(x: T): this.type + +def f(buf: Buffer[Int]) = + val buf1 = buf.append(1).append(2) + val buf2: Buffer[Int]^{buf1} = buf1 + + + From 05966dc70404d52a952aad656b8b6eaa57c2fa18 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 13 Aug 2024 10:55:16 +0100 Subject: [PATCH 460/827] Bound TypeRepr by Matchable in Quotes --- library/src/scala/quoted/Quotes.scala | 2 +- tests/pos/i21282.scala | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21282.scala diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index d048d8d728d5..fad769793bb7 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -2640,7 +2640,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // ----- Types ---------------------------------------------------- /** A type, type constructors, type bounds or NoPrefix */ - type TypeRepr + type TypeRepr <: Matchable /** Module object of `type TypeRepr` */ val TypeRepr: TypeReprModule diff --git a/tests/pos/i21282.scala b/tests/pos/i21282.scala new file mode 100644 index 000000000000..a483f3763f89 --- /dev/null +++ b/tests/pos/i21282.scala @@ -0,0 +1,18 @@ +//> using options -Xfatal-warnings -source:future-migration + +import scala.quoted.* + +private def isUnionCanonicalImpl[U: Type](using Quotes): Expr[Unit] = + import quotes.reflect.* + val u = TypeRepr.of[U].dealiasKeepOpaques + + def inner[U: Type](s: Set[TypeRepr], tr: TypeRepr): Set[TypeRepr] = + tr.dealiasKeepOpaques match + case OrType(a, b) => + val ss = inner[U](s, a) + inner[U](ss, b) + case x if s.contains(x) => + report.errorAndAbort(s"Type ${x.show} multiple times (CHECK ALIASES) in union ${u.show}") + case x => s + x + inner(Set.empty, u) + '{ () } From 71db18e98e861772483e3bb369610558b22fd8a4 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 13 Aug 2024 11:15:20 +0100 Subject: [PATCH 461/827] Bump tasty format for 28.6.x --- tasty/src/dotty/tools/tasty/TastyFormat.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index c29ea99bcd8d..8f5f9d57a8a5 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -324,7 +324,7 @@ object TastyFormat { * compatibility, but remains backwards compatible, with all * preceding `MinorVersion`. */ - final val MinorVersion: Int = 5 + final val MinorVersion: Int = 6 /** Natural Number. The `ExperimentalVersion` allows for * experimentation with changes to TASTy without committing From e8f2ef72c59f2a7e79d1b688df582dbe144bc94c Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 13 Aug 2024 14:37:59 +0100 Subject: [PATCH 462/827] Adapt the compat regex to the new version pattern in baseVersion --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 2b54230cdf65..9128a9615219 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -134,7 +134,7 @@ object Build { } val compatMode = { - val VersionRE = """^\d+\.(\d+).(\d+).*""".r + val VersionRE = """^\d+\.(\d+)\.(\d+)""".r baseVersion match { case VersionRE(_, "0") => CompatMode.BinaryCompatible case _ => CompatMode.SourceAndBinaryCompatible From 7990912858d3e2f7da61f50cb8183d55b42d591b Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Tue, 13 Aug 2024 15:26:31 +0100 Subject: [PATCH 463/827] Update LTS MiMa filters --- project/MiMaFilters.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 88e3f2b27a84..a04f4fae91aa 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -65,6 +65,10 @@ object MiMaFilters { ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$betterMatchTypeExtractors$"), ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.7-migration"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.7"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E7$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E7$minusmigration$"), ), ) From 3cfbd3ca1058f627f2b4d095076f19b22c98e358 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 14 Aug 2024 00:21:28 +0200 Subject: [PATCH 464/827] Set reference version to 3.5.1-RC2 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 9128a9615219..54ac84734486 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -90,7 +90,7 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.5.0" + val referenceVersion = "3.5.1-RC2" val baseVersion = "3.6.0" // Will be required by some automation later From e896db28f8d5a19353fa7914d6008edd9bf637aa Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Wed, 3 Jul 2024 15:21:07 +0200 Subject: [PATCH 465/827] feat: infer expected type --- .../dotty/tools/pc/InferExpectedType.scala | 128 ++++++++ .../tools/pc/ScalaPresentationCompiler.scala | 10 + .../pc/utils/InteractiveEnrichments.scala | 3 + .../pc/tests/InferExpectedTypeSuite.scala | 292 ++++++++++++++++++ 4 files changed, 433 insertions(+) create mode 100644 presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala create mode 100644 presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala diff --git a/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala b/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala new file mode 100644 index 000000000000..85beb31d5d29 --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala @@ -0,0 +1,128 @@ +package dotty.tools.pc + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.core.Symbols +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.interactive.Interactive +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.util.SourceFile +import dotty.tools.dotc.util.Spans.Span +import dotty.tools.pc.IndexedContext +import dotty.tools.pc.printer.ShortenedTypePrinter +import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam +import dotty.tools.pc.utils.InteractiveEnrichments.* + +import scala.meta.internal.metals.ReportContext +import scala.meta.pc.OffsetParams +import scala.meta.pc.SymbolSearch + +class InferExpectedType( + search: SymbolSearch, + driver: InteractiveDriver, + params: OffsetParams +)(implicit rc: ReportContext): + val uri = params.uri().nn + val code = params.text().nn + + val sourceFile = SourceFile.virtual(uri, code) + driver.run(uri, sourceFile) + + val ctx = driver.currentCtx + val pos = driver.sourcePosition(params) + + def infer() = + driver.compilationUnits.get(uri) match + case Some(unit) => + val path = + Interactive.pathTo(driver.openedTrees(uri), pos)(using ctx) + val newctx = ctx.fresh.setCompilationUnit(unit) + val tpdPath = + Interactive.pathTo(newctx.compilationUnit.tpdTree, pos.span)(using + newctx + ) + val locatedCtx = + Interactive.contextOfPath(tpdPath)(using newctx) + val indexedCtx = IndexedContext(locatedCtx) + val printer = + ShortenedTypePrinter(search, IncludeDefaultParam.ResolveLater)(using indexedCtx) + InterCompletionType.inferType(path)(using newctx).map{ + tpe => printer.tpe(tpe) + } + case None => None + +object InterCompletionType: + def inferType(path: List[Tree])(using Context): Option[Type] = + path match + case (lit: Literal) :: Select(Literal(_), _) :: Apply(Select(Literal(_), _), List(Literal(Constant(null)))) :: rest => inferType(rest, lit.span) + case ident :: rest => inferType(rest, ident.span) + case _ => None + + def inferType(path: List[Tree], span: Span)(using Context): Option[Type] = + path match + case Typed(expr, tpt) :: _ if expr.span.contains(span) && !tpt.tpe.isErroneous => Some(tpt.tpe) + case Block(_, expr) :: rest if expr.span.contains(span) => + inferType(rest, span) + case Bind(_, body) :: rest if body.span.contains(span) => inferType(rest, span) + case Alternative(_) :: rest => inferType(rest, span) + case Try(block, _, _) :: rest if block.span.contains(span) => inferType(rest, span) + case CaseDef(_, _, body) :: Try(_, cases, _) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => inferType(rest, span) + case If(cond, _, _) :: rest if !cond.span.contains(span) => inferType(rest, span) + case If(cond, _, _) :: rest if cond.span.contains(span) => Some(Symbols.defn.BooleanType) + case CaseDef(_, _, body) :: Match(_, cases) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => + inferType(rest, span) + case NamedArg(_, arg) :: rest if arg.span.contains(span) => inferType(rest, span) + // x match + // case @@ + case CaseDef(pat, _, _) :: Match(sel, cases) :: rest if pat.span.contains(span) && cases.exists(_.span.contains(span)) && !sel.tpe.isErroneous => + sel.tpe match + case tpe: TermRef => Some(tpe.symbol.info).filterNot(_.isErroneous) + case tpe => Some(tpe) + // List(@@) + case SeqLiteral(_, tpe) :: _ if !tpe.tpe.isErroneous => + Some(tpe.tpe) + // val _: T = @@ + // def _: T = @@ + case (defn: ValOrDefDef) :: rest if !defn.tpt.tpe.isErroneous => Some(defn.tpt.tpe) + // f(@@) + case (app: Apply) :: rest => + val param = + for { + ind <- app.args.zipWithIndex.collectFirst { + case (arg, id) if arg.span.contains(span) => id + } + params <- app.symbol.paramSymss.find(!_.exists(_.isTypeParam)) + param <- params.get(ind) + } yield param.info + param match + // def f[T](a: T): T = ??? + // f[Int](@@) + // val _: Int = f(@@) + case Some(t : TypeRef) if t.symbol.is(Flags.TypeParam) => + for { + (typeParams, args) <- + app match + case Apply(TypeApply(fun, args), _) => + val typeParams = fun.symbol.paramSymss.headOption.filter(_.forall(_.isTypeParam)) + typeParams.map((_, args.map(_.tpe))) + // val f: (j: "a") => Int + // f(@@) + case Apply(Select(v, StdNames.nme.apply), _) => + v.symbol.info match + case AppliedType(des, args) => + Some((des.typeSymbol.typeParams, args)) + case _ => None + case _ => None + ind = typeParams.indexOf(t.symbol) + tpe <- args.get(ind) + if !tpe.isErroneous + } yield tpe + case Some(tpe) => Some(tpe) + case _ => None + case _ => None + diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index a8ab7af0d147..85de8e7d8439 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -30,6 +30,7 @@ import scala.meta.pc.{PcSymbolInformation as IPcSymbolInformation} import dotty.tools.dotc.reporting.StoreReporter import dotty.tools.pc.completions.CompletionProvider +import dotty.tools.pc.InferExpectedType import dotty.tools.pc.completions.OverrideCompletions import dotty.tools.pc.buildinfo.BuildInfo @@ -198,6 +199,15 @@ case class ScalaPresentationCompiler( .asJava } + def inferExpectedType(params: OffsetParams): CompletableFuture[ju.Optional[String]] = + compilerAccess.withInterruptableCompiler(Some(params))( + Optional.empty(), + params.token, + ) { access => + val driver = access.compiler() + new InferExpectedType(search, driver, params).infer().asJava + } + def shutdown(): Unit = compilerAccess.shutdown() diff --git a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala index dd2fb3107c49..8ff11694ff1c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala @@ -412,4 +412,7 @@ object InteractiveEnrichments extends CommonMtagsEnrichments: RefinedType(parent.dealias, name, refinedInfo.deepDealias) case dealised => dealised + extension[T] (list: List[T]) + def get(n: Int): Option[T] = if 0 <= n && n < list.size then Some(list(n)) else None + end InteractiveEnrichments diff --git a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala new file mode 100644 index 000000000000..3c40ee075a4c --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala @@ -0,0 +1,292 @@ +package dotty.tools.pc.tests + +import scala.language.unsafeNulls +import dotty.tools.pc.base.BasePCSuite +import scala.meta.internal.metals.CompilerOffsetParams +import java.nio.file.Paths +import scala.meta.internal.metals.EmptyCancelToken +import dotty.tools.pc.ScalaPresentationCompiler +import scala.meta.internal.mtags.CommonMtagsEnrichments.* + +import org.junit.Test +import org.junit.Ignore + +class InferExpectedTypeSuite extends BasePCSuite: + def check( + original: String, + expectedType: String, + fileName: String = "A.scala" + ): Unit = + presentationCompiler.restart() + val (code, offset) = params(original.replace("@@", "CURSOR@@"), fileName) + val offsetParams = CompilerOffsetParams( + Paths.get(fileName).toUri(), + code, + offset, + EmptyCancelToken + ) + presentationCompiler.asInstanceOf[ScalaPresentationCompiler].inferExpectedType(offsetParams).get().asScala match { + case Some(value) => assertNoDiff(value, expectedType) + case None => fail("Empty result.") + } + + @Test def basic = + check( + """|def doo: Double = @@ + |""".stripMargin, + """|Double + |""".stripMargin + ) + + @Test def `basic-param` = + check( + """|def paint(c: Int) = ??? + |val _ = paint(@@) + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `type-ascription` = + check( + """|def doo = (@@ : Double) + |""".stripMargin, + """|Double + |""".stripMargin + ) + + @Ignore("Not handled correctly.") + @Test def list = + check( + """|val i: List[Int] = List(@@) + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `list-singleton` = + check( + """|val i: List["foo"] = List("@@") + |""".stripMargin, + """|"foo" + |""".stripMargin + ) + + @Test def option = + check( + """|val i: Option[Int] = Option(@@) + |""".stripMargin, + """|Int + |""".stripMargin + ) + +// some structures + @Test def `with-block` = + check( + """|def c: Double = + | @@ + |""".stripMargin, + """|Double + |""".stripMargin + ) + + @Test def `if-statement` = + check( + """|def c(shouldBeBlue: Boolean): Int = + | if(shouldBeBlue) @@ + | else 2 + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `if-statement-2` = + check( + """|def c(shouldBeBlue: Boolean): Int = + | if(shouldBeBlue) 1 + | else @@ + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `if-statement-3` = + check( + """|def c(shouldBeBlue: Boolean): Int = + | if(@@) 3 + | else 2 + |""".stripMargin, + """|Boolean + |""".stripMargin + ) + + @Test def `try` = + check( + """|val _: Int = + | try { + | @@ + | } catch { + | case _ => + | } + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `try-catch` = + check( + """|val _: Int = + | try { + | } catch { + | case _ => @@ + | } + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `if-condition` = + check( + """|val _ = if @@ then 1 else 2 + |""".stripMargin, + """|Boolean + |""".stripMargin + ) + + @Test def `inline-if` = + check( + """|inline def o: Int = inline if ??? then @@ else ??? + |""".stripMargin, + """|Int + |""".stripMargin + ) + +// pattern matching + + @Test def `pattern-match` = + check( + """|val _ = + | List(1) match + | case @@ + |""".stripMargin, + """|List[Int] + |""".stripMargin + ) + + @Test def bind = + check( + """|val _ = + | List(1) match + | case name @ @@ + |""".stripMargin, + """|List[Int] + |""".stripMargin + ) + + @Test def alternative = + check( + """|val _ = + | List(1) match + | case Nil | @@ + |""".stripMargin, + """|List[Int] + |""".stripMargin + ) + + @Ignore("Unapply is not handled correctly.") + @Test def unapply = + check( + """|val _ = + | List(1) match + | case @@ :: _ => + |""".stripMargin, + """|Int + |""".stripMargin + ) + +// generic functions + + @Test def `any-generic` = + check( + """|val _ : List[Int] = identity(@@) + |""".stripMargin, + """|List[Int] + |""".stripMargin + ) + + @Test def `eq-generic` = + check( + """|def eq[T](a: T, b: T): Boolean = ??? + |val _ = eq(1, @@) + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Ignore("Generic functions are not handled correctly.") + @Test def flatmap = + check( + """|val _ : List[Int] = List().flatMap(_ => @@) + |""".stripMargin, + """|IterableOnce[Int] + |""".stripMargin + ) + + @Ignore("Generic functions are not handled correctly.") + @Test def `for-comprehension` = + check( + """|val _ : List[Int] = + | for { + | _ <- List("a", "b") + | } yield @@ + |""".stripMargin, + """|Int + |""".stripMargin + ) + +// bounds + @Ignore("Bounds are not handled correctly.") + @Test def any = + check( + """|trait Foo + |def foo[T](a: T): Boolean = ??? + |val _ = foo(@@) + |""".stripMargin, + """|<: Any + |""".stripMargin + ) + + @Ignore("Bounds are not handled correctly.") + @Test def `bounds-1` = + check( + """|trait Foo + |def foo[T <: Foo](a: Foo): Boolean = ??? + |val _ = foo(@@) + |""".stripMargin, + """|<: Foo + |""".stripMargin + ) + + @Ignore("Bounds are not handled correctly.") + @Test def `bounds-2` = + check( + """|trait Foo + |def foo[T :> Foo](a: Foo): Boolean = ??? + |val _ = foo(@@) + |""".stripMargin, + """|:> Foo + |""".stripMargin + ) + + @Ignore("Bounds are not handled correctly.") + @Test def `bounds-3` = + check( + """|trait A + |class B extends A + |class C extends B + |def roo[F >: C <: A](f: F) = ??? + |val kjk = roo(@@) + |""".stripMargin, + """|>: C <: A + |""".stripMargin + ) From da42229a8053bb86ccee3c0723f6105ea0748195 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sat, 17 Aug 2024 14:53:52 +0100 Subject: [PATCH 466/827] Add InferExpectedTypeSuite.map and fixup others --- .../tools/pc/tests/InferExpectedTypeSuite.scala | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala index 3c40ee075a4c..e296f9a70839 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala @@ -26,7 +26,7 @@ class InferExpectedTypeSuite extends BasePCSuite: EmptyCancelToken ) presentationCompiler.asInstanceOf[ScalaPresentationCompiler].inferExpectedType(offsetParams).get().asScala match { - case Some(value) => assertNoDiff(value, expectedType) + case Some(value) => assertNoDiff(expectedType, value) case None => fail("Empty result.") } @@ -232,6 +232,15 @@ class InferExpectedTypeSuite extends BasePCSuite: |""".stripMargin ) + @Ignore("Generic functions are not handled correctly.") + @Test def map = + check( + """|val _ : List[Int] = List().map(_ => @@) + |""".stripMargin, + """|Int + |""".stripMargin + ) + @Ignore("Generic functions are not handled correctly.") @Test def `for-comprehension` = check( @@ -260,7 +269,7 @@ class InferExpectedTypeSuite extends BasePCSuite: @Test def `bounds-1` = check( """|trait Foo - |def foo[T <: Foo](a: Foo): Boolean = ??? + |def foo[T <: Foo](a: T): Boolean = ??? |val _ = foo(@@) |""".stripMargin, """|<: Foo @@ -271,7 +280,7 @@ class InferExpectedTypeSuite extends BasePCSuite: @Test def `bounds-2` = check( """|trait Foo - |def foo[T :> Foo](a: Foo): Boolean = ??? + |def foo[T >: Foo](a: T): Boolean = ??? |val _ = foo(@@) |""".stripMargin, """|:> Foo From 0b4e6771bad9f91f93180fd0bcd30ca05764928e Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sat, 17 Aug 2024 15:19:39 +0100 Subject: [PATCH 467/827] Fix InferExpectedTypeSuite.unapply --- .../src/main/dotty/tools/pc/InferExpectedType.scala | 6 ++++++ .../test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala | 1 - 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala b/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala index 85beb31d5d29..260a28392093 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala @@ -11,6 +11,8 @@ import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.Types.Type import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.typer.Applications.UnapplyArgs +import dotty.tools.dotc.util.NoSourcePosition import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.Spans.Span import dotty.tools.pc.IndexedContext @@ -89,6 +91,10 @@ object InterCompletionType: // val _: T = @@ // def _: T = @@ case (defn: ValOrDefDef) :: rest if !defn.tpt.tpe.isErroneous => Some(defn.tpt.tpe) + case UnApply(fun, _, pats) :: _ => + val ind = pats.indexWhere(_.span.contains(span)) + if ind < 0 then None + else Some(UnapplyArgs(fun.tpe.finalResultType, fun, pats, NoSourcePosition).argTypes(ind)) // f(@@) case (app: Apply) :: rest => val param = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala index e296f9a70839..3bc1964735e4 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala @@ -193,7 +193,6 @@ class InferExpectedTypeSuite extends BasePCSuite: |""".stripMargin ) - @Ignore("Unapply is not handled correctly.") @Test def unapply = check( """|val _ = From 0f827c6709034529d060bcc956949bd1123c5bdd Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 14 Aug 2024 14:33:10 +0200 Subject: [PATCH 468/827] [CI] Publish build scans to develocity.scala-lang.org --- .github/workflows/build-msi.yml | 2 ++ .github/workflows/build-sdk.yml | 4 +-- .github/workflows/ci.yaml | 3 ++- .github/workflows/dependency-graph.yml | 2 ++ .github/workflows/language-reference.yaml | 2 ++ .github/workflows/launchers.yml | 3 +++ .github/workflows/scaladoc.yaml | 1 + project/Build.scala | 20 +++++++++++++++ project/GithubEnv.scala | 30 +++++++++++++++++++++++ project/plugins.sbt | 2 ++ 10 files changed, 66 insertions(+), 3 deletions(-) create mode 100644 project/GithubEnv.scala diff --git a/.github/workflows/build-msi.yml b/.github/workflows/build-msi.yml index 8e3ac5eeb78b..8e7cf8a9ef47 100644 --- a/.github/workflows/build-msi.yml +++ b/.github/workflows/build-msi.yml @@ -29,6 +29,8 @@ jobs: cache: 'sbt' - name: Build MSI package run: sbt 'dist-win-x86_64/Windows/packageBin' + env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} - name: Upload MSI Artifact uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/build-sdk.yml b/.github/workflows/build-sdk.yml index 0233403894fb..b2af623d731a 100644 --- a/.github/workflows/build-sdk.yml +++ b/.github/workflows/build-sdk.yml @@ -52,6 +52,8 @@ jobs: mac-aarch64-id : ${{ steps.mac-aarch64.outputs.artifact-id }} win-x86_64-id : ${{ steps.win-x86_64.outputs.artifact-id }} win-x86_64-digest: ${{ steps.win-x86_64-digest.outputs.digest }} + env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} steps: - uses: actions/checkout@v4 - uses: actions/setup-java@v4 @@ -112,5 +114,3 @@ jobs: run : | curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -o artifact.zip -L https://api.github.com/repos/scala/scala3/actions/artifacts/${{ steps.win-x86_64.outputs.artifact-id }}/zip echo "digest=$(sha256sum artifact.zip | cut -d " " -f 1)" >> "$GITHUB_OUTPUT" - - \ No newline at end of file diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d4583847c438..0ce994b12e65 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -28,6 +28,7 @@ on: env: DOTTY_CI_RUN: true + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} # In this file, we set `--cpu-shares 4096` on every job. This might seem useless # since it means that every container has the same weight which should be @@ -1013,7 +1014,7 @@ jobs: uses: ./.github/workflows/build-msi.yml if : github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_msi]') # TODO: ADD A JOB THAT DEPENDS ON THIS TO TEST THE MSI - + build-sdk-package: uses: ./.github/workflows/build-sdk.yml with: diff --git a/.github/workflows/dependency-graph.yml b/.github/workflows/dependency-graph.yml index d4be398148c7..35af4fa0526d 100644 --- a/.github/workflows/dependency-graph.yml +++ b/.github/workflows/dependency-graph.yml @@ -10,3 +10,5 @@ jobs: steps: - uses: actions/checkout@v4 - uses: scalacenter/sbt-dependency-submission@v3 + env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} diff --git a/.github/workflows/language-reference.yaml b/.github/workflows/language-reference.yaml index 786785eaa4a2..7f87b4a453ef 100644 --- a/.github/workflows/language-reference.yaml +++ b/.github/workflows/language-reference.yaml @@ -43,6 +43,8 @@ jobs: ./project/scripts/sbt "scaladoc/generateReferenceDocumentation --no-regenerate-expected-links" ./project/scripts/docsLinksStability ./scaladoc/output/reference ./project/scripts/expected-links/reference-expected-links.txt cd .. + env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} - name: Push changes to scala3-reference-docs if: github.event_name == 'push' diff --git a/.github/workflows/launchers.yml b/.github/workflows/launchers.yml index 036b4f2966e8..399dd34cef3b 100644 --- a/.github/workflows/launchers.yml +++ b/.github/workflows/launchers.yml @@ -3,6 +3,9 @@ on: pull_request: workflow_dispatch: +env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} + jobs: linux-x86_64: name: Deploy and Test on Linux x64 architecture diff --git a/.github/workflows/scaladoc.yaml b/.github/workflows/scaladoc.yaml index 98ce94718fe5..4f6f5bbfe2fb 100644 --- a/.github/workflows/scaladoc.yaml +++ b/.github/workflows/scaladoc.yaml @@ -16,6 +16,7 @@ jobs: build: env: AZURE_STORAGE_SAS_TOKEN: ${{ secrets.AZURE_STORAGE_SAS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} runs-on: ubuntu-latest if: "github.event_name == 'merge_group' || ( github.event_name == 'pull_request' diff --git a/project/Build.scala b/project/Build.scala index 54ac84734486..db0949c46b50 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -9,6 +9,7 @@ import sbt.* import complete.DefaultParsers._ import pl.project13.scala.sbt.JmhPlugin import pl.project13.scala.sbt.JmhPlugin.JmhKeys.Jmh +import com.gradle.develocity.agent.sbt.DevelocityPlugin.autoImport._ import com.typesafe.sbt.packager.Keys._ import com.typesafe.sbt.packager.MappingsHelper.directory import com.typesafe.sbt.packager.universal.UniversalPlugin @@ -268,6 +269,25 @@ object Build { // enable verbose exception messages for JUnit (Test / testOptions) += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-s"), + + // Configuration to publish build scans to develocity.scala-lang.org + develocityConfiguration := { + val isInsideCI = insideCI.value + val previousConfig = develocityConfiguration.value + val previousBuildScan = previousConfig.buildScan + previousConfig + .withProjectId(ProjectId("scala3")) + .withServer(previousConfig.server.withUrl(Some(url("https://develocity.scala-lang.org")))) + .withBuildScan( + previousBuildScan + .withPublishing(Publishing.onlyIf(_.authenticated)) + .withBackgroundUpload(!isInsideCI) + .tag(if (isInsideCI) "CI" else "Local") + .withLinks(previousBuildScan.links ++ GithubEnv.develocityLinks) + .withValues(previousBuildScan.values ++ GithubEnv.develocityValues) + .withObfuscation(previousBuildScan.obfuscation.withIpAddresses(_.map(_ => "0.0.0.0"))) + ) + } ) // Settings shared globally (scoped in Global). Used in build.sbt diff --git a/project/GithubEnv.scala b/project/GithubEnv.scala new file mode 100644 index 000000000000..a5246f36028c --- /dev/null +++ b/project/GithubEnv.scala @@ -0,0 +1,30 @@ +import scala.util.Properties +import sbt.url +import java.net.URL + + +// https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/variables#default-environment-variables +object GithubEnv { + lazy val repositoryVar: Option[(String, String)] = envVar("GITHUB_REPOSITORY") + lazy val runIdVar: Option[(String, String)] = envVar("GITHUB_RUN_ID") + lazy val shaVar: Option[(String, String)] = envVar("GITHUB_SHA") + lazy val workflowVar: Option[(String, String)] = envVar("GITHUB_WORKFLOW") + + lazy val runUrl: Option[(String, URL)] = + for { + (_, repository) <- repositoryVar + (_, runId) <- runIdVar + } yield "GITHUB_RUN" -> url(s"https://github.com/$repository/actions/runs/$runId") + lazy val treeUrl: Option[(String, URL)] = + for { + (_, repository) <- repositoryVar + (_, sha) <- shaVar + } yield "GITHUB_TREE" -> url(s"https://github.com/$repository/tree/$sha") + + + def develocityValues: Seq[(String, String)] = repositoryVar.toSeq ++ shaVar ++ workflowVar + def develocityLinks: Seq[(String, URL)] = runUrl.toSeq ++ treeUrl + + private def envVar(key: String): Option[(String, String)] = + Properties.envOrNone(key).map(key -> _) +} diff --git a/project/plugins.sbt b/project/plugins.sbt index bb0693ced132..a5944c60633a 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -21,3 +21,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("ch.epfl.scala" % "sbt-tasty-mima" % "1.0.0") addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.10.0") + +addSbtPlugin("com.gradle" % "sbt-develocity" % "1.0.1") From 44bdfedb238a9529e1d1f2eecba6863f76133004 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Thu, 15 Aug 2024 11:53:43 +0200 Subject: [PATCH 469/827] Add develocity badge in README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 7410c914a898..2146cda7be23 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ Dotty ===== [![Dotty CI](https://github.com/scala/scala3/workflows/Dotty/badge.svg?branch=main)](https://github.com/scala/scala3/actions?query=branch%3Amain) [![Join the chat at https://discord.com/invite/scala](https://img.shields.io/discord/632150470000902164)](https://discord.com/invite/scala) +[![Revved up by Develocity](https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A)](https://develocity.scala-lang.org) * [Documentation](https://docs.scala-lang.org/scala3/) From 03fc304e5f177ac79b740190702f930fea49623e Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Mon, 19 Aug 2024 20:41:52 +0200 Subject: [PATCH 470/827] Add migration rewrite for non-named arguments in Java annotations --- .../tools/dotc/config/MigrationVersion.scala | 2 ++ .../dotty/tools/dotc/reporting/messages.scala | 2 ++ .../src/dotty/tools/dotc/typer/Checking.scala | 16 ++++++++++++++-- .../test/dotty/tools/dotc/CompilationTests.scala | 1 + tests/neg/i20554-a.check | 2 ++ tests/neg/i20554-b.check | 1 + .../MyAnnotation.java | 8 ++++++++ .../annotation-named-pararamters/test.check | 6 ++++++ .../annotation-named-pararamters/test.scala | 6 ++++++ 9 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 tests/rewrites/annotation-named-pararamters/MyAnnotation.java create mode 100644 tests/rewrites/annotation-named-pararamters/test.check create mode 100644 tests/rewrites/annotation-named-pararamters/test.scala diff --git a/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala index 4dd9d065395b..4a16111e76a5 100644 --- a/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala @@ -43,6 +43,8 @@ object MigrationVersion: val WithOperator = MigrationVersion(`3.4`, future) val FunctionUnderscore = MigrationVersion(`3.4`, future) + val NonNamedArgumentInJavaAnnotation = MigrationVersion(`3.6`, `3.6`) + val ImportWildcard = MigrationVersion(future, future) val ImportRename = MigrationVersion(future, future) val ParameterEnclosedByParenthesis = MigrationVersion(future, future) diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 38b49e63c685..91642ca51bc5 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -35,6 +35,7 @@ import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.SourcePosition import scala.jdk.CollectionConverters.* import dotty.tools.dotc.util.SourceFile +import dotty.tools.dotc.config.SourceVersion import DidYouMean.* /** Messages @@ -3293,6 +3294,7 @@ class NonNamedArgumentInJavaAnnotation(using Context) extends SyntaxMsg(NonNamed override protected def msg(using Context): String = "Named arguments are required for Java defined annotations" + + Message.rewriteNotice("This", version = SourceVersion.`3.6-migration`) override protected def explain(using Context): String = i"""Starting from Scala 3.6.0, named arguments are required for Java defined annotations. diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index aeda38cc7646..efcdad2b427f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -891,14 +891,26 @@ object Checking { def annotationHasValueField: Boolean = sym.info.decls.exists(_.name == nme.value) + lazy val annotationFieldNamesByIdx: Map[Int, TermName] = + sym.info.decls.filter: decl => + decl.is(Method) && decl.name != nme.CONSTRUCTOR + .map(_.name.toTermName) + .zipWithIndex + .map(_.swap) + .toMap + annot match case untpd.Apply(fun, List(param)) if !param.isInstanceOf[untpd.NamedArg] && annotationHasValueField => untpd.cpy.Apply(annot)(fun, List(untpd.cpy.NamedArg(param)(nme.value, param))) case untpd.Apply(_, params) => for - param <- params + (param, paramIdx) <- params.zipWithIndex if !param.isInstanceOf[untpd.NamedArg] - do report.error(NonNamedArgumentInJavaAnnotation(), param) + do + report.errorOrMigrationWarning(NonNamedArgumentInJavaAnnotation(), param, MigrationVersion.NonNamedArgumentInJavaAnnotation) + if MigrationVersion.NonNamedArgumentInJavaAnnotation.needsPatch then + annotationFieldNamesByIdx.get(paramIdx).foreach: paramName => + patch(param.span, untpd.cpy.NamedArg(param)(paramName, param).show) annot case _ => annot end checkNamedArgumentForJavaAnnotation diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 2b9ebd2c69d1..dd722403723a 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -76,6 +76,7 @@ class CompilationTests { compileFile("tests/rewrites/i17187.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i17399.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i20002.scala", defaultOptions.and("-indent", "-rewrite")), + compileDir("tests/rewrites/annotation-named-pararamters", defaultOptions.and("-rewrite", "-source:3.6-migration")), ).checkRewrites() } diff --git a/tests/neg/i20554-a.check b/tests/neg/i20554-a.check index 5cfa4e3faaad..b223cba32f77 100644 --- a/tests/neg/i20554-a.check +++ b/tests/neg/i20554-a.check @@ -2,6 +2,7 @@ 3 |@Annotation(3, 4) // error // error : Java defined annotation should be called with named arguments | ^ | Named arguments are required for Java defined annotations + | This can be rewritten automatically under -rewrite -source 3.6-migration. |--------------------------------------------------------------------------------------------------------------------- | Explanation (enabled by `-explain`) |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -23,6 +24,7 @@ 3 |@Annotation(3, 4) // error // error : Java defined annotation should be called with named arguments | ^ | Named arguments are required for Java defined annotations + | This can be rewritten automatically under -rewrite -source 3.6-migration. |--------------------------------------------------------------------------------------------------------------------- | Explanation (enabled by `-explain`) |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/neg/i20554-b.check b/tests/neg/i20554-b.check index 2395554a7485..5e5119e043fe 100644 --- a/tests/neg/i20554-b.check +++ b/tests/neg/i20554-b.check @@ -2,6 +2,7 @@ 3 |@SimpleAnnotation(1) // error: the parameters is not named 'value' | ^ | Named arguments are required for Java defined annotations + | This can be rewritten automatically under -rewrite -source 3.6-migration. |--------------------------------------------------------------------------------------------------------------------- | Explanation (enabled by `-explain`) |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/rewrites/annotation-named-pararamters/MyAnnotation.java b/tests/rewrites/annotation-named-pararamters/MyAnnotation.java new file mode 100644 index 000000000000..a7e247c8083c --- /dev/null +++ b/tests/rewrites/annotation-named-pararamters/MyAnnotation.java @@ -0,0 +1,8 @@ +import java.util.concurrent.TimeUnit; + +public @interface MyAnnotation { + public TimeUnit D() default TimeUnit.DAYS; + TimeUnit C() default TimeUnit.DAYS; + String A() default ""; + public String B() default ""; +} \ No newline at end of file diff --git a/tests/rewrites/annotation-named-pararamters/test.check b/tests/rewrites/annotation-named-pararamters/test.check new file mode 100644 index 000000000000..186a4fbf7974 --- /dev/null +++ b/tests/rewrites/annotation-named-pararamters/test.check @@ -0,0 +1,6 @@ +import java.util.concurrent.TimeUnit +@MyAnnotation() class Test1 +@MyAnnotation(D = TimeUnit.DAYS) class Test2 +@MyAnnotation(D = TimeUnit.DAYS, C = TimeUnit.DAYS) class Test3 +@MyAnnotation(D = TimeUnit.DAYS, C = TimeUnit.DAYS, A = "foo") class Test4 +@MyAnnotation(D = TimeUnit.DAYS, C = TimeUnit.DAYS, A = "foo", B = "bar") class Test5 diff --git a/tests/rewrites/annotation-named-pararamters/test.scala b/tests/rewrites/annotation-named-pararamters/test.scala new file mode 100644 index 000000000000..85cf34ab976b --- /dev/null +++ b/tests/rewrites/annotation-named-pararamters/test.scala @@ -0,0 +1,6 @@ +import java.util.concurrent.TimeUnit +@MyAnnotation() class Test1 +@MyAnnotation(TimeUnit.DAYS) class Test2 +@MyAnnotation(TimeUnit.DAYS, TimeUnit.DAYS) class Test3 +@MyAnnotation(TimeUnit.DAYS, TimeUnit.DAYS, "foo") class Test4 +@MyAnnotation(TimeUnit.DAYS, TimeUnit.DAYS, "foo", "bar") class Test5 \ No newline at end of file From a53c8cf4a180fe313b7652b9d119b567ed9f81a1 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Mon, 19 Aug 2024 23:26:46 +0100 Subject: [PATCH 471/827] Refactor MigrationVersion to be an enum --- .../tools/dotc/config/MigrationVersion.scala | 59 ++++++++----------- 1 file changed, 24 insertions(+), 35 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala index 4dd9d065395b..83d750ca7c1e 100644 --- a/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala @@ -6,46 +6,35 @@ import SourceVersion.* import Feature.* import core.Contexts.Context -class MigrationVersion( - val warnFrom: SourceVersion, - val errorFrom: SourceVersion): - require(warnFrom.ordinal <= errorFrom.ordinal) - - def needsPatch(using Context): Boolean = - sourceVersion.isMigrating && sourceVersion.isAtLeast(warnFrom) - - def patchFrom: SourceVersion = - warnFrom.prevMigrating - -object MigrationVersion: - - val Scala2to3 = MigrationVersion(`3.0`, `3.0`) - - val OverrideValParameter = MigrationVersion(`3.0`, future) - +enum MigrationVersion(val warnFrom: SourceVersion, val errorFrom: SourceVersion): + case Scala2to3 extends MigrationVersion(`3.0`, `3.0`) + case OverrideValParameter extends MigrationVersion(`3.0`, future) // we tighten for-comprehension without `case` to error in 3.4, // but we keep pat-defs as warnings for now ("@unchecked"), // until we propose an alternative way to assert exhaustivity to the typechecker. - val ForComprehensionPatternWithoutCase = MigrationVersion(`3.2`, `3.4`) - val ForComprehensionUncheckedPathDefs = MigrationVersion(`3.2`, future) - - val NonLocalReturns = MigrationVersion(`3.2`, future) - - val AscriptionAfterPattern = MigrationVersion(`3.3`, future) + case ForComprehensionPatternWithoutCase extends MigrationVersion(`3.2`, `3.4`) + case ForComprehensionUncheckedPathDefs extends MigrationVersion(`3.2`, future) + + case NonLocalReturns extends MigrationVersion(`3.2`, future) + case AscriptionAfterPattern extends MigrationVersion(`3.3`, future) + case ExplicitContextBoundArgument extends MigrationVersion(`3.4`, `3.5`) + case AlphanumericInfix extends MigrationVersion(`3.4`, future) + case RemoveThisQualifier extends MigrationVersion(`3.4`, future) + case UninitializedVars extends MigrationVersion(`3.4`, future) + case VarargSpliceAscription extends MigrationVersion(`3.4`, future) + case WildcardType extends MigrationVersion(`3.4`, future) + case WithOperator extends MigrationVersion(`3.4`, future) + case FunctionUnderscore extends MigrationVersion(`3.4`, future) + case ImportWildcard extends MigrationVersion(future, future) + case ImportRename extends MigrationVersion(future, future) + case ParameterEnclosedByParenthesis extends MigrationVersion(future, future) + case XmlLiteral extends MigrationVersion(future, future) - val ExplicitContextBoundArgument = MigrationVersion(`3.4`, `3.5`) + require(warnFrom.ordinal <= errorFrom.ordinal) - val AlphanumericInfix = MigrationVersion(`3.4`, future) - val RemoveThisQualifier = MigrationVersion(`3.4`, future) - val UninitializedVars = MigrationVersion(`3.4`, future) - val VarargSpliceAscription = MigrationVersion(`3.4`, future) - val WildcardType = MigrationVersion(`3.4`, future) - val WithOperator = MigrationVersion(`3.4`, future) - val FunctionUnderscore = MigrationVersion(`3.4`, future) + def needsPatch(using Context): Boolean = + sourceVersion.isMigrating && sourceVersion.isAtLeast(warnFrom) - val ImportWildcard = MigrationVersion(future, future) - val ImportRename = MigrationVersion(future, future) - val ParameterEnclosedByParenthesis = MigrationVersion(future, future) - val XmlLiteral = MigrationVersion(future, future) + def patchFrom: SourceVersion = warnFrom.prevMigrating end MigrationVersion From 90eedc5dbb0059839a61ba1208d07ec90f84d608 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Tue, 20 Aug 2024 12:54:11 +0200 Subject: [PATCH 472/827] Replace usage of printer to create patch with a simple string prefix Co-authored-by: Hamza Remmal --- compiler/src/dotty/tools/dotc/typer/Checking.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index efcdad2b427f..7f5ac955fa12 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -910,7 +910,7 @@ object Checking { report.errorOrMigrationWarning(NonNamedArgumentInJavaAnnotation(), param, MigrationVersion.NonNamedArgumentInJavaAnnotation) if MigrationVersion.NonNamedArgumentInJavaAnnotation.needsPatch then annotationFieldNamesByIdx.get(paramIdx).foreach: paramName => - patch(param.span, untpd.cpy.NamedArg(param)(paramName, param).show) + patch(param.span.startPos, s"$paramName = ") annot case _ => annot end checkNamedArgumentForJavaAnnotation From c1cef24d5b53ab4f5792cd737b6329c1d92dbd71 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Mon, 15 Jul 2024 13:37:14 +0200 Subject: [PATCH 473/827] Enable betterMatchTypeExtractors in >= 3.6 --- .../src/dotty/tools/dotc/config/Feature.scala | 2 -- .../dotty/tools/dotc/core/TypeComparer.scala | 12 +++---- .../runtime/stdLibPatches/language.scala | 1 + tests/neg/mt-deskolemize-2.scala | 2 -- tests/pos/20538.scala | 32 +++++++++++++++++++ tests/pos/20538b.scala | 19 +++++++++++ tests/pos/mt-deskolemize.scala | 2 -- 7 files changed, 57 insertions(+), 13 deletions(-) create mode 100644 tests/pos/20538.scala create mode 100644 tests/pos/20538b.scala diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index fa82f14a81fe..8b9a64924ace 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -133,8 +133,6 @@ object Feature: def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) - def betterMatchTypeExtractorsEnabled(using Context) = enabled(betterMatchTypeExtractors) - def quotedPatternsWithPolymorphicFunctionsEnabled(using Context) = enabled(quotedPatternsWithPolymorphicFunctions) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 0f74ca40843b..646874a7e5c1 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -10,7 +10,7 @@ import TypeOps.refineUsingParent import collection.mutable import util.{Stats, NoSourcePosition, EqHashMap} import config.Config -import config.Feature.{betterMatchTypeExtractorsEnabled, migrateTo3, sourceVersion} +import config.Feature.{migrateTo3, sourceVersion} import config.Printers.{subtyping, gadts, matchTypes, capt, noPrinter} import config.SourceVersion import TypeErasure.{erasedLub, erasedGlb} @@ -3621,10 +3621,8 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { case MatchTypeCasePattern.TypeMemberExtractor(typeMemberName, capture) => /** Try to remove references to `skolem` from a type in accordance with the spec. * - * If `betterMatchTypeExtractorsEnabled` is enabled then references - * to `skolem` occuring are avoided by following aliases and - * singletons, otherwise no attempt made to avoid references to - * `skolem`. + * References to `skolem` occuring are avoided by following aliases and + * singletons. * * If any reference to `skolem` remains in the result type, * `refersToSkolem` is set to true. @@ -3638,7 +3636,7 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { case `skolem` => refersToSkolem = true tp - case tp: NamedType if betterMatchTypeExtractorsEnabled => + case tp: NamedType => val pre1 = apply(tp.prefix) if refersToSkolem then tp match @@ -3656,7 +3654,7 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { tp.derivedSelect(pre1) else tp.derivedSelect(pre1) - case tp: LazyRef if betterMatchTypeExtractorsEnabled => + case tp: LazyRef => // By default, TypeMap maps LazyRefs lazily. We need to // force it for `refersToSkolem` to be correctly set. apply(tp.ref) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 3d71c0da1481..547710d55293 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -124,6 +124,7 @@ object language: * @see [[https://github.com/scala/improvement-proposals/pull/84]] */ @compileTimeOnly("`betterMatchTypeExtractors` can only be used at compile time in import statements") + @deprecated("The experimental.betterMatchTypeExtractors language import is no longer needed since the feature is now standard. It now has no effect, including when setting an older source version.", since = "3.6") object betterMatchTypeExtractors /** Experimental support for quote pattern matching with polymorphic functions diff --git a/tests/neg/mt-deskolemize-2.scala b/tests/neg/mt-deskolemize-2.scala index 90d506a42e6f..505e47637ac4 100644 --- a/tests/neg/mt-deskolemize-2.scala +++ b/tests/neg/mt-deskolemize-2.scala @@ -1,5 +1,3 @@ -//> using options -language:experimental.betterMatchTypeExtractors - trait Expr: type Value object Expr: diff --git a/tests/pos/20538.scala b/tests/pos/20538.scala new file mode 100644 index 000000000000..a03bf98f6ac3 --- /dev/null +++ b/tests/pos/20538.scala @@ -0,0 +1,32 @@ +trait Column: + type T + type F[X] + type Q = F[T] + +class varchar extends Column: + type T = String + +trait notnull extends Column: + type F[X] = X + +object Error: + + val firstName = new varchar with notnull + val lastName = new varchar with notnull + + val relation = (firstName, lastName) + + type RelationTypes = Tuple.InverseMap[relation.type, [X] =>> Column { type Q = X }] + + summon[RelationTypes =:= (String, String)] + +object Works: + + object firstName extends varchar with notnull + object lastName extends varchar with notnull + + val relation = (firstName, lastName) + + type RelationTypes = Tuple.InverseMap[relation.type, [X] =>> Column { type Q = X }] + + summon[RelationTypes =:= (String, String)] diff --git a/tests/pos/20538b.scala b/tests/pos/20538b.scala new file mode 100644 index 000000000000..d6d176f8a10d --- /dev/null +++ b/tests/pos/20538b.scala @@ -0,0 +1,19 @@ +trait A: + type T + type U = T + +trait B extends A: + type T = String + +object C extends B + + +type F[X] = A { type U = X } // works when `U` is replaced with `T` + +type InvF[Y] = Y match + case F[x] => x + + +object Test: + summon[InvF[C.type] =:= String] // ok + summon[InvF[B] =:= String] // error: selector B does not uniquely determine parameter x diff --git a/tests/pos/mt-deskolemize.scala b/tests/pos/mt-deskolemize.scala index abd61d9d55e6..34f38289b24d 100644 --- a/tests/pos/mt-deskolemize.scala +++ b/tests/pos/mt-deskolemize.scala @@ -1,5 +1,3 @@ -//> using options -language:experimental.betterMatchTypeExtractors - trait Expr: type Value From 53a40b49cae38de691ef50876a461dd0d8d64b1e Mon Sep 17 00:00:00 2001 From: Jan Chyb <48855024+jchyb@users.noreply.github.com> Date: Tue, 20 Aug 2024 15:18:11 +0200 Subject: [PATCH 474/827] Add regression test for i19675 (#21159) Originally fixed by #19926 Closes #19675 Even though this is a slower sbt scripted test, I think it's worth adding, since it showcases a different issue than what #19926 was fixing, and I do not believe it is reproducible in any way without a scala-2 dependency (so we cannot minimize it into regular compilation test). --- .../i19675/UnrelatedDeprecationWarning.scala | 22 +++++++++++++++++++ sbt-test/scala2-compat/i19675/build.sbt | 6 +++++ sbt-test/scala2-compat/i19675/test | 1 + 3 files changed, 29 insertions(+) create mode 100644 sbt-test/scala2-compat/i19675/UnrelatedDeprecationWarning.scala create mode 100644 sbt-test/scala2-compat/i19675/build.sbt create mode 100644 sbt-test/scala2-compat/i19675/test diff --git a/sbt-test/scala2-compat/i19675/UnrelatedDeprecationWarning.scala b/sbt-test/scala2-compat/i19675/UnrelatedDeprecationWarning.scala new file mode 100644 index 000000000000..da7585a5dab7 --- /dev/null +++ b/sbt-test/scala2-compat/i19675/UnrelatedDeprecationWarning.scala @@ -0,0 +1,22 @@ +import com.twitter.finagle.Thrift +import com.twitter.finagle.thrift.ThriftService +import scala.reflect.ClassTag + +class Minim { + trait Foo[A] + + object Foo { + inline def make[A]: Foo[A] = ??? + } + + final class Unrelated() + + object Unrelated { + val foo = Foo.make[Unrelated] + } + + object Main { + def foo[S <: ThriftService](using ClassTag[S]) = + Thrift.client.build[S]("asd") + } +} diff --git a/sbt-test/scala2-compat/i19675/build.sbt b/sbt-test/scala2-compat/i19675/build.sbt new file mode 100644 index 000000000000..819be2d87d58 --- /dev/null +++ b/sbt-test/scala2-compat/i19675/build.sbt @@ -0,0 +1,6 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +scalacOptions ++= Seq("-Wunused:imports", "-deprecation", "-Werror") +libraryDependencies ++= Seq( + "com.twitter" %% "finagle-thrift" % "24.2.0" +).map(_.cross(CrossVersion.for3Use2_13)) diff --git a/sbt-test/scala2-compat/i19675/test b/sbt-test/scala2-compat/i19675/test new file mode 100644 index 000000000000..73a68203f3f1 --- /dev/null +++ b/sbt-test/scala2-compat/i19675/test @@ -0,0 +1 @@ +> compile \ No newline at end of file From a722358a0944e788c8aef15dac0af19e811d4761 Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Tue, 20 Aug 2024 12:52:07 -0400 Subject: [PATCH 475/827] Fix uninitializing fields when evaluating a cached constructor call --- .../src/dotty/tools/dotc/transform/init/Objects.scala | 1 - tests/init-global/pos/cache-constructor.scala | 9 +++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 tests/init-global/pos/cache-constructor.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 1ceb8d4472a3..892cd3ce8b06 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -950,7 +950,6 @@ class Objects(using Context @constructorOnly): val instance = OfClass(klass, outerWidened, ctor, args.map(_.value), envWidened) callConstructor(instance, ctor, args) - instance case ValueSet(values) => values.map(ref => instantiate(ref, klass, ctor, args)).join diff --git a/tests/init-global/pos/cache-constructor.scala b/tests/init-global/pos/cache-constructor.scala new file mode 100644 index 000000000000..f7af30f32516 --- /dev/null +++ b/tests/init-global/pos/cache-constructor.scala @@ -0,0 +1,9 @@ +class Bar: + var f: Int = 0 + +object A: + val b1 = new Bar() + val b2 = new Bar() + val b3 = new Bar() + b3.f = 1 + \ No newline at end of file From 36b49497186b50febf865a556d338d109f9027a6 Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Tue, 20 Aug 2024 14:23:36 -0400 Subject: [PATCH 476/827] Address comment --- compiler/src/dotty/tools/dotc/transform/init/Objects.scala | 5 ++++- tests/init-global/pos/cache-constructor.scala | 1 - 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 892cd3ce8b06..1050fbe85ef2 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -908,7 +908,10 @@ class Objects(using Context @constructorOnly): Bottom } - /** Handle new expression `new p.C(args)`. + /** + * Handle new expression `new p.C(args)`. + * The actual instance might be cached without running the constructor. + * See tests/init-global/pos/cache-constructor.scala * * @param outer The value for `p`. * @param klass The symbol of the class `C`. diff --git a/tests/init-global/pos/cache-constructor.scala b/tests/init-global/pos/cache-constructor.scala index f7af30f32516..87769fd5d78a 100644 --- a/tests/init-global/pos/cache-constructor.scala +++ b/tests/init-global/pos/cache-constructor.scala @@ -6,4 +6,3 @@ object A: val b2 = new Bar() val b3 = new Bar() b3.f = 1 - \ No newline at end of file From c8cb131a6fba8ce3a8de39855027a26c24f3f52c Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 21 Aug 2024 11:39:41 +0100 Subject: [PATCH 477/827] Update hamzaremmal/sdkman-release-action action --- .github/workflows/publish-sdkman.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index de12f81426b5..77bbebf3f846 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -46,7 +46,7 @@ jobs: - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: - - uses: hamzaremmal/sdkman-release-action@7e437233a6bd79bc4cb0fa9071b685e94bdfdba6 + - uses: hamzaremmal/sdkman-release-action@978b8cdb5f9c3b83ebdc45e0a1bf97bf17cc6280 with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} From 85cf4ab89229e786a0a4fc2bae49a7b226484195 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 21 Aug 2024 12:10:54 +0100 Subject: [PATCH 478/827] Update hamzaremmal/sdkman-release-action & hamzaremmal/sdkman-default-action action --- .github/workflows/publish-sdkman.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 77bbebf3f846..6f10ac128b6e 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -46,7 +46,7 @@ jobs: - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: - - uses: hamzaremmal/sdkman-release-action@978b8cdb5f9c3b83ebdc45e0a1bf97bf17cc6280 + - uses: hamzaremmal/sdkman-release-action@4cb6c8cf99cfdf0ed5de586d6b38500558737e65 with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} @@ -59,7 +59,7 @@ jobs: runs-on: ubuntu-latest needs: publish steps: - - uses: hamzaremmal/sdkman-default-action@866bc79fc5bd397eeb48f9cedda2f15221c8515d + - uses: hamzaremmal/sdkman-default-action@f312ff69dec7c4f83b060c3df90df7ed19e2d70e with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} From b34df4d8c45bb01df46754b900df276d9beb886b Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sat, 17 Aug 2024 15:57:24 +0100 Subject: [PATCH 479/827] Fix InferExpectedTypeSuite.list with Apply fixes --- .../dotty/tools/dotc/typer/Applications.scala | 10 ++-- .../dotty/tools/dotc/util/Signatures.scala | 2 +- .../pc/tests/InferExpectedTypeSuite.scala | 1 - .../signaturehelp/SignatureHelpSuite.scala | 54 ++++++++++++++++++- 4 files changed, 60 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 33ab9f210634..c6d8fd80fd60 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -571,7 +571,7 @@ trait Applications extends Compatibility { fail(TypeMismatch(methType.resultType, resultType, None)) // match all arguments with corresponding formal parameters - matchArgs(orderedArgs, methType.paramInfos, 0) + if success then matchArgs(orderedArgs, methType.paramInfos, 0) case _ => if (methType.isError) ok = false else fail(em"$methString does not take parameters") @@ -666,7 +666,7 @@ trait Applications extends Compatibility { * @param n The position of the first parameter in formals in `methType`. */ def matchArgs(args: List[Arg], formals: List[Type], n: Int): Unit = - if (success) formals match { + formals match { case formal :: formals1 => def checkNoVarArg(arg: Arg) = @@ -878,7 +878,9 @@ trait Applications extends Compatibility { init() def addArg(arg: Tree, formal: Type): Unit = - typedArgBuf += adapt(arg, formal.widenExpr) + val typedArg = adapt(arg, formal.widenExpr) + typedArgBuf += typedArg + ok = ok & !typedArg.tpe.isError def makeVarArg(n: Int, elemFormal: Type): Unit = { val args = typedArgBuf.takeRight(n).toList @@ -943,7 +945,7 @@ trait Applications extends Compatibility { var typedArgs = typedArgBuf.toList def app0 = cpy.Apply(app)(normalizedFun, typedArgs) // needs to be a `def` because typedArgs can change later val app1 = - if (!success || typedArgs.exists(_.tpe.isError)) app0.withType(UnspecifiedErrorType) + if !success then app0.withType(UnspecifiedErrorType) else { if isJavaAnnotConstr(methRef.symbol) then // #19951 Make sure all arguments are NamedArgs for Java annotations diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index ae6bc583bae8..3b45d8f2fa51 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -651,7 +651,7 @@ object Signatures { * * @param err The error message to inspect. * @param params The parameters that were given at the call site. - * @param alreadyCurried Index of paramss we are currently in. + * @param paramssIndex Index of paramss we are currently in. * * @return A pair composed of the index of the best alternative (0 if no alternatives * were found), and the list of alternatives. diff --git a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala index 3bc1964735e4..94b0f92d0c7f 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala @@ -55,7 +55,6 @@ class InferExpectedTypeSuite extends BasePCSuite: |""".stripMargin ) - @Ignore("Not handled correctly.") @Test def list = check( """|val i: List[Int] = List(@@) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala index 2b458ced9683..bd9f8edeef49 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala @@ -2,7 +2,7 @@ package dotty.tools.pc.tests.signaturehelp import dotty.tools.pc.base.BaseSignatureHelpSuite -import org.junit.Test +import org.junit.{ Ignore, Test } class SignatureHelpSuite extends BaseSignatureHelpSuite: @@ -253,6 +253,20 @@ class SignatureHelpSuite extends BaseSignatureHelpSuite: ) @Test def `tparam5` = + check( + """ + |object a { + | List[Int](1).lengthCompare(@@) + |} + """.stripMargin, + """|lengthCompare(len: Int): Int + | ^^^^^^^^ + |lengthCompare(that: Iterable[?]): Int + |""".stripMargin + ) + + @Ignore("See if applyCallInfo can still inform on lengthCompare's sig, even if recv is in error") + @Test def `tparam5_TypeMismatch` = check( """ |object a { @@ -265,6 +279,31 @@ class SignatureHelpSuite extends BaseSignatureHelpSuite: |""".stripMargin ) + @Test def `tparam5_nonvarargs` = + check( + """ + |object a { + | Option[Int](1).getOrElse(@@) + |} + """.stripMargin, + """|getOrElse[B >: Int](default: => B): B + | ^^^^^^^^^^^^^ + |""".stripMargin + ) + + @Ignore("Similar to `tparam5_TypeMismatch`") + @Test def `tparam5_nonvarargs_TypeMismatch` = + check( + """ + |object a { + | Option[String](1).getOrElse(@@) + |} + """.stripMargin, + """|getOrElse[B >: String](default: => B): B + | ^^^^^^^^^^^^^ + |""".stripMargin + ) + @Test def `error1` = check( """ @@ -547,6 +586,19 @@ class SignatureHelpSuite extends BaseSignatureHelpSuite: ) @Test def `last-arg1` = + check( + """ + |object A { + | List[Int](1).map(a => @@) + |} + """.stripMargin, + """|map[B](f: Int => B): List[B] + | ^^^^^^^^^^^ + |""".stripMargin + ) + + @Ignore("Similar to `tparam5_TypeMismatch`") + @Test def `last-arg1_TypeMismatch` = check( """ |object A { From 24051098d61474b68a7b8ef97b3b5caa0ece9678 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sat, 17 Aug 2024 17:07:47 +0100 Subject: [PATCH 480/827] Fix InferExpectedTypeSuite.bounds expectations.. --- .../tools/pc/tests/InferExpectedTypeSuite.scala | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala index 94b0f92d0c7f..1a04c5d8c864 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala @@ -252,40 +252,36 @@ class InferExpectedTypeSuite extends BasePCSuite: ) // bounds - @Ignore("Bounds are not handled correctly.") @Test def any = check( """|trait Foo |def foo[T](a: T): Boolean = ??? |val _ = foo(@@) |""".stripMargin, - """|<: Any + """|Any |""".stripMargin ) - @Ignore("Bounds are not handled correctly.") @Test def `bounds-1` = check( """|trait Foo |def foo[T <: Foo](a: T): Boolean = ??? |val _ = foo(@@) |""".stripMargin, - """|<: Foo + """|Foo |""".stripMargin ) - @Ignore("Bounds are not handled correctly.") @Test def `bounds-2` = check( """|trait Foo |def foo[T >: Foo](a: T): Boolean = ??? |val _ = foo(@@) |""".stripMargin, - """|:> Foo - |""".stripMargin + """|Foo + |""".stripMargin // ideally Any (maybe?) ) - @Ignore("Bounds are not handled correctly.") @Test def `bounds-3` = check( """|trait A @@ -294,6 +290,6 @@ class InferExpectedTypeSuite extends BasePCSuite: |def roo[F >: C <: A](f: F) = ??? |val kjk = roo(@@) |""".stripMargin, - """|>: C <: A - |""".stripMargin + """|C + |""".stripMargin // ideally A ) From 5aaea2f6e847be0881f2116cc206e2057d64b524 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sat, 17 Aug 2024 12:01:47 +0100 Subject: [PATCH 481/827] Print failures in boundsViolations, via TypeComparer.explaining TypeComparer.explaining is like TypeComparer.explained, but instead of just returning the trace, returns the result, still allowing the trace to be accessed via .lastTrace, as exemplified by implementing TypeComparer.explained in terms of TypeComparer.explaining. Also add, but leave commented out the call of, a trace.dumpStack, which is like Thread.dumpStack(), but outputing to System.out, like all our tracing does - so the two don't interact when unbuffering onto the terminal. Also, we can do customisations like filtering out stack elements, limiting the stack. --- .../dotty/tools/dotc/core/TypeComparer.scala | 12 ++++++++---- .../src/dotty/tools/dotc/core/TypeOps.scala | 19 +++++++++++++++---- .../dotty/tools/dotc/reporting/trace.scala | 12 ++++++++++++ 3 files changed, 35 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 0f74ca40843b..5aab2e277693 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3268,9 +3268,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** The trace of comparison operations when performing `op` */ def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:", short: Boolean)(using Context): String = - val cmp = explainingTypeComparer(short) - inSubComparer(cmp)(op) - cmp.lastTrace(header) + explaining(cmp => { op(cmp); cmp.lastTrace(header) }, short) + + def explaining[T](op: ExplainingTypeComparer => T, short: Boolean)(using Context): T = + inSubComparer(explainingTypeComparer(short))(op) def reduceMatchWith[T](op: MatchReducer => T)(using Context): T = inSubComparer(matchReducer)(op) @@ -3440,6 +3441,9 @@ object TypeComparer { def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:", short: Boolean = false)(using Context): String = comparing(_.explained(op, header, short)) + def explaining[T](op: ExplainingTypeComparer => T, short: Boolean = false)(using Context): T = + comparing(_.explaining(op, short)) + def reduceMatchWith[T](op: MatchReducer => T)(using Context): T = comparing(_.reduceMatchWith(op)) @@ -3873,7 +3877,7 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa override def recur(tp1: Type, tp2: Type): Boolean = def moreInfo = if Config.verboseExplainSubtype || ctx.settings.verbose.value - then s" ${tp1.getClass} ${tp2.getClass}" + then s" ${tp1.className} ${tp2.className}" else "" val approx = approxState def approxStr = if short then "" else approx.show diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 8089735bdb0f..0d8801b646ee 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -691,11 +691,22 @@ object TypeOps: val hiBound = instantiate(bounds.hi, skolemizedArgTypes) val loBound = instantiate(bounds.lo, skolemizedArgTypes) - def check(using Context) = { - if (!(lo <:< hiBound)) violations += ((arg, "upper", hiBound)) - if (!(loBound <:< hi)) violations += ((arg, "lower", loBound)) + def check(tp1: Type, tp2: Type, which: String, bound: Type)(using Context) = { + val isSub = TypeComparer.explaining { cmp => + val isSub = cmp.isSubType(tp1, tp2) + if !isSub then + if !ctx.typerState.constraint.domainLambdas.isEmpty then + typr.println(i"${ctx.typerState.constraint}") + if !ctx.gadt.symbols.isEmpty then + typr.println(i"${ctx.gadt}") + typr.println(cmp.lastTrace(i"checkOverlapsBounds($lo, $hi, $arg, $bounds)($which)")) + //trace.dumpStack() + isSub + }//(using ctx.fresh.setSetting(ctx.settings.verbose, true)) // uncomment to enable moreInfo in ExplainingTypeComparer recur + if !isSub then violations += ((arg, which, bound)) } - check(using checkCtx) + check(lo, hiBound, "upper", hiBound)(using checkCtx) + check(loBound, hi, "lower", loBound)(using checkCtx) } def loop(args: List[Tree], boundss: List[TypeBounds]): Unit = args match diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala index fbbc3d990969..732e779e9bf7 100644 --- a/compiler/src/dotty/tools/dotc/reporting/trace.scala +++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala @@ -27,6 +27,18 @@ object trace extends TraceSyntax: object log extends TraceSyntax: inline def isEnabled: true = true protected val isForced = false + + def dumpStack(limit: Int = -1): Unit = { + val out = Console.out + val exc = new Exception("Dump Stack") + var stack = exc.getStackTrace + .filter(e => !e.getClassName.startsWith("dotty.tools.dotc.reporting.TraceSyntax")) + .filter(e => !e.getClassName.startsWith("dotty.tools.dotc.reporting.trace")) + if limit >= 0 then + stack = stack.take(limit) + exc.setStackTrace(stack) + exc.printStackTrace(out) + } end trace /** This module is carefully optimized to give zero overhead if Config.tracingEnabled From 43fc10c2b382eb09e3dcac9a4652ddd6a9f4261e Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 21 Aug 2024 13:59:18 +0100 Subject: [PATCH 482/827] Give up on InferExpectedTypeSuite.map/flatMap But keep the extraction of the instDecision logic, and keep the tests cases I used in studying this change. Also simplify and update InstantiateModel. Martin had tweaked my i14218 fix to make it more conservative: in the (NN, UU) case (i.e. no inferred bounds, only a UU upper bound declared) for covariant type parameters, revert back to minimising to Nothing rather than maximising to the declared bound. --- .../dotty/tools/dotc/typer/Inferencing.scala | 55 ++++++++++------- .../dotty/tools/dotc/typer/ProtoTypes.scala | 4 +- .../tools/dotc/typer/InstantiateModel.scala | 49 ++++++++------- .../pc/tests/InferExpectedTypeSuite.scala | 15 ++--- tests/pos/i21390.TrieMap.scala | 12 ++++ tests/pos/i21390.zio.scala | 59 +++++++++++++++++++ 6 files changed, 138 insertions(+), 56 deletions(-) create mode 100644 tests/pos/i21390.TrieMap.scala create mode 100644 tests/pos/i21390.zio.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index c41fb2e60ae5..2ebcd96d5bde 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -240,25 +240,12 @@ object Inferencing { && { var fail = false var skip = false - val direction = instDirection(tvar.origin) - if minimizeSelected then - if direction <= 0 && tvar.hasLowerBound then - skip = instantiate(tvar, fromBelow = true) - else if direction >= 0 && tvar.hasUpperBound then - skip = instantiate(tvar, fromBelow = false) - // else hold off instantiating unbounded unconstrained variable - else if direction != 0 then - skip = instantiate(tvar, fromBelow = direction < 0) - else if variance >= 0 && tvar.hasLowerBound then - skip = instantiate(tvar, fromBelow = true) - else if (variance > 0 || variance == 0 && !tvar.hasUpperBound) - && force.ifBottom == IfBottom.ok - then // if variance == 0, prefer upper bound if one is given - skip = instantiate(tvar, fromBelow = true) - else if variance >= 0 && force.ifBottom == IfBottom.fail then - fail = true - else - toMaximize = tvar :: toMaximize + instDecision(tvar, variance, minimizeSelected, force.ifBottom) match + case Decision.Min => skip = instantiate(tvar, fromBelow = true) + case Decision.Max => skip = instantiate(tvar, fromBelow = false) + case Decision.Skip => // hold off instantiating unbounded unconstrained variable + case Decision.Fail => fail = true + case Decision.ToMax => toMaximize ::= tvar !fail && (skip || foldOver(x, tvar)) } case tp => foldOver(x, tp) @@ -452,9 +439,32 @@ object Inferencing { if (!cmp.isSubTypeWhenFrozen(constrained.lo, original.lo)) 1 else 0 val approxAbove = if (!cmp.isSubTypeWhenFrozen(original.hi, constrained.hi)) 1 else 0 + //println(i"instDirection($param) = $approxAbove - $approxBelow original=[$original] constrained=[$constrained]") approxAbove - approxBelow } + /** The instantiation decision for given poly param computed from the constraint. */ + enum Decision { case Min; case Max; case ToMax; case Skip; case Fail } + private def instDecision(tvar: TypeVar, v: Int, minimizeSelected: Boolean, ifBottom: IfBottom)(using Context): Decision = + import Decision.* + val direction = instDirection(tvar.origin) + val dec = if minimizeSelected then + if direction <= 0 && tvar.hasLowerBound then Min + else if direction >= 0 && tvar.hasUpperBound then Max + else Skip + else if direction != 0 then if direction < 0 then Min else Max + else if tvar.hasLowerBound then if v >= 0 then Min else ToMax + else ifBottom match + // What's left are unconstrained tvars with at most a non-Any param upperbound: + // * IfBottom.flip will always maximise to the param upperbound, for all variances + // * IfBottom.fail will fail the IFD check, for covariant or invariant tvars, maximise contravariant tvars + // * IfBottom.ok will minimise to Nothing covariant and unbounded invariant tvars, and max to Any the others + case IfBottom.ok => if v > 0 || v == 0 && !tvar.hasUpperBound then Min else ToMax // prefer upper bound if one is given + case IfBottom.fail => if v >= 0 then Fail else ToMax + case ifBottom_flip => ToMax + //println(i"instDecision($tvar, v=v, minimizedSelected=$minimizeSelected, $ifBottom) dir=$direction = $dec") + dec + /** Following type aliases and stripping refinements and annotations, if one arrives at a * class type reference where the class has a companion module, a reference to * that companion module. Otherwise NoType @@ -651,7 +661,7 @@ trait Inferencing { this: Typer => val ownedVars = state.ownedVars if (ownedVars ne locked) && !ownedVars.isEmpty then - val qualifying = ownedVars -- locked + val qualifying = (ownedVars -- locked).toList if (!qualifying.isEmpty) { typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") val resultAlreadyConstrained = @@ -687,6 +697,10 @@ trait Inferencing { this: Typer => def constraint = state.constraint + trace(i"interpolateTypeVars($tree: ${tree.tpe}, $pt, $qualifying)", typr, (_: Any) => i"$qualifying\n$constraint\n${ctx.gadt}") { + //println(i"$constraint") + //println(i"${ctx.gadt}") + /** Values of this type report type variables to instantiate with variance indication: * +1 variable appears covariantly, can be instantiated from lower bound * -1 variable appears contravariantly, can be instantiated from upper bound @@ -804,6 +818,7 @@ trait Inferencing { this: Typer => end doInstantiate doInstantiate(filterByDeps(toInstantiate)) + } } end if tree diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 5909cda8c428..a69a63d1ceef 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -18,10 +18,11 @@ import config.Printers.typr import Inferencing.* import ErrorReporting.* import util.SourceFile +import util.Spans.{NoSpan, Span} import TypeComparer.necessarySubType +import reporting.* import scala.annotation.internal.sharable -import dotty.tools.dotc.util.Spans.{NoSpan, Span} object ProtoTypes { @@ -83,6 +84,7 @@ object ProtoTypes { * fits the given expected result type. */ def constrainResult(mt: Type, pt: Type)(using Context): Boolean = + trace(i"constrainResult($mt, $pt)", typr): val savedConstraint = ctx.typerState.constraint val res = pt.widenExpr match { case pt: FunProto => diff --git a/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala index b08062913dac..9841fcbafb5b 100644 --- a/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala +++ b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala @@ -4,22 +4,16 @@ package typer // Modelling the decision in IsFullyDefined object InstantiateModel: - enum LB { case NN; case LL; case L1 }; import LB.* - enum UB { case AA; case UU; case U1 }; import UB.* - enum Var { case V; case NotV }; import Var.* - enum MSe { case M; case NotM }; import MSe.* - enum Bot { case Fail; case Ok; case Flip }; import Bot.* - enum Act { case Min; case Max; case ToMax; case Skip; case False }; import Act.* + enum LB { case NN; case LL; case L1 }; import LB.* + enum UB { case AA; case UU; case U1 }; import UB.* + enum Decision { case Min; case Max; case ToMax; case Skip; case Fail }; import Decision.* // NN/AA = Nothing/Any // LL/UU = the original bounds, on the type parameter // L1/U1 = the constrained bounds, on the type variable - // V = variance >= 0 ("non-contravariant") - // MSe = minimisedSelected - // Bot = IfBottom // ToMax = delayed maximisation, via addition to toMaximize // Skip = minimisedSelected "hold off instantiating" - // False = return false + // Fail = IfBottom.fail's bail option // there are 9 combinations: // # | LB | UB | d | // d = direction @@ -34,24 +28,27 @@ object InstantiateModel: // 8 | NN | UU | 0 | T <: UU // 9 | NN | AA | 0 | T - def decide(lb: LB, ub: UB, v: Var, bot: Bot, m: MSe): Act = (lb, ub) match + def instDecision(lb: LB, ub: UB, v: Int, ifBottom: IfBottom, min: Boolean) = (lb, ub) match case (L1, AA) => Min case (L1, UU) => Min case (LL, U1) => Max case (NN, U1) => Max - case (L1, U1) => if m==M || v==V then Min else ToMax - case (LL, UU) => if m==M || v==V then Min else ToMax - case (LL, AA) => if m==M || v==V then Min else ToMax - - case (NN, UU) => bot match - case _ if m==M => Max - //case Ok if v==V => Min // removed, i14218 fix - case Fail if v==V => False - case _ => ToMax - - case (NN, AA) => bot match - case _ if m==M => Skip - case Ok if v==V => Min - case Fail if v==V => False - case _ => ToMax + case (L1, U1) => if min then Min else pickVar(v, Min, Min, ToMax) + case (LL, UU) => if min then Min else pickVar(v, Min, Min, ToMax) + case (LL, AA) => if min then Min else pickVar(v, Min, Min, ToMax) + + case (NN, UU) => ifBottom match + case _ if min => Max + case IfBottom.ok => pickVar(v, Min, ToMax, ToMax) + case IfBottom.fail => pickVar(v, Fail, Fail, ToMax) + case IfBottom.flip => ToMax + + case (NN, AA) => ifBottom match + case _ if min => Skip + case IfBottom.ok => pickVar(v, Min, Min, ToMax) + case IfBottom.fail => pickVar(v, Fail, Fail, ToMax) + case IfBottom.flip => ToMax + + def pickVar[A](v: Int, cov: A, inv: A, con: A) = + if v > 0 then cov else if v == 0 then inv else con diff --git a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala index 1a04c5d8c864..ccdc68ef1cad 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala @@ -221,25 +221,22 @@ class InferExpectedTypeSuite extends BasePCSuite: |""".stripMargin ) - @Ignore("Generic functions are not handled correctly.") @Test def flatmap = check( """|val _ : List[Int] = List().flatMap(_ => @@) |""".stripMargin, - """|IterableOnce[Int] - |""".stripMargin + """|IterableOnce[Nothing] + |""".stripMargin // ideally IterableOnce[Int], but can't change interpolateTypeVars ) - @Ignore("Generic functions are not handled correctly.") @Test def map = check( """|val _ : List[Int] = List().map(_ => @@) |""".stripMargin, - """|Int - |""".stripMargin + """|Nothing + |""".stripMargin // ideally Int, but can't change interpolateTypeVars ) - @Ignore("Generic functions are not handled correctly.") @Test def `for-comprehension` = check( """|val _ : List[Int] = @@ -247,8 +244,8 @@ class InferExpectedTypeSuite extends BasePCSuite: | _ <- List("a", "b") | } yield @@ |""".stripMargin, - """|Int - |""".stripMargin + """|Nothing + |""".stripMargin // ideally Int, but can't change interpolateTypeVars ) // bounds diff --git a/tests/pos/i21390.TrieMap.scala b/tests/pos/i21390.TrieMap.scala new file mode 100644 index 000000000000..e49cca353485 --- /dev/null +++ b/tests/pos/i21390.TrieMap.scala @@ -0,0 +1,12 @@ +// Minimised from scala.collection.concurrent.LNode +// Useful as a minimisation of how, +// If we were to change the type interpolation +// to minimise to the inferred "X" type, +// then this is a minimisation of how the (ab)use of +// GADT constraints to handle class type params +// can fail PostTyper, -Ytest-pickler, and probably others. + +import scala.language.experimental.captureChecking + +class Foo[X](xs: List[X]): + def this(a: X, b: X) = this(if (a == b) then a :: Nil else a :: b :: Nil) diff --git a/tests/pos/i21390.zio.scala b/tests/pos/i21390.zio.scala new file mode 100644 index 000000000000..3aece69632b3 --- /dev/null +++ b/tests/pos/i21390.zio.scala @@ -0,0 +1,59 @@ +// A minimisation of a community build failure in PR 21390 +// To see why changing the instantiation direction in interpolateTypeVars +// using the same logic as IsFullyDefined. +class Has[A] +object Has: + class Union[B, C] + object Union: + given HasHasUnion[B0 <: Has[?], C0 <: Has[?]]: Union[B0, C0] = ??? + +class Lay[+D]: + def and1[B1 >: D, C1](that: Lay[C1])(using Has.Union[B1, C1]): Lay[B1 & C1] = ??? + def and2[B2 >: D, C2](that: Lay[C2])(using Has.Union[B2, C2]): Lay[B2 & C2] = ??? + +class J; type X = Has[J] +class K; type Y = Has[K] +class L; type Z = Has[L] + +def t1(x: Lay[X], y: Lay[Y], z: Lay[Z]): Lay[X & Y & Z] = x.and1(y).and2(z) + +/* + +Here's what goes wrong in the tvar instantiation, in method t1: + +1) <== constrainResult(method and1, (using x$2: Union[B1, C1]): Lay[B1 & C1], ?{ and2: ? }) = true +2) ==> Has.Union[B0, C0] <: Has.Union[B1, C1 := Y]? +3) <== Has.Union[B0, C0] <: Has.Union[B1, C1 := Y] = OK + +1) B1 >: X B2 >: B1 & C1 +2) B1 >: X C1 := Y B2 >: B1 & Y B0 <: Has[?] C0 <: Has[?] +3) B1 >: X <: Has[?] C1 := Y B2 >: B1 & Y B0 := B1 C0 := Y + +1) Check that the result of and1 fits the expected .and2 call, inferring any necessary constraints +2) Initiate the check that calling HasHasUnion matches the needed Has.Union[B1, C1] parameter +3) In inferring that the need B0 := B1 and C0 := Y, we end up inferring B0's `<: Has[?]` on B1. + +4a) <== B1.instantiate(fromBelow = true ) = X +4b) <== B1.instantiate(fromBelow = false) = Has[?] +5a) <== B2.instantiate(fromBelow = true) = X & Y +5b) <== B2.instantiate(fromBelow = true) = Y +6) <== constrainResult(method and2, (using x$2: Has.Union[B2, C2]): Lay[B2 & C2], Lay[X & Y & Z]) = true + +4a) B2 >: X & Y +4b) B2 >: Y & Has[?] +5a) B2 := X & Y +5b) B2 := Y +6a) B2 >: X & Y C2 <: Z +6b) B2 >: Y C2 <: X & Z + +4) With the extra upper bound constraint, we end up maximising to Has[?] (4b) instead of minimising to X (4a) +5) Which leads to instantiating B2 to just Y (5b) instead of X & Y (5a) +6) Which leads the constraints from the result of and2 to infer X & Z (6b) instead of just Z (6a) + +-- [E007] Type Mismatch Error: tests/pos/i21390.zio.scala:14:73 ------------------------------------ +14 |def t1(x: Lay[X], y: Lay[Y], z: Lay[Z]): Lay[X & Y & Z] = x.and1(y).and2(z) + | ^ + | Found: (z : Lay[Z]) + | Required: Lay[X & Z] + +*/ From c3f45ca82d0d517848b53e91a4d2dffb724cede1 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 21 Aug 2024 15:04:27 +0100 Subject: [PATCH 483/827] Update hamzaremmal/sdkman-default-action action --- .github/workflows/publish-sdkman.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 6f10ac128b6e..d5cbd6c02966 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -59,7 +59,7 @@ jobs: runs-on: ubuntu-latest needs: publish steps: - - uses: hamzaremmal/sdkman-default-action@f312ff69dec7c4f83b060c3df90df7ed19e2d70e + - uses: hamzaremmal/sdkman-default-action@b3f991bd109e40155af1b13a4c6fc8e8ccada65e with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} From 13223a3c2323303343df4b51f7e8a0a28bd89e15 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 23 Aug 2024 13:07:25 +0200 Subject: [PATCH 484/827] Bump scala-cli to 1.5.0 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 54ac84734486..bfbcc04c3e93 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -124,7 +124,7 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.4.3" + val scalaCliLauncherVersion = "1.5.0" /** Version of Coursier to download for initializing the local maven repo of Scala command */ val coursierJarVersion = "2.1.10" From e44dd5921e8ddd6685738599aa9ed32a83c7cf89 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 23 Aug 2024 16:30:29 +0200 Subject: [PATCH 485/827] Introduce libexec folder in the distribution --- dist/bin-native-overrides/cli-common-platform | 3 --- .../cli-common-platform.bat | 3 --- dist/bin/scala | 4 +-- dist/bin/scala.bat | 4 +-- dist/bin/scala_legacy | 2 +- dist/bin/scalac | 2 +- dist/bin/scalac.bat | 2 +- dist/bin/scaladoc | 2 +- dist/bin/scaladoc.bat | 2 +- .../cli-common-platform | 3 +++ .../cli-common-platform.bat | 3 +++ dist/{bin => libexec}/cli-common-platform | 0 dist/{bin => libexec}/cli-common-platform.bat | 0 dist/{bin => libexec}/common | 2 +- dist/{bin => libexec}/common-shared | 0 dist/{bin => libexec}/common.bat | 0 project/Build.scala | 25 ++++++++++--------- project/RepublishPlugin.scala | 20 +++++++-------- 18 files changed, 38 insertions(+), 39 deletions(-) delete mode 100644 dist/bin-native-overrides/cli-common-platform delete mode 100644 dist/bin-native-overrides/cli-common-platform.bat create mode 100644 dist/libexec-native-overrides/cli-common-platform create mode 100644 dist/libexec-native-overrides/cli-common-platform.bat rename dist/{bin => libexec}/cli-common-platform (100%) rename dist/{bin => libexec}/cli-common-platform.bat (100%) rename dist/{bin => libexec}/common (95%) rename dist/{bin => libexec}/common-shared (100%) rename dist/{bin => libexec}/common.bat (100%) diff --git a/dist/bin-native-overrides/cli-common-platform b/dist/bin-native-overrides/cli-common-platform deleted file mode 100644 index 49803d6282c5..000000000000 --- a/dist/bin-native-overrides/cli-common-platform +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"") diff --git a/dist/bin-native-overrides/cli-common-platform.bat b/dist/bin-native-overrides/cli-common-platform.bat deleted file mode 100644 index 24ab08086007..000000000000 --- a/dist/bin-native-overrides/cli-common-platform.bat +++ /dev/null @@ -1,3 +0,0 @@ -@echo off - -set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" diff --git a/dist/bin/scala b/dist/bin/scala index fa4f4cb25a11..81e11a4dffc2 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -26,8 +26,8 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common-shared" -source "$PROG_HOME/bin/cli-common-platform" +source "$PROG_HOME/libexec/common-shared" +source "$PROG_HOME/libexec/cli-common-platform" SCALA_VERSION="" # iterate through lines in VERSION_SRC diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index 7418909da263..bd5bf0b8dfbe 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -11,7 +11,7 @@ for %%f in ("%~dp0.") do ( @rem get rid of the trailing slash set "_PROG_HOME=!_PROG_HOME:~0,-1!" ) -call "%_PROG_HOME%\bin\common.bat" +call "%_PROG_HOME%\libexec\common.bat" if not %_EXITCODE%==0 goto end @rem ######################################################################### @@ -19,7 +19,7 @@ if not %_EXITCODE%==0 goto end call :setScalaOpts -call "%_PROG_HOME%\bin\cli-common-platform.bat" +call "%_PROG_HOME%\libexec\cli-common-platform.bat" @rem SCALA_CLI_CMD_WIN is an array, set in cli-common-platform.bat. @rem WE NEED TO PASS '--skip-cli-updates' for JVM launchers but we actually don't need it for native launchers diff --git a/dist/bin/scala_legacy b/dist/bin/scala_legacy index 18fc6d874e34..62755801819b 100755 --- a/dist/bin/scala_legacy +++ b/dist/bin/scala_legacy @@ -26,7 +26,7 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common" +source "$PROG_HOME/libexec/common" while [[ $# -gt 0 ]]; do case "$1" in diff --git a/dist/bin/scalac b/dist/bin/scalac index a527d9767749..ec91629a87ac 100755 --- a/dist/bin/scalac +++ b/dist/bin/scalac @@ -26,7 +26,7 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common" +source "$PROG_HOME/libexec/common" [ -z "$PROG_NAME" ] && PROG_NAME=$CompilerMain diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index e2898bdc2890..038c733f24c8 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -11,7 +11,7 @@ for %%f in ("%~dp0.") do ( @rem get rid of the trailing slash set "_PROG_HOME=!_PROG_HOME:~0,-1!" ) -call "%_PROG_HOME%\bin\common.bat" +call "%_PROG_HOME%\libexec\common.bat" if not %_EXITCODE%==0 goto end call :args %* diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index 0af5a2b55acb..e137176e819f 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -28,7 +28,7 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common" +source "$PROG_HOME/libexec/common" default_java_opts="-Xmx768m -Xms768m" withCompiler=true diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index b9e4820b006d..2be13aa628b1 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -11,7 +11,7 @@ for %%f in ("%~dp0.") do ( @rem get rid of the trailing slash set "_PROG_HOME=!_PROG_HOME:~0,-1!" ) -call "%_PROG_HOME%\bin\common.bat" +call "%_PROG_HOME%\libexec\common.bat" if not %_EXITCODE%==0 goto end set _DEFAULT_JAVA_OPTS=-Xmx768m -Xms768m diff --git a/dist/libexec-native-overrides/cli-common-platform b/dist/libexec-native-overrides/cli-common-platform new file mode 100644 index 000000000000..246cbc58d5c7 --- /dev/null +++ b/dist/libexec-native-overrides/cli-common-platform @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +SCALA_CLI_CMD_BASH=("\"$PROG_HOME/libexec/scala-cli\"") diff --git a/dist/libexec-native-overrides/cli-common-platform.bat b/dist/libexec-native-overrides/cli-common-platform.bat new file mode 100644 index 000000000000..239ab40f1f28 --- /dev/null +++ b/dist/libexec-native-overrides/cli-common-platform.bat @@ -0,0 +1,3 @@ +@echo off + +set SCALA_CLI_CMD_WIN="%_PROG_HOME%\libexec\scala-cli.exe" diff --git a/dist/bin/cli-common-platform b/dist/libexec/cli-common-platform similarity index 100% rename from dist/bin/cli-common-platform rename to dist/libexec/cli-common-platform diff --git a/dist/bin/cli-common-platform.bat b/dist/libexec/cli-common-platform.bat similarity index 100% rename from dist/bin/cli-common-platform.bat rename to dist/libexec/cli-common-platform.bat diff --git a/dist/bin/common b/dist/libexec/common similarity index 95% rename from dist/bin/common rename to dist/libexec/common index 2de8bdf9f99a..28b5d66a9ed3 100644 --- a/dist/bin/common +++ b/dist/libexec/common @@ -1,6 +1,6 @@ #!/usr/bin/env bash -source "$PROG_HOME/bin/common-shared" +source "$PROG_HOME/libexec/common-shared" #/*-------------------------------------------------- # * The code below is for Dotty diff --git a/dist/bin/common-shared b/dist/libexec/common-shared similarity index 100% rename from dist/bin/common-shared rename to dist/libexec/common-shared diff --git a/dist/bin/common.bat b/dist/libexec/common.bat similarity index 100% rename from dist/bin/common.bat rename to dist/libexec/common.bat diff --git a/project/Build.scala b/project/Build.scala index 54ac84734486..e6592f67ce3d 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -2137,9 +2137,10 @@ object Build { Universal / packageBin := (Universal / packageBin).dependsOn(republish).value, Universal / packageZipTarball := (Universal / packageZipTarball).dependsOn(republish).value, // ======== - Universal / mappings ++= directory(republishRepo.value / "bin"), + Universal / mappings ++= directory(dist.base / "bin"), Universal / mappings ++= directory(republishRepo.value / "maven2"), Universal / mappings ++= directory(republishRepo.value / "lib"), + Universal / mappings ++= directory(republishRepo.value / "libexec"), Universal / mappings += (republishRepo.value / "VERSION") -> "VERSION", // ======== republishCommandLibs += ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), @@ -2150,7 +2151,7 @@ object Build { lazy val dist = project.asDist(Bootstrapped) .settings(packageName := "scala3-" + dottyVersion) .settings( - republishBinDir := baseDirectory.value / "bin", + republishLibexecDir := baseDirectory.value / "libexec", republishCoursier += ("coursier.jar" -> s"https://github.com/coursier/coursier/releases/download/v$coursierJarVersion/coursier.jar"), republishLaunchers += @@ -2160,8 +2161,8 @@ object Build { lazy val `dist-mac-x86_64` = project.in(file("dist/mac-x86_64")).asDist(Bootstrapped) .settings(packageName := (dist / packageName).value + "-x86_64-apple-darwin") .settings( - republishBinDir := (dist / republishBinDir).value, - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-apple-darwin.gz") @@ -2170,8 +2171,8 @@ object Build { lazy val `dist-mac-aarch64` = project.in(file("dist/mac-aarch64")).asDist(Bootstrapped) .settings(packageName := (dist / packageName).value + "-aarch64-apple-darwin") .settings( - republishBinDir := (dist / republishBinDir).value, - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-apple-darwin.gz") @@ -2181,8 +2182,8 @@ object Build { .enablePlugins(WindowsPlugin) // TO GENERATE THE `.msi` installer .settings(packageName := (dist / packageName).value + "-x86_64-pc-win32") .settings( - republishBinDir := (dist / republishBinDir).value, - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") @@ -2204,8 +2205,8 @@ object Build { lazy val `dist-linux-x86_64` = project.in(file("dist/linux-x86_64")).asDist(Bootstrapped) .settings(packageName := (dist / packageName).value + "-x86_64-pc-linux") .settings( - republishBinDir := (dist / republishBinDir).value, - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-pc-linux.gz") @@ -2214,8 +2215,8 @@ object Build { lazy val `dist-linux-aarch64` = project.in(file("dist/linux-aarch64")).asDist(Bootstrapped) .settings(packageName := (dist / packageName).value + "-aarch64-pc-linux") .settings( - republishBinDir := (dist / republishBinDir).value, - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-pc-linux.gz") diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index ff469e8fda56..f1faeca3d9cd 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -60,9 +60,9 @@ object RepublishPlugin extends AutoPlugin { val republishFetchCoursier = taskKey[File]("cache the coursier.jar for resolving the local maven repo.") val republishPrepareBin = taskKey[File]("prepare the bin directory, including launchers and scripts.") val republishWriteExtraProps = taskKey[Option[File]]("write extra properties for the launchers.") - val republishBinDir = settingKey[File]("where to find static files for the bin dir.") + val republishLibexecDir = settingKey[File]("where to find static files for the `libexec` dir.") val republishCoursierDir = settingKey[File]("where to download the coursier launcher jar.") - val republishBinOverrides = settingKey[Seq[File]]("files to override those in bin-dir.") + val republishLibexecOverrides = settingKey[Seq[File]]("files to override those in libexec-dir.") val republishCommandLibs = settingKey[Seq[(String, List[String])]]("libraries needed for each command.") val republish = taskKey[File]("cache the dependencies and download launchers for the distribution") val republishPack = taskKey[File]("do the pack command") @@ -405,7 +405,7 @@ object RepublishPlugin extends AutoPlugin { republishCoursierDir := republishRepo.value / "coursier", republishLaunchers := Seq.empty, republishCoursier := Seq.empty, - republishBinOverrides := Seq.empty, + republishLibexecOverrides := Seq.empty, republishExtraProps := Seq.empty, republishCommandLibs := Seq.empty, republishLocalResolved / republishProjectRefs := { @@ -489,16 +489,14 @@ object RepublishPlugin extends AutoPlugin { }, republishPrepareBin := { val baseDir = baseDirectory.value - val srcBin = republishBinDir.value - val overrides = republishBinOverrides.value + val srcLibexec = republishLibexecDir.value + val overrides = republishLibexecOverrides.value val repoDir = republishRepo.value - val targetBin = repoDir / "bin" - IO.copyDirectory(srcBin, targetBin) - overrides.foreach { dir => - IO.copyDirectory(dir, targetBin, overwrite = true) - } - targetBin + val targetLibexec = repoDir / "libexec" + IO.copyDirectory(srcLibexec, targetLibexec) + overrides.foreach(IO.copyDirectory(_, targetLibexec, overwrite = true)) + targetLibexec }, republishWriteExtraProps := { val s = streams.value From e5987d66f01aa65d82423f1ba4109ea4e3c9eacb Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 23 Aug 2024 19:41:07 +0200 Subject: [PATCH 486/827] Check all top-level covariant capture sets in checkNotUniversal Fixes #21401 --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 29 ++++++++++++------- tests/neg-custom-args/captures/i21401.check | 10 +++++++ tests/neg-custom-args/captures/i21401.scala | 19 ++++++++++++ 3 files changed, 47 insertions(+), 11 deletions(-) create mode 100644 tests/neg-custom-args/captures/i21401.check create mode 100644 tests/neg-custom-args/captures/i21401.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 27a3d6024b65..384c6e1f29ef 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -978,21 +978,28 @@ class CheckCaptures extends Recheck, SymTransformer: case _: RefTree | _: Apply | _: TypeApply => tree.symbol.unboxesResult case _: Try => true case _ => false - def checkNotUniversal(tp: Type): Unit = tp.widenDealias match - case wtp @ CapturingType(parent, refs) => - refs.disallowRootCapability { () => - report.error( - em"""The expression's type $wtp is not allowed to capture the root capability `cap`. - |This usually means that a capability persists longer than its allowed lifetime.""", - tree.srcPos) - } - checkNotUniversal(parent) - case _ => + + object checkNotUniversal extends TypeTraverser: + def traverse(tp: Type) = + tp.dealias match + case wtp @ CapturingType(parent, refs) => + if variance > 0 then + refs.disallowRootCapability: () => + def part = if wtp eq tpe.widen then "" else i" in its part $wtp" + report.error( + em"""The expression's type ${tpe.widen} is not allowed to capture the root capability `cap`$part. + |This usually means that a capability persists longer than its allowed lifetime.""", + tree.srcPos) + if !wtp.isBoxed then traverse(parent) + case tp => + traverseChildren(tp) + if !ccConfig.useSealed && !tpe.hasAnnotation(defn.UncheckedCapturesAnnot) && needsUniversalCheck + && tpe.widen.isValueType then - checkNotUniversal(tpe) + checkNotUniversal.traverse(tpe.widen) super.recheckFinish(tpe, tree, pt) end recheckFinish diff --git a/tests/neg-custom-args/captures/i21401.check b/tests/neg-custom-args/captures/i21401.check new file mode 100644 index 000000000000..e9a5fbd4678c --- /dev/null +++ b/tests/neg-custom-args/captures/i21401.check @@ -0,0 +1,10 @@ +-- Error: tests/neg-custom-args/captures/i21401.scala:15:22 ------------------------------------------------------------ +15 | val a = usingIO[IO^](x => x) // error: The expression's type IO^ is not allowed to capture the root capability `cap` + | ^^^^^^^^^^^^^^^^^^^^ + | The expression's type box IO^ is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/i21401.scala:16:70 ------------------------------------------------------------ +16 | val leaked: [R, X <: Boxed[IO^] -> R] -> (op: X) -> R = usingIO[Res](mkRes) // error: The expression's type Res is not allowed to capture the root capability `cap` in its part box IO^ + | ^^^^^^^^^^^^^^^^^^^ + | The expression's type Res is not allowed to capture the root capability `cap` in its part box IO^. + | This usually means that a capability persists longer than its allowed lifetime. diff --git a/tests/neg-custom-args/captures/i21401.scala b/tests/neg-custom-args/captures/i21401.scala new file mode 100644 index 000000000000..07d407a79809 --- /dev/null +++ b/tests/neg-custom-args/captures/i21401.scala @@ -0,0 +1,19 @@ +import language.experimental.captureChecking + +trait IO: + def println(s: String): Unit +def usingIO[R](op: IO^ => R): R = ??? + +case class Boxed[+T](unbox: T) + +type Res = [R, X <: Boxed[IO^] -> R] -> (op: X) -> R +def mkRes(x: IO^): Res = + [R, X <: Boxed[IO^] -> R] => (op: X) => + val op1: Boxed[IO^] -> R = op + op1(Boxed[IO^](x)) +def test2() = + val a = usingIO[IO^](x => x) // error: The expression's type IO^ is not allowed to capture the root capability `cap` + val leaked: [R, X <: Boxed[IO^] -> R] -> (op: X) -> R = usingIO[Res](mkRes) // error: The expression's type Res is not allowed to capture the root capability `cap` in its part box IO^ + val x: Boxed[IO^] = leaked[Boxed[IO^], Boxed[IO^] -> Boxed[IO^]](x => x) + val y: IO^{x*} = x.unbox + y.println("boom") From 69530560f6630e1a6912dc6280d79107037792b1 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 20 Aug 2024 13:03:33 -0700 Subject: [PATCH 487/827] Add origin filter to WConf, DeprecationWarning --- compiler/src/dotty/tools/dotc/report.scala | 4 ++-- .../dotty/tools/dotc/reporting/Diagnostic.scala | 3 ++- .../src/dotty/tools/dotc/reporting/WConf.scala | 9 +++++++-- .../tools/dotc/typer/CrossVersionChecks.scala | 4 ++-- .../src/dotty/tools/dotc/typer/RefChecks.scala | 4 +++- .../tools/dotc/config/ScalaSettingsTests.scala | 7 ++++--- tests/warn/deprecated-origin.scala | 15 +++++++++++++++ 7 files changed, 35 insertions(+), 11 deletions(-) create mode 100644 tests/warn/deprecated-origin.scala diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index c77d4eb2fc7e..e24e6be38b2b 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -23,8 +23,8 @@ object report: private def issueWarning(warning: Warning)(using Context): Unit = ctx.reporter.report(warning) - def deprecationWarning(msg: Message, pos: SrcPos)(using Context): Unit = - issueWarning(new DeprecationWarning(msg, pos.sourcePos)) + def deprecationWarning(msg: Message, pos: SrcPos, origin: String = "")(using Context): Unit = + issueWarning(new DeprecationWarning(msg, pos.sourcePos, origin)) def migrationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new MigrationWarning(msg, pos.sourcePos)) diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala index 7a8edb233aee..6a2d88f4e82f 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala @@ -75,7 +75,8 @@ object Diagnostic: class DeprecationWarning( msg: Message, - pos: SourcePosition + pos: SourcePosition, + val origin: String ) extends ConditionalWarning(msg, pos) { def enablingOption(using Context): Setting[Boolean] = ctx.settings.deprecation } diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index 54a6fc14e054..1896e5269d6c 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -19,23 +19,27 @@ enum MessageFilter: case Deprecated => message.isInstanceOf[Diagnostic.DeprecationWarning] case Feature => message.isInstanceOf[Diagnostic.FeatureWarning] case Unchecked => message.isInstanceOf[Diagnostic.UncheckedWarning] + case MessageID(errorId) => message.msg.errorId == errorId case MessagePattern(pattern) => val noHighlight = message.msg.message.replaceAll("\\e\\[[\\d;]*[^\\d;]","") pattern.findFirstIn(noHighlight).nonEmpty - case MessageID(errorId) => message.msg.errorId == errorId case SourcePattern(pattern) => val source = message.position.orElse(NoSourcePosition).source() val path = source.jfile() .map(_.toPath.toAbsolutePath.toUri.normalize().getRawPath) .orElse(source.path()) pattern.findFirstIn(path).nonEmpty - + case Origin(pattern) => + message match + case message: Diagnostic.DeprecationWarning => pattern.findFirstIn(message.origin).nonEmpty + case _ => false case None => false case Any, Deprecated, Feature, Unchecked, None case MessagePattern(pattern: Regex) case MessageID(errorId: ErrorMessageID) case SourcePattern(pattern: Regex) + case Origin(pattern: Regex) enum Action: case Error, Warning, Verbose, Info, Silent @@ -96,6 +100,7 @@ object WConf: case _ => Left(s"unknown category: $conf") case "src" => regex(conf).map(SourcePattern.apply) + case "origin" => regex(conf).map(Origin.apply) case _ => Left(s"unknown filter: $filter") case _ => Left(s"unknown filter: $s") diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 5ce1b02733d0..6020431672b9 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -78,7 +78,7 @@ class CrossVersionChecks extends MiniPhase: do val msg = annot.argumentConstantString(0).map(msg => s": $msg").getOrElse("") val since = annot.argumentConstantString(1).map(version => s" (since: $version)").getOrElse("") - report.deprecationWarning(em"inheritance from $psym is deprecated$since$msg", parent.srcPos) + report.deprecationWarning(em"inheritance from $psym is deprecated$since$msg", parent.srcPos, origin=psym.showFullName) } override def transformValDef(tree: ValDef)(using Context): ValDef = @@ -171,7 +171,7 @@ object CrossVersionChecks: def maybeWarn(annotee: Symbol, annot: Annotation) = if !skipWarning(sym) then val message = annot.argumentConstantString(0).filter(!_.isEmpty).map(": " + _).getOrElse("") val since = annot.argumentConstantString(1).filter(!_.isEmpty).map(" since " + _).getOrElse("") - report.deprecationWarning(em"${annotee.showLocated} is deprecated${since}${message}", pos) + report.deprecationWarning(em"${annotee.showLocated} is deprecated${since}${message}", pos, origin=annotee.showFullName) sym.getAnnotation(defn.DeprecatedAnnot) match case Some(annot) => maybeWarn(sym, annot) case _ => diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 5f7504fa072f..0a0356707048 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -484,7 +484,9 @@ object RefChecks { def overrideDeprecation(what: String, member: Symbol, other: Symbol, fix: String): Unit = report.deprecationWarning( em"overriding $what${infoStringWithLocation(other)} is deprecated;\n ${infoString(member)} should be $fix.", - if member.owner == clazz then member.srcPos else clazz.srcPos) + if member.owner == clazz then member.srcPos else clazz.srcPos, + origin = other.showFullName + ) def autoOverride(sym: Symbol) = sym.is(Synthetic) && ( diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index 3dc4f4e4ec5e..a412848eaa98 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -81,7 +81,7 @@ class ScalaSettingsTests: val conf = sets.Wconf.valueIn(proc.sstate) val sut = reporting.WConf.fromSettings(conf).getOrElse(???) val msg = "There was a problem!".toMessage - val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition) + val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition, origin="") assertEquals(Action.Silent, sut.action(depr)) val feat = new Diagnostic.FeatureWarning(msg, util.NoSourcePosition) assertEquals(Action.Error, sut.action(feat)) @@ -197,7 +197,7 @@ class ScalaSettingsTests: val proc = sets.processArguments(sumy, processAll = true, skipped = Nil) val conf = sets.Wconf.valueIn(proc.sstate) val msg = "Don't use that!".toMessage - val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition) + val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition, origin="") val sut = reporting.WConf.fromSettings(conf).getOrElse(???) assertEquals(Action.Silent, sut.action(depr)) @@ -293,7 +293,8 @@ class ScalaSettingsTests: util.SourcePosition( source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), span = util.Spans.Span(1L) - ) + ), + origin="", ) ) assertEquals(result, Right(reporting.Action.Error)) diff --git a/tests/warn/deprecated-origin.scala b/tests/warn/deprecated-origin.scala new file mode 100644 index 000000000000..e028515d795c --- /dev/null +++ b/tests/warn/deprecated-origin.scala @@ -0,0 +1,15 @@ +//> using options -deprecation -Wconf:origin=p\.C$:s + +package p: + @deprecated("Old style", since="1.0") + class C + @deprecated("Bad style", since="1.0") + class Crude + +package q: + import annotation.* + import p.* + class D extends C // nowarn - C$ pattern avoids matching Crude + class Oil extends Crude // warn + @nowarn("""origin=p\.Crude""") + class Language extends Crude // nowarn obvs From 3c873cf495412a83d1f0dfc960c343c311db4a5c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 24 Aug 2024 09:39:04 -0700 Subject: [PATCH 488/827] More doc --- .../tools/dotc/config/ScalaSettings.scala | 6 ++- .../dotc/reporting/MessageRendering.scala | 39 +++++++++++-------- tests/warn/deprecated-origin-verbose.check | 14 +++++++ tests/warn/deprecated-origin-verbose.scala | 15 +++++++ 4 files changed, 56 insertions(+), 18 deletions(-) create mode 100644 tests/warn/deprecated-origin-verbose.check create mode 100644 tests/warn/deprecated-origin-verbose.scala diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index d775a4239d1b..72a051ea8154 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -233,7 +233,7 @@ private sealed trait WarningSettings: "patterns", default = List(), descr = - s"""Configure compiler warnings. + raw"""Configure compiler warnings. |Syntax: -Wconf::,:,... |multiple are combined with &, i.e., &...& | @@ -254,6 +254,9 @@ private sealed trait WarningSettings: | - Source location: src=regex | The regex is evaluated against the full source path. | + | - Origin of warning: origin=regex + | The regex must match the full name (`package.Class.method`) of the deprecated entity. + | |In verbose warning mode the compiler prints matching filters for warnings. |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). @@ -273,6 +276,7 @@ private sealed trait WarningSettings: |Examples: | - change every warning into an error: -Wconf:any:error | - silence deprecations: -Wconf:cat=deprecation:s + | - silence a deprecation: -Wconf:origin=java\.lang\.Thread\.getId:s | - silence warnings in src_managed directory: -Wconf:src=src_managed/.*:s | |Note: on the command-line you might need to quote configurations containing `*` or `&` diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala index 6881235e3dc1..7db5112b6674 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala @@ -15,7 +15,7 @@ import util.{ SourcePosition, NoSourcePosition } import util.Chars.{ LF, CR, FF, SU } import scala.annotation.switch -import scala.collection.mutable +import scala.collection.mutable.StringBuilder trait MessageRendering { import Highlight.* @@ -209,22 +209,27 @@ trait MessageRendering { sb.toString } - private def appendFilterHelp(dia: Diagnostic, sb: mutable.StringBuilder): Unit = - import dia.* + private def appendFilterHelp(dia: Diagnostic, sb: StringBuilder): Unit = + import dia.msg val hasId = msg.errorId.errorNumber >= 0 - val category = dia match { - case _: UncheckedWarning => "unchecked" - case _: DeprecationWarning => "deprecation" - case _: FeatureWarning => "feature" - case _ => "" - } - if (hasId || category.nonEmpty) - sb.append(EOL).append("Matching filters for @nowarn or -Wconf:") - if (hasId) - sb.append(EOL).append(" - id=E").append(msg.errorId.errorNumber) - sb.append(EOL).append(" - name=").append(msg.errorId.productPrefix.stripSuffix("ID")) - if (category.nonEmpty) - sb.append(EOL).append(" - cat=").append(category) + val (category, origin) = dia match + case _: UncheckedWarning => ("unchecked", "") + case w: DeprecationWarning => ("deprecation", w.origin) + case _: FeatureWarning => ("feature", "") + case _ => ("", "") + var entitled = false + def addHelp(what: String)(value: String): Unit = + if !entitled then + sb.append(EOL).append("Matching filters for @nowarn or -Wconf:") + entitled = true + sb.append(EOL).append(" - ").append(what).append(value) + if hasId then + addHelp("id=E")(msg.errorId.errorNumber.toString) + addHelp("name=")(msg.errorId.productPrefix.stripSuffix("ID")) + if category.nonEmpty then + addHelp("cat=")(category) + if origin.nonEmpty then + addHelp("origin=")(origin) /** The whole message rendered from `msg` */ def messageAndPos(dia: Diagnostic)(using Context): String = { @@ -236,7 +241,7 @@ trait MessageRendering { else 0 given Level = Level(level) given Offset = Offset(maxLineNumber.toString.length + 2) - val sb = mutable.StringBuilder() + val sb = StringBuilder() val posString = posStr(pos, msg, diagnosticLevel(dia)) if (posString.nonEmpty) sb.append(posString).append(EOL) if (pos.exists) { diff --git a/tests/warn/deprecated-origin-verbose.check b/tests/warn/deprecated-origin-verbose.check new file mode 100644 index 000000000000..e67efaf8668d --- /dev/null +++ b/tests/warn/deprecated-origin-verbose.check @@ -0,0 +1,14 @@ +-- Deprecation Warning: tests/warn/deprecated-origin-verbose.scala:12:18 ----------------------------------------------- +12 | class D extends C // warn + | ^ + | class C in package p is deprecated since 1.0: Old style +Matching filters for @nowarn or -Wconf: + - cat=deprecation + - origin=p.C +-- Deprecation Warning: tests/warn/deprecated-origin-verbose.scala:13:20 ----------------------------------------------- +13 | class Oil extends Crude // warn + | ^^^^^ + | class Crude in package p is deprecated since 1.0: Bad style +Matching filters for @nowarn or -Wconf: + - cat=deprecation + - origin=p.Crude diff --git a/tests/warn/deprecated-origin-verbose.scala b/tests/warn/deprecated-origin-verbose.scala new file mode 100644 index 000000000000..0a960744bdcf --- /dev/null +++ b/tests/warn/deprecated-origin-verbose.scala @@ -0,0 +1,15 @@ +//> using options -deprecation -Wconf:any:verbose + +package p: + @deprecated("Old style", since="1.0") + class C + @deprecated("Bad style", since="1.0") + class Crude + +package q: + import annotation.* + import p.* + class D extends C // warn + class Oil extends Crude // warn + @nowarn("""origin=p\.Crude""") + class Language extends Crude // nowarn obvs From 929e7eb814f7236f51b383ea1717a04e93bfa6e5 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 24 Aug 2024 14:39:14 -0700 Subject: [PATCH 489/827] Restore help msg for other than language --- compiler/src/dotty/tools/dotc/config/CompilerCommand.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala index 43f3ed63f969..e90bbcc36878 100644 --- a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala @@ -9,7 +9,8 @@ abstract class CompilerCommand extends CliCommand: final def helpMsg(using settings: ConcreteSettings)(using SettingsState, Context): String = settings.allSettings.find(isHelping) match - case Some(s) => availableOptionsMsg(_ == s, showArgFileMsg = false) + case Some(s @ settings.language) => availableOptionsMsg(_ == s, showArgFileMsg = false) + case Some(s) => s.description case _ => if (settings.help.value) usageMessage else if (settings.Vhelp.value) vusageMessage From 345bfc6639d5db556c8511e5a9ead6724fe37e29 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sun, 25 Aug 2024 16:23:33 +0100 Subject: [PATCH 490/827] Avoid import suggestion thread hang if -Ximport-suggestion-timeout <= 1 Without this change, if -Ximport-suggestion-timeout is set to 0 or 1, we would create a Timer thread and never cancel it, making the whole execution thread hang. --- compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index 5ab6a4a5fae6..8f8c51e2f566 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -148,9 +148,9 @@ trait ImportSuggestions: * `name` that are applicable to `T`. */ private def importSuggestions(pt: Type)(using Context): (List[TermRef], List[TermRef]) = - val timer = new Timer() val allotted = ctx.run.nn.importSuggestionBudget if allotted <= 1 then return (Nil, Nil) + val timer = new Timer() implicits.println(i"looking for import suggestions, timeout = ${allotted}ms") val start = System.currentTimeMillis() val deadLine = start + allotted From 22ea67733dd01466750bb9e6e4401e3e4385bdc3 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sun, 25 Aug 2024 16:24:39 +0100 Subject: [PATCH 491/827] Implement Show[Seq[Nothing]] Somehow I hit this requirement, and it picked two other instances and calling them ambiguous, because it was looking for a Show[Nothing]. --- compiler/src/dotty/tools/dotc/printing/Formatting.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 43cac17e6318..a36e6f48533a 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -76,6 +76,9 @@ object Formatting { given [X: Show]: Show[Seq[X]] with def show(x: Seq[X]) = CtxShow(x.map(toStr)) + given Show[Seq[Nothing]] with + def show(x: Seq[Nothing]) = CtxShow(x) + given [K: Show, V: Show]: Show[Map[K, V]] with def show(x: Map[K, V]) = CtxShow(x.map((k, v) => s"${toStr(k)} => ${toStr(v)}")) From 77b6cc0c31147a4cd769a1f28a2c431793d56b7d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 13:42:49 +0000 Subject: [PATCH 492/827] Bump hamzaremmal/sdkman-release-action Bumps [hamzaremmal/sdkman-release-action](https://github.com/hamzaremmal/sdkman-release-action) from 4cb6c8cf99cfdf0ed5de586d6b38500558737e65 to 1f2d4209b4f5a38721d4ae20014ea8e1689d869e. - [Release notes](https://github.com/hamzaremmal/sdkman-release-action/releases) - [Commits](https://github.com/hamzaremmal/sdkman-release-action/compare/4cb6c8cf99cfdf0ed5de586d6b38500558737e65...1f2d4209b4f5a38721d4ae20014ea8e1689d869e) --- updated-dependencies: - dependency-name: hamzaremmal/sdkman-release-action dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- .github/workflows/publish-sdkman.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index d5cbd6c02966..92123546dc4a 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -46,7 +46,7 @@ jobs: - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: - - uses: hamzaremmal/sdkman-release-action@4cb6c8cf99cfdf0ed5de586d6b38500558737e65 + - uses: hamzaremmal/sdkman-release-action@1f2d4209b4f5a38721d4ae20014ea8e1689d869e with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} From a3e48c2a0296d896469663bba3bcd05e7fe5344e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 13:42:53 +0000 Subject: [PATCH 493/827] Bump VirtusLab/scala-cli-setup from 1.4.3 to 1.5.0 Bumps [VirtusLab/scala-cli-setup](https://github.com/virtuslab/scala-cli-setup) from 1.4.3 to 1.5.0. - [Release notes](https://github.com/virtuslab/scala-cli-setup/releases) - [Commits](https://github.com/virtuslab/scala-cli-setup/compare/v1.4.3...v1.5.0) --- updated-dependencies: - dependency-name: VirtusLab/scala-cli-setup dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 57a5f105c86e..5a269d889d7c 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.4.3 + - uses: VirtusLab/scala-cli-setup@v1.5.0 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From f036195971c6c848df9cbe01af5ec5a967757c32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Mon, 26 Aug 2024 16:32:59 +0200 Subject: [PATCH 494/827] SimplePattern errors should now be recovered as wildcard instead of unimplemented expr (#21438) We should not emit more errors that came from our error recovery term trees. Previously, we've recovered those situations with unimplemented expression term added in https://github.com/scala/scala3/pull/19103 and before that it was just a `null` --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 2 +- tests/neg/i5004.scala | 2 +- tests/neg/parser-stability-1.scala | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 969915f4706d..39bdc69111d3 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3198,7 +3198,7 @@ object Parsers { else { val start = in.lastOffset syntaxErrorOrIncomplete(IllegalStartOfSimplePattern(), expectedOffset) - errorTermTree(start) + atSpan(Span(start, in.offset)) { Ident(nme.WILDCARD) } } } diff --git a/tests/neg/i5004.scala b/tests/neg/i5004.scala index 02105104efd1..ba1abe77f5bf 100644 --- a/tests/neg/i5004.scala +++ b/tests/neg/i5004.scala @@ -2,5 +2,5 @@ object i0 { 1 match { def this(): Int // error def this() -} // error +} } diff --git a/tests/neg/parser-stability-1.scala b/tests/neg/parser-stability-1.scala index 661ab87e31e5..560b9cf116e3 100644 --- a/tests/neg/parser-stability-1.scala +++ b/tests/neg/parser-stability-1.scala @@ -1,4 +1,3 @@ object x0 { x1 match // error def this // error -// error \ No newline at end of file From 0b08c981dacf7295980c234ac461e6af1c75b11d Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 26 Aug 2024 20:58:13 +0200 Subject: [PATCH 495/827] Charge also dcs of local reaches to capture set of enclosing method Fixes #21442 --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 2 +- tests/neg-custom-args/captures/i21442.check | 8 +++++++ tests/neg-custom-args/captures/i21442.scala | 18 +++++++++++++++ tests/neg-custom-args/captures/reaches.check | 22 +++++++++---------- tests/neg-custom-args/captures/reaches.scala | 2 +- .../captures/unsound-reach.check | 5 +++++ .../captures/unsound-reach.scala | 2 +- 7 files changed, 44 insertions(+), 15 deletions(-) create mode 100644 tests/neg-custom-args/captures/i21442.check create mode 100644 tests/neg-custom-args/captures/i21442.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 384c6e1f29ef..ec134149eb49 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -394,7 +394,7 @@ class CheckCaptures extends Recheck, SymTransformer: val isVisible = isVisibleFromEnv(refOwner) if !isVisible && (c.isReach || ref.isType) - && refSym.is(Param) + && (!ccConfig.useSealed || refSym.is(Param)) && refOwner == env.owner then if refSym.hasAnnotation(defn.UnboxAnnot) then diff --git a/tests/neg-custom-args/captures/i21442.check b/tests/neg-custom-args/captures/i21442.check new file mode 100644 index 000000000000..a3bbf65c5988 --- /dev/null +++ b/tests/neg-custom-args/captures/i21442.check @@ -0,0 +1,8 @@ +-- Error: tests/neg-custom-args/captures/i21442.scala:9:13 ------------------------------------------------------------- +9 | val io = x.unbox // error: local reach capability {x*} leaks + | ^^^^^^^ + | Local reach capability x* leaks into capture scope of method foo +-- Error: tests/neg-custom-args/captures/i21442.scala:17:14 ------------------------------------------------------------ +17 | val io = x1.unbox // error + | ^^^^^^^^ + | Local reach capability x1* leaks into capture scope of method bar diff --git a/tests/neg-custom-args/captures/i21442.scala b/tests/neg-custom-args/captures/i21442.scala new file mode 100644 index 000000000000..c9fa7d152fae --- /dev/null +++ b/tests/neg-custom-args/captures/i21442.scala @@ -0,0 +1,18 @@ +import language.experimental.captureChecking +trait IO: + def use(): Unit +case class Boxed[+T](unbox: T) + +// `foo` is a function that unboxes its parameter +// and uses the capability boxed inside the parameter. +def foo(x: Boxed[IO^]): Unit = + val io = x.unbox // error: local reach capability {x*} leaks + io.use() + +// `bar` is a function that does the same thing in a +// slightly different way. +// But, no type error reported. +def bar(x: Boxed[IO^]): Unit = + val x1: Boxed[IO^] = x + val io = x1.unbox // error + io.use() diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index aa45c738dcc5..f00fea09ed8c 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -25,18 +25,10 @@ | ^^^^^^^^^^^^ | The expression's type box () => Unit is not allowed to capture the root capability `cap`. | This usually means that a capability persists longer than its allowed lifetime. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:53:2 --------------------------------------- -53 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error - | ^ - | Found: box () => Unit - | Required: () => Unit - | - | Note that box () => Unit cannot be box-converted to () => Unit - | since at least one of their capture sets contains the root capability `cap` -54 | usingFile: f => -55 | id(() => f.write()) - | - | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/reaches.scala:55:6 ------------------------------------------------------------ +55 | id(() => f.write()) // error + | ^^^^^^^^^^^^^^^^^^^ + | Local reach capability id* leaks into capture scope of method test -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:62:27 -------------------------------------- 62 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ @@ -52,3 +44,9 @@ 79 | ps.map((x, y) => compose1(x, y)) // error // error | ^ | Local reach capability ps* leaks into capture scope of method mapCompose +-- [E057] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:53:51 -------------------------------------- +53 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error + | ^ + | Type argument () -> Unit does not conform to lower bound () => Unit + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index c2d8001e2a7c..c33ba80a668b 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -52,7 +52,7 @@ class Id[-A, +B >: A](): def test = val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error usingFile: f => - id(() => f.write()) + id(() => f.write()) // error def attack2 = val id: File^ -> File^ = x => x diff --git a/tests/neg-custom-args/captures/unsound-reach.check b/tests/neg-custom-args/captures/unsound-reach.check index f0e4c4deeb41..4a6793d204c5 100644 --- a/tests/neg-custom-args/captures/unsound-reach.check +++ b/tests/neg-custom-args/captures/unsound-reach.check @@ -1,3 +1,8 @@ +-- Error: tests/neg-custom-args/captures/unsound-reach.scala:18:21 ----------------------------------------------------- +18 | boom.use(f): (f1: File^{backdoor*}) => // error + | ^ + | Local reach capability backdoor* leaks into capture scope of method bad +19 | escaped = f1 -- [E164] Declaration Error: tests/neg-custom-args/captures/unsound-reach.scala:10:8 ----------------------------------- 10 | def use(x: File^)(op: File^ => Unit): Unit = op(x) // error, was OK using sealed checking | ^ diff --git a/tests/neg-custom-args/captures/unsound-reach.scala b/tests/neg-custom-args/captures/unsound-reach.scala index 22ed4614b71b..c3c31a7f32ff 100644 --- a/tests/neg-custom-args/captures/unsound-reach.scala +++ b/tests/neg-custom-args/captures/unsound-reach.scala @@ -15,6 +15,6 @@ def bad(): Unit = var escaped: File^{backdoor*} = null withFile("hello.txt"): f => - boom.use(f): (f1: File^{backdoor*}) => // was error before existentials + boom.use(f): (f1: File^{backdoor*}) => // error escaped = f1 From 98a41c2efcdfeb4999429d51356a5d3b643f49b7 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 26 Aug 2024 23:17:32 +0200 Subject: [PATCH 496/827] Fix test --- tests/neg-custom-args/captures/i21401.check | 4 ++++ tests/neg-custom-args/captures/i21401.scala | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/neg-custom-args/captures/i21401.check b/tests/neg-custom-args/captures/i21401.check index e9a5fbd4678c..e204540358ce 100644 --- a/tests/neg-custom-args/captures/i21401.check +++ b/tests/neg-custom-args/captures/i21401.check @@ -8,3 +8,7 @@ | ^^^^^^^^^^^^^^^^^^^ | The expression's type Res is not allowed to capture the root capability `cap` in its part box IO^. | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/i21401.scala:18:21 ------------------------------------------------------------ +18 | val y: IO^{x*} = x.unbox // error + | ^^^^^^^ + | Local reach capability x* leaks into capture scope of method test2 diff --git a/tests/neg-custom-args/captures/i21401.scala b/tests/neg-custom-args/captures/i21401.scala index 07d407a79809..8284c601cd5f 100644 --- a/tests/neg-custom-args/captures/i21401.scala +++ b/tests/neg-custom-args/captures/i21401.scala @@ -15,5 +15,5 @@ def test2() = val a = usingIO[IO^](x => x) // error: The expression's type IO^ is not allowed to capture the root capability `cap` val leaked: [R, X <: Boxed[IO^] -> R] -> (op: X) -> R = usingIO[Res](mkRes) // error: The expression's type Res is not allowed to capture the root capability `cap` in its part box IO^ val x: Boxed[IO^] = leaked[Boxed[IO^], Boxed[IO^] -> Boxed[IO^]](x => x) - val y: IO^{x*} = x.unbox + val y: IO^{x*} = x.unbox // error y.println("boom") From da0f714778dd758e09e8232eb5992731bb3d2b78 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Mon, 26 Aug 2024 17:55:47 +0200 Subject: [PATCH 497/827] Update to sbt-develocity 1.1.1 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index a5944c60633a..21d8826b6b24 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -22,4 +22,4 @@ addSbtPlugin("ch.epfl.scala" % "sbt-tasty-mima" % "1.0.0") addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.10.0") -addSbtPlugin("com.gradle" % "sbt-develocity" % "1.0.1") +addSbtPlugin("com.gradle" % "sbt-develocity" % "1.1.1") From 8c358ba530ebcd1016660b7794b7ebf2a1fe1dad Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Mon, 26 Aug 2024 17:55:57 +0200 Subject: [PATCH 498/827] Disable Develocity Build Cache in compilation tests At the moment, the Develocity Build Cache does not work properly with our compilation tests, because the cache key does not take into account the sources that are read in the tests. Keeping the build cache would lead to false cache hits, so it is better to disable it for the time being. --- project/Build.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/project/Build.scala b/project/Build.scala index db0949c46b50..7515eb3c9552 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -908,6 +908,10 @@ object Build { sjsSources } (Set(scalaJSIRSourcesJar)).toSeq }.taskValue, + + // Develocity's Build Cache does not work with our compilation tests + // at the moment. + Test / develocityBuildCacheClient := None, ) def insertClasspathInArgs(args: List[String], cp: String): List[String] = { From 748d4391bb90fcbd852f01562aa74224caa790a4 Mon Sep 17 00:00:00 2001 From: rochala Date: Sun, 30 Jun 2024 23:50:14 +0200 Subject: [PATCH 499/827] Remove artificial CURSOR added for the completions --- .../dotty/tools/dotc/ast/NavigateAST.scala | 11 +++- .../tools/dotc/interactive/Completion.scala | 13 ++-- .../tools/languageserver/CompletionTest.scala | 9 +++ .../dotty/tools/pc/AutoImportsProvider.scala | 2 +- .../tools/pc/completions/CompletionPos.scala | 9 ++- .../pc/completions/CompletionProvider.scala | 64 +++++++++++++------ .../tools/pc/completions/Completions.scala | 20 ++---- .../completions/InterpolatorCompletions.scala | 2 +- .../pc/completions/NamedArgCompletions.scala | 31 ++++----- .../pc/completions/OverrideCompletions.scala | 16 ++++- .../tests/completion/CompletionArgSuite.scala | 62 ++++++++++-------- .../CompletionInterpolatorSuite.scala | 2 +- .../pc/tests/completion/CompletionSuite.scala | 9 +-- .../tests/definition/PcDefinitionSuite.scala | 4 +- .../tools/pc/tests/hover/HoverTermSuite.scala | 6 +- 15 files changed, 159 insertions(+), 101 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index f83f12e1c027..2aeb2f7df067 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -3,6 +3,7 @@ package ast import core.Contexts.* import core.Decorators.* +import core.StdNames import util.Spans.* import Trees.{Closure, MemberDef, DefTree, WithLazyFields} import dotty.tools.dotc.core.Types.AnnotatedType @@ -76,6 +77,8 @@ object NavigateAST { var bestFit: List[Positioned] = path while (it.hasNext) { val path1 = it.next() match { + // FIXME this has to be changed to deterministicaly find recoveed tree + case untpd.Select(qual, name) if name == StdNames.nme.??? => path case p: Positioned if !p.isInstanceOf[Closure[?]] => singlePath(p, path) case m: untpd.Modifiers => childPath(m.productIterator, path) case xs: List[?] => childPath(xs.iterator, path) @@ -84,11 +87,17 @@ object NavigateAST { if ((path1 ne path) && ((bestFit eq path) || bestFit.head.span != path1.head.span && - bestFit.head.span.contains(path1.head.span))) + envelops(bestFit.head.span, path1.head.span))) bestFit = path1 } bestFit } + + def envelops(a: Span, b: Span): Boolean = + !b.exists || a.exists && ( + (a.start < b.start && a.end >= b.end ) || (a.start <= b.start && a.end > b.end) + ) + /* * Annotations trees are located in the Type */ diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 1395d9b80b53..7112caf1cfad 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -121,16 +121,17 @@ object Completion: case _ => "" + def naiveCompletionPrefix(text: String, offset: Int): String = + var i = offset - 1 + while i >= 0 && text(i).isUnicodeIdentifierPart do i -= 1 + i += 1 // move to first character + text.slice(i, offset) + /** * Inspect `path` to determine the completion prefix. Only symbols whose name start with the * returned prefix should be considered. */ def completionPrefix(path: List[untpd.Tree], pos: SourcePosition)(using Context): String = - def fallback: Int = - var i = pos.point - 1 - while i >= 0 && Character.isUnicodeIdentifierPart(pos.source.content()(i)) do i -= 1 - i + 1 - path match case GenericImportSelector(sel) => if sel.isGiven then completionPrefix(sel.bound :: Nil, pos) @@ -148,7 +149,7 @@ object Completion: case (tree: untpd.RefTree) :: _ if tree.name != nme.ERROR => tree.name.toString.take(pos.span.point - tree.span.point) - case _ => pos.source.content.slice(fallback, pos.point).mkString + case _ => naiveCompletionPrefix(pos.source.content().mkString, pos.point) end completionPrefix diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index 8034b4c8d40b..043788dbd0ac 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -1706,6 +1706,15 @@ class CompletionTest { ("getOrElse", Method, "[V1 >: String](key: Int, default: => V1): V1"), )) + @Test def testtest: Unit = + code"""|object M { + | def sel$m1 + |} + |""" + .completion(m1, Set( + ("getOrElse", Method, "[V1 >: String](key: Int, default: => V1): V1"), + )) + @Test def noEnumCompletionInNewContext: Unit = code"""|enum TestEnum: | case TestCase diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala index ded7845ffa4e..0252786c20f6 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala @@ -67,7 +67,7 @@ final class AutoImportsProvider( val results = symbols.result.filter(isExactMatch(_, name)) if results.nonEmpty then - val correctedPos = CompletionPos.infer(pos, params, path).toSourcePosition + val correctedPos = CompletionPos.infer(pos, params, path, false).toSourcePosition val mkEdit = path match // if we are in import section just specify full name diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala index ad571ff843c3..6d89cb663b9c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala @@ -22,7 +22,8 @@ case class CompletionPos( identEnd: Int, query: String, originalCursorPosition: SourcePosition, - sourceUri: URI + sourceUri: URI, + withCURSOR: Boolean ): def queryEnd: Int = originalCursorPosition.point def stripSuffixEditRange: l.Range = new l.Range(originalCursorPosition.offsetToPos(queryStart), originalCursorPosition.offsetToPos(identEnd)) @@ -34,17 +35,19 @@ object CompletionPos: def infer( sourcePos: SourcePosition, offsetParams: OffsetParams, - adjustedPath: List[Tree] + adjustedPath: List[Tree], + wasCursorApplied: Boolean )(using Context): CompletionPos = val identEnd = adjustedPath match case (refTree: RefTree) :: _ if refTree.name.toString.contains(Cursor.value) => refTree.span.end - Cursor.value.length + case (refTree: RefTree) :: _ => refTree.span.end case _ => sourcePos.end val query = Completion.completionPrefix(adjustedPath, sourcePos) val start = sourcePos.end - query.length() - CompletionPos(start, identEnd, query.nn, sourcePos, offsetParams.uri.nn) + CompletionPos(start, identEnd, query.nn, sourcePos, offsetParams.uri.nn, wasCursorApplied) /** * Infer the indentation by counting the number of spaces in the given line. diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 4d45595dac8d..a04cd82d10b3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -14,9 +14,12 @@ import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Phases -import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.core.StdNames.nme +import dotty.tools.dotc.core.Flags import dotty.tools.dotc.interactive.Interactive +import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.parsing.Tokens import dotty.tools.dotc.util.SourceFile import dotty.tools.pc.AutoImports.AutoImportEdits import dotty.tools.pc.AutoImports.AutoImportsGenerator @@ -47,23 +50,31 @@ class CompletionProvider( val uri = params.uri().nn val text = params.text().nn - val code = applyCompletionCursor(params) + val (wasCursorApplied, code) = applyCompletionCursor(params) val sourceFile = SourceFile.virtual(uri, code) driver.run(uri, sourceFile) - val ctx = driver.currentCtx + given ctx: Context = driver.currentCtx val pos = driver.sourcePosition(params) val (items, isIncomplete) = driver.compilationUnits.get(uri) match case Some(unit) => - val newctx = ctx.fresh.setCompilationUnit(unit).withPhase(Phases.typerPhase(using ctx)) - val tpdPath = Interactive.pathTo(newctx.compilationUnit.tpdTree, pos.span)(using newctx) - val adjustedPath = Interactive.resolveTypedOrUntypedPath(tpdPath, pos)(using newctx) + val tpdPath0 = Interactive.pathTo(unit.tpdTree, pos.span)(using newctx) + val adjustedPath = Interactive.resolveTypedOrUntypedPath(tpdPath0, pos)(using newctx) + + val tpdPath = tpdPath0 match + // $1$ // FIXME add check for a $1$ name to make sure we only do the below in lifting case + case Select(qual, name) :: tail if qual.symbol.is(Flags.Synthetic) => + qual.symbol.defTree match + case valdef: ValDef => Select(valdef.rhs, name) :: tail + case _ => tpdPath0 + case _ => tpdPath0 + val locatedCtx = Interactive.contextOfPath(tpdPath)(using newctx) val indexedCtx = IndexedContext(locatedCtx) - val completionPos = CompletionPos.infer(pos, params, adjustedPath)(using locatedCtx) + val completionPos = CompletionPos.infer(pos, params, adjustedPath, wasCursorApplied)(using locatedCtx) val autoImportsGen = AutoImports.generator( completionPos.toSourcePosition, @@ -114,6 +125,10 @@ class CompletionProvider( ) end completions + val allKeywords = + val softKeywords = Tokens.softModifierNames + nme.as + nme.derives + nme.extension + nme.throws + nme.using + Tokens.keywords.toList.map(Tokens.tokenString) ++ softKeywords.map(_.toString) + /** * In case if completion comes from empty line like: * {{{ @@ -126,23 +141,30 @@ class CompletionProvider( * Otherwise, completion poisition doesn't point at any tree * because scala parser trim end position to the last statement pos. */ - private def applyCompletionCursor(params: OffsetParams): String = + private def applyCompletionCursor(params: OffsetParams): (Boolean, String) = val text = params.text().nn val offset = params.offset().nn + val query = Completion.naiveCompletionPrefix(text, offset) - val isStartMultilineComment = - val i = params.offset() - i >= 3 && (text.charAt(i - 1) match - case '*' => - text.charAt(i - 2) == '*' && - text.charAt(i - 3) == '/' - case _ => false - ) - if isStartMultilineComment then - // Insert potentially missing `*/` to avoid comment out all codes after the "/**". - text.substring(0, offset).nn + Cursor.value + "*/" + text.substring(offset) + if offset > 0 && text.charAt(offset - 1).isUnicodeIdentifierPart && !allKeywords.contains(query) then + false -> text else - text.substring(0, offset).nn + Cursor.value + text.substring(offset) + val isStartMultilineComment = + + val i = params.offset() + i >= 3 && (text.charAt(i - 1) match + case '*' => + text.charAt(i - 2) == '*' && + text.charAt(i - 3) == '/' + case _ => false + ) + true -> ( + if isStartMultilineComment then + // Insert potentially missing `*/` to avoid comment out all codes after the "/**". + text.substring(0, offset).nn + Cursor.value + "*/" + text.substring(offset) + else + text.substring(0, offset).nn + Cursor.value + text.substring(offset) + ) end applyCompletionCursor private def completionItems( @@ -175,7 +197,7 @@ class CompletionProvider( Select(Apply(Select(Select(_, name), _), _), _), _ ) :: _ => - name == StdNames.nme.StringContext + name == nme.StringContext // "My name is $name" case Literal(Constant(_: String)) :: _ => true diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index 3bebaa76a309..f691939772c6 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -67,23 +67,19 @@ class Completions( case _ :: (_: UnApply) :: _ => false case _ => true - private lazy val shouldAddSuffix = shouldAddSnippet && + private lazy val shouldAddSuffix = shouldAddSnippet && (path match /* In case of `method@@()` we should not add snippets and the path * will contain apply as the parent of the current tree. */ - case (fun) :: (appl: GenericApply) :: _ if appl.fun == fun => - false + case (fun) :: (appl: GenericApply) :: _ if appl.fun == fun => false /* In case of `T@@[]` we should not add snippets. */ - case tpe :: (appl: AppliedTypeTree) :: _ if appl.tpt == tpe => - false - case _ :: (withcursor @ Select(fun, name)) :: (appl: GenericApply) :: _ - if appl.fun == withcursor && name.decoded == Cursor.value => - false + case tpe :: (appl: AppliedTypeTree) :: _ if appl.tpt == tpe => false + case sel :: (funSel @ Select(fun, name)) :: (appl: GenericApply) :: _ + if appl.fun == funSel && sel == fun => false case _ => true) - private lazy val isNew: Boolean = Completion.isInNewContext(adjustedPath) def includeSymbol(sym: Symbol)(using Context): Boolean = @@ -521,14 +517,8 @@ class Completions( if tree.selectors.exists(_.renamed.sourcePos.contains(pos)) => (List.empty, true) - // From Scala 3.1.3-RC3 (as far as I know), path contains - // `Literal(Constant(null))` on head for an incomplete program, in this case, just ignore the head. - case Literal(Constant(null)) :: tl => - advancedCompletions(tl, completionPos) - case _ => val args = NamedArgCompletions.contribute( - pos, path, adjustedPath, indexedContext, diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala index 2e39c17b24b3..da46e5167834 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala @@ -224,7 +224,7 @@ object InterpolatorCompletions: buildTargetIdentifier: String )(using ctx: Context, reportsContext: ReportContext): List[CompletionValue] = val litStartPos = lit.span.start - val litEndPos = lit.span.end - Cursor.value.length() + val litEndPos = lit.span.end - (if completionPos.withCURSOR then Cursor.value.length else 0) val position = completionPos.originalCursorPosition val span = position.span val nameStart = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala index 647b151a635b..11b0cd660f42 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala @@ -35,9 +35,8 @@ import scala.annotation.tailrec object NamedArgCompletions: def contribute( - pos: SourcePosition, path: List[Tree], - untypedPath: => List[untpd.Tree], + untypedPath: List[untpd.Tree], indexedContext: IndexedContext, clientSupportsSnippets: Boolean, )(using ctx: Context): List[CompletionValue] = @@ -64,12 +63,13 @@ object NamedArgCompletions: for app <- getApplyForContextFunctionParam(rest) if !app.fun.isInfix - yield contribute( - Some(ident), - app, - indexedContext, - clientSupportsSnippets, - ) + yield + contribute( + Some(ident), + app, + indexedContext, + clientSupportsSnippets, + ) contribution.getOrElse(Nil) case (app: Apply) :: _ => /** @@ -156,10 +156,11 @@ object NamedArgCompletions: case _ => None val matchingMethods = for - (name, indxContext) <- maybeNameAndIndexedContext(method) - potentialMatches <- indxContext.findSymbol(name) - yield potentialMatches.collect { - case m + (name, indexedContext) <- maybeNameAndIndexedContext(method) + potentialMatches <- indexedContext.findSymbol(name) + yield + potentialMatches.collect { + case m if m.is(Flags.Method) && m.vparamss.length >= argss.length && Try(m.isAccessibleFrom(apply.symbol.info)).toOption @@ -179,8 +180,7 @@ object NamedArgCompletions: end fallbackFindMatchingMethods val matchingMethods: List[Symbols.Symbol] = - if method.symbol.paramSymss.nonEmpty - then + if method.symbol.paramSymss.nonEmpty then val allArgsAreSupplied = val vparamss = method.symbol.vparamss vparamss.length == argss.length && vparamss @@ -295,6 +295,7 @@ object NamedArgCompletions: ) } + // FIXME pass query here val prefix = ident .map(_.name.toString) .getOrElse("") @@ -391,7 +392,7 @@ class FuzzyArgMatcher(tparams: List[Symbols.Symbol])(using Context): (expectedArgs.length == actualArgs.length || (!allArgsProvided && expectedArgs.length >= actualArgs.length)) && actualArgs.zipWithIndex.forall { - case (Ident(name), _) if name.endsWith(Cursor.value) => true + case (Ident(name), _) => true case (NamedArg(name, arg), _) => expectedArgs.exists { expected => expected.name == name && (!arg.hasType || arg.typeOpt.unfold diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala index 1e310ca0e8ec..28dc4ebe59c9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala @@ -582,7 +582,7 @@ object OverrideCompletions: ) ) // class Main extends Val: - // he@@ + // he@@ case (id: Ident) :: (t: Template) :: (td: TypeDef) :: _ if t.parents.nonEmpty => Some( @@ -595,6 +595,20 @@ object OverrideCompletions: ) ) + // class Main extends Val: + // hello@ // this transforms into this.hello, thus is a Select + case (sel @ Select(th: This, name)) :: (t: Template) :: (td: TypeDef) :: _ + if t.parents.nonEmpty && th.qual.name == td.name => + Some( + ( + td, + None, + sel.sourcePos.start, + false, + Some(name.show), + ) + ) + case _ => None end OverrideExtractor diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala index 210a28f6a7a1..17f21b16d6e8 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala @@ -614,8 +614,9 @@ class CompletionArgSuite extends BaseCompletionSuite: check( s"""|case class Context() | - |def foo(arg1: (Context) ?=> Int, arg2: Int): String = ??? - |val m = foo(ar@@) + |object Main: + | def foo(arg1: (Context) ?=> Int, arg2: Int): String = ??? + | val m = foo(ar@@) |""".stripMargin, """|arg1 = : (Context) ?=> Int |arg2 = : Int @@ -627,8 +628,9 @@ class CompletionArgSuite extends BaseCompletionSuite: check( s"""|case class Context() | - |def foo(arg1: Context ?=> Int, arg2: Context ?=> Int): String = ??? - |val m = foo(arg1 = ???, a@@) + |object Main: + | def foo(arg1: Context ?=> Int, arg2: Context ?=> Int): String = ??? + | val m = foo(arg1 = ???, a@@) |""".stripMargin, """|arg2 = : (Context) ?=> Int |""".stripMargin, @@ -639,8 +641,9 @@ class CompletionArgSuite extends BaseCompletionSuite: check( s"""|case class Context() | - |def foo(arg1: (Boolean, Context) ?=> Int ?=> String, arg2: (Boolean, Context) ?=> Int ?=> String): String = ??? - |val m = foo(arg1 = ???, a@@) + |object Main: + | def foo(arg1: (Boolean, Context) ?=> Int ?=> String, arg2: (Boolean, Context) ?=> Int ?=> String): String = ??? + | val m = foo(arg1 = ???, a@@) |""".stripMargin, """|arg2 = : (Boolean, Context) ?=> (Int) ?=> String |""".stripMargin, @@ -786,10 +789,11 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `overloaded-with-param` = check( - """|def m(idd : String, abb: Int): Int = ??? - |def m(inn : Int, uuu: Option[Int]): Int = ??? - |def m(inn : Int, aaa: Int): Int = ??? - |def k: Int = m(1, a@@) + """|object Main: + | def m(idd : String, abb: Int): Int = ??? + | def m(inn : Int, uuu: Option[Int]): Int = ??? + | def m(inn : Int, aaa: Int): Int = ??? + | def k: Int = m(1, a@@) |""".stripMargin, """|aaa = : Int |assert(assertion: Boolean): Unit @@ -799,10 +803,11 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `overloaded-with-named-param` = check( - """|def m(idd : String, abb: Int): Int = ??? - |def m(inn : Int, uuu: Option[Int]): Int = ??? - |def m(inn : Int, aaa: Int): Int = ??? - |def k: Int = m(inn = 1, a@@) + """|object Main: + | def m(idd : String, abb: Int): Int = ??? + | def m(inn : Int, uuu: Option[Int]): Int = ??? + | def m(inn : Int, aaa: Int): Int = ??? + | def k: Int = m(inn = 1, a@@) |""".stripMargin, """|aaa = : Int |assert(assertion: Boolean): Unit @@ -812,7 +817,7 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `overloaded-generic` = check( - """|object M: + """|object Main: | val g = 3 | val l : List[Int] = List(1,2,3) | def m[T](inn : List[T], yy: Int, aaa: Int, abb: Option[Int]): Int = ??? @@ -899,10 +904,11 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `overloaded-function-param` = check( - """|def m[T](i: Int)(inn: T => Int, abb: Option[Int]): Int = ??? - |def m[T](i: Int)(inn: T => Int, aaa: Int): Int = ??? - |def m[T](i: Int)(inn: T => String, acc: List[Int]): Int = ??? - |def k = m(1)(inn = identity[Int], a@@) + """|object Main: + | def m[T](i: Int)(inn: T => Int, abb: Option[Int]): Int = ??? + | def m[T](i: Int)(inn: T => Int, aaa: Int): Int = ??? + | def m[T](i: Int)(inn: T => String, acc: List[Int]): Int = ??? + | def k = m(1)(inn = identity[Int], a@@) |""".stripMargin, """|aaa = : Int |abb = : Option[Int] @@ -913,10 +919,11 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `overloaded-function-param2` = check( - """|def m[T](i: Int)(inn: T => Int, abb: Option[Int]): Int = ??? - |def m[T](i: Int)(inn: T => Int, aaa: Int): Int = ??? - |def m[T](i: String)(inn: T => Int, acc: List[Int]): Int = ??? - |def k = m(1)(inn = identity[Int], a@@) + """|object Main: + | def m[T](i: Int)(inn: T => Int, abb: Option[Int]): Int = ??? + | def m[T](i: Int)(inn: T => Int, aaa: Int): Int = ??? + | def m[T](i: String)(inn: T => Int, acc: List[Int]): Int = ??? + | def k = m(1)(inn = identity[Int], a@@) |""".stripMargin, """|aaa = : Int |abb = : Option[Int] @@ -978,9 +985,10 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `overloaded-function-param3` = check( - """|def m[T](inn: Int => T, abb: Option[Int]): Int = ??? - |def m[T](inn: String => T, aaa: Int): Int = ??? - |def k = m(identity[Int], a@@) + """|object Main: + | def m[T](inn: Int => T, abb: Option[Int]): Int = ??? + | def m[T](inn: String => T, aaa: Int): Int = ??? + | def k = m(identity[Int], a@@) |""".stripMargin, """|abb = : Option[Int] |""".stripMargin, @@ -1109,7 +1117,7 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `comparison` = check( - """package a + """ |object w { | abstract class T(x: Int) { | def met(x: Int): Unit = { diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala index 08cc1535fd56..50019928a2f3 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala @@ -112,7 +112,7 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |""".stripMargin.triplequoted, """|object Main { | val myName = "" - | s"$myName $$" + | s"$myName$0 $$" |} |""".stripMargin.triplequoted, filterText = "myName" diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 47e4cabb76f4..1cd26858b934 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -530,8 +530,6 @@ class CompletionSuite extends BaseCompletionSuite: """.stripMargin, """|until(end: Int): Range |until(end: Int, step: Int): Range - |until(end: Long): Exclusive[Long] - |until(end: Long, step: Long): Exclusive[Long] |""".stripMargin, stableOrder = false ) @@ -1606,7 +1604,7 @@ class CompletionSuite extends BaseCompletionSuite: @Test def `multi-export` = check( - """export scala.collection.{AbstractMap, Set@@} + """export scala.collection.{AbstractMap, Se@@} |""".stripMargin, """Set scala.collection |SetOps scala.collection @@ -1619,7 +1617,9 @@ class CompletionSuite extends BaseCompletionSuite: |StrictOptimizedSetOps scala.collection |StrictOptimizedSortedSetOps scala.collection |GenSet = scala.collection.Set[X] - |""".stripMargin + |""".stripMargin, + filter = _.contains("Set") + ) @Test def `multi-imports` = @@ -1638,6 +1638,7 @@ class CompletionSuite extends BaseCompletionSuite: |StrictOptimizedSortedSetOps scala.collection |GenSet = scala.collection.Set[X] |""".stripMargin, + filter = _.contains("Set") ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala index c7c9b9979404..20d56ab94938 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala @@ -28,7 +28,7 @@ class PcDefinitionSuite extends BasePcDefinitionSuite: MockLocation("scala/Predef.Ensuring#ensuring(+2).", "Predef.scala"), MockLocation("scala/Predef.Ensuring#ensuring(+3).", "Predef.scala"), MockLocation("scala/collection/immutable/List#`::`().", "List.scala"), - MockLocation("scala/collection/IterableFactory#apply().", "Factory.scala") + MockLocation("scala/package.List.", "List.scala") ) override def definitions(offsetParams: OffsetParams): List[Location] = @@ -123,7 +123,7 @@ class PcDefinitionSuite extends BasePcDefinitionSuite: check( """| |object Main { - | /*scala/collection/IterableFactory#apply(). Factory.scala*/@@List(1) + | /*scala/package.List. List.scala*/@@List(1) |} |""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala index 9ae37048caf7..0b992fe98f08 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala @@ -269,9 +269,9 @@ class HoverTermSuite extends BaseHoverSuite: | } yield x |} |""".stripMargin, - """|Option[Int] - |override def headOption: Option[A] - |""".stripMargin.hover + """|```scala + |override def headOption: Option[Int] + |```""".stripMargin.hover ) @Test def `object` = From 7205f20f1dc3f8712a925a780a244f7448cf8226 Mon Sep 17 00:00:00 2001 From: rochala Date: Thu, 4 Jul 2024 21:10:23 +0200 Subject: [PATCH 500/827] Add tests, don't cache when CURSOR is added --- .../dotty/tools/dotc/ast/NavigateAST.scala | 37 ++-- .../dotty/tools/dotc/parsing/Parsers.scala | 2 +- .../tools/languageserver/CompletionTest.scala | 9 - .../tools/languageserver/HoverTest.scala | 2 +- .../dotty/tools/pc/AutoImportsProvider.scala | 1 - ...MetalsDriver.scala => CachingDriver.scala} | 10 +- .../dotty/tools/pc/PcInlayHintsProvider.scala | 1 - .../dotty/tools/pc/Scala3CompilerAccess.scala | 3 +- .../tools/pc/Scala3CompilerWrapper.scala | 7 +- .../tools/pc/ScalaPresentationCompiler.scala | 34 ++-- .../tools/pc/ScriptFirstImportPosition.scala | 1 - .../tools/pc/SignatureHelpProvider.scala | 1 - .../tools/pc/SymbolInformationProvider.scala | 3 +- .../pc/completions/CompletionProvider.scala | 25 ++- .../tools/pc/completions/Completions.scala | 7 +- .../pc/completions/NamedArgCompletions.scala | 2 - .../pc/printer/ShortenedTypePrinter.scala | 2 - .../tools/pc/base/BaseInlayHintsSuite.scala | 4 +- .../tools/pc/base/ReusableClassRunner.scala | 9 +- .../tools/pc/tests/CompilerCachingSuite.scala | 166 ++++++++++++++++++ .../CompletionExtraConstructorSuite.scala | 4 - .../tests/definition/PcDefinitionSuite.scala | 4 +- .../tools/pc/tests/hover/HoverTermSuite.scala | 8 + .../tools/pc/utils/DefSymbolCollector.scala | 2 +- .../tools/pc/utils/MockSymbolSearch.scala | 1 - .../dotty/tools/pc/utils/PcAssertions.scala | 2 - .../dotty/tools/pc/utils/TestInlayHints.scala | 3 +- 27 files changed, 256 insertions(+), 94 deletions(-) rename presentation-compiler/src/main/dotty/tools/pc/{MetalsDriver.scala => CachingDriver.scala} (88%) create mode 100644 presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index 2aeb2f7df067..429e0868667c 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -75,24 +75,41 @@ object NavigateAST { def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = { var bestFit: List[Positioned] = path - while (it.hasNext) { - val path1 = it.next() match { - // FIXME this has to be changed to deterministicaly find recoveed tree - case untpd.Select(qual, name) if name == StdNames.nme.??? => path + while (it.hasNext) do + val path1 = it.next() match + case sel: untpd.Select if isTreeFromRecovery(sel) => path case p: Positioned if !p.isInstanceOf[Closure[?]] => singlePath(p, path) case m: untpd.Modifiers => childPath(m.productIterator, path) case xs: List[?] => childPath(xs.iterator, path) case _ => path - } - if ((path1 ne path) && - ((bestFit eq path) || - bestFit.head.span != path1.head.span && - envelops(bestFit.head.span, path1.head.span))) + + if (path1 ne path) && ((bestFit eq path) || isBetterFit(bestFit, path1)) then bestFit = path1 - } + bestFit } + /** + * When choosing better fit we compare spans. If candidate span has starting or ending point inside (exclusive) + * current best fit it is selected as new best fit. This means that same spans are failing the first predicate. + * + * In case when spans start and end at same offsets we prefer non synthethic one. + */ + def isBetterFit(currentBest: List[Positioned], candidate: List[Positioned]): Boolean = + if currentBest.isEmpty && candidate.nonEmpty then true + else if currentBest.nonEmpty && candidate.nonEmpty then + val bestSpan= currentBest.head.span + val candidateSpan = candidate.head.span + + bestSpan != candidateSpan && + envelops(bestSpan, candidateSpan) || + bestSpan.contains(candidateSpan) && bestSpan.isSynthetic && !candidateSpan.isSynthetic + else false + + + def isTreeFromRecovery(p: untpd.Select): Boolean = + p.name == StdNames.nme.??? && p.qualifier.symbol.name == StdNames.nme.Predef && p.span.isSynthetic + def envelops(a: Span, b: Span): Boolean = !b.exists || a.exists && ( (a.start < b.start && a.end >= b.end ) || (a.start <= b.start && a.end > b.end) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 39bdc69111d3..8a173faa3cec 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -406,7 +406,7 @@ object Parsers { false } - def errorTermTree(start: Offset): Tree = atSpan(start, in.offset, in.offset) { unimplementedExpr } + def errorTermTree(start: Offset): Tree = atSpan(Span(start, in.offset)) { unimplementedExpr } private var inFunReturnType = false private def fromWithinReturnType[T](body: => T): T = { diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index 043788dbd0ac..8034b4c8d40b 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -1706,15 +1706,6 @@ class CompletionTest { ("getOrElse", Method, "[V1 >: String](key: Int, default: => V1): V1"), )) - @Test def testtest: Unit = - code"""|object M { - | def sel$m1 - |} - |""" - .completion(m1, Set( - ("getOrElse", Method, "[V1 >: String](key: Int, default: => V1): V1"), - )) - @Test def noEnumCompletionInNewContext: Unit = code"""|enum TestEnum: | case TestCase diff --git a/language-server/test/dotty/tools/languageserver/HoverTest.scala b/language-server/test/dotty/tools/languageserver/HoverTest.scala index a2196f4a71f3..91f72e222432 100644 --- a/language-server/test/dotty/tools/languageserver/HoverTest.scala +++ b/language-server/test/dotty/tools/languageserver/HoverTest.scala @@ -227,7 +227,7 @@ class HoverTest { @Test def enums: Unit = { code"""|package example |enum TestEnum3: - | case ${m1}A${m2} // no tooltip + | case ${m1}A${m2} // no tooltip | |""" .hover(m1 to m2, hoverContent("example.TestEnum3")) diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala index 0252786c20f6..3d4864c73508 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala @@ -13,7 +13,6 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile -import dotty.tools.pc.AutoImports.* import dotty.tools.pc.completions.CompletionPos import dotty.tools.pc.utils.InteractiveEnrichments.* diff --git a/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala b/presentation-compiler/src/main/dotty/tools/pc/CachingDriver.scala similarity index 88% rename from presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala rename to presentation-compiler/src/main/dotty/tools/pc/CachingDriver.scala index 819c3f2fc9c9..f5715c2780a9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CachingDriver.scala @@ -10,8 +10,8 @@ import dotty.tools.dotc.util.SourceFile import scala.compiletime.uninitialized /** - * MetalsDriver is a wrapper class that provides a compilation cache for InteractiveDriver. - * MetalsDriver skips running compilation if + * CachingDriver is a wrapper class that provides a compilation cache for InteractiveDriver. + * CachingDriver skips running compilation if * - the target URI of `run` is the same as the previous target URI * - the content didn't change since the last compilation. * @@ -27,9 +27,7 @@ import scala.compiletime.uninitialized * To avoid the complexity related to currentCtx, * we decided to cache only when the target URI only if the same as the previous run. */ -class MetalsDriver( - override val settings: List[String] -) extends InteractiveDriver(settings): +class CachingDriver(override val settings: List[String]) extends InteractiveDriver(settings): @volatile private var lastCompiledURI: URI = uninitialized @@ -55,4 +53,4 @@ class MetalsDriver( lastCompiledURI = uri diags -end MetalsDriver +end CachingDriver diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala index b3f836801460..9c0e6bcfa9d8 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala @@ -14,7 +14,6 @@ import scala.meta.internal.pc.LabelPart.* import scala.meta.pc.InlayHintsParams import scala.meta.pc.SymbolSearch -import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags diff --git a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala index ef5aaf4e5ed0..1443fbcf37cc 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala @@ -8,13 +8,14 @@ import scala.meta.internal.pc.CompilerAccess import scala.meta.pc.PresentationCompilerConfig import dotty.tools.dotc.reporting.StoreReporter +import dotty.tools.dotc.interactive.InteractiveDriver class Scala3CompilerAccess( config: PresentationCompilerConfig, sh: Option[ScheduledExecutorService], newCompiler: () => Scala3CompilerWrapper )(using ec: ExecutionContextExecutor, rc: ReportContext) - extends CompilerAccess[StoreReporter, MetalsDriver]( + extends CompilerAccess[StoreReporter, InteractiveDriver]( config, sh, newCompiler, diff --git a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerWrapper.scala b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerWrapper.scala index de4fb282edc9..968c144625a3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerWrapper.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerWrapper.scala @@ -4,11 +4,12 @@ import scala.meta.internal.pc.CompilerWrapper import scala.meta.internal.pc.ReporterAccess import dotty.tools.dotc.reporting.StoreReporter +import dotty.tools.dotc.interactive.InteractiveDriver -class Scala3CompilerWrapper(driver: MetalsDriver) - extends CompilerWrapper[StoreReporter, MetalsDriver]: +class Scala3CompilerWrapper(driver: InteractiveDriver) + extends CompilerWrapper[StoreReporter, InteractiveDriver]: - override def compiler(): MetalsDriver = driver + override def compiler(): InteractiveDriver = driver override def resetReporter(): Unit = val ctx = driver.currentCtx diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index 85de8e7d8439..e6da8b79164f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -33,11 +33,13 @@ import dotty.tools.pc.completions.CompletionProvider import dotty.tools.pc.InferExpectedType import dotty.tools.pc.completions.OverrideCompletions import dotty.tools.pc.buildinfo.BuildInfo +import dotty.tools.pc.SymbolInformationProvider +import dotty.tools.dotc.interactive.InteractiveDriver import org.eclipse.lsp4j.DocumentHighlight import org.eclipse.lsp4j.TextEdit import org.eclipse.lsp4j as l -import dotty.tools.pc.SymbolInformationProvider + case class ScalaPresentationCompiler( buildTargetIdentifier: String = "", @@ -76,14 +78,20 @@ case class ScalaPresentationCompiler( override def withReportsLoggerLevel(level: String): PresentationCompiler = copy(reportsLevel = ReportLevel.fromString(level)) - val compilerAccess: CompilerAccess[StoreReporter, MetalsDriver] = + val compilerAccess: CompilerAccess[StoreReporter, InteractiveDriver] = Scala3CompilerAccess( config, sh, - () => new Scala3CompilerWrapper(newDriver) - )(using - ec - ) + () => new Scala3CompilerWrapper(CachingDriver(driverSettings)) + )(using ec) + + val driverSettings = + val implicitSuggestionTimeout = List("-Ximport-suggestion-timeout", "0") + val defaultFlags = List("-color:never") + val filteredOptions = removeDoubleOptions(options.filterNot(forbiddenOptions)) + + filteredOptions ::: defaultFlags ::: implicitSuggestionTimeout ::: "-classpath" :: classpath + .mkString(File.pathSeparator) :: Nil private def removeDoubleOptions(options: List[String]): List[String] = options match @@ -92,19 +100,6 @@ case class ScalaPresentationCompiler( case head :: tail => head :: removeDoubleOptions(tail) case Nil => options - def newDriver: MetalsDriver = - val implicitSuggestionTimeout = List("-Ximport-suggestion-timeout", "0") - val defaultFlags = List("-color:never") - val filteredOptions = removeDoubleOptions( - options.filterNot(forbiddenOptions) - ) - val settings = - filteredOptions ::: defaultFlags ::: implicitSuggestionTimeout ::: "-classpath" :: classpath - .mkString( - File.pathSeparator - ) :: Nil - new MetalsDriver(settings) - override def semanticTokens( params: VirtualFileParams ): CompletableFuture[ju.List[Node]] = @@ -146,6 +141,7 @@ case class ScalaPresentationCompiler( new CompletionProvider( search, driver, + () => InteractiveDriver(driverSettings), params, config, buildTargetIdentifier, diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScriptFirstImportPosition.scala b/presentation-compiler/src/main/dotty/tools/pc/ScriptFirstImportPosition.scala index 2bb8023cee08..5a4c135fdc4c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScriptFirstImportPosition.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScriptFirstImportPosition.scala @@ -1,6 +1,5 @@ package dotty.tools.pc -import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Comments.Comment object ScriptFirstImportPosition: diff --git a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala index edfd9c95fa84..bd16d2ce2aa9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala @@ -1,6 +1,5 @@ package dotty.tools.pc -import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Symbols.* diff --git a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala index da075e21f486..ccda618078b8 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala @@ -7,8 +7,6 @@ import scala.meta.pc.PcSymbolKind import scala.meta.pc.PcSymbolProperty import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Denotations.Denotation -import dotty.tools.dotc.core.Denotations.MultiDenotation import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.StdNames.nme @@ -19,6 +17,7 @@ import dotty.tools.pc.utils.InteractiveEnrichments.allSymbols import dotty.tools.pc.utils.InteractiveEnrichments.stripBackticks import scala.meta.internal.pc.PcSymbolInformation import scala.meta.internal.pc.SymbolInfo +import dotty.tools.dotc.core.Denotations.{Denotation, MultiDenotation} class SymbolInformationProvider(using Context): diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index a04cd82d10b3..78bf15614769 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -16,6 +16,7 @@ import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Phases import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.Names.DerivedName import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.InteractiveDriver @@ -39,7 +40,8 @@ import scala.meta.pc.CompletionItemPriority class CompletionProvider( search: SymbolSearch, - driver: InteractiveDriver, + cachingDriver: InteractiveDriver, + freshDriver: () => InteractiveDriver, params: OffsetParams, config: PresentationCompilerConfig, buildTargetIdentifier: String, @@ -52,6 +54,16 @@ class CompletionProvider( val (wasCursorApplied, code) = applyCompletionCursor(params) val sourceFile = SourceFile.virtual(uri, code) + + /** Creating a new fresh driver is way slower than reusing existing one, + * but runnig a compilation has side effects that modifies the state of the driver. + * We don't want to affect cachingDriver state with compilation including "CURSOR" suffix. + * + * We could in theory save this fresh driver for reuse, but it is a choice between extra memory usage and speed. + * The scenario in which "CURSOR" is applied (empty query or query equal to any keyword) has a slim chance of happening. + */ + + val driver = if wasCursorApplied then freshDriver() else cachingDriver driver.run(uri, sourceFile) given ctx: Context = driver.currentCtx @@ -63,11 +75,12 @@ class CompletionProvider( val adjustedPath = Interactive.resolveTypedOrUntypedPath(tpdPath0, pos)(using newctx) val tpdPath = tpdPath0 match - // $1$ // FIXME add check for a $1$ name to make sure we only do the below in lifting case - case Select(qual, name) :: tail if qual.symbol.is(Flags.Synthetic) => - qual.symbol.defTree match - case valdef: ValDef => Select(valdef.rhs, name) :: tail - case _ => tpdPath0 + case Select(qual, name) :: tail + // If for any reason we end up in param after lifting, we want to inline the synthetic val + if qual.symbol.is(Flags.Synthetic) && qual.symbol.name.isInstanceOf[DerivedName] => + qual.symbol.defTree match + case valdef: ValDef => Select(valdef.rhs, name) :: tail + case _ => tpdPath0 case _ => tpdPath0 diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index f691939772c6..05dbe1ef5a43 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -5,7 +5,6 @@ import java.nio.file.Path import java.nio.file.Paths import scala.collection.mutable -import scala.meta.internal.metals.Fuzzy import scala.meta.internal.metals.ReportContext import scala.meta.internal.mtags.CoursierComplete import scala.meta.internal.pc.{IdentifierComparator, MemberOrdering, CompletionFuzzy} @@ -27,15 +26,12 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.Completion.Mode -import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.SrcPos import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.buildinfo.BuildInfo import dotty.tools.pc.completions.OverrideCompletions.OverrideExtractor import dotty.tools.pc.utils.InteractiveEnrichments.* -import dotty.tools.dotc.core.Denotations.SingleDenotation -import dotty.tools.dotc.interactive.Interactive class Completions( text: String, @@ -279,7 +275,6 @@ class Completions( val affix = if methodDenot.symbol.isConstructor && existsApply then adjustedPath match case (select @ Select(qual, _)) :: _ => - val start = qual.span.start val insertRange = select.sourcePos.startPos.withEnd(completionPos.queryEnd).toLsp suffix @@ -662,7 +657,7 @@ class Completions( .collect { case symbolic: CompletionValue.Symbolic => symbolic } .groupBy(_.symbol.fullName) // we somehow have to ignore proxy type - val filteredSymbolicCompletions = symbolicCompletionsMap.filter: (name, denots) => + val filteredSymbolicCompletions = symbolicCompletionsMap.filter: (name, _) => lazy val existsTypeWithoutSuffix: Boolean = !symbolicCompletionsMap .get(name.toTypeName) .forall(_.forall(sym => sym.snippetAffix.suffixes.nonEmpty)) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala index 11b0cd660f42..8cf66eee5aba 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala @@ -27,7 +27,6 @@ import dotty.tools.dotc.core.Types.TermRef import dotty.tools.dotc.core.Types.Type import dotty.tools.dotc.core.Types.TypeBounds import dotty.tools.dotc.core.Types.WildcardType -import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.IndexedContext import dotty.tools.pc.utils.InteractiveEnrichments.* import scala.annotation.tailrec @@ -295,7 +294,6 @@ object NamedArgCompletions: ) } - // FIXME pass query here val prefix = ident .map(_.name.toString) .getOrElse("") diff --git a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala index a0dcb5276253..a738440c585d 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala @@ -24,8 +24,6 @@ import dotty.tools.dotc.printing.RefinedPrinter import dotty.tools.dotc.printing.Texts.Text import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.AutoImports.ImportSel -import dotty.tools.pc.AutoImports.ImportSel.Direct -import dotty.tools.pc.AutoImports.ImportSel.Rename import dotty.tools.pc.IndexedContext import dotty.tools.pc.IndexedContext.Result import dotty.tools.pc.Params diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala index 78635e540c43..7d29e6c4dda9 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala @@ -8,9 +8,7 @@ import scala.meta.internal.metals.CompilerRangeParams import scala.language.unsafeNulls import dotty.tools.pc.utils.TestInlayHints -import dotty.tools.pc.utils.TextEdits -import org.eclipse.lsp4j.TextEdit class BaseInlayHintsSuite extends BasePCSuite { @@ -55,4 +53,4 @@ class BaseInlayHintsSuite extends BasePCSuite { obtained, ) -} \ No newline at end of file +} diff --git a/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala b/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala index 82e697e6e9a1..4999e0ddbc69 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala @@ -13,22 +13,17 @@ class ReusableClassRunner(testClass: Class[BasePCSuite]) testClass.getDeclaredConstructor().newInstance() override def createTest(): AnyRef = instance - override def withBefores( - method: FrameworkMethod, - target: Object, - statement: Statement - ): Statement = - statement override def withAfters( method: FrameworkMethod, target: Object, statement: Statement ): Statement = + val newStatement = super.withAfters(method, target, statement) new Statement(): override def evaluate(): Unit = try - statement.evaluate() + newStatement.evaluate() finally if (isLastTestCase(method)) then instance.clean() diff --git a/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala new file mode 100644 index 000000000000..3fecba04fb77 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala @@ -0,0 +1,166 @@ +package dotty.tools.pc.tests + +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.pc.base.BasePCSuite +import dotty.tools.pc.ScalaPresentationCompiler +import org.junit.{Before, Test} + +import scala.language.unsafeNulls +import scala.meta.internal.metals.EmptyCancelToken +import scala.meta.internal.metals.CompilerOffsetParams +import scala.meta.pc.OffsetParams +import scala.concurrent.Future +import scala.concurrent.Await +import scala.meta.pc.VirtualFileParams +import scala.concurrent.duration.* + +import java.util.Collections +import java.nio.file.Paths +import java.util.concurrent.CompletableFuture + + +class CompilerCachingSuite extends BasePCSuite: + + val timeout = 5.seconds + + private def checkCompilationCount(params: VirtualFileParams, expected: Int): Unit = + presentationCompiler match + case pc: ScalaPresentationCompiler => + val compilations= pc.compilerAccess.withNonInterruptableCompiler(Some(params))(-1, EmptyCancelToken) { driver => + driver.compiler().currentCtx.runId + }.get(timeout.length, timeout.unit) + assertEquals(expected, compilations, s"Expected $expected compilations but got $compilations") + case _ => throw IllegalStateException("Presentation compiler should always be of type of ScalaPresentationCompiler") + + private def getContext(params: VirtualFileParams): Context = + presentationCompiler match + case pc: ScalaPresentationCompiler => + pc.compilerAccess.withNonInterruptableCompiler(Some(params))(null, EmptyCancelToken) { driver => + driver.compiler().currentCtx + }.get(timeout.length, timeout.unit) + case _ => throw IllegalStateException("Presentation compiler should always be of type of ScalaPresentationCompiler") + + @Before + def beforeEach: Unit = + presentationCompiler.restart() + + // We want to run art least one compilation, so runId points at 3. + // This will ensure that we use the same driver, not recreate fresh one on each call + val dryRunParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "dryRun", 1, EmptyCancelToken) + checkCompilationCount(dryRunParams, 2) + val freshContext = getContext(dryRunParams) + presentationCompiler.complete(dryRunParams).get(timeout.length, timeout.unit) + checkCompilationCount(dryRunParams, 3) + val dryRunContext = getContext(dryRunParams) + assert(freshContext != dryRunContext) + + + @Test + def `cursor-compilation-does-not-corrupt-cache`: Unit = + + val fakeParamsCursor = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = new", 15, EmptyCancelToken) + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 14, EmptyCancelToken) + + val contextPreCompilation = getContext(fakeParams) + + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextPostFirst = getContext(fakeParams) + assert(contextPreCompilation != contextPostFirst) + checkCompilationCount(fakeParams, 4) + + presentationCompiler.complete(fakeParamsCursor).get(timeout.length, timeout.unit) + val contextPostCursor = getContext(fakeParamsCursor) + assert(contextPreCompilation != contextPostCursor) + assert(contextPostFirst == contextPostCursor) + checkCompilationCount(fakeParamsCursor, 4) + + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextPostSecond = getContext(fakeParams) + assert(contextPreCompilation != contextPostSecond) + assert(contextPostFirst == contextPostCursor) + assert(contextPostCursor == contextPostSecond) + checkCompilationCount(fakeParamsCursor, 4) + + @Test + def `compilation-for-same-snippet-is-cached`: Unit = + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 14, EmptyCancelToken) + + val contextPreCompilation = getContext(fakeParams) + + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextPostFirst = getContext(fakeParams) + assert(contextPreCompilation != contextPostFirst) + checkCompilationCount(fakeParams, 4) + + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextPostSecond = getContext(fakeParams) + assert(contextPreCompilation != contextPostFirst) + assert(contextPostSecond == contextPostFirst) + checkCompilationCount(fakeParams, 4) + + @Test + def `compilation-for-different-snippet-is-not-cached`: Unit = + + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = prin", 16, EmptyCancelToken) + val fakeParams2 = CompilerOffsetParams(Paths.get("Test2.scala").toUri(), "def hello = prin", 16, EmptyCancelToken) + val fakeParams3 = CompilerOffsetParams(Paths.get("Test2.scala").toUri(), "def hello = print", 17, EmptyCancelToken) + + checkCompilationCount(fakeParams, 3) + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + checkCompilationCount(fakeParams, 4) + + presentationCompiler.complete(fakeParams2).get(timeout.length, timeout.unit) + checkCompilationCount(fakeParams2, 5) + + presentationCompiler.complete(fakeParams3).get(timeout.length, timeout.unit) + checkCompilationCount(fakeParams3, 6) + + + private val testFunctions: List[OffsetParams => CompletableFuture[_]] = List( + presentationCompiler.complete(_), + presentationCompiler.convertToNamedArguments(_, Collections.emptyList()), + presentationCompiler.autoImports("a", _, false), + presentationCompiler.definition(_), + presentationCompiler.didChange(_), + presentationCompiler.documentHighlight(_), + presentationCompiler.hover(_), + presentationCompiler.implementAbstractMembers(_), + presentationCompiler.insertInferredType(_), + presentationCompiler.semanticTokens(_), + presentationCompiler.prepareRename(_), + presentationCompiler.rename(_, "a"), + presentationCompiler.signatureHelp(_), + presentationCompiler.typeDefinition(_) + ) + + + @Test + def `different-api-calls-reuse-cache`: Unit = + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 13, EmptyCancelToken) + + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextBefore = getContext(fakeParams) + + val differentContexts = testFunctions.map: f => + f(fakeParams).get(timeout.length, timeout.unit) + checkCompilationCount(fakeParams, 4) + getContext(fakeParams) + .toSet + + assert(differentContexts == Set(contextBefore)) + + @Test + def `different-api-calls-reuse-cache-parallel`: Unit = + import scala.jdk.FutureConverters.* + import scala.concurrent.ExecutionContext.Implicits.global + + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 13, EmptyCancelToken) + + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextBefore = getContext(fakeParams) + + val futures = testFunctions.map: f => + f(fakeParams).asScala.map(_ => getContext(fakeParams)) + + val res = Await.result(Future.sequence(futures), timeout).toSet + assert(res == Set(contextBefore)) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala index 010d0b14fa90..6a8759d0a0c9 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala @@ -1,14 +1,10 @@ package dotty.tools.pc.tests.completion -import scala.meta.pc.SymbolDocumentation import scala.language.unsafeNulls import dotty.tools.pc.base.BaseCompletionSuite -import dotty.tools.pc.utils.MockEntries import org.junit.Test -import org.junit.Ignore -import scala.collection.immutable.ListMapBuilder class CompletionExtraConstructorSuite extends BaseCompletionSuite: diff --git a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala index 20d56ab94938..fab21ffdee0a 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala @@ -28,7 +28,7 @@ class PcDefinitionSuite extends BasePcDefinitionSuite: MockLocation("scala/Predef.Ensuring#ensuring(+2).", "Predef.scala"), MockLocation("scala/Predef.Ensuring#ensuring(+3).", "Predef.scala"), MockLocation("scala/collection/immutable/List#`::`().", "List.scala"), - MockLocation("scala/package.List.", "List.scala") + MockLocation("scala/package.List.", "package.scala") ) override def definitions(offsetParams: OffsetParams): List[Location] = @@ -123,7 +123,7 @@ class PcDefinitionSuite extends BasePcDefinitionSuite: check( """| |object Main { - | /*scala/package.List. List.scala*/@@List(1) + | /*scala/package.List. package.scala*/@@List(1) |} |""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala index 0b992fe98f08..3e7a2549cbe0 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala @@ -694,3 +694,11 @@ class HoverTermSuite extends BaseHoverSuite: |""".stripMargin, "extension [T](a: T) def *:[U <: Tuple](b: Wrap[U]): Wrap[T *: U]".hover ) + + @Test def `dont-ignore-???-in-path`: Unit = + check( + """object Obj: + | val x = ?@@?? + |""".stripMargin, + """def ???: Nothing""".stripMargin.hover + ) diff --git a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala index a37801b3c48c..3dabcded4e45 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala @@ -3,7 +3,7 @@ package dotty.tools.pc.utils import scala.meta.pc.VirtualFileParams import dotty.tools.dotc.ast.tpd.* -import dotty.tools.dotc.ast.{Trees, tpd} +import dotty.tools.dotc.ast.Trees import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition diff --git a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala index 9015a39ba9e7..459c41e3c8e5 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala @@ -8,7 +8,6 @@ import scala.jdk.CollectionConverters.* import scala.jdk.OptionConverters.* import scala.meta.internal.metals.{ClasspathSearch, WorkspaceSymbolQuery} import scala.meta.pc.ContentType -import scala.meta.pc.SymbolSearch.Result import scala.meta.pc.{ ParentSymbols, SymbolDocumentation, diff --git a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala index ef15121c6702..af4502d66b4b 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala @@ -4,7 +4,6 @@ import scala.language.unsafeNulls import dotty.tools.pc.completions.CompletionSource import dotty.tools.dotc.util.DiffUtil -import dotty.tools.pc.utils.InteractiveEnrichments.* import org.hamcrest import org.hamcrest.* @@ -127,7 +126,6 @@ trait PcAssertions: def getDetailedMessage(diff: String): String = val lines = diff.linesIterator.toList val sources = completionSources.padTo(lines.size, CompletionSource.Empty) - val maxLength = lines.map(_.length).maxOption.getOrElse(0) var completionIndex = 0 lines.map: line => if line.startsWith(Console.BOLD + Console.RED) || line.startsWith(" ") then diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala index a923b76b955c..b9d3fd411dcc 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala @@ -4,7 +4,6 @@ import scala.collection.mutable.ListBuffer import scala.meta.internal.jdk.CollectionConverters._ import dotty.tools.pc.utils.InteractiveEnrichments.* -import dotty.tools.pc.utils.TextEdits import org.eclipse.lsp4j.InlayHint import org.eclipse.lsp4j.TextEdit @@ -67,4 +66,4 @@ object TestInlayHints { def removeInlayHints(text: String): String = text.replaceAll(raw"\/\*(.*?)\*\/", "").nn -} \ No newline at end of file +} From 009fc63b22c08d0946e574f5795b6ef00aaa4a69 Mon Sep 17 00:00:00 2001 From: rochala Date: Tue, 27 Aug 2024 09:56:43 +0200 Subject: [PATCH 501/827] Address review comments --- .../dotty/tools/dotc/ast/NavigateAST.scala | 2 +- .../dotty/tools/pc/AutoImportsProvider.scala | 3 +- .../pc/completions/CompletionProvider.scala | 28 +++++-- .../pc/completions/NamedArgCompletions.scala | 7 +- .../pc/completions/OverrideCompletions.scala | 33 +++++--- .../tools/pc/tests/CompilerCachingSuite.scala | 77 +++++++++---------- 6 files changed, 86 insertions(+), 64 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index 429e0868667c..ed1473d79ad0 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -98,7 +98,7 @@ object NavigateAST { def isBetterFit(currentBest: List[Positioned], candidate: List[Positioned]): Boolean = if currentBest.isEmpty && candidate.nonEmpty then true else if currentBest.nonEmpty && candidate.nonEmpty then - val bestSpan= currentBest.head.span + val bestSpan = currentBest.head.span val candidateSpan = candidate.head.span bestSpan != candidateSpan && diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala index 3d4864c73508..e35556ad11c9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala @@ -66,7 +66,8 @@ final class AutoImportsProvider( val results = symbols.result.filter(isExactMatch(_, name)) if results.nonEmpty then - val correctedPos = CompletionPos.infer(pos, params, path, false).toSourcePosition + val correctedPos = + CompletionPos.infer(pos, params, path, wasCursorApplied = false).toSourcePosition val mkEdit = path match // if we are in import section just specify full name diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 78bf15614769..5578fab412d1 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -38,6 +38,11 @@ import org.eclipse.lsp4j.Range as LspRange import org.eclipse.lsp4j.TextEdit import scala.meta.pc.CompletionItemPriority +object CompletionProvider: + val allKeywords = + val softKeywords = Tokens.softModifierNames + nme.as + nme.derives + nme.extension + nme.throws + nme.using + Tokens.keywords.toList.map(Tokens.tokenString) ++ softKeywords.map(_.toString) + class CompletionProvider( search: SymbolSearch, cachingDriver: InteractiveDriver, @@ -76,7 +81,20 @@ class CompletionProvider( val tpdPath = tpdPath0 match case Select(qual, name) :: tail - // If for any reason we end up in param after lifting, we want to inline the synthetic val + /** If for any reason we end up in param after lifting, we want to inline the synthetic val: + * List(1).iterator.sliding@@ will be transformed into: + * + * 1| val $1$: Iterator[Int] = List.apply[Int]([1 : Int]*).iterator + * 2| { + * 3| def $anonfun(size: Int, step: Int): $1$.GroupedIterator[Int] = + * 4| $1$.sliding[Int](size, step) + * 5| closure($anonfun) + * 6| }:((Int, Int) => Iterator[Int]#GroupedIterator[Int]) + * + * With completion being run at line 4 at @@: + * 4| $1$.sliding@@[Int](size, step) + * + */ if qual.symbol.is(Flags.Synthetic) && qual.symbol.name.isInstanceOf[DerivedName] => qual.symbol.defTree match case valdef: ValDef => Select(valdef.rhs, name) :: tail @@ -138,10 +156,6 @@ class CompletionProvider( ) end completions - val allKeywords = - val softKeywords = Tokens.softModifierNames + nme.as + nme.derives + nme.extension + nme.throws + nme.using - Tokens.keywords.toList.map(Tokens.tokenString) ++ softKeywords.map(_.toString) - /** * In case if completion comes from empty line like: * {{{ @@ -159,8 +173,8 @@ class CompletionProvider( val offset = params.offset().nn val query = Completion.naiveCompletionPrefix(text, offset) - if offset > 0 && text.charAt(offset - 1).isUnicodeIdentifierPart && !allKeywords.contains(query) then - false -> text + if offset > 0 && text.charAt(offset - 1).isUnicodeIdentifierPart + && !CompletionProvider.allKeywords.contains(query) then false -> text else val isStartMultilineComment = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala index 8cf66eee5aba..dd3a910beb4f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala @@ -170,7 +170,7 @@ object NamedArgCompletions: .zipWithIndex .forall { case (pair, index) => FuzzyArgMatcher(m.tparams) - .doMatch(allArgsProvided = index != 0) + .doMatch(allArgsProvided = index != 0, ident) .tupled(pair) } => m @@ -385,12 +385,13 @@ class FuzzyArgMatcher(tparams: List[Symbols.Symbol])(using Context): * We check the args types not the result type. */ def doMatch( - allArgsProvided: Boolean + allArgsProvided: Boolean, + ident: Option[Ident] )(expectedArgs: List[Symbols.Symbol], actualArgs: List[Tree]) = (expectedArgs.length == actualArgs.length || (!allArgsProvided && expectedArgs.length >= actualArgs.length)) && actualArgs.zipWithIndex.forall { - case (Ident(name), _) => true + case (arg: Ident, _) if ident.contains(arg) => true case (NamedArg(name, arg), _) => expectedArgs.exists { expected => expected.name == name && (!arg.hasType || arg.typeOpt.unfold diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala index 28dc4ebe59c9..f5c15ca6df0e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala @@ -530,8 +530,11 @@ object OverrideCompletions: object OverrideExtractor: def unapply(path: List[Tree])(using Context) = path match - // class FooImpl extends Foo: - // def x| + // abstract class Val: + // def hello: Int = 2 + // + // class Main extends Val: + // def h| case (dd: (DefDef | ValDef)) :: (t: Template) :: (td: TypeDef) :: _ if t.parents.nonEmpty => val completing = @@ -547,12 +550,13 @@ object OverrideCompletions: ) ) - // class FooImpl extends Foo: + // abstract class Val: + // def hello: Int = 2 + // + // class Main extends Val: // ov| case (ident: Ident) :: (t: Template) :: (td: TypeDef) :: _ - if t.parents.nonEmpty && "override".startsWith( - ident.name.show.replace(Cursor.value, "") - ) => + if t.parents.nonEmpty && "override".startsWith(ident.name.show.replace(Cursor.value, "")) => Some( ( td, @@ -563,15 +567,13 @@ object OverrideCompletions: ) ) + // abstract class Val: + // def hello: Int = 2 + // // class Main extends Val: // def@@ case (id: Ident) :: (t: Template) :: (td: TypeDef) :: _ - if t.parents.nonEmpty && "def".startsWith( - id.name.decoded.replace( - Cursor.value, - "", - ) - ) => + if t.parents.nonEmpty && "def".startsWith(id.name.decoded.replace(Cursor.value, "")) => Some( ( td, @@ -581,6 +583,10 @@ object OverrideCompletions: None, ) ) + + // abstract class Val: + // def hello: Int = 2 + // // class Main extends Val: // he@@ case (id: Ident) :: (t: Template) :: (td: TypeDef) :: _ @@ -595,6 +601,9 @@ object OverrideCompletions: ) ) + // abstract class Val: + // def hello: Int = 2 + // // class Main extends Val: // hello@ // this transforms into this.hello, thus is a Select case (sel @ Select(th: This, name)) :: (t: Template) :: (td: TypeDef) :: _ diff --git a/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala index 3fecba04fb77..5e13c07b9e5f 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala @@ -23,19 +23,19 @@ class CompilerCachingSuite extends BasePCSuite: val timeout = 5.seconds - private def checkCompilationCount(params: VirtualFileParams, expected: Int): Unit = + private def checkCompilationCount(expected: Int): Unit = presentationCompiler match case pc: ScalaPresentationCompiler => - val compilations= pc.compilerAccess.withNonInterruptableCompiler(Some(params))(-1, EmptyCancelToken) { driver => + val compilations = pc.compilerAccess.withNonInterruptableCompiler(None)(-1, EmptyCancelToken) { driver => driver.compiler().currentCtx.runId }.get(timeout.length, timeout.unit) assertEquals(expected, compilations, s"Expected $expected compilations but got $compilations") case _ => throw IllegalStateException("Presentation compiler should always be of type of ScalaPresentationCompiler") - private def getContext(params: VirtualFileParams): Context = + private def getContext(): Context = presentationCompiler match case pc: ScalaPresentationCompiler => - pc.compilerAccess.withNonInterruptableCompiler(Some(params))(null, EmptyCancelToken) { driver => + pc.compilerAccess.withNonInterruptableCompiler(None)(null, EmptyCancelToken) { driver => driver.compiler().currentCtx }.get(timeout.length, timeout.unit) case _ => throw IllegalStateException("Presentation compiler should always be of type of ScalaPresentationCompiler") @@ -44,76 +44,73 @@ class CompilerCachingSuite extends BasePCSuite: def beforeEach: Unit = presentationCompiler.restart() - // We want to run art least one compilation, so runId points at 3. + // We want to run at least one compilation, so runId points at 3. // This will ensure that we use the same driver, not recreate fresh one on each call val dryRunParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "dryRun", 1, EmptyCancelToken) - checkCompilationCount(dryRunParams, 2) - val freshContext = getContext(dryRunParams) + checkCompilationCount(2) + val freshContext = getContext() presentationCompiler.complete(dryRunParams).get(timeout.length, timeout.unit) - checkCompilationCount(dryRunParams, 3) - val dryRunContext = getContext(dryRunParams) + checkCompilationCount(3) + val dryRunContext = getContext() assert(freshContext != dryRunContext) @Test def `cursor-compilation-does-not-corrupt-cache`: Unit = + val contextPreCompilation = getContext() - val fakeParamsCursor = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = new", 15, EmptyCancelToken) val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 14, EmptyCancelToken) - - val contextPreCompilation = getContext(fakeParams) - presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) - val contextPostFirst = getContext(fakeParams) + val contextPostFirst = getContext() assert(contextPreCompilation != contextPostFirst) - checkCompilationCount(fakeParams, 4) + checkCompilationCount(4) + val fakeParamsCursor = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = new", 15, EmptyCancelToken) presentationCompiler.complete(fakeParamsCursor).get(timeout.length, timeout.unit) - val contextPostCursor = getContext(fakeParamsCursor) + val contextPostCursor = getContext() assert(contextPreCompilation != contextPostCursor) assert(contextPostFirst == contextPostCursor) - checkCompilationCount(fakeParamsCursor, 4) + checkCompilationCount(4) presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) - val contextPostSecond = getContext(fakeParams) + val contextPostSecond = getContext() assert(contextPreCompilation != contextPostSecond) assert(contextPostFirst == contextPostCursor) assert(contextPostCursor == contextPostSecond) - checkCompilationCount(fakeParamsCursor, 4) + checkCompilationCount(4) @Test def `compilation-for-same-snippet-is-cached`: Unit = - val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 14, EmptyCancelToken) - - val contextPreCompilation = getContext(fakeParams) + val contextPreCompilation = getContext() + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 14, EmptyCancelToken) presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) - val contextPostFirst = getContext(fakeParams) + val contextPostFirst = getContext() assert(contextPreCompilation != contextPostFirst) - checkCompilationCount(fakeParams, 4) + checkCompilationCount(4) presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) - val contextPostSecond = getContext(fakeParams) + val contextPostSecond = getContext() assert(contextPreCompilation != contextPostFirst) assert(contextPostSecond == contextPostFirst) - checkCompilationCount(fakeParams, 4) + checkCompilationCount(4) @Test def `compilation-for-different-snippet-is-not-cached`: Unit = - val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = prin", 16, EmptyCancelToken) - val fakeParams2 = CompilerOffsetParams(Paths.get("Test2.scala").toUri(), "def hello = prin", 16, EmptyCancelToken) - val fakeParams3 = CompilerOffsetParams(Paths.get("Test2.scala").toUri(), "def hello = print", 17, EmptyCancelToken) - checkCompilationCount(fakeParams, 3) + checkCompilationCount(3) + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = prin", 16, EmptyCancelToken) presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) - checkCompilationCount(fakeParams, 4) + checkCompilationCount(4) + val fakeParams2 = CompilerOffsetParams(Paths.get("Test2.scala").toUri(), "def hello = prin", 16, EmptyCancelToken) presentationCompiler.complete(fakeParams2).get(timeout.length, timeout.unit) - checkCompilationCount(fakeParams2, 5) + checkCompilationCount(5) + val fakeParams3 = CompilerOffsetParams(Paths.get("Test2.scala").toUri(), "def hello = print", 17, EmptyCancelToken) presentationCompiler.complete(fakeParams3).get(timeout.length, timeout.unit) - checkCompilationCount(fakeParams3, 6) + checkCompilationCount(6) private val testFunctions: List[OffsetParams => CompletableFuture[_]] = List( @@ -137,14 +134,14 @@ class CompilerCachingSuite extends BasePCSuite: @Test def `different-api-calls-reuse-cache`: Unit = val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 13, EmptyCancelToken) - presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) - val contextBefore = getContext(fakeParams) + + val contextBefore = getContext() val differentContexts = testFunctions.map: f => f(fakeParams).get(timeout.length, timeout.unit) - checkCompilationCount(fakeParams, 4) - getContext(fakeParams) + checkCompilationCount(4) + getContext() .toSet assert(differentContexts == Set(contextBefore)) @@ -155,12 +152,12 @@ class CompilerCachingSuite extends BasePCSuite: import scala.concurrent.ExecutionContext.Implicits.global val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 13, EmptyCancelToken) - presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) - val contextBefore = getContext(fakeParams) + + val contextBefore = getContext() val futures = testFunctions.map: f => - f(fakeParams).asScala.map(_ => getContext(fakeParams)) + f(fakeParams).asScala.map(_ => getContext()) val res = Await.result(Future.sequence(futures), timeout).toSet assert(res == Set(contextBefore)) From 3d08aa1594d5b8b32eae73e6891360f4bb7b3f1a Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 27 Aug 2024 13:07:32 +0200 Subject: [PATCH 502/827] Use natural language for Develocity labels --- project/GithubEnv.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/GithubEnv.scala b/project/GithubEnv.scala index a5246f36028c..7e629d53f3a7 100644 --- a/project/GithubEnv.scala +++ b/project/GithubEnv.scala @@ -14,12 +14,12 @@ object GithubEnv { for { (_, repository) <- repositoryVar (_, runId) <- runIdVar - } yield "GITHUB_RUN" -> url(s"https://github.com/$repository/actions/runs/$runId") + } yield "GitHub Run" -> url(s"https://github.com/$repository/actions/runs/$runId") lazy val treeUrl: Option[(String, URL)] = for { (_, repository) <- repositoryVar (_, sha) <- shaVar - } yield "GITHUB_TREE" -> url(s"https://github.com/$repository/tree/$sha") + } yield "GitHub Commit" -> url(s"https://github.com/$repository/tree/$sha") def develocityValues: Seq[(String, String)] = repositoryVar.toSeq ++ shaVar ++ workflowVar From df43fa43085bcb31bfebbaa18bda6e3d2601a4d1 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 28 Aug 2024 09:30:51 +0200 Subject: [PATCH 503/827] Disable build cache (remote and local) --- project/Build.scala | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 7515eb3c9552..3ee4795f7821 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -273,19 +273,25 @@ object Build { // Configuration to publish build scans to develocity.scala-lang.org develocityConfiguration := { val isInsideCI = insideCI.value - val previousConfig = develocityConfiguration.value - val previousBuildScan = previousConfig.buildScan - previousConfig + val config = develocityConfiguration.value + val buildScan = config.buildScan + val buildCache = config.buildCache + config .withProjectId(ProjectId("scala3")) - .withServer(previousConfig.server.withUrl(Some(url("https://develocity.scala-lang.org")))) + .withServer(config.server.withUrl(Some(url("https://develocity.scala-lang.org")))) .withBuildScan( - previousBuildScan + buildScan .withPublishing(Publishing.onlyIf(_.authenticated)) .withBackgroundUpload(!isInsideCI) .tag(if (isInsideCI) "CI" else "Local") - .withLinks(previousBuildScan.links ++ GithubEnv.develocityLinks) - .withValues(previousBuildScan.values ++ GithubEnv.develocityValues) - .withObfuscation(previousBuildScan.obfuscation.withIpAddresses(_.map(_ => "0.0.0.0"))) + .withLinks(buildScan.links ++ GithubEnv.develocityLinks) + .withValues(buildScan.values ++ GithubEnv.develocityValues) + .withObfuscation(buildScan.obfuscation.withIpAddresses(_.map(_ => "0.0.0.0"))) + ) + .withBuildCache( + buildCache + .withLocal(buildCache.local.withEnabled(false)) + .withRemote(buildCache.remote.withEnabled(false)) ) } ) @@ -910,7 +916,7 @@ object Build { }.taskValue, // Develocity's Build Cache does not work with our compilation tests - // at the moment. + // at the moment: it does not take compilation files as inputs. Test / develocityBuildCacheClient := None, ) From 42ca6553b9b9a723af3daab6cb9667ce06043231 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 28 Aug 2024 10:57:45 +0200 Subject: [PATCH 504/827] Respect `[skip-ci]` comments in lanuchers and spec tests --- .github/workflows/launchers.yml | 8 ++++++++ .github/workflows/spec.yml | 4 ++++ 2 files changed, 12 insertions(+) diff --git a/.github/workflows/launchers.yml b/.github/workflows/launchers.yml index 036b4f2966e8..d3660440b29c 100644 --- a/.github/workflows/launchers.yml +++ b/.github/workflows/launchers.yml @@ -7,6 +7,8 @@ jobs: linux-x86_64: name: Deploy and Test on Linux x64 architecture runs-on: ubuntu-latest + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') ) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3' ) steps: - uses: actions/checkout@v4 - name: Set up JDK 17 @@ -43,6 +45,8 @@ jobs: mac-x86_64: name: Deploy and Test on Mac x64 architecture runs-on: macos-13 + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') ) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3' ) steps: - uses: actions/checkout@v4 - name: Set up JDK 17 @@ -62,6 +66,8 @@ jobs: mac-aarch64: name: Deploy and Test on Mac ARM64 architecture runs-on: macos-latest + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') ) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3' ) steps: - uses: actions/checkout@v4 - name: Set up JDK 17 @@ -81,6 +87,8 @@ jobs: win-x86_64: name: Deploy and Test on Windows x64 architecture runs-on: windows-latest + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') ) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3' ) steps: - uses: actions/checkout@v4 - name: Set up JDK 17 diff --git a/.github/workflows/spec.yml b/.github/workflows/spec.yml index a639c80bbda9..ab5f2b3d2fe1 100644 --- a/.github/workflows/spec.yml +++ b/.github/workflows/spec.yml @@ -16,6 +16,10 @@ env: jobs: specification: runs-on: ubuntu-latest + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]')) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3') || + github.event_name == 'push' || + github.event_name == 'merge_group' defaults: run: working-directory: ./docs/_spec From 3bd4a7a750310aac5d0df95d63411e5d93956247 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 28 Aug 2024 11:06:30 +0200 Subject: [PATCH 505/827] Exclude build-sdk-package from when skip ci annotation is present --- .github/workflows/ci.yaml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d4583847c438..54916cf31bea 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1013,9 +1013,15 @@ jobs: uses: ./.github/workflows/build-msi.yml if : github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_msi]') # TODO: ADD A JOB THAT DEPENDS ON THIS TO TEST THE MSI - + build-sdk-package: uses: ./.github/workflows/build-sdk.yml + if: + (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]')) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3') || + (github.event_name == 'schedule' && github.repository == 'scala/scala3') || + github.event_name == 'push' || + github.event_name == 'merge_group' with: java-version: 8 From b4dfab9591915f55a3d61ada0f1b672b9f8c5c4a Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 28 Aug 2024 12:07:05 +0200 Subject: [PATCH 506/827] Enable test retry selectively Test retry is disabled in all compilation test classes, where each test is responsible for compiling many files separately. --- project/Build.scala | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/project/Build.scala b/project/Build.scala index f390f32533a3..6c6666b9f62b 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -276,6 +276,18 @@ object Build { val config = develocityConfiguration.value val buildScan = config.buildScan val buildCache = config.buildCache + // disable test retry on compilation test classes + val noRetryTestClasses = Set( + "dotty.tools.dotc.BestEffortOptionsTests", + "dotty.tools.dotc.CompilationTests", + "dotty.tools.dotc.FromTastyTests", + "dotty.tools.dotc.IdempotencyTests", + "dotty.tools.dotc.ScalaJSCompilationTests", + "dotty.tools.dotc.TastyBootstrapTests", + "dotty.tools.dotc.coverage.CoverageTests", + "dotty.tools.dotc.transform.PatmatExhaustivityTest", + "dotty.tools.repl.ScriptedTests" + ) config .withProjectId(ProjectId("scala3")) .withServer(config.server.withUrl(Some(url("https://develocity.scala-lang.org")))) @@ -293,6 +305,13 @@ object Build { .withLocal(buildCache.local.withEnabled(false)) .withRemote(buildCache.remote.withEnabled(false)) ) + .withTestRetryConfiguration( + config.testRetryConfiguration + .withFlakyTestPolicy(FlakyTestPolicy.Fail) + .withMaxRetries(1) + .withMaxFailures(10) + .withClassesFilter((className, _) => !noRetryTestClasses.contains(className)) + ) } ) From 2f9f371d5bd5a136eec134d1a2242121efb31c81 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 28 Aug 2024 16:37:05 +0200 Subject: [PATCH 507/827] Fix canComparePredefined(Nothing, T) in explicit nulls --- .../src/dotty/tools/dotc/typer/Synthesizer.scala | 3 ++- tests/explicit-nulls/pos/i21392.scala | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 tests/explicit-nulls/pos/i21392.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 5ef5b1a420ee..c935e8d6b3cf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -187,7 +187,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): // val x: String = null.asInstanceOf[String] // if (x == null) {} // error: x is non-nullable // if (x.asInstanceOf[String|Null] == null) {} // ok - cls1 == defn.NullClass && cls1 == cls2 + if cls1 == defn.NullClass || cls2 == defn.NullClass then cls1 == cls2 + else cls1 == defn.NothingClass || cls2 == defn.NothingClass else if cls1 == defn.NullClass then cls1 == cls2 || cls2.derivesFrom(defn.ObjectClass) else if cls2 == defn.NullClass then diff --git a/tests/explicit-nulls/pos/i21392.scala b/tests/explicit-nulls/pos/i21392.scala new file mode 100644 index 000000000000..0266199b7831 --- /dev/null +++ b/tests/explicit-nulls/pos/i21392.scala @@ -0,0 +1,16 @@ +//> using options -language:strictEquality + +import scala.collection.LinearSeq + +def foo[T](a: LinearSeq[T]) = a match + case Nil => -1 + case head +: tail => head + +enum Foo derives CanEqual: + case Bar + case Baz(x: String) + + +def foo(a: Foo) = a match + case Foo.Bar => -1 + case _ => 0 \ No newline at end of file From a9ac82915b98ce2085e1365a96ee8865fb4b1a65 Mon Sep 17 00:00:00 2001 From: rochala Date: Wed, 28 Aug 2024 17:09:30 +0200 Subject: [PATCH 508/827] Make pathTo handle new pattern recovery mechanism --- compiler/src/dotty/tools/dotc/ast/NavigateAST.scala | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index ed1473d79ad0..e77642a8e2b9 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -77,7 +77,8 @@ object NavigateAST { var bestFit: List[Positioned] = path while (it.hasNext) do val path1 = it.next() match - case sel: untpd.Select if isTreeFromRecovery(sel) => path + case sel: untpd.Select if isRecoveryTree(sel) => path + case sel: untpd.Ident if isPatternRecoveryTree(sel) => path case p: Positioned if !p.isInstanceOf[Closure[?]] => singlePath(p, path) case m: untpd.Modifiers => childPath(m.productIterator, path) case xs: List[?] => childPath(xs.iterator, path) @@ -106,9 +107,12 @@ object NavigateAST { bestSpan.contains(candidateSpan) && bestSpan.isSynthetic && !candidateSpan.isSynthetic else false + def isRecoveryTree(sel: untpd.Select): Boolean = + sel.span.isSynthetic + && (sel.name == StdNames.nme.??? && sel.qualifier.symbol.name == StdNames.nme.Predef) - def isTreeFromRecovery(p: untpd.Select): Boolean = - p.name == StdNames.nme.??? && p.qualifier.symbol.name == StdNames.nme.Predef && p.span.isSynthetic + def isPatternRecoveryTree(ident: untpd.Ident): Boolean = + ident.span.isSynthetic && StdNames.nme.WILDCARD == ident.name def envelops(a: Span, b: Span): Boolean = !b.exists || a.exists && ( From cfe13a4cc894753951ad4b2c268b1ea84880a2ac Mon Sep 17 00:00:00 2001 From: rochala Date: Thu, 2 May 2024 10:53:34 +0200 Subject: [PATCH 509/827] Add regression test for issue 18726 --- .../completion/CompletionRelease11Suite.scala | 26 +++++++++++++++++++ .../completion/CompletionRelease8Suite.scala | 25 ++++++++++++++++++ 2 files changed, 51 insertions(+) create mode 100644 presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala create mode 100644 presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala new file mode 100644 index 000000000000..72192bfb5a00 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala @@ -0,0 +1,26 @@ +package dotty.tools.pc.tests.completion + +import dotty.tools.pc.base.BaseCompletionSuite + +import org.junit.Test +import java.nio.file.Path + +class CompletionRelease11Suite extends BaseCompletionSuite: + + override protected def scalacOptions(classpath: Seq[Path]): Seq[String] = + "-release:11" +: super.scalacOptions(classpath) + + @Test def java11Symbols = + check( + """ + |object A { + | "".repea@@ + |}""".stripMargin, + """repeat(x$0: Int): String + |replaceAll(x$0: String, x$1: String): String + |prependedAll[B >: A](prefix: IterableOnce[B]): IndexedSeq[B] + |prependedAll(prefix: String): String + |prependedAll[B >: Char](prefix: IterableOnce[B]): IndexedSeq[B] + |replaceAllLiterally(literal: String, replacement: String): String + |""".stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala new file mode 100644 index 000000000000..ff10a28e1265 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala @@ -0,0 +1,25 @@ +package dotty.tools.pc.tests.completion + +import dotty.tools.pc.base.BaseCompletionSuite + +import org.junit.Test +import java.nio.file.Path + +class CompletionRelease8Suite extends BaseCompletionSuite: + + override protected def scalacOptions(classpath: Seq[Path]): Seq[String] = + "-release:8" +: super.scalacOptions(classpath) + + @Test def noJvm11Symbols = + check( + """ + |object A { + | "".repea@@ + |}""".stripMargin, + """replaceAll(x$0: String, x$1: String): String + |prependedAll[B >: A](prefix: IterableOnce[B]): IndexedSeq[B] + |prependedAll(prefix: String): String + |prependedAll[B >: Char](prefix: IterableOnce[B]): IndexedSeq[B] + |replaceAllLiterally(literal: String, replacement: String): String + |""".stripMargin + ) From f5dc97f99d0e1de92a4b1a31f4a4e44be3133ed4 Mon Sep 17 00:00:00 2001 From: rochala Date: Wed, 28 Aug 2024 18:55:29 +0200 Subject: [PATCH 510/827] Make 11 test start only on jvm 11+ --- .../tools/pc/base/ReusableClassRunner.scala | 9 ++------ .../completion/CompletionRelease11Suite.scala | 6 +++++ .../completion/CompletionRelease8Suite.scala | 6 +++++ .../test/dotty/tools/pc/utils/JRE.scala | 22 +++++++++++++++++++ 4 files changed, 36 insertions(+), 7 deletions(-) create mode 100644 presentation-compiler/test/dotty/tools/pc/utils/JRE.scala diff --git a/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala b/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala index 82e697e6e9a1..4999e0ddbc69 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala @@ -13,22 +13,17 @@ class ReusableClassRunner(testClass: Class[BasePCSuite]) testClass.getDeclaredConstructor().newInstance() override def createTest(): AnyRef = instance - override def withBefores( - method: FrameworkMethod, - target: Object, - statement: Statement - ): Statement = - statement override def withAfters( method: FrameworkMethod, target: Object, statement: Statement ): Statement = + val newStatement = super.withAfters(method, target, statement) new Statement(): override def evaluate(): Unit = try - statement.evaluate() + newStatement.evaluate() finally if (isLastTestCase(method)) then instance.clean() diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala index 72192bfb5a00..76015a588387 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala @@ -3,13 +3,19 @@ package dotty.tools.pc.tests.completion import dotty.tools.pc.base.BaseCompletionSuite import org.junit.Test +import org.junit.Before import java.nio.file.Path +import dotty.tools.pc.utils.JRE class CompletionRelease11Suite extends BaseCompletionSuite: override protected def scalacOptions(classpath: Seq[Path]): Seq[String] = "-release:11" +: super.scalacOptions(classpath) + @Before + def beforeMethod(): Unit = + org.junit.Assume.assumeTrue(JRE.getJavaMajorVersion >= 11) + @Test def java11Symbols = check( """ diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala index ff10a28e1265..587cd5a53073 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala @@ -3,13 +3,19 @@ package dotty.tools.pc.tests.completion import dotty.tools.pc.base.BaseCompletionSuite import org.junit.Test +import org.junit.Before import java.nio.file.Path +import dotty.tools.pc.utils.JRE class CompletionRelease8Suite extends BaseCompletionSuite: override protected def scalacOptions(classpath: Seq[Path]): Seq[String] = "-release:8" +: super.scalacOptions(classpath) + @Before + def beforeMethod(): Unit = + org.junit.Assume.assumeTrue(JRE.getJavaMajorVersion >= 8) + @Test def noJvm11Symbols = check( """ diff --git a/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala b/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala new file mode 100644 index 000000000000..aefa1633e142 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala @@ -0,0 +1,22 @@ +package dotty.tools.pc.utils + +object JRE: + + def getJavaMajorVersion: Int = + val javaVersion = sys.env.get("java.version").filter(!_.isEmpty()) + + javaVersion match + case Some(version) if version.startsWith("1.8") => 8 + case _ => + scala.util.Try: + val versionMethod = classOf[Runtime].getMethod("version") + versionMethod.nn.setAccessible(true) + val version = versionMethod.nn.invoke(null) + + val majorMethod = version.getClass().getMethod("feature") + majorMethod.nn.setAccessible(true) + val major = majorMethod.nn.invoke(version).asInstanceOf[Int] + major + .getOrElse(8) // Minimal version supported by Scala + + From 85b1c0e740ed72490f15b7fc3f61c3e01bf0ea26 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Fri, 19 Apr 2024 17:03:51 +0200 Subject: [PATCH 511/827] Use `uninitialized` instead of `_` in lazyval benchmarks --- .../benchmarks/lazyvals/ContendedInitialization.scala | 7 ++++--- .../tools/benchmarks/lazyvals/InitializedAccess.scala | 3 ++- .../tools/benchmarks/lazyvals/InitializedAccessAny.scala | 3 ++- .../benchmarks/lazyvals/InitializedAccessGeneric.scala | 3 ++- .../tools/benchmarks/lazyvals/InitializedAccessInt.scala | 3 ++- .../benchmarks/lazyvals/InitializedAccessMultiple.scala | 3 ++- .../benchmarks/lazyvals/InitializedAccessString.scala | 3 ++- 7 files changed, 16 insertions(+), 9 deletions(-) diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala index fb2cedbb7d41..12713b297759 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyHolder import org.openjdk.jmh.infra.Blackhole @@ -16,12 +17,12 @@ import java.util.concurrent.{Executors, ExecutorService} class ContendedInitialization { @Param(Array("2000000", "5000000")) - var size: Int = _ + var size: Int = uninitialized @Param(Array("2", "4", "8")) - var nThreads: Int = _ + var nThreads: Int = uninitialized - var executor: ExecutorService = _ + var executor: ExecutorService = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala index d413458d0049..34bd652cbd2d 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccess { - var holder: LazyHolder = _ + var holder: LazyHolder = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala index 8c75f6bb11a2..4e044dcaee52 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyAnyHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessAny { - var holder: LazyAnyHolder = _ + var holder: LazyAnyHolder = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala index a9fecae6281e..4c1a0c6d7417 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyGenericHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessGeneric { - var holder: LazyGenericHolder[String] = _ + var holder: LazyGenericHolder[String] = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala index 2a115ad63496..6ff8622a82e8 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations.* import org.openjdk.jmh.infra.Blackhole import LazyVals.LazyIntHolder @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessInt { - var holder: LazyIntHolder = _ + var holder: LazyIntHolder = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala index 4f3c75fd920b..9416bac36c33 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessMultiple { - var holders: Array[LazyHolder] = _ + var holders: Array[LazyHolder] = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala index e6c6cd5eb2e3..af751d782010 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyStringHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessString { - var holder: LazyStringHolder = _ + var holder: LazyStringHolder = uninitialized @Setup def prepare: Unit = { From 07deb553a4f3bd2c72fc746579dc0880c45ccc22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Thu, 29 Aug 2024 10:47:48 +0200 Subject: [PATCH 512/827] Update presentation-compiler/test/dotty/tools/pc/utils/JRE.scala Co-authored-by: Tomasz Godzik --- .../test/dotty/tools/pc/utils/JRE.scala | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala b/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala index aefa1633e142..2f812e1bbf80 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala @@ -3,20 +3,11 @@ package dotty.tools.pc.utils object JRE: def getJavaMajorVersion: Int = - val javaVersion = sys.env.get("java.version").filter(!_.isEmpty()) + val javaVersion = sys.env.get("java.specification.version").filter(!_.isEmpty()) javaVersion match case Some(version) if version.startsWith("1.8") => 8 - case _ => - scala.util.Try: - val versionMethod = classOf[Runtime].getMethod("version") - versionMethod.nn.setAccessible(true) - val version = versionMethod.nn.invoke(null) - - val majorMethod = version.getClass().getMethod("feature") - majorMethod.nn.setAccessible(true) - val major = majorMethod.nn.invoke(version).asInstanceOf[Int] - major - .getOrElse(8) // Minimal version supported by Scala + case Some(version) => version + case None => 8 From ea19290ae8ec0b6856b8066a0d5d15753ea8e933 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Thu, 29 Aug 2024 14:45:53 +0200 Subject: [PATCH 513/827] No need to unbox if expected type is LhsProto --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index ec134149eb49..b05ab8542137 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -969,7 +969,7 @@ class CheckCaptures extends Recheck, SymTransformer: if tree.isTerm then if !ccConfig.useExistentials then checkReachCapsIsolated(res.widen, tree.srcPos) - if !pt.isBoxedCapturing then + if !pt.isBoxedCapturing && pt != LhsProto then markFree(res.boxedCaptureSet, tree.srcPos) res From 76ae6092d405e774fa82f7618f0eac76d8a066bd Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Thu, 29 Aug 2024 15:32:37 -0400 Subject: [PATCH 514/827] analyze object when selecting from package --- .../src/dotty/tools/dotc/transform/init/Objects.scala | 2 +- tests/init-global/warn/cyclic-object.scala | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 tests/init-global/warn/cyclic-object.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 1050fbe85ef2..12a1ba3a0642 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -864,7 +864,7 @@ class Objects(using Context @constructorOnly): Bottom case Bottom => - if field.isStaticObject then ObjectRef(field.moduleClass.asClass) + if field.isStaticObject then accessObject(field.moduleClass.asClass) else Bottom case ValueSet(values) => diff --git a/tests/init-global/warn/cyclic-object.scala b/tests/init-global/warn/cyclic-object.scala new file mode 100644 index 000000000000..e997d3259877 --- /dev/null +++ b/tests/init-global/warn/cyclic-object.scala @@ -0,0 +1,9 @@ +package cyclicObject + +object O1 { // warn + val o = cyclicObject.O2 +} + +object O2 { + val o = cyclicObject.O1 +} From 533ee8aae91501c6a6a721fc79bc7629c16697a1 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Thu, 29 Aug 2024 21:49:36 +0200 Subject: [PATCH 515/827] Widen values in assignment --- .../tools/dotc/transform/init/Objects.scala | 45 +++++++++++-------- 1 file changed, 26 insertions(+), 19 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 1050fbe85ef2..1598e58ad767 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -1226,11 +1226,12 @@ class Objects(using Context @constructorOnly): extendTrace(id) { evalType(prefix, thisV, klass) } val value = eval(rhs, thisV, klass) + val widened = widenEscapedValue(value, rhs) if isLocal then - writeLocal(thisV, lhs.symbol, value) + writeLocal(thisV, lhs.symbol, widened) else - withTrace(trace2) { assign(receiver, lhs.symbol, value, rhs.tpe) } + withTrace(trace2) { assign(receiver, lhs.symbol, widened, rhs.tpe) } case closureDef(ddef) => Fun(ddef, thisV, klass, summon[Env.Data]) @@ -1568,6 +1569,28 @@ class Objects(using Context @constructorOnly): throw new Exception("unexpected type: " + tp + ", Trace:\n" + Trace.show) } + /** Widen the escaped value (a method argument or rhs of an assignment) + * + * The default widening is 1 for most values, 2 for function values. + * User-specified widening annotations are repected. + */ + def widenEscapedValue(value: Value, expr: Tree): Contextual[Value] = + expr.tpe.getAnnotation(defn.InitWidenAnnot) match + case Some(annot) => + annot.argument(0).get match + case arg @ Literal(c: Constants.Constant) => + val height = c.intValue + if height < 0 then + report.warning("The argument should be positive", arg) + value.widen(1) + else + value.widen(c.intValue) + case arg => + report.warning("The argument should be a constant integer value", arg) + value.widen(1) + case _ => + if value.isInstanceOf[Fun] then value.widen(2) else value.widen(1) + /** Evaluate arguments of methods and constructors */ def evalArgs(args: List[Arg], thisV: ThisValue, klass: ClassSymbol): Contextual[List[ArgInfo]] = val argInfos = new mutable.ArrayBuffer[ArgInfo] @@ -1578,23 +1601,7 @@ class Objects(using Context @constructorOnly): else eval(arg.tree, thisV, klass) - val widened = - arg.tree.tpe.getAnnotation(defn.InitWidenAnnot) match - case Some(annot) => - annot.argument(0).get match - case arg @ Literal(c: Constants.Constant) => - val height = c.intValue - if height < 0 then - report.warning("The argument should be positive", arg) - res.widen(1) - else - res.widen(c.intValue) - case arg => - report.warning("The argument should be a constant integer value", arg) - res.widen(1) - case _ => - if res.isInstanceOf[Fun] then res.widen(2) else res.widen(1) - + val widened = widenEscapedValue(res, arg.tree) argInfos += ArgInfo(widened, trace.add(arg.tree), arg.tree) } argInfos.toList From a045b8c3bc287a9788c02d62fbf948d841f7d9db Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Fri, 30 Aug 2024 00:52:28 +0200 Subject: [PATCH 516/827] add test case for #21507 --- tests/pos-custom-args/captures/i21507.scala | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 tests/pos-custom-args/captures/i21507.scala diff --git a/tests/pos-custom-args/captures/i21507.scala b/tests/pos-custom-args/captures/i21507.scala new file mode 100644 index 000000000000..bb80dafb3b45 --- /dev/null +++ b/tests/pos-custom-args/captures/i21507.scala @@ -0,0 +1,10 @@ +import language.experimental.captureChecking + +trait Box[Cap^]: + def store(f: (() -> Unit)^{Cap^}): Unit + +def run[Cap^](f: Box[Cap]^{Cap^} => Unit): Box[Cap]^{Cap^} = + new Box[Cap]: + private var item: () ->{Cap^} Unit = () => () + def store(f: () ->{Cap^} Unit): Unit = + item = f // was error, now ok From 40c083dad6360a80d3b1c151d255a271128deda3 Mon Sep 17 00:00:00 2001 From: Joel Wilsson Date: Thu, 29 Aug 2024 23:02:23 +0200 Subject: [PATCH 517/827] Remove empty argument lists for classes with only context bounds Closes #21418 --- .../dotty/tools/dotc/typer/Migrations.scala | 6 +++++ .../dotty/tools/dotc/CompilationTests.scala | 1 + tests/rewrites/i21418.check | 26 +++++++++++++++++++ tests/rewrites/i21418.scala | 26 +++++++++++++++++++ 4 files changed, 59 insertions(+) create mode 100644 tests/rewrites/i21418.check create mode 100644 tests/rewrites/i21418.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Migrations.scala b/compiler/src/dotty/tools/dotc/typer/Migrations.scala index 8d468fd68bba..d6b95ceb93dc 100644 --- a/compiler/src/dotty/tools/dotc/typer/Migrations.scala +++ b/compiler/src/dotty/tools/dotc/typer/Migrations.scala @@ -113,6 +113,12 @@ trait Migrations: em"""Context bounds will map to context parameters. |A `using` clause is needed to pass explicit arguments to them.$rewriteMsg""", tree.srcPos, mversion) + tree match + case Apply(ta @ TypeApply(Select(New(_), _), _), Nil) => + // Remove empty arguments for calls to new that may precede the context bound. + // They are no longer necessary. + patch(Span(ta.span.end, pt.args.head.span.start - 1), "") + case _ => () if mversion.needsPatch && pt.args.nonEmpty then patch(Span(pt.args.head.span.start), "using ") end contextBoundParams diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index dd722403723a..0c5d5764949a 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -77,6 +77,7 @@ class CompilationTests { compileFile("tests/rewrites/i17399.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i20002.scala", defaultOptions.and("-indent", "-rewrite")), compileDir("tests/rewrites/annotation-named-pararamters", defaultOptions.and("-rewrite", "-source:3.6-migration")), + compileFile("tests/rewrites/i21418.scala", unindentOptions.and("-rewrite", "-source:3.5-migration")), ).checkRewrites() } diff --git a/tests/rewrites/i21418.check b/tests/rewrites/i21418.check new file mode 100644 index 000000000000..9ad54a6134df --- /dev/null +++ b/tests/rewrites/i21418.check @@ -0,0 +1,26 @@ +trait Effect[F[_]] +class Countdown[F[_]: Effect] +class Countdown1[F[_]: Effect](howMany: Int) +class Countdown2[F[_]: Effect, F2[_]: Effect] + +def foo[F[_]: Effect]() = + "foo" + +@main def Test = { + val a = new Countdown[Option](using ???) + Countdown[Option](using ???) + val b = Countdown[Option](using ???) + new Countdown[Option](using ???) + val c = Countdown[List](using ???) + new Countdown2[List, Option](using ???, ???) + new Countdown2[List, Option] (using ???, ???) + Countdown2[List, Option](using ???, ???) + Countdown2[List, Option] (using ???, ???) + new Countdown1[Option](10)(using ???) + new Array[Int](10) + new scala.collection.immutable.HashSet[Int] + new scala.collection.immutable.HashSet[Int]() + new scala.collection.immutable.HashSet[Int] () + foo()(using ???) + foo() (using ???) +} diff --git a/tests/rewrites/i21418.scala b/tests/rewrites/i21418.scala new file mode 100644 index 000000000000..88fdb22ea177 --- /dev/null +++ b/tests/rewrites/i21418.scala @@ -0,0 +1,26 @@ +trait Effect[F[_]] +class Countdown[F[_]: Effect] +class Countdown1[F[_]: Effect](howMany: Int) +class Countdown2[F[_]: Effect, F2[_]: Effect] + +def foo[F[_]: Effect]() = + "foo" + +@main def Test = { + val a = new Countdown[Option]()(???) + Countdown[Option]()(???) + val b = Countdown[Option]()(???) + new Countdown[Option] ()(???) + val c = Countdown[List] () (???) + new Countdown2[List, Option] () (???, ???) + new Countdown2[List, Option] (using ???, ???) + Countdown2[List, Option] () (???, ???) + Countdown2[List, Option] (using ???, ???) + new Countdown1[Option](10)(???) + new Array[Int](10) + new scala.collection.immutable.HashSet[Int] + new scala.collection.immutable.HashSet[Int]() + new scala.collection.immutable.HashSet[Int] () + foo()(???) + foo() (???) +} From 0dac210e1e158f999dbb220d018b4e16896b9051 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 30 Aug 2024 15:46:56 +0200 Subject: [PATCH 518/827] [scaladoc] fix: Only trim one newline when preprocessing the content of a markdown code snippet --- scaladoc-testcases/docs/_docs/index.md | 7 + .../tools/scaladoc/renderers/Renderer.scala | 131 +++++++++--------- .../snippets/FlexmarkSnippetProcessor.scala | 2 +- 3 files changed, 74 insertions(+), 66 deletions(-) diff --git a/scaladoc-testcases/docs/_docs/index.md b/scaladoc-testcases/docs/_docs/index.md index 42cb5f62dae8..9acac71a63b3 100644 --- a/scaladoc-testcases/docs/_docs/index.md +++ b/scaladoc-testcases/docs/_docs/index.md @@ -13,5 +13,12 @@ class Renderer(using RenderingContext) val renderer: Renderer = Renderer() ``` +```scala + trait Ord: + type Self + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self +``` diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala index 1a43ea8648a8..0f7082fd6f49 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala @@ -30,71 +30,72 @@ abstract class Renderer(rootPackage: Member, val members: Map[DRI, Member], prot val rootApiPage: Option[Page] = Some(memberPage(rootPackage)).filter(_.children.nonEmpty).map(_.withTitle(ctx.args.name)) - val rootDocsPage: Option[Page] = staticSite match - case None => None - case Some(siteContext) => - val rootTemplate = siteContext.staticSiteRoot.rootTemplate - - // Below code is for walking in order the tree and modifing its nodes basing on its neighbours - - // We add dummy guards - val notHidden: Seq[Option[LoadedTemplate]] = None +: siteContext.allTemplates.filterNot(_.hidden).map(Some(_)) :+ None - - // Let's gather the list of maps for each template with its in-order neighbours - val newSettings: List[Map[String, Object]] = notHidden.sliding(size = 3, step = 1).map { - case None :: None :: Nil => - Map.empty - case prev :: mid :: next :: Nil => - def link(sibling: Option[LoadedTemplate]): Option[String] = - def realPath(path: Path) = if Files.isDirectory(path) then Paths.get(path.toString, "index.html") else path - sibling.map { n => - val realMidPath = realPath(mid.get.file.toPath) - val realSiblingPath = realPath(n.file.toPath) - realMidPath.relativize(realSiblingPath).toString.stripPrefix("../") - } - List( - for { - link <- link(prev) - p <- prev - } yield ( - "previous" -> Map( - "title" -> p.templateFile.title.name, - "url" -> link - ) - ), - for { - link <- link(next) - n <- next - } yield ( - "next" -> Map( - "title" -> n.templateFile.title.name, - "url" -> link - ) - ), - ).flatten.toMap - }.toList - - def updateSettings(templates: Seq[LoadedTemplate], additionalSettings: ListBuffer[Map[String, Object]]): List[LoadedTemplate] = - val updatedTemplates = List.newBuilder[LoadedTemplate] - for template <- templates do - val head: Map[String, Object] = - if template.hidden then Map.empty - else additionalSettings.remove(0) - val current: Map[String, Object] = template.templateFile.settings.getOrElse("page", Map.empty).asInstanceOf[Map[String, Object]] - val updatedTemplateFile = template.templateFile.copy(settings = template.templateFile.settings.updated("page", head ++ current)) - updatedTemplates += template.copy( - templateFile = updatedTemplateFile, - children = updateSettings(template.children, additionalSettings) - ) - updatedTemplates.result() - - val newTemplates = updateSettings(Seq(rootTemplate), newSettings.to(ListBuffer)) - val templatePages = newTemplates.map(templateToPage(_, siteContext)) - - val newRoot = newTemplates.head - - Some(newRoot).filter(r => r.children.nonEmpty || r.templateFile.rawCode.nonEmpty) - .map(templateToPage(_, siteContext)) + val rootDocsPage: Option[Page] = staticSite match { + case None => None + case Some(siteContext) => + val rootTemplate = siteContext.staticSiteRoot.rootTemplate + + // Below code is for walking in order the tree and modifing its nodes basing on its neighbours + + // We add dummy guards + val notHidden: Seq[Option[LoadedTemplate]] = None +: siteContext.allTemplates.filterNot(_.hidden).map(Some(_)) :+ None + + // Let's gather the list of maps for each template with its in-order neighbours + val newSettings: List[Map[String, Object]] = notHidden.sliding(size = 3, step = 1).map { + case None :: None :: Nil => + Map.empty + case prev :: mid :: next :: Nil => + def link(sibling: Option[LoadedTemplate]): Option[String] = + def realPath(path: Path) = if Files.isDirectory(path) then Paths.get(path.toString, "index.html") else path + sibling.map { n => + val realMidPath = realPath(mid.get.file.toPath) + val realSiblingPath = realPath(n.file.toPath) + realMidPath.relativize(realSiblingPath).toString.stripPrefix("../") + } + List( + for { + link <- link(prev) + p <- prev + } yield ( + "previous" -> Map( + "title" -> p.templateFile.title.name, + "url" -> link + ) + ), + for { + link <- link(next) + n <- next + } yield ( + "next" -> Map( + "title" -> n.templateFile.title.name, + "url" -> link + ) + ), + ).flatten.toMap + }.toList + + def updateSettings(templates: Seq[LoadedTemplate], additionalSettings: ListBuffer[Map[String, Object]]): List[LoadedTemplate] = + val updatedTemplates = List.newBuilder[LoadedTemplate] + for template <- templates do + val head: Map[String, Object] = + if template.hidden then Map.empty + else additionalSettings.remove(0) + val current: Map[String, Object] = template.templateFile.settings.getOrElse("page", Map.empty).asInstanceOf[Map[String, Object]] + val updatedTemplateFile = template.templateFile.copy(settings = template.templateFile.settings.updated("page", head ++ current)) + updatedTemplates += template.copy( + templateFile = updatedTemplateFile, + children = updateSettings(template.children, additionalSettings) + ) + updatedTemplates.result() + + val newTemplates = updateSettings(Seq(rootTemplate), newSettings.to(ListBuffer)) + val templatePages = newTemplates.map(templateToPage(_, siteContext)) + + val newRoot = newTemplates.head + + Some(newRoot).filter(r => r.children.nonEmpty || r.templateFile.rawCode.nonEmpty) + .map(templateToPage(_, siteContext)) + } val redirectPages: Seq[Page] = staticSite.fold(Seq.empty)(siteContext => siteContext.redirectTemplates.map { case (template, driFrom, driTo) => diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala index 33f0e089053a..c92853816d16 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala @@ -65,7 +65,7 @@ object FlexmarkSnippetProcessor: content.add(s, 0) node.setContent(content) - val fullSnippet = Seq(snippetImports, snippet).mkString("\n").trim + val fullSnippet = Seq(snippetImports, snippet).mkString("\n").stripPrefix("\n") val snippetCompilationResult = cf(fullSnippet, lineOffset, argOverride) match { case Some(result @ SnippetCompilationResult(wrapped, _, _, messages)) => node.setContentString(fullSnippet) From 3416509a0bc6fa42f5fed33f795c6bcd622ce3e6 Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Sat, 31 Aug 2024 16:16:04 -0400 Subject: [PATCH 519/827] Update init-global-scala2-library-tasty blacklist --- .../neg-init-global-scala2-library-tasty.blacklist | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist index 93e6cd5b4ebc..48fe29ebc6bc 100644 --- a/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist +++ b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist @@ -5,3 +5,16 @@ unapplySeq-implicit-arg.scala unapplySeq-implicit-arg2.scala unapplySeq-implicit-arg3.scala ScalaCheck.scala +mutable-read8.scala +TypeCast.scala +global-cycle8.scala +global-cycle6.scala +i12544b.scala +t9360.scala +mutable-array.scala +patmat-unapplySeq2.scala +line-spacing.scala +global-list.scala +t5366.scala +mutable-read7.scala +t9115.scala From 8527a9bb605ad8f656c949a97db14685a7cb0b42 Mon Sep 17 00:00:00 2001 From: EnzeXing <58994529+EnzeXing@users.noreply.github.com> Date: Sat, 31 Aug 2024 21:49:29 -0400 Subject: [PATCH 520/827] Adds special treatment to Predef.classOf (#21523) This is an updated PR of #20945, making the global initialization checker skips the analysis of `Predef.classOf` in scala2-library, since it is a stub method in tasty and is replaced by actual class representations in the backend. This prevents related warnings when running test_scala2_library_tasty. --------- Co-authored-by: EnzeXing --- compiler/src/dotty/tools/dotc/transform/init/Objects.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 1050fbe85ef2..0f96440f343f 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -702,6 +702,9 @@ class Objects(using Context @constructorOnly): val arr = OfArray(State.currentObject, summon[Regions.Data]) Heap.writeJoin(arr.addr, args.map(_.value).join) arr + else if target.equals(defn.Predef_classOf) then + // Predef.classOf is a stub method in tasty and is replaced in backend + Bottom else if target.hasSource then val cls = target.owner.enclosingClass.asClass val ddef = target.defTree.asInstanceOf[DefDef] From 46d7f955acd1d7d85898f361f5019887490bf2e6 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Mon, 2 Sep 2024 10:47:39 +0200 Subject: [PATCH 521/827] Disable publishing failing scans --- project/scripts/cmdTests | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/project/scripts/cmdTests b/project/scripts/cmdTests index 453590084b00..dae553d39ca1 100755 --- a/project/scripts/cmdTests +++ b/project/scripts/cmdTests @@ -55,7 +55,9 @@ cp tests/neg-macros/i6371/A_1.scala $OUT/A.scala cp tests/neg-macros/i6371/B_2.scala $OUT/B.scala "$SBT" "scalac $OUT/A.scala -d $OUT1" rm $OUT/A.scala -"$SBT" "scalac -classpath $OUT1 -d $OUT1 $OUT/B.scala" > "$tmp" 2>&1 || echo "ok" +# this command is expected to fail +# setting -Dscan=false disables publishing scans to develocity.scala-lang.org +"$SBT" "scalac -classpath $OUT1 -d $OUT1 $OUT/B.scala -Dscan=false" > "$tmp" 2>&1 || echo "ok" # cat "$tmp" # for debugging grep -qe "B.scala:2:7" "$tmp" grep -qe "This location contains code that was inlined from A.scala:3" "$tmp" From 48825a4f17a869fa2d963db35e2283e70932bb55 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sun, 25 Aug 2024 16:29:35 +0100 Subject: [PATCH 522/827] Consistently use TypeMismatch in TreeChecker --- .../tools/dotc/transform/TreeChecker.scala | 31 +++++++------------ 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index c4e1c7892e8d..a2b403fdae6c 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -432,19 +432,8 @@ object TreeChecker { promote(tree) case _ => val tree1 = super.typedUnadapted(tree, pt, locked) - def isSubType(tp1: Type, tp2: Type) = - (tp1 eq tp2) || // accept NoType / NoType - (tp1 <:< tp2) - def divergenceMsg(tp1: Type, tp2: Type) = - s"""Types differ - |Original type : ${tree.typeOpt.show} - |After checking: ${tree1.tpe.show} - |Original tree : ${tree.show} - |After checking: ${tree1.show} - |Why different : - """.stripMargin + core.TypeComparer.explained(_.isSubType(tp1, tp2)) - if (tree.hasType) // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted - assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt)) + if tree.hasType then // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted + checkType(tree1.tpe, tree.typeOpt, tree, "typedUnadapted") tree1 checkNoOrphans(res.tpe) phasesToCheck.foreach(_.checkPostCondition(res)) @@ -824,16 +813,20 @@ object TreeChecker { && !isPrimaryConstructorReturn && !pt.isInstanceOf[FunOrPolyProto] then - assert(tree.tpe <:< pt, { - val mismatch = TypeMismatch(tree.tpe, pt, Some(tree)) - i"""|Type Mismatch: - |${mismatch.message} - |tree = $tree ${tree.className}""".stripMargin - }) + checkType(tree.tpe, pt, tree, "adapt") tree } override def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = tree + + private def checkType(tp1: Type, tp2: Type, tree: untpd.Tree, step: String)(using Context) = + // Accept NoType <:< NoType as true + assert((tp1 eq tp2) || (tp1 <:< tp2), { + val mismatch = TypeMismatch(tp1, tp2, None) + i"""|Type Mismatch (while checking $step): + |${mismatch.message}${mismatch.explanation} + |tree = $tree ${tree.className}""".stripMargin + }) } /** Tree checker that can be applied to a local tree. */ From 0b45f44092d9c00f2e3f89d08eabccb42d78ad67 Mon Sep 17 00:00:00 2001 From: rochala Date: Mon, 2 Sep 2024 18:37:34 +0200 Subject: [PATCH 523/827] Fix types --- compiler/src/dotty/tools/dotc/interactive/Completion.scala | 3 +-- presentation-compiler/test/dotty/tools/pc/utils/JRE.scala | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 1395d9b80b53..4b3c6100d71c 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -252,7 +252,7 @@ object Completion: // https://github.com/scalameta/metals/blob/main/mtags/src/main/scala/scala/meta/internal/mtags/KeywordWrapper.scala // https://github.com/com-lihaoyi/Ammonite/blob/73a874173cd337f953a3edc9fb8cb96556638fdd/amm/util/src/main/scala/ammonite/util/Model.scala private def needsBacktick(s: String) = - val chunks = s.split("_", -1).nn + val chunks = s.split("_", -1) val validChunks = chunks.zipWithIndex.forall { case (chunk, index) => chunk.nn.forall(Chars.isIdentifierPart) || @@ -286,7 +286,6 @@ object Completion: if denot.isType then denot.symbol.showFullName else denot.info.widenTermRefExpr.show - def isInNewContext(untpdPath: List[untpd.Tree]): Boolean = untpdPath match case _ :: untpd.New(selectOrIdent: (untpd.Select | untpd.Ident)) :: _ => true diff --git a/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala b/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala index 2f812e1bbf80..d082258c255b 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala @@ -7,7 +7,7 @@ object JRE: javaVersion match case Some(version) if version.startsWith("1.8") => 8 - case Some(version) => version + case Some(version) => version.toInt // it is better to crash during tests than to run incorrect suite case None => 8 From 76498dc5e4cc43e4211922e2b71d4e5f85b7a2b5 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 2 Sep 2024 21:42:20 +0200 Subject: [PATCH 524/827] Address review: Refactor code for clarity --- .../tools/dotc/transform/init/Objects.scala | 42 +++++++++++-------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 1598e58ad767..05d45072528b 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -910,7 +910,7 @@ class Objects(using Context @constructorOnly): /** * Handle new expression `new p.C(args)`. - * The actual instance might be cached without running the constructor. + * The actual instance might be cached without running the constructor. * See tests/init-global/pos/cache-constructor.scala * * @param outer The value for `p`. @@ -1574,22 +1574,30 @@ class Objects(using Context @constructorOnly): * The default widening is 1 for most values, 2 for function values. * User-specified widening annotations are repected. */ - def widenEscapedValue(value: Value, expr: Tree): Contextual[Value] = - expr.tpe.getAnnotation(defn.InitWidenAnnot) match - case Some(annot) => - annot.argument(0).get match - case arg @ Literal(c: Constants.Constant) => - val height = c.intValue - if height < 0 then - report.warning("The argument should be positive", arg) - value.widen(1) - else - value.widen(c.intValue) - case arg => - report.warning("The argument should be a constant integer value", arg) - value.widen(1) - case _ => - if value.isInstanceOf[Fun] then value.widen(2) else value.widen(1) + def widenEscapedValue(value: Value, annotatedTree: Tree): Contextual[Value] = + def parseAnnotation: Option[Int] = + annotatedTree.tpe.getAnnotation(defn.InitWidenAnnot).flatMap: annot => + annot.argument(0).get match + case arg @ Literal(c: Constants.Constant) => + val height = c.intValue + if height < 0 then + report.warning("The argument should be positive", arg) + None + else + Some(height) + case arg => + report.warning("The argument should be a constant integer value", arg) + None + end parseAnnotation + + parseAnnotation match + case Some(i) => + value.widen(i) + + case None => + if value.isInstanceOf[Fun] + then value.widen(2) + else value.widen(1) /** Evaluate arguments of methods and constructors */ def evalArgs(args: List[Arg], thisV: ThisValue, klass: ClassSymbol): Contextual[List[ArgInfo]] = From 0a95e899a4e37347e107ecef3d729346144bef55 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 3 Sep 2024 09:52:29 +0100 Subject: [PATCH 525/827] Drop trace.force in CaptureSet This was showing up in testCompilation runs, locally or in CI. --- compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 25d8e0bc6506..1cf587f8f03a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -159,7 +159,7 @@ sealed abstract class CaptureSet extends Showable: */ def accountsFor(x: CaptureRef)(using Context): Boolean = if comparer.isInstanceOf[ExplainingTypeComparer] then // !!! DEBUG - reporting.trace.force(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): + reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): elems.exists(_.subsumes(x)) || !x.isMaxCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK else From 54ae45d8523e76f770089b8453a724e68e25f51b Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 3 Sep 2024 13:17:51 +0100 Subject: [PATCH 526/827] Fixed false positive unreachable local object I think this was fixed in PR 21000, but I didn't check. --- tests/warn/i21218.scala | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 tests/warn/i21218.scala diff --git a/tests/warn/i21218.scala b/tests/warn/i21218.scala new file mode 100644 index 000000000000..29fa957e2e70 --- /dev/null +++ b/tests/warn/i21218.scala @@ -0,0 +1,10 @@ +def Test[U, A](thisElem: A, thatElem: U) = { + case object passedEnd + val any: Seq[Any] = ??? + any.zip(any) + .map { + case (`passedEnd`, r: U @unchecked) => (thisElem, r) + case (l: A @unchecked, `passedEnd`) => (l, thatElem) + case t: (A, U) @unchecked => t // false-positive warning + } +} From 634fcd12976bbc2877c71b8a850253791907d5d7 Mon Sep 17 00:00:00 2001 From: Joel Wilsson Date: Fri, 30 Aug 2024 14:45:51 +0200 Subject: [PATCH 527/827] Re-use attachment in exportForwarders to handle ambiguous overloads exportForwarders can be called more than once for the same expression if there are ambiguous overloads. Just return the already computed ExportForwarders if that happens. Closes #21071 --- .../src/dotty/tools/dotc/typer/Namer.scala | 9 ++++---- tests/neg/i21071.check | 9 ++++++++ tests/neg/i21071.scala | 21 +++++++++++++++++++ 3 files changed, 35 insertions(+), 4 deletions(-) create mode 100644 tests/neg/i21071.check create mode 100644 tests/neg/i21071.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 0a1a70b98bbb..6167db62fbe0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1452,10 +1452,11 @@ class Namer { typer: Typer => forwarders.derivedCons(forwarder2, avoidClashes(forwarders2)) case Nil => forwarders - addForwarders(selectors, Nil) - val forwarders = avoidClashes(buf.toList) - exp.pushAttachment(ExportForwarders, forwarders) - forwarders + exp.getAttachment(ExportForwarders).getOrElse: + addForwarders(selectors, Nil) + val forwarders = avoidClashes(buf.toList) + exp.pushAttachment(ExportForwarders, forwarders) + forwarders end exportForwarders /** Add forwarders as required by the export statements in this class */ diff --git a/tests/neg/i21071.check b/tests/neg/i21071.check new file mode 100644 index 000000000000..b2a3233a31c0 --- /dev/null +++ b/tests/neg/i21071.check @@ -0,0 +1,9 @@ +-- [E051] Reference Error: tests/neg/i21071.scala:9:2 ------------------------------------------------------------------ +9 | foo { // error + | ^^^ + | Ambiguous overload. The overloaded alternatives of method foo in object MySuite with types + | (a: String): Nothing + | (a: List[String]): Nothing + | both match arguments ((??? : => Nothing)) + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21071.scala b/tests/neg/i21071.scala new file mode 100644 index 000000000000..ac222cad7936 --- /dev/null +++ b/tests/neg/i21071.scala @@ -0,0 +1,21 @@ +trait Service { + def method: String +} + +object MySuite { + def foo(a: List[String]) = ??? + def foo(a: String) = ??? + + foo { // error + + new Service { + private val underlying: Service = ??? + private val s = "foo" + + export underlying.* + export s.toLowerCase + } + + ??? + } +} From 10d6b6342db3802a49db5831703dcce986fdd29c Mon Sep 17 00:00:00 2001 From: Lorenzo Gabriele Date: Thu, 5 Sep 2024 19:10:29 +0200 Subject: [PATCH 528/827] Fix typo in NamedTuple.scala --- library/src/scala/NamedTuple.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index 21c4c6840f5c..f237d1d487fe 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -119,7 +119,7 @@ object NamedTuple: NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] /** A type specially treated by the compiler to represent all fields of a - * class argument `T` as a named tuple. Or, if `T` is already a named tyuple, + * class argument `T` as a named tuple. Or, if `T` is already a named tuple, * `From[T]` is the same as `T`. */ type From[T] <: AnyNamedTuple From 9f90ad0a6f2057e6c32ae0a030b029f435d37eb3 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 6 Sep 2024 12:11:39 +0100 Subject: [PATCH 529/827] Calm param autotupling for overloads When resolving method overloads, we look to apply the same parameter auto-tupling logic that we have in typedFunctionValue. But we only checked the function was unary without checking whether it was a tuple. So I reused the same precondition. --- .../dotty/tools/dotc/typer/Applications.scala | 30 ++++++++++---- .../src/dotty/tools/dotc/typer/Typer.scala | 11 +---- tests/run/i16108.scala | 41 +++++++++++++++++++ 3 files changed, 64 insertions(+), 18 deletions(-) create mode 100644 tests/run/i16108.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index c6d8fd80fd60..11a95ce23f93 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -2214,19 +2214,26 @@ trait Applications extends Compatibility { case untpd.Function(args: List[untpd.ValDef] @unchecked, body) => // If ref refers to a method whose parameter at index `idx` is a function type, - // the arity of that function, otherise -1. - def paramCount(ref: TermRef) = + // the parameters of that function, otherwise Nil. + // We return Nil for both nilary functions and non-functions, + // because we won't be making tupled functions for nilary functions anyways, + // seeing as there is no Tuple0. + def params(ref: TermRef) = val formals = ref.widen.firstParamTypes if formals.length > idx then formals(idx).dealias match - case defn.FunctionNOf(args, _, _) => args.length - case _ => -1 - else -1 + case defn.FunctionNOf(args, _, _) => args + case _ => Nil + else Nil + + def isCorrectUnaryFunction(alt: TermRef): Boolean = + val formals = params(alt) + formals.length == 1 && ptIsCorrectProduct(formals.head, args) val numArgs = args.length - if numArgs != 1 - && !alts.exists(paramCount(_) == numArgs) - && alts.exists(paramCount(_) == 1) + if numArgs > 1 + && !alts.exists(params(_).lengthIs == numArgs) + && alts.exists(isCorrectUnaryFunction) then desugar.makeTupledFunction(args, body, isGenericTuple = true) // `isGenericTuple = true` is the safe choice here. It means the i'th tuple @@ -2395,6 +2402,13 @@ trait Applications extends Compatibility { } end resolveOverloaded1 + /** Is `formal` a product type which is elementwise compatible with `params`? */ + def ptIsCorrectProduct(formal: Type, params: List[untpd.ValDef])(using Context): Boolean = + isFullyDefined(formal, ForceDegree.flipBottom) + && defn.isProductSubType(formal) + && tupleComponentTypes(formal).corresponds(params): (argType, param) => + param.tpt.isEmpty || argType.widenExpr <:< typedAheadType(param.tpt).tpe + /** The largest suffix of `paramss` that has the same first parameter name as `t`, * plus the number of term parameters in `paramss` that come before that suffix. */ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 1125e09539b6..2b0e0c764398 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1837,19 +1837,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (protoFormals.length == params.length) (protoFormals(i), isDefinedErased(i)) else (errorType(WrongNumberOfParameters(tree, params.length, pt, protoFormals.length), tree.srcPos), false) - /** Is `formal` a product type which is elementwise compatible with `params`? */ - def ptIsCorrectProduct(formal: Type) = - isFullyDefined(formal, ForceDegree.flipBottom) && - defn.isProductSubType(formal) && - tupleComponentTypes(formal).corresponds(params) { - (argType, param) => - param.tpt.isEmpty || argType.widenExpr <:< typedAheadType(param.tpt).tpe - } - var desugared: untpd.Tree = EmptyTree if protoFormals.length == 1 && params.length != 1 then val firstFormal = protoFormals.head.loBound - if ptIsCorrectProduct(firstFormal) then + if ptIsCorrectProduct(firstFormal, params) then val isGenericTuple = firstFormal.derivesFrom(defn.TupleClass) && !defn.isTupleClass(firstFormal.typeSymbol) diff --git a/tests/run/i16108.scala b/tests/run/i16108.scala new file mode 100644 index 000000000000..de7396be8f9f --- /dev/null +++ b/tests/run/i16108.scala @@ -0,0 +1,41 @@ +import scala.language.implicitConversions + +final class Functoid[+R](val function: Product => R) + +object Functoid { + implicit def apply[A, R](function: A => R): Functoid[R] = { + println(s"arity 1") + new Functoid({ case Tuple1(a: A @unchecked) => function(a) }) + } + implicit def apply[A, B, R](function: (A, B) => R): Functoid[R] = { + println("arity 2") + new Functoid({ case (a: A @unchecked, b: B @unchecked) => function(a, b) }) + } +} + +final case class ContainerConfig(image: String, version: Int, cmd: String) + +final class ContainerResource + +object ContainerResource { + implicit final class DockerProviderExtensions(private val self: Functoid[ContainerResource]) extends AnyVal { + def modifyConfig(modify: Functoid[ContainerConfig => ContainerConfig]): Functoid[ContainerConfig => ContainerConfig] = modify + // removing this overload fixes the implicit conversion and returns `arity 2` print + def modifyConfig(modify: ContainerConfig => ContainerConfig): Functoid[ContainerConfig => ContainerConfig] = new Functoid(_ => modify) + } +} + +object Test { + def main(args: Array[String]): Unit = { + val cfg = new Functoid(_ => new ContainerResource) + .modifyConfig { + // applying Functoid.apply explicitly instead of via implicit conversion also avoids untupling +// Functoid { + (image: String, version: Int) => (cfg: ContainerConfig) => cfg.copy(image, version) +// } + } + .function.apply(Tuple2("img", 9)) + .apply(ContainerConfig("a", 0, "b")) + println(cfg) + } +} From 2fbd42ecf7a2d943ce323e9bd9f26fa89f527930 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 6 Sep 2024 12:50:30 +0100 Subject: [PATCH 530/827] Drop if in CaptureSet --- compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 1cf587f8f03a..c57ad639783c 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -158,14 +158,9 @@ sealed abstract class CaptureSet extends Showable: * as frozen. */ def accountsFor(x: CaptureRef)(using Context): Boolean = - if comparer.isInstanceOf[ExplainingTypeComparer] then // !!! DEBUG - reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): - elems.exists(_.subsumes(x)) - || !x.isMaxCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK - else - reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): - elems.exists(_.subsumes(x)) - || !x.isMaxCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): + elems.exists(_.subsumes(x)) + || !x.isMaxCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK /** A more optimistic version of accountsFor, which does not take variable supersets * of the `x` reference into account. A set might account for `x` if it accounts From c361d7ee941e6cc6a680e630d25fd32fbd184637 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Sat, 7 Sep 2024 16:50:48 +0200 Subject: [PATCH 531/827] Temporary fix to CompletionScalaCliSuite --- .../pc/tests/completion/CompletionScalaCliSuite.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala index 79d35944c84d..360607367db3 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala @@ -59,19 +59,19 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: @Test def `version` = check( - """|//> using lib "io.circe::circe-core_sjs1:0.14.1@@" + """|//> using lib "io.circe::circe-core_sjs1:0.14.10@@" |package A |""".stripMargin, - "0.14.1" + "0.14.10" ) // We don't to add `::` before version if `sjs1` is specified @Test def `version-edit` = checkEdit( - """|//> using lib "io.circe::circe-core_sjs1:0.14.1@@" + """|//> using lib "io.circe::circe-core_sjs1:0.14.10@@" |package A |""".stripMargin, - """|//> using lib "io.circe::circe-core_sjs1:0.14.1" + """|//> using lib "io.circe::circe-core_sjs1:0.14.10" |package A |""".stripMargin, ) From 1b1dd161e279b11cec2093e89e2878eb85b23c3d Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Sun, 8 Sep 2024 00:47:06 +0200 Subject: [PATCH 532/827] Revert "Drop redundant `butNot = Param` clause in isAnchor" This reverts commit 9d88c800ba518b184bb5f63259a782532d1abf96. Closes #21521 The `ClassTypeParamCreationFlags` include both `TypeParam` and `Deferred`. In effect, a class type parameter was incorrectly considered as an anchor. For a failing example, one can try asserting: ```scala || sym.is(Deferred).ensuring(_ == sym.is(Deferred, butNot = Param)) ``` in `ImplicitRunInfo#isAnchor` and a test with `summon[Ordering[Int]]`. In that example, at least, the flags happen to be set by `Scala2Unpickler#readDisambiguatedSymbol` src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala:562. --- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 51e468153d1f..1e040c085019 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -636,7 +636,7 @@ trait ImplicitRunInfo: private def isAnchor(sym: Symbol) = sym.isClass && !isExcluded(sym) || sym.isOpaqueAlias - || sym.is(Deferred) + || sym.is(Deferred, butNot = Param) || sym.info.isMatchAlias private def computeIScope(rootTp: Type): OfTypeImplicits = From 10497c9a9bafdb36a84250584b0eaa2d7b1b6b07 Mon Sep 17 00:00:00 2001 From: Ondrej Lhotak Date: Wed, 14 Aug 2024 16:27:06 -0400 Subject: [PATCH 533/827] add tracking of NotNullInfo for Match, Case, Try trees (fix #21380) --- .../src/dotty/tools/dotc/typer/Typer.scala | 20 +++++++++++++++---- tests/explicit-nulls/neg/i21380.scala | 19 ++++++++++++++++++ tests/explicit-nulls/neg/i21380b.scala | 8 ++++++++ 3 files changed, 43 insertions(+), 4 deletions(-) create mode 100644 tests/explicit-nulls/neg/i21380.scala create mode 100644 tests/explicit-nulls/neg/i21380b.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 1125e09539b6..71bf3c3bc9a4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2135,14 +2135,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case1 } .asInstanceOf[List[CaseDef]] - assignType(cpy.Match(tree)(sel, cases1), sel, cases1).cast(pt) + var nni = sel.notNullInfo + if(cases1.nonEmpty) nni = nni.seq(cases1.map(_.notNullInfo).reduce(_.alt(_))) + assignType(cpy.Match(tree)(sel, cases1), sel, cases1).cast(pt).withNotNullInfo(nni) } // Overridden in InlineTyper for inline matches def typedMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: Type)(using Context): Tree = { val cases1 = harmonic(harmonize, pt)(typedCases(cases, sel, wideSelType, pt.dropIfProto)) .asInstanceOf[List[CaseDef]] - assignType(cpy.Match(tree)(sel, cases1), sel, cases1) + var nni = sel.notNullInfo + if(cases1.nonEmpty) nni = nni.seq(cases1.map(_.notNullInfo).reduce(_.alt(_))) + assignType(cpy.Match(tree)(sel, cases1), sel, cases1).withNotNullInfo(nni) } def typedCases(cases: List[untpd.CaseDef], sel: Tree, wideSelType0: Type, pt: Type)(using Context): List[CaseDef] = @@ -2216,7 +2220,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer pat1.putAttachment(InferredGadtConstraints, ctx.gadt) if (pt1.isValueType) // insert a cast if body does not conform to expected type if we disregard gadt bounds body1 = body1.ensureConforms(pt1)(using originalCtx) - assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1) + val nni = pat1.notNullInfo.seq( + guard1.notNullInfoIf(false).alt( + guard1.notNullInfoIf(true).seq(body1.notNullInfo) + ) + ) + assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1).withNotNullInfo(nni) } val pat1 = typedPattern(tree.pat, wideSelType)(using gadtCtx) @@ -2327,7 +2336,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer }: @unchecked val finalizer1 = typed(tree.finalizer, defn.UnitType) val cases2 = cases2x.asInstanceOf[List[CaseDef]] - assignType(cpy.Try(tree)(expr2, cases2, finalizer1), expr2, cases2) + val nni = expr2.notNullInfo.retractedInfo.seq( + cases2.map(_.notNullInfo.retractedInfo).fold(NotNullInfo.empty)(_.alt(_)) + ).seq(finalizer1.notNullInfo) + assignType(cpy.Try(tree)(expr2, cases2, finalizer1), expr2, cases2).withNotNullInfo(nni) } def typedTry(tree: untpd.ParsedTry, pt: Type)(using Context): Try = diff --git a/tests/explicit-nulls/neg/i21380.scala b/tests/explicit-nulls/neg/i21380.scala new file mode 100644 index 000000000000..685aa09ef818 --- /dev/null +++ b/tests/explicit-nulls/neg/i21380.scala @@ -0,0 +1,19 @@ +@main def test() = { + var x: String | Null = null + if (false) { + x = "" + + } else { + x = "" + } + try { + x = "" + throw new Exception() + } + catch { + case e: Exception => { + x = null + } + } + x.replace("", "") // error +} diff --git a/tests/explicit-nulls/neg/i21380b.scala b/tests/explicit-nulls/neg/i21380b.scala new file mode 100644 index 000000000000..b371dfcd743f --- /dev/null +++ b/tests/explicit-nulls/neg/i21380b.scala @@ -0,0 +1,8 @@ +@main def test() = { + var x: String | Null = null + x = "" + 1 match { + case 1 => x = null + } + x.replace("", "") // error +} From 7674fefae3e9c9c9dffe880c9b2b799049e90a10 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 15 Aug 2024 16:19:35 +0200 Subject: [PATCH 534/827] Fix NotNullInfo for Case and Try; add more tests --- .../src/dotty/tools/dotc/typer/Typer.scala | 35 +++++++++++-------- tests/explicit-nulls/neg/i21380b.scala | 14 +++++--- tests/explicit-nulls/neg/i21380c.scala | 34 ++++++++++++++++++ 3 files changed, 65 insertions(+), 18 deletions(-) create mode 100644 tests/explicit-nulls/neg/i21380c.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 71bf3c3bc9a4..e4932045e91a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2136,7 +2136,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } .asInstanceOf[List[CaseDef]] var nni = sel.notNullInfo - if(cases1.nonEmpty) nni = nni.seq(cases1.map(_.notNullInfo).reduce(_.alt(_))) + if cases1.nonEmpty then nni = nni.seq(cases1.map(_.notNullInfo).reduce(_.alt(_))) assignType(cpy.Match(tree)(sel, cases1), sel, cases1).cast(pt).withNotNullInfo(nni) } @@ -2145,7 +2145,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val cases1 = harmonic(harmonize, pt)(typedCases(cases, sel, wideSelType, pt.dropIfProto)) .asInstanceOf[List[CaseDef]] var nni = sel.notNullInfo - if(cases1.nonEmpty) nni = nni.seq(cases1.map(_.notNullInfo).reduce(_.alt(_))) + if cases1.nonEmpty then nni = nni.seq(cases1.map(_.notNullInfo).reduce(_.alt(_))) assignType(cpy.Match(tree)(sel, cases1), sel, cases1).withNotNullInfo(nni) } @@ -2218,13 +2218,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // will end up taking too much memory. If it does, we should just limit // how much GADT constraints we infer - it's always sound to infer less. pat1.putAttachment(InferredGadtConstraints, ctx.gadt) - if (pt1.isValueType) // insert a cast if body does not conform to expected type if we disregard gadt bounds + if pt1.isValueType then // insert a cast if body does not conform to expected type if we disregard gadt bounds body1 = body1.ensureConforms(pt1)(using originalCtx) - val nni = pat1.notNullInfo.seq( - guard1.notNullInfoIf(false).alt( - guard1.notNullInfoIf(true).seq(body1.notNullInfo) - ) - ) + val nni = pat1.notNullInfo + .seq(guard1.notNullInfoIf(false).alt(guard1.notNullInfoIf(true))) + .seq(body1.notNullInfo) assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1).withNotNullInfo(nni) } @@ -2329,16 +2327,25 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer untpd.Block(makeCanThrow(capabilityProof), expr) def typedTry(tree: untpd.Try, pt: Type)(using Context): Try = { + // We want to type check tree.expr first to comput NotNullInfo, but `addCanThrowCapabilities` + // uses the types of patterns in `tree.cases` to determine the capabilities. + // Hence, we create a copy of cases with empty body and type check that first, then type check + // the rest of the tree in order. + val casesEmptyBody1 = tree.cases.mapconserve(cpy.CaseDef(_)(body = EmptyTree)) + val casesEmptyBody2 = typedCases(casesEmptyBody1, EmptyTree, defn.ThrowableType, WildcardType) + val expr2 :: cases2x = harmonic(harmonize, pt) { - val cases1 = typedCases(tree.cases, EmptyTree, defn.ThrowableType, pt.dropIfProto) - val expr1 = typed(addCanThrowCapabilities(tree.expr, cases1), pt.dropIfProto) + val expr1 = typed(addCanThrowCapabilities(tree.expr, casesEmptyBody2), pt.dropIfProto) + val casesCtx = ctx.addNotNullInfo(expr1.notNullInfo.retractedInfo) + val cases1 = typedCases(tree.cases, EmptyTree, defn.ThrowableType, pt.dropIfProto)(using casesCtx) expr1 :: cases1 }: @unchecked - val finalizer1 = typed(tree.finalizer, defn.UnitType) val cases2 = cases2x.asInstanceOf[List[CaseDef]] - val nni = expr2.notNullInfo.retractedInfo.seq( - cases2.map(_.notNullInfo.retractedInfo).fold(NotNullInfo.empty)(_.alt(_)) - ).seq(finalizer1.notNullInfo) + + var nni = expr2.notNullInfo.retractedInfo + if cases2.nonEmpty then nni = nni.seq(cases2.map(_.notNullInfo).reduce(_.alt(_))) + val finalizer1 = typed(tree.finalizer, defn.UnitType)(using ctx.addNotNullInfo(nni)) + nni = nni.seq(finalizer1.notNullInfo) assignType(cpy.Try(tree)(expr2, cases2, finalizer1), expr2, cases2).withNotNullInfo(nni) } diff --git a/tests/explicit-nulls/neg/i21380b.scala b/tests/explicit-nulls/neg/i21380b.scala index b371dfcd743f..55a5fcf5bb60 100644 --- a/tests/explicit-nulls/neg/i21380b.scala +++ b/tests/explicit-nulls/neg/i21380b.scala @@ -1,8 +1,14 @@ -@main def test() = { +def test1 = var x: String | Null = null x = "" - 1 match { + 1 match case 1 => x = null - } + case _ => x = x.trim() // ok x.replace("", "") // error -} + +def test2(i: Int) = + var x: String | Null = null + i match + case 1 => x = "1" + case _ => x = " " + x.replace("", "") // ok \ No newline at end of file diff --git a/tests/explicit-nulls/neg/i21380c.scala b/tests/explicit-nulls/neg/i21380c.scala new file mode 100644 index 000000000000..4fea14f2f124 --- /dev/null +++ b/tests/explicit-nulls/neg/i21380c.scala @@ -0,0 +1,34 @@ +def test1(i: Int): Int = + var x: String | Null = null + if i == 0 then x = "" + else x = "" + try + x = x.replace(" ", "") // ok + throw new Exception() + catch + case e: Exception => + x = x.replaceAll(" ", "") // error + x = null + x.length // error + +def test2: Int = + var x: String | Null = null + try throw new Exception() + finally x = "" + x.length // ok + +def test3 = + var x: String | Null = "" + try throw new Exception() + catch case e: Exception => + x = (??? : String | Null) + finally + val l = x.length // error + +def test4: Int = + var x: String | Null = null + try throw new Exception() + catch + case npe: NullPointerException => x = "" + case _ => x = "" + x.length // ok \ No newline at end of file From 7f92b53fa5670c64c8419e680a0b0d85d493e959 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 28 Aug 2024 15:56:12 +0200 Subject: [PATCH 535/827] Fix typing cases --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 8 +++++--- tests/explicit-nulls/neg/i21380b.scala | 9 ++++++++- tests/explicit-nulls/neg/i21380c.scala | 13 ++++++++++++- 3 files changed, 25 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index e4932045e91a..85dc13f7853a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2210,7 +2210,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } val pat1 = indexPattern(tree).transform(pat) val guard1 = typedExpr(tree.guard, defn.BooleanType) - var body1 = ensureNoLocalRefs(typedExpr(tree.body, pt1), pt1, ctx.scope.toList) + var body1 = ensureNoLocalRefs( + typedExpr(tree.body, pt1)(using ctx.addNotNullInfo(guard1.notNullInfoIf(true))), + pt1, ctx.scope.toList) if ctx.gadt.isNarrowing then // Store GADT constraint to later retrieve it (in PostTyper, for now). // GADT constraints are necessary to correctly check bounds of type app, @@ -2221,7 +2223,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if pt1.isValueType then // insert a cast if body does not conform to expected type if we disregard gadt bounds body1 = body1.ensureConforms(pt1)(using originalCtx) val nni = pat1.notNullInfo - .seq(guard1.notNullInfoIf(false).alt(guard1.notNullInfoIf(true))) + .seq(guard1.notNullInfoIf(true)) .seq(body1.notNullInfo) assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1).withNotNullInfo(nni) } @@ -2343,7 +2345,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val cases2 = cases2x.asInstanceOf[List[CaseDef]] var nni = expr2.notNullInfo.retractedInfo - if cases2.nonEmpty then nni = nni.seq(cases2.map(_.notNullInfo).reduce(_.alt(_))) + if cases2.nonEmpty then nni = nni.seq(cases2.map(_.notNullInfo.retractedInfo).reduce(_.alt(_))) val finalizer1 = typed(tree.finalizer, defn.UnitType)(using ctx.addNotNullInfo(nni)) nni = nni.seq(finalizer1.notNullInfo) assignType(cpy.Try(tree)(expr2, cases2, finalizer1), expr2, cases2).withNotNullInfo(nni) diff --git a/tests/explicit-nulls/neg/i21380b.scala b/tests/explicit-nulls/neg/i21380b.scala index 55a5fcf5bb60..83e23053547c 100644 --- a/tests/explicit-nulls/neg/i21380b.scala +++ b/tests/explicit-nulls/neg/i21380b.scala @@ -11,4 +11,11 @@ def test2(i: Int) = i match case 1 => x = "1" case _ => x = " " - x.replace("", "") // ok \ No newline at end of file + x.replace("", "") // ok + +def test3(i: Int) = + var x: String | Null = null + i match + case 1 if x != null => () + case _ => x = " " + x.trim() // ok \ No newline at end of file diff --git a/tests/explicit-nulls/neg/i21380c.scala b/tests/explicit-nulls/neg/i21380c.scala index 4fea14f2f124..f86a5638e4c8 100644 --- a/tests/explicit-nulls/neg/i21380c.scala +++ b/tests/explicit-nulls/neg/i21380c.scala @@ -31,4 +31,15 @@ def test4: Int = catch case npe: NullPointerException => x = "" case _ => x = "" - x.length // ok \ No newline at end of file + x.length // error + // Although the catch block here is exhaustive, + // it is possible that the exception is thrown and not caught. + // Therefore, the code after the try block can only rely on the retracted info. + +def test5: Int = + var x: String | Null = null + try + x = "" + throw new Exception() + catch + case npe: NullPointerException => val i: Int = x.length // error \ No newline at end of file From fe0bdad123ea0daef818be993f9b7127c9242482 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Mon, 9 Sep 2024 14:59:25 +0200 Subject: [PATCH 536/827] Put i13864 in the blacklist of best-effort test. --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 16 +++++++++------- .../test/dotc/neg-best-effort-pickling.blacklist | 1 + 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 85dc13f7853a..ce5743f69d0c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2329,14 +2329,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer untpd.Block(makeCanThrow(capabilityProof), expr) def typedTry(tree: untpd.Try, pt: Type)(using Context): Try = { - // We want to type check tree.expr first to comput NotNullInfo, but `addCanThrowCapabilities` - // uses the types of patterns in `tree.cases` to determine the capabilities. - // Hence, we create a copy of cases with empty body and type check that first, then type check - // the rest of the tree in order. - val casesEmptyBody1 = tree.cases.mapconserve(cpy.CaseDef(_)(body = EmptyTree)) - val casesEmptyBody2 = typedCases(casesEmptyBody1, EmptyTree, defn.ThrowableType, WildcardType) - val expr2 :: cases2x = harmonic(harmonize, pt) { + // We want to type check tree.expr first to comput NotNullInfo, but `addCanThrowCapabilities` + // uses the types of patterns in `tree.cases` to determine the capabilities. + // Hence, we create a copy of cases with empty body and type check that first, then type check + // the rest of the tree in order. + // It may seem that invalid references can be created if the type of the pattern contains + // type binds, but this is not a valid `CanThrow` capability (checked by `addCanThrowCapabilities`), + // so it is not a problem. + val casesEmptyBody1 = tree.cases.mapconserve(cpy.CaseDef(_)(body = EmptyTree)) + val casesEmptyBody2 = typedCases(casesEmptyBody1, EmptyTree, defn.ThrowableType, WildcardType) val expr1 = typed(addCanThrowCapabilities(tree.expr, casesEmptyBody2), pt.dropIfProto) val casesCtx = ctx.addNotNullInfo(expr1.notNullInfo.retractedInfo) val cases1 = typedCases(tree.cases, EmptyTree, defn.ThrowableType, pt.dropIfProto)(using casesCtx) diff --git a/compiler/test/dotc/neg-best-effort-pickling.blacklist b/compiler/test/dotc/neg-best-effort-pickling.blacklist index a582f085dd30..99a83a467f08 100644 --- a/compiler/test/dotc/neg-best-effort-pickling.blacklist +++ b/compiler/test/dotc/neg-best-effort-pickling.blacklist @@ -16,6 +16,7 @@ i13780-1.scala i20317a.scala i11226.scala i974.scala +i13864.scala # semantic db generation fails in the first compilation i1642.scala From c7c8bb550078e31bbc6341fab51f2c1c93468d44 Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Mon, 9 Sep 2024 18:28:31 +0200 Subject: [PATCH 537/827] improvement: Make sure Scala CLI tests are less flaky The expected list should be stable and we only check if the expected subset is contained --- .../completion/CompletionScalaCliSuite.scala | 42 +++++++++++++------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala index 360607367db3..b542e4ba84e3 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala @@ -8,7 +8,7 @@ import org.junit.Ignore class CompletionScalaCliSuite extends BaseCompletionSuite: @Test def `simple` = - check( + checkSubset( """|//> using lib "io.cir@@ |package A |""".stripMargin, @@ -30,11 +30,12 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |//> using lib io.circe::circe-core_native0.4 |package A |""".stripMargin, - assertSingleItem = false + assertSingleItem = false, + filter = _.contains("circe-core_native0.4") ) @Test def `version-sort` = - check( + checkSubset( """|//> using dep "com.lihaoyi::pprint:0.7@@" |package A |""".stripMargin, @@ -42,12 +43,12 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |0.7.2 |0.7.1 |0.7.0 - |""".stripMargin, + |""".stripMargin ) @Ignore @Test def `single-colon` = - check( + checkSubset( """|//> using lib "io.circe:circe-core_na@@ |package A |""".stripMargin, @@ -58,7 +59,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: ) @Test def `version` = - check( + checkSubset( """|//> using lib "io.circe::circe-core_sjs1:0.14.10@@" |package A |""".stripMargin, @@ -74,11 +75,12 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: """|//> using lib "io.circe::circe-core_sjs1:0.14.10" |package A |""".stripMargin, + filter = _.endsWith("0.14.10") ) @Ignore @Test def `multiple-libs` = - check( + checkSubset( """|//> using lib "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, @@ -87,7 +89,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: @Ignore @Test def `script` = - check( + checkSubset( scriptWrapper( """|//> using lib "io.circe:circe-core_na@@ | @@ -103,7 +105,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: ) @Test def `closing-quote` = - check( + checkSubset( """|//> using lib "io.circe::circe-core:0.14.0"@@ |package A |""".stripMargin, @@ -111,7 +113,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: ) @Test def `whitespace` = - check( + checkSubset( """|//> using lib "io.circe::circe-co @@ |package A |""".stripMargin, @@ -130,7 +132,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: ) @Test def `dep` = - check( + checkSubset( """|//> using dep "io.cir@@ |package A |""".stripMargin, @@ -140,13 +142,29 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: @Ignore @Test def `multiple-deps2` = - check( + checkSubset( """|//> using libs "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, "circe-core_native0.4" ) + def checkSubset( + original: String, + expected: String, + filename: String = "A.scala", + enablePackageWrap: Boolean = true + ) = { + val expectedAtLeast = expected.linesIterator.toSet + check( + original, + expected, + filter = expectedAtLeast, + filename = filename, + enablePackageWrap = enablePackageWrap + ) + } + private def scriptWrapper(code: String, filename: String): String = // Vaguely looks like a scala file that ScalaCLI generates // from a sc file. From eb8a3155f289c90591eaa8250a239e7218389985 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Wed, 4 Sep 2024 16:29:22 +0200 Subject: [PATCH 538/827] Ignore best effort settings in repl --- compiler/src/dotty/tools/repl/ReplDriver.scala | 15 ++++++++++++++- .../test/dotty/tools/repl/ReplCompilerTests.scala | 11 +++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index f22523ac6f64..486005658d79 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -87,8 +87,21 @@ class ReplDriver(settings: Array[String], setupRootCtx(this.settings ++ settings, rootCtx) } + private val incompatibleOptions: Seq[String] = Seq( + initCtx.settings.YbestEffort.name, + initCtx.settings.YwithBestEffortTasty.name + ) + private def setupRootCtx(settings: Array[String], rootCtx: Context) = { - setup(settings, rootCtx) match + val incompatible = settings.intersect(incompatibleOptions) + val filteredSettings = + if !incompatible.isEmpty then + inContext(rootCtx) { + out.println(i"Options incompatible with repl will be ignored: ${incompatible.mkString(", ")}") + } + settings.filter(!incompatible.contains(_)) + else settings + setup(filteredSettings, rootCtx) match case Some((files, ictx)) => inContext(ictx) { shouldStart = true if files.nonEmpty then out.println(i"Ignoring spurious arguments: $files%, %") diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index 67e63d0156a5..374f53dbd011 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -456,6 +456,17 @@ class ReplCompilerTests extends ReplTest: assertTrue(last, last.startsWith("val res0: tpolecat.type = null")) assertTrue(last, last.endsWith("""// result of "res0.toString" is null""")) + @Test def `i21431 filter out best effort options`: Unit = + initially: + run(":settings -Ybest-effort -Ywith-best-effort-tasty") + .andThen: + run("0") // check for crash + val last = lines() + println(last) + assertTrue(last(0), last(0) == ("Options incompatible with repl will be ignored: -Ybest-effort, -Ywith-best-effort-tasty")) + assertTrue(last(1), last(1) == ("val res0: Int = 0")) + + object ReplCompilerTests: private val pattern = Pattern.compile("\\r[\\n]?|\\n"); From a440a2353b0cf23f822822e05443df586f9cf1c1 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Wed, 4 Sep 2024 16:30:45 +0200 Subject: [PATCH 539/827] Make purpose of the options clearer in the internal docs --- docs/_docs/internals/best-effort-compilation.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/_docs/internals/best-effort-compilation.md b/docs/_docs/internals/best-effort-compilation.md index 2fed951c3fd8..248203883a3c 100644 --- a/docs/_docs/internals/best-effort-compilation.md +++ b/docs/_docs/internals/best-effort-compilation.md @@ -11,6 +11,9 @@ It is composed of two experimental compiler options: * `-Ywith-best-effort-tasty` allows to read Best Effort TASTy files, and if such file is read from the classpath then limits compilation to the frontend phases +IMPORTANT: These options are meant to by used by an IDE and should never be used on the user side, in the project definition. +This is why they are hidden behind a private `-Y` option specifier. + This feature aims to force through to the typer phase regardless of errors, and then serialize tasty-like files obtained from the error trees into the best effort directory (`META-INF/best-effort`) and also serialize semanticdb as normal. From a59a4e6247b6c7f090d9b82e17608fbebc9bdd62 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 10 Sep 2024 15:06:21 +0200 Subject: [PATCH 540/827] Fix -Dscan=false --- project/scripts/cmdTests | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/scripts/cmdTests b/project/scripts/cmdTests index dae553d39ca1..1fdf96d53fdd 100755 --- a/project/scripts/cmdTests +++ b/project/scripts/cmdTests @@ -57,7 +57,7 @@ cp tests/neg-macros/i6371/B_2.scala $OUT/B.scala rm $OUT/A.scala # this command is expected to fail # setting -Dscan=false disables publishing scans to develocity.scala-lang.org -"$SBT" "scalac -classpath $OUT1 -d $OUT1 $OUT/B.scala -Dscan=false" > "$tmp" 2>&1 || echo "ok" +"$SBT" "scalac -classpath $OUT1 -d $OUT1 $OUT/B.scala" -Dscan=false > "$tmp" 2>&1 || echo "ok" # cat "$tmp" # for debugging grep -qe "B.scala:2:7" "$tmp" grep -qe "This location contains code that was inlined from A.scala:3" "$tmp" From 1e6111de81cde91c6862332981c78ac0f9be76f7 Mon Sep 17 00:00:00 2001 From: David Hua Date: Wed, 11 Sep 2024 01:14:54 -0400 Subject: [PATCH 541/827] Fix bug in init checker while compiling scodec-bits community project --- .../tools/dotc/transform/init/Objects.scala | 3 +++ tests/init-global/pos/scodec-bits.scala | 17 +++++++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 tests/init-global/pos/scodec-bits.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 691e67e228ef..52760cf8b6c7 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -523,6 +523,8 @@ class Objects(using Context @constructorOnly): def getHeapData()(using mutable: MutableData): Data = mutable.heap + def setHeap(newHeap: Data)(using mutable: MutableData): Unit = mutable.heap = newHeap + /** Cache used to terminate the check */ object Cache: case class Config(thisV: Value, env: Env.Data, heap: Heap.Data) @@ -538,6 +540,7 @@ class Objects(using Context @constructorOnly): val result = super.cachedEval(config, expr, cacheResult, default = Res(Bottom, Heap.getHeapData())) { expr => Res(fun(expr), Heap.getHeapData()) } + Heap.setHeap(result.heap) result.value end Cache diff --git a/tests/init-global/pos/scodec-bits.scala b/tests/init-global/pos/scodec-bits.scala new file mode 100644 index 000000000000..97a4a793a4a6 --- /dev/null +++ b/tests/init-global/pos/scodec-bits.scala @@ -0,0 +1,17 @@ +abstract class A { + def a: Long +} + +object O { + case class B() extends A { + def a = 5L + } + case class C(a2: A) extends A { + var c: Long = a2.a + def a = c + } + def f(a: A): A = C(f(a)) + def g(): A = f(B()) + + val x = g() +} \ No newline at end of file From 6f21759ec5142a70d3dc6b25ecbe6b254bce3745 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Thu, 5 Sep 2024 11:09:23 +0200 Subject: [PATCH 542/827] Remove the `-rewrite` option for `typechecks` methods in Quotes --- compiler/src/dotty/tools/dotc/config/Settings.scala | 4 ++++ compiler/src/dotty/tools/dotc/inlines/Inlines.scala | 5 ++++- compiler/test/dotc/pos-test-pickling.blacklist | 3 +++ compiler/test/dotc/run-test-pickling.blacklist | 4 +++- tests/pos/i21415.scala | 8 ++++++++ 5 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i21415.scala diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 7454682fba56..a5fc6a64aa45 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -47,6 +47,10 @@ object Settings: values(idx) = x changed.add(idx) this + + def reinitializedCopy(): SettingsState = + SettingsState(values.toSeq, changed.toSet) + end SettingsState case class ArgsSummary( diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index fffe87c3f57a..c1ab5f99d8d2 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -342,10 +342,13 @@ object Inlines: if Inlines.isInlineable(codeArg1.symbol) then stripTyped(Inlines.inlineCall(codeArg1)) else codeArg1 + // We should not be rewriting tested strings + val noRewriteSettings = ctx.settings.rewrite.updateIn(ctx.settingsState.reinitializedCopy(), None) + ConstFold(underlyingCodeArg).tpe.widenTermRefExpr match { case ConstantType(Constant(code: String)) => val source2 = SourceFile.virtual("tasty-reflect", code) - inContext(ctx.fresh.setNewTyperState().setTyper(new Typer(ctx.nestingLevel + 1)).setSource(source2)) { + inContext(ctx.fresh.setSettings(noRewriteSettings).setNewTyperState().setTyper(new Typer(ctx.nestingLevel + 1)).setSource(source2)) { val tree2 = new Parser(source2).block() if ctx.reporter.allErrors.nonEmpty then ctx.reporter.allErrors.map((ErrorKind.Parser, _)) diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index b68ac7fc3b6e..032b53150e49 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -135,3 +135,6 @@ parsercombinators-new-syntax.scala hylolib-deferred-given hylolib-cb hylolib + +# typecheckErrors method unpickling +i21415.scala diff --git a/compiler/test/dotc/run-test-pickling.blacklist b/compiler/test/dotc/run-test-pickling.blacklist index dacbc63bb520..31304e061bc7 100644 --- a/compiler/test/dotc/run-test-pickling.blacklist +++ b/compiler/test/dotc/run-test-pickling.blacklist @@ -27,7 +27,6 @@ tuple-zip.scala tuples1.scala tuples1a.scala tuples1b.scala -typeCheckErrors.scala typeclass-derivation-doc-example.scala typeclass-derivation1.scala typeclass-derivation2.scala @@ -47,3 +46,6 @@ trait-static-forwarder i17255 named-tuples-strawman-2.scala +# typecheckErrors method unpickling +typeCheckErrors.scala + diff --git a/tests/pos/i21415.scala b/tests/pos/i21415.scala new file mode 100644 index 000000000000..04afd512b535 --- /dev/null +++ b/tests/pos/i21415.scala @@ -0,0 +1,8 @@ +//> using options -rewrite -source:3.4-migration +import scala.compiletime.testing.typeCheckErrors + +def foo(arg: Int): Unit = ??? + +@main def Test = + typeCheckErrors("Seq.empty[Int].foreach(foo.apply _)") + typeCheckErrors("Seq.empty[Int].foreach(foo.apply _)") From 42981997f37528c01d1658d8213c8212891ee6b0 Mon Sep 17 00:00:00 2001 From: David Hua Date: Wed, 11 Sep 2024 16:09:48 -0400 Subject: [PATCH 543/827] Empty commit to trigger CI From 787fd8ec09700f109da3cc708c32fa6e4aedee81 Mon Sep 17 00:00:00 2001 From: crunchyfrog <49813441+truecrunchyfrog@users.noreply.github.com> Date: Thu, 12 Sep 2024 13:42:13 +0200 Subject: [PATCH 544/827] small language fix replace `my` with `by` --- scala2-library-cc/src/scala/collection/mutable/Builder.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scala2-library-cc/src/scala/collection/mutable/Builder.scala b/scala2-library-cc/src/scala/collection/mutable/Builder.scala index dd57cb75da91..2d5f84c32e92 100644 --- a/scala2-library-cc/src/scala/collection/mutable/Builder.scala +++ b/scala2-library-cc/src/scala/collection/mutable/Builder.scala @@ -80,7 +80,7 @@ trait Builder[-A, +To] extends Growable[A] { } } - /** A builder resulting from this builder my mapping the result using `f`. */ + /** A builder resulting from this builder by mapping the result using `f`. */ def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo]^{this, f} = new Builder[A, NewTo] { def addOne(x: A): this.type = { self += x; this } def clear(): Unit = self.clear() From 3078860c15ef937d1f846a5318aeaccae9904f32 Mon Sep 17 00:00:00 2001 From: rochala Date: Thu, 12 Sep 2024 20:24:38 +0200 Subject: [PATCH 545/827] Fix accidentaly removed .nn call and remove -release from forbidden options --- compiler/src/dotty/tools/dotc/interactive/Completion.scala | 2 +- .../src/main/dotty/tools/pc/ScalaPresentationCompiler.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 4b3c6100d71c..6e86f45237cf 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -252,7 +252,7 @@ object Completion: // https://github.com/scalameta/metals/blob/main/mtags/src/main/scala/scala/meta/internal/mtags/KeywordWrapper.scala // https://github.com/com-lihaoyi/Ammonite/blob/73a874173cd337f953a3edc9fb8cb96556638fdd/amm/util/src/main/scala/ammonite/util/Model.scala private def needsBacktick(s: String) = - val chunks = s.split("_", -1) + val chunks = s.split("_", -1).nn val validChunks = chunks.zipWithIndex.forall { case (chunk, index) => chunk.nn.forall(Chars.isIdentifierPart) || diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index 85de8e7d8439..679fbf000f75 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -58,7 +58,7 @@ case class ScalaPresentationCompiler( val scalaVersion = BuildInfo.scalaVersion private val forbiddenOptions = Set("-print-lines", "-print-tasty") - private val forbiddenDoubleOptions = Set("-release") + private val forbiddenDoubleOptions = Set.empty[String] given ReportContext = folderPath From 90d99c36e8e9260b9f81dbe3e4eec1289e17160a Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Fri, 19 Apr 2024 17:17:08 +0200 Subject: [PATCH 546/827] Add AnnotationsMappingBenchmark --- .../AnnotationsMappingBenchmark.scala | 71 +++++++++++++++++++ bench-micro/tests/someAnnotatedTypes.scala | 28 ++++++++ 2 files changed, 99 insertions(+) create mode 100644 bench-micro/src/main/scala/dotty/tools/benchmarks/AnnotationsMappingBenchmark.scala create mode 100644 bench-micro/tests/someAnnotatedTypes.scala diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/AnnotationsMappingBenchmark.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/AnnotationsMappingBenchmark.scala new file mode 100644 index 000000000000..310a1745171f --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/AnnotationsMappingBenchmark.scala @@ -0,0 +1,71 @@ +package dotty.tools.benchmarks + +import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Level, Measurement, Mode as JMHMode, Param, Scope, Setup, State, Warmup} +import java.util.concurrent.TimeUnit.SECONDS + +import dotty.tools.dotc.{Driver, Run, Compiler} +import dotty.tools.dotc.ast.{tpd, TreeTypeMap}, tpd.{Apply, Block, Tree, TreeAccumulator, TypeApply} +import dotty.tools.dotc.core.Annotations.{Annotation, ConcreteAnnotation, EmptyAnnotation} +import dotty.tools.dotc.core.Contexts.{ContextBase, Context, ctx, withMode} +import dotty.tools.dotc.core.Mode +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.core.Symbols.{defn, mapSymbols, Symbol} +import dotty.tools.dotc.core.Types.{AnnotatedType, NoType, SkolemType, TermRef, Type, TypeMap} +import dotty.tools.dotc.parsing.Parser +import dotty.tools.dotc.typer.TyperPhase + +/** Measures the performance of mapping over annotated types. + * + * Run with: scala3-bench-micro / Jmh / run AnnotationsMappingBenchmark + */ +@Fork(value = 4) +@Warmup(iterations = 4, time = 1, timeUnit = SECONDS) +@Measurement(iterations = 4, time = 1, timeUnit = SECONDS) +@BenchmarkMode(Array(JMHMode.Throughput)) +@State(Scope.Thread) +class AnnotationsMappingBenchmark: + var tp: Type = null + var specialIntTp: Type = null + var context: Context = null + var typeFunction: Context ?=> Type => Type = null + var typeMap: TypeMap = null + + @Param(Array("v1", "v2", "v3", "v4")) + var valName: String = null + + @Param(Array("id", "mapInts")) + var typeFunctionName: String = null + + @Setup(Level.Iteration) + def setup(): Unit = + val testPhase = + new Phase: + final override def phaseName = "testPhase" + final override def run(using ctx: Context): Unit = + val pkg = ctx.compilationUnit.tpdTree.symbol + tp = pkg.requiredClass("Test").requiredValueRef(valName).underlying + specialIntTp = pkg.requiredClass("Test").requiredType("SpecialInt").typeRef + context = ctx + + val compiler = + new Compiler: + private final val baseCompiler = new Compiler() + final override def phases = List(List(Parser()), List(TyperPhase()), List(testPhase)) + + val driver = + new Driver: + final override def newCompiler(using Context): Compiler = compiler + + driver.process(Array("-classpath", System.getProperty("BENCH_CLASS_PATH"), "tests/someAnnotatedTypes.scala")) + + typeFunction = + typeFunctionName match + case "id" => tp => tp + case "mapInts" => tp => (if tp frozen_=:= defn.IntType then specialIntTp else tp) + case _ => throw new IllegalArgumentException(s"Unknown type function: $typeFunctionName") + + typeMap = + new TypeMap(using context): + final override def apply(tp: Type): Type = typeFunction(mapOver(tp)) + + @Benchmark def applyTypeMap() = typeMap.apply(tp) diff --git a/bench-micro/tests/someAnnotatedTypes.scala b/bench-micro/tests/someAnnotatedTypes.scala new file mode 100644 index 000000000000..8b12d4f7c2c6 --- /dev/null +++ b/bench-micro/tests/someAnnotatedTypes.scala @@ -0,0 +1,28 @@ +class Test: + class FlagAnnot extends annotation.StaticAnnotation + class StringAnnot(val s: String) extends annotation.StaticAnnotation + class LambdaAnnot(val f: Int => Boolean) extends annotation.StaticAnnotation + + type SpecialInt <: Int + + val v1: Int @FlagAnnot = 42 + + val v2: Int @StringAnnot("hello") = 42 + + val v3: Int @LambdaAnnot(it => it == 42) = 42 + + val v4: Int @LambdaAnnot(it => { + def g(x: Int, y: Int) = x - y + 5 + g(it, 7) * 2 == 80 + }) = 42 + + /*val v5: Int @LambdaAnnot(it => { + class Foo(x: Int): + def xPlus10 = x + 10 + def xPlus20 = x + 20 + def xPlus(y: Int) = x + y + val foo = Foo(it) + foo.xPlus10 - foo.xPlus20 + foo.xPlus(30) == 62 + }) = 42*/ + + def main(args: Array[String]): Unit = ??? From 56fe7cfeb065bee24722985a7fba1ff64328a533 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Tue, 7 May 2024 10:24:50 +0200 Subject: [PATCH 547/827] Run printing tests in a separate directory --- compiler/test/dotty/tools/dotc/printing/PrintingTest.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index 382c029c86e0..8a80a6978bdb 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -6,7 +6,7 @@ import scala.language.unsafeNulls import vulpix.FileDiff import vulpix.TestConfiguration -import vulpix.TestConfiguration +import vulpix.ParallelTesting import reporting.TestReporter import java.io._ @@ -25,7 +25,9 @@ import java.io.File class PrintingTest { def options(phase: String, flags: List[String]) = - List(s"-Xprint:$phase", "-color:never", "-nowarn", "-classpath", TestConfiguration.basicClasspath) ::: flags + val outDir = ParallelTesting.defaultOutputDir + "printing" + File.pathSeparator + File(outDir).mkdirs() + List(s"-Xprint:$phase", "-color:never", "-nowarn", "-d", outDir, "-classpath", TestConfiguration.basicClasspath) ::: flags private def compileFile(path: JPath, phase: String): Boolean = { val baseFilePath = path.toString.stripSuffix(".scala") From ac76938c1e596efcaffa0f5d64e224bdc8be72f0 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Tue, 7 May 2024 10:24:32 +0200 Subject: [PATCH 548/827] Improve mapping and pickling of annotated types MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit `Annotation.mapWith` maps an `Annotation` with a type map `tm`. As an optimization, this function first checks if `tm` would result in any change (by traversing the annotation’s argument trees with a `TreeAccumulator`) before applying `tm` to the whole annotation tree. This optimization had two problems: 1. it didn’t include type parameters, and 2. it used `frozen_=:=` to compare types, which didn’t work as expected with `NoType`. This commit fixes these issues. Additionally, positions of trees that appear only inside `AnnotatedType` were not pickled. This commit also fixes this. --- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 10 +++++- .../dotty/tools/dotc/core/Annotations.scala | 11 ++++--- .../dotc/core/tasty/PositionPickler.scala | 4 +++ .../tools/dotc/core/tasty/TreePickler.scala | 7 ++++ .../tools/dotc/quoted/PickledQuotes.scala | 2 +- .../dotty/tools/dotc/transform/Pickler.scala | 2 +- tests/pos/annot-17939b.scala | 10 ++++++ tests/pos/annot-18064.scala | 9 +++++ tests/pos/annot-5789.scala | 10 ++++++ tests/printing/annot-18064.check | 16 +++++++++ tests/printing/annot-18064.scala | 9 +++++ tests/printing/annot-19846b.check | 33 +++++++++++++++++++ tests/printing/annot-19846b.scala | 7 ++++ 13 files changed, 123 insertions(+), 7 deletions(-) create mode 100644 tests/pos/annot-17939b.scala create mode 100644 tests/pos/annot-18064.scala create mode 100644 tests/pos/annot-5789.scala create mode 100644 tests/printing/annot-18064.check create mode 100644 tests/printing/annot-18064.scala create mode 100644 tests/printing/annot-19846b.check create mode 100644 tests/printing/annot-19846b.scala diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 385917f9b368..5b89c9bbacd1 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -141,9 +141,17 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => loop(tree, Nil) /** All term arguments of an application in a single flattened list */ + def allTermArguments(tree: Tree): List[Tree] = unsplice(tree) match { + case Apply(fn, args) => allArguments(fn) ::: args + case TypeApply(fn, args) => allArguments(fn) + case Block(_, expr) => allArguments(expr) + case _ => Nil + } + + /** All type and term arguments of an application in a single flattened list */ def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { case Apply(fn, args) => allArguments(fn) ::: args - case TypeApply(fn, _) => allArguments(fn) + case TypeApply(fn, args) => allArguments(fn) ::: args case Block(_, expr) => allArguments(expr) case _ => Nil } diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index b4cdeba4600b..d6a99b12e3b3 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -30,8 +30,8 @@ object Annotations { def derivedAnnotation(tree: Tree)(using Context): Annotation = if (tree eq this.tree) this else Annotation(tree) - /** All arguments to this annotation in a single flat list */ - def arguments(using Context): List[Tree] = tpd.allArguments(tree) + /** All term arguments of this annotation in a single flat list */ + def arguments(using Context): List[Tree] = tpd.allTermArguments(tree) def argument(i: Int)(using Context): Option[Tree] = { val args = arguments @@ -54,15 +54,18 @@ object Annotations { * type, since ranges cannot be types of trees. */ def mapWith(tm: TypeMap)(using Context) = - val args = arguments + val args = tpd.allArguments(tree) if args.isEmpty then this else + // Checks if `tm` would result in any change by applying it to types + // inside the annotations' arguments and checking if the resulting types + // are different. val findDiff = new TreeAccumulator[Type]: def apply(x: Type, tree: Tree)(using Context): Type = if tm.isRange(x) then x else val tp1 = tm(tree.tpe) - foldOver(if tp1 frozen_=:= tree.tpe then x else tp1, tree) + foldOver(if !tp1.exists || (tp1 frozen_=:= tree.tpe) then x else tp1, tree) val diff = findDiff(NoType, args) if tm.isRange(diff) then EmptyAnnotation else if diff.exists then derivedAnnotation(tm.mapOver(tree)) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala index 86076517021a..3d8080e72a29 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala @@ -33,6 +33,7 @@ object PositionPickler: pickler: TastyPickler, addrOfTree: TreeToAddr, treeAnnots: untpd.MemberDef => List[tpd.Tree], + typeAnnots: List[tpd.Tree], relativePathReference: String, source: SourceFile, roots: List[Tree], @@ -136,6 +137,9 @@ object PositionPickler: } for (root <- roots) traverse(root, NoSource) + + for annotTree <- typeAnnots do + traverse(annotTree, NoSource) end picklePositions end PositionPickler diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 6659348fb5de..7fd6444746ce 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -41,6 +41,10 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { */ private val annotTrees = util.EqHashMap[untpd.MemberDef, mutable.ListBuffer[Tree]]() + /** A set of annotation trees appearing in annotated types. + */ + private val annotatedTypeTrees = mutable.ListBuffer[Tree]() + /** A map from member definitions to their doc comments, so that later * parallel comment pickling does not need to access symbols of trees (which * would involve accessing symbols of named types and possibly changing phases @@ -57,6 +61,8 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { val ts = annotTrees.lookup(tree) if ts == null then Nil else ts.toList + def typeAnnots: List[Tree] = annotatedTypeTrees.toList + def docString(tree: untpd.MemberDef): Option[Comment] = Option(docStrings.lookup(tree)) @@ -278,6 +284,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { case tpe: AnnotatedType => writeByte(ANNOTATEDtype) withLength { pickleType(tpe.parent, richTypes); pickleTree(tpe.annot.tree) } + annotatedTypeTrees += tpe.annot.tree case tpe: AndType => writeByte(ANDtype) withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) } diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 6d6e2ff01ad4..67a354919d5b 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -224,7 +224,7 @@ object PickledQuotes { if tree.span.exists then val positionWarnings = new mutable.ListBuffer[Message]() val reference = ctx.settings.sourceroot.value - PositionPickler.picklePositions(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference, + PositionPickler.picklePositions(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, treePkl.typeAnnots, reference, ctx.compilationUnit.source, tree :: Nil, positionWarnings) positionWarnings.foreach(report.warning(_)) diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index dd24f38990df..c8c071064ab8 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -322,7 +322,7 @@ class Pickler extends Phase { if tree.span.exists then val reference = ctx.settings.sourceroot.value PositionPickler.picklePositions( - pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference, + pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, treePkl.typeAnnots, reference, unit.source, tree :: Nil, positionWarnings, scratch.positionBuffer, scratch.pickledIndices) diff --git a/tests/pos/annot-17939b.scala b/tests/pos/annot-17939b.scala new file mode 100644 index 000000000000..a48f4690d0b2 --- /dev/null +++ b/tests/pos/annot-17939b.scala @@ -0,0 +1,10 @@ +import scala.annotation.Annotation +class myRefined(f: ? => Boolean) extends Annotation + +def test(axes: Int) = true + +trait Tensor: + def mean(axes: Int): Int @myRefined(_ => test(axes)) + +class TensorImpl() extends Tensor: + def mean(axes: Int) = ??? diff --git a/tests/pos/annot-18064.scala b/tests/pos/annot-18064.scala new file mode 100644 index 000000000000..b6a67ea9ebe7 --- /dev/null +++ b/tests/pos/annot-18064.scala @@ -0,0 +1,9 @@ +//> using options "-Xprint:typer" + +class myAnnot[T]() extends annotation.Annotation + +trait Tensor[T]: + def add: Tensor[T] @myAnnot[T]() + +class TensorImpl[A]() extends Tensor[A]: + def add /* : Tensor[A] @myAnnot[A] */ = this diff --git a/tests/pos/annot-5789.scala b/tests/pos/annot-5789.scala new file mode 100644 index 000000000000..bdf4438c9d5d --- /dev/null +++ b/tests/pos/annot-5789.scala @@ -0,0 +1,10 @@ +class Annot[T] extends scala.annotation.Annotation + +class D[T](val f: Int@Annot[T]) + +object A{ + def main(a:Array[String]) = { + val c = new D[Int](1) + c.f + } +} diff --git a/tests/printing/annot-18064.check b/tests/printing/annot-18064.check new file mode 100644 index 000000000000..d93ddb95afee --- /dev/null +++ b/tests/printing/annot-18064.check @@ -0,0 +1,16 @@ +[[syntax trees at end of typer]] // tests/printing/annot-18064.scala +package { + class myAnnot[T >: Nothing <: Any]() extends annotation.Annotation() { + T + } + trait Tensor[T >: Nothing <: Any]() extends Object { + T + def add: Tensor[Tensor.this.T] @myAnnot[T] + } + class TensorImpl[A >: Nothing <: Any]() extends Object(), Tensor[ + TensorImpl.this.A] { + A + def add: Tensor[A] @myAnnot[A] = this + } +} + diff --git a/tests/printing/annot-18064.scala b/tests/printing/annot-18064.scala new file mode 100644 index 000000000000..b6a67ea9ebe7 --- /dev/null +++ b/tests/printing/annot-18064.scala @@ -0,0 +1,9 @@ +//> using options "-Xprint:typer" + +class myAnnot[T]() extends annotation.Annotation + +trait Tensor[T]: + def add: Tensor[T] @myAnnot[T]() + +class TensorImpl[A]() extends Tensor[A]: + def add /* : Tensor[A] @myAnnot[A] */ = this diff --git a/tests/printing/annot-19846b.check b/tests/printing/annot-19846b.check new file mode 100644 index 000000000000..3f63a46c4286 --- /dev/null +++ b/tests/printing/annot-19846b.check @@ -0,0 +1,33 @@ +[[syntax trees at end of typer]] // tests/printing/annot-19846b.scala +package { + class lambdaAnnot(g: () => Int) extends scala.annotation.Annotation(), + annotation.StaticAnnotation { + private[this] val g: () => Int + } + final lazy module val Test: Test = new Test() + final module class Test() extends Object() { this: Test.type => + val y: Int = ??? + val z: + Int @lambdaAnnot( + { + def $anonfun(): Int = Test.y + closure($anonfun) + } + ) + = f(Test.y) + } + final lazy module val annot-19846b$package: annot-19846b$package = + new annot-19846b$package() + final module class annot-19846b$package() extends Object() { + this: annot-19846b$package.type => + def f(x: Int): + Int @lambdaAnnot( + { + def $anonfun(): Int = x + closure($anonfun) + } + ) + = x + } +} + diff --git a/tests/printing/annot-19846b.scala b/tests/printing/annot-19846b.scala new file mode 100644 index 000000000000..951a3c8116ff --- /dev/null +++ b/tests/printing/annot-19846b.scala @@ -0,0 +1,7 @@ +class lambdaAnnot(g: () => Int) extends annotation.StaticAnnotation + +def f(x: Int): Int @lambdaAnnot(() => x) = x + +object Test: + val y: Int = ??? + val z /* : Int @lambdaAnnot(() => y) */ = f(y) From 02aae433d918495723ae8c102cf2b94f0133631d Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 13 Sep 2024 10:59:17 +0200 Subject: [PATCH 549/827] Treat more closure parameter types as inferred This is necessary for types that contain possibly illegal @retains annotations since those annotations are only removed before pickling for InferredTypes. Fixes #21347 --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 11 +++++++++-- tests/pos-custom-args/captures/i21347.scala | 11 +++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 tests/pos-custom-args/captures/i21347.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ce5743f69d0c..901e27a2f1a1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1903,9 +1903,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if knownFormal then formal0 else errorType(AnonymousFunctionMissingParamType(param, tree, inferredType = formal, expectedType = pt), param.srcPos) ) + val untpdTpt = formal match + case _: WildcardType => + // In this case we have a situation like f(_), where we expand in the end to + // (x: T) => f(x) and `T` is taken from `f`'s declared parameters. In this case + // we treat the type as declared instead of inferred. InferredType is used for + // types that are inferred from the context. + untpd.TypeTree() + case _ => InferredTypeTree() val paramTpt = untpd.TypedSplice( - (if knownFormal then InferredTypeTree() else untpd.TypeTree()) - .withType(paramType.translateFromRepeated(toArray = false)) + untpdTpt.withType(paramType.translateFromRepeated(toArray = false)) .withSpan(param.span.endPos) ) val param0 = cpy.ValDef(param)(tpt = paramTpt) diff --git a/tests/pos-custom-args/captures/i21347.scala b/tests/pos-custom-args/captures/i21347.scala new file mode 100644 index 000000000000..e74c15bff8c1 --- /dev/null +++ b/tests/pos-custom-args/captures/i21347.scala @@ -0,0 +1,11 @@ +//> using scala 3.6.0-RC1-bin-SNAPSHOT + +import language.experimental.captureChecking + +class Box[Cap^] {} + +def run[Cap^](f: Box[Cap]^{Cap^} => Unit): Box[Cap]^{Cap^} = ??? + +def main() = + val b = run(_ => ()) + // val b = run[caps.CapSet](_ => ()) // this compiles \ No newline at end of file From edd40bc50d318a188f8c1767c636526fedbb990e Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 13 Sep 2024 11:05:21 +0200 Subject: [PATCH 550/827] Avoid using ExplainingTypeComparer in regular code The operations of an ExplainingTypeComparer are expensive. So we should only run it when producing an error message. --- .../src/dotty/tools/dotc/core/TypeOps.scala | 26 +++++++++---------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 0d8801b646ee..bfda613d0586 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -691,20 +691,18 @@ object TypeOps: val hiBound = instantiate(bounds.hi, skolemizedArgTypes) val loBound = instantiate(bounds.lo, skolemizedArgTypes) - def check(tp1: Type, tp2: Type, which: String, bound: Type)(using Context) = { - val isSub = TypeComparer.explaining { cmp => - val isSub = cmp.isSubType(tp1, tp2) - if !isSub then - if !ctx.typerState.constraint.domainLambdas.isEmpty then - typr.println(i"${ctx.typerState.constraint}") - if !ctx.gadt.symbols.isEmpty then - typr.println(i"${ctx.gadt}") - typr.println(cmp.lastTrace(i"checkOverlapsBounds($lo, $hi, $arg, $bounds)($which)")) - //trace.dumpStack() - isSub - }//(using ctx.fresh.setSetting(ctx.settings.verbose, true)) // uncomment to enable moreInfo in ExplainingTypeComparer recur - if !isSub then violations += ((arg, which, bound)) - } + def check(tp1: Type, tp2: Type, which: String, bound: Type)(using Context) = + val isSub = TypeComparer.isSubType(tp1, tp2) + if !isSub then + // inContext(ctx.fresh.setSetting(ctx.settings.verbose, true)): // uncomment to enable moreInfo in ExplainingTypeComparer + TypeComparer.explaining: cmp => + if !ctx.typerState.constraint.domainLambdas.isEmpty then + typr.println(i"${ctx.typerState.constraint}") + if !ctx.gadt.symbols.isEmpty then + typr.println(i"${ctx.gadt}") + typr.println(cmp.lastTrace(i"checkOverlapsBounds($lo, $hi, $arg, $bounds)($which)")) + violations += ((arg, which, bound)) + check(lo, hiBound, "upper", hiBound)(using checkCtx) check(loBound, hi, "lower", loBound)(using checkCtx) } From 45728afddd3d1f5ffca185a302959f3f39dddc08 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 13 Sep 2024 13:12:01 +0200 Subject: [PATCH 551/827] Embed accountsFor info in regular explain traces --- compiler/src/dotty/tools/dotc/cc/CaptureSet.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index c57ad639783c..44d5e2cf4b88 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -158,9 +158,13 @@ sealed abstract class CaptureSet extends Showable: * as frozen. */ def accountsFor(x: CaptureRef)(using Context): Boolean = - reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): + def debugInfo(using Context) = i"$this accountsFor $x, which has capture set ${x.captureSetOfInfo}" + def test(using Context) = reporting.trace(debugInfo): elems.exists(_.subsumes(x)) || !x.isMaxCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + comparer match + case comparer: ExplainingTypeComparer => comparer.traceIndented(debugInfo)(test) + case _ => test /** A more optimistic version of accountsFor, which does not take variable supersets * of the `x` reference into account. A set might account for `x` if it accounts From d0ea3b0546bfae0c10e0f97937ff3a17a2de5860 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 13 Sep 2024 20:41:57 +0200 Subject: [PATCH 552/827] Guard against recursive lower bounds in constraints We could get an indirect recursion going through a singleton type before. Fixes #21535 --- .../tools/dotc/core/ConstraintHandling.scala | 21 +++++++++++++++---- tests/neg/i21535.check | 11 ++++++++++ tests/neg/i21535.scala | 16 ++++++++++++++ 3 files changed, 44 insertions(+), 4 deletions(-) create mode 100644 tests/neg/i21535.check create mode 100644 tests/neg/i21535.scala diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index e63911a6a883..04d55475ec60 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -295,11 +295,24 @@ trait ConstraintHandling { end legalBound protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = + + // Replace top-level occurrences of `param` in `bound` by `Nothing` + def sanitize(bound: Type): Type = + if bound.stripped eq param then defn.NothingType + else bound match + case bound: AndOrType => + bound.derivedAndOrType(sanitize(bound.tp1), sanitize(bound.tp2)) + case _ => + bound + if !constraint.contains(param) then true - else if !isUpper && param.occursIn(rawBound) then - // We don't allow recursive lower bounds when defining a type, - // so we shouldn't allow them as constraints either. - false + else if !isUpper && param.occursIn(rawBound.widen) then + val rawBound1 = sanitize(rawBound.widenDealias) + if param.occursIn(rawBound1) then + // We don't allow recursive lower bounds when defining a type, + // so we shouldn't allow them as constraints either. + false + else addOneBound(param, rawBound1, isUpper) else // Narrow one of the bounds of type parameter `param` diff --git a/tests/neg/i21535.check b/tests/neg/i21535.check new file mode 100644 index 000000000000..7a24f2196ec8 --- /dev/null +++ b/tests/neg/i21535.check @@ -0,0 +1,11 @@ +-- [E007] Type Mismatch Error: tests/neg/i21535.scala:7:4 -------------------------------------------------------------- +3 | (if (true) then +4 | new A(66) +5 | else +6 | m1() +7 | ).m2(p1 = p); // error + | ^ + | Found: (Int | Short) @uncheckedVariance + | Required: Int & Short + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21535.scala b/tests/neg/i21535.scala new file mode 100644 index 000000000000..f9573f823160 --- /dev/null +++ b/tests/neg/i21535.scala @@ -0,0 +1,16 @@ +def test() = { + val p = 10.toShort + (if (true) then + new A(66) + else + m1() + ).m2(p1 = p); // error + +} + +def m1(): A[Short] = new A(10) + +class A[D](var f: D) { + + def m2(p1: D = f, p2: D = f): Unit = {} +} \ No newline at end of file From 472555dca6866b4ae1fde35f3abe99579689b8eb Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 14 Sep 2024 12:24:11 +0200 Subject: [PATCH 553/827] Survive inaccessible types when computing implicit scope Also: Give a better error message later when encountering a missing type that refers to a private member of a base class. The previous one was misleading since it referred to a potentially missing class file, which is certainly not the case here. Fixes #21543 --- compiler/src/dotty/tools/dotc/core/TypeErrors.scala | 2 ++ .../src/dotty/tools/dotc/reporting/messages.scala | 8 ++++---- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 4 ++++ tests/neg/i21543.scala | 13 +++++++++++++ 4 files changed, 23 insertions(+), 4 deletions(-) create mode 100644 tests/neg/i21543.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 11e313c47932..1c9696da67d1 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -73,6 +73,8 @@ class MissingType(val pre: Type, val name: Name)(using Context) extends TypeErro case _ if givenSelf.exists && givenSelf.member(name).exists => i"""$name exists as a member of the self type $givenSelf of $cls |but it cannot be called on a receiver whose type does not extend $cls""" + case _ if pre.baseClasses.exists(_.findMember(name, pre, Private, EmptyFlags).exists) => + i"$name is a private member in a base class" case _ => missingClassFile diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 91642ca51bc5..01eb2acfa4de 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3292,14 +3292,14 @@ object UnusedSymbol { class NonNamedArgumentInJavaAnnotation(using Context) extends SyntaxMsg(NonNamedArgumentInJavaAnnotationID): - override protected def msg(using Context): String = + override protected def msg(using Context): String = "Named arguments are required for Java defined annotations" + Message.rewriteNotice("This", version = SourceVersion.`3.6-migration`) - override protected def explain(using Context): String = + override protected def explain(using Context): String = i"""Starting from Scala 3.6.0, named arguments are required for Java defined annotations. - |Java defined annotations don't have an exact constructor representation - |and we previously relied on the order of the fields to create one. + |Java defined annotations don't have an exact constructor representation + |and we previously relied on the order of the fields to create one. |One possible issue with this representation is the reordering of the fields. |Lets take the following example: | diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 51e468153d1f..def5bb77519f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -821,6 +821,10 @@ trait ImplicitRunInfo: override def stopAt = StopAt.Static private val seen = util.HashSet[Type]() + override def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type = + if lo.exists && hi.exists then super.derivedTypeBounds(tp, lo, hi) + else NoType // Survive inaccessible types, for instance in i21543.scala. + def applyToUnderlying(t: TypeProxy) = if seen.contains(t) then WildcardType diff --git a/tests/neg/i21543.scala b/tests/neg/i21543.scala new file mode 100644 index 000000000000..98de8d3ec939 --- /dev/null +++ b/tests/neg/i21543.scala @@ -0,0 +1,13 @@ +object CompilerCrash { + trait Scope { + private type Event = String + + case class Cmd(events: List[Event]) + } + + new Scope { + val commands = List( + Cmd(List("1", "2")) + ) + } +} \ No newline at end of file From cd9a7c58afaab6d07bab91ebc059d98313a4713d Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 14 Sep 2024 14:22:58 +0200 Subject: [PATCH 554/827] Fix check files --- tests/neg/i20554-a.check | 8 ++++---- tests/neg/i20554-b.check | 4 ++-- tests/neg/i21543.check | 22 ++++++++++++++++++++++ tests/neg/i21543.scala | 2 +- 4 files changed, 29 insertions(+), 7 deletions(-) create mode 100644 tests/neg/i21543.check diff --git a/tests/neg/i20554-a.check b/tests/neg/i20554-a.check index b223cba32f77..ac0890ba133a 100644 --- a/tests/neg/i20554-a.check +++ b/tests/neg/i20554-a.check @@ -7,8 +7,8 @@ | Explanation (enabled by `-explain`) |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | Starting from Scala 3.6.0, named arguments are required for Java defined annotations. - | Java defined annotations don't have an exact constructor representation - | and we previously relied on the order of the fields to create one. + | Java defined annotations don't have an exact constructor representation + | and we previously relied on the order of the fields to create one. | One possible issue with this representation is the reordering of the fields. | Lets take the following example: | @@ -29,8 +29,8 @@ | Explanation (enabled by `-explain`) |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | Starting from Scala 3.6.0, named arguments are required for Java defined annotations. - | Java defined annotations don't have an exact constructor representation - | and we previously relied on the order of the fields to create one. + | Java defined annotations don't have an exact constructor representation + | and we previously relied on the order of the fields to create one. | One possible issue with this representation is the reordering of the fields. | Lets take the following example: | diff --git a/tests/neg/i20554-b.check b/tests/neg/i20554-b.check index 5e5119e043fe..637b48ee93ef 100644 --- a/tests/neg/i20554-b.check +++ b/tests/neg/i20554-b.check @@ -7,8 +7,8 @@ | Explanation (enabled by `-explain`) |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | Starting from Scala 3.6.0, named arguments are required for Java defined annotations. - | Java defined annotations don't have an exact constructor representation - | and we previously relied on the order of the fields to create one. + | Java defined annotations don't have an exact constructor representation + | and we previously relied on the order of the fields to create one. | One possible issue with this representation is the reordering of the fields. | Lets take the following example: | diff --git a/tests/neg/i21543.check b/tests/neg/i21543.check new file mode 100644 index 000000000000..9fa9a7779d7a --- /dev/null +++ b/tests/neg/i21543.check @@ -0,0 +1,22 @@ +-- [E007] Type Mismatch Error: tests/neg/i21543.scala:10:15 ------------------------------------------------------------ +10 | Cmd(List("1", "2")) // error // error + | ^^^ + | Found: ("1" : String) + | Required: Event + | + | Note that I could not resolve reference Event. + | Event is a private member in a base class + | + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i21543.scala:10:20 ------------------------------------------------------------ +10 | Cmd(List("1", "2")) // error // error + | ^^^ + | Found: ("2" : String) + | Required: Event + | + | Note that I could not resolve reference Event. + | Event is a private member in a base class + | + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21543.scala b/tests/neg/i21543.scala index 98de8d3ec939..aaadce6d22b4 100644 --- a/tests/neg/i21543.scala +++ b/tests/neg/i21543.scala @@ -7,7 +7,7 @@ object CompilerCrash { new Scope { val commands = List( - Cmd(List("1", "2")) + Cmd(List("1", "2")) // error // error ) } } \ No newline at end of file From d4066d941ad91a7b7babf34418cdcf95fd5022bc Mon Sep 17 00:00:00 2001 From: rochala Date: Sat, 14 Sep 2024 17:45:36 +0200 Subject: [PATCH 555/827] Autoimports should now correctly be inserted for licenses and directive just before first object --- .../src/main/dotty/tools/pc/AutoImports.scala | 9 ++-- .../pc/tests/edit/AutoImportsSuite.scala | 54 +++++++++++++++++++ 2 files changed, 59 insertions(+), 4 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala index 896954c4e1a4..1b44dce8c642 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala @@ -320,13 +320,14 @@ object AutoImports: case _ => None - def skipUsingDirectivesOffset( - firstObjectPos: Int = firstMemberDefinitionStart(tree).getOrElse(0) - ): Int = + def skipUsingDirectivesOffset(firstObjectPos: Int = firstMemberDefinitionStart(tree).getOrElse(0)): Int = val firstObjectLine = pos.source.offsetToLine(firstObjectPos) + comments .takeWhile(comment => - !comment.isDocComment && pos.source.offsetToLine(comment.span.end) + 1 < firstObjectLine + val commentLine = pos.source.offsetToLine(comment.span.end) + val isFirstObjectComment = commentLine + 1 == firstObjectLine && !comment.raw.startsWith("//>") + commentLine < firstObjectLine && !isFirstObjectComment ) .lastOption .fold(0)(_.span.end + 1) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala index e4ef8c0f747d..3bb5bfea7bc0 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala @@ -500,3 +500,57 @@ class AutoImportsSuite extends BaseAutoImportsSuite: |object Main{ val obj = ABC } |""".stripMargin ) + + @Test def scalaCliNoEmptyLineAfterDirective = + checkEdit( + """|//> using scala 3.5.0 + |object Main: + | <> + |""".stripMargin, + """|//> using scala 3.5.0 + |import java.nio.file.Files + |object Main: + | Files + |""".stripMargin + ) + + @Test def scalaCliNoEmptyLineAfterLicense = + checkEdit( + """|/** + | * Some license text + | */ + | + |object Main: + | <> + |""".stripMargin, + """|/** + | * Some license text + | */ + |import java.nio.file.Files + | + |object Main: + | Files + |""".stripMargin + ) + + @Test def scalaCliNoEmptyLineAfterLicenseWithPackage = + checkEdit( + """|/** + | * Some license text + | */ + |package test + | + |object Main: + | <> + |""".stripMargin, + """|/** + | * Some license text + | */ + |package test + | + |import java.nio.file.Files + | + |object Main: + | Files + |""".stripMargin + ) From 0047389d0429c67abfb6f04c812a889b0b988252 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 14 Sep 2024 18:15:34 +0200 Subject: [PATCH 556/827] Regression test for 21360 --- tests/pos/i21360.scala | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 tests/pos/i21360.scala diff --git a/tests/pos/i21360.scala b/tests/pos/i21360.scala new file mode 100644 index 000000000000..5bc1ac1ac5b0 --- /dev/null +++ b/tests/pos/i21360.scala @@ -0,0 +1,11 @@ +case class Table(owner: Owner.Id) + +case class Owner(owner: Owner.Id) // type Id is not a member of object Playground.Owner + +trait Typed[Tag] { + type Id = String +} + +object Owner extends Typed[Owner] { + //type Id = String +} \ No newline at end of file From 066101a1a748091daa0f1b9fff466fb0276d250b Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 14 Sep 2024 19:48:23 +0200 Subject: [PATCH 557/827] Fix treatment of separately compiled @native methods in FirstTransform We need to use a SymTransformer, fixing the method in the tree is not enough. --- .../src/dotty/tools/dotc/core/Flags.scala | 1 + .../tools/dotc/transform/FirstTransform.scala | 26 +++++++++++-------- tests/pos/i20588/Baz_2.scala | 1 + tests/pos/i20588/Foo_1.scala | 3 +++ 4 files changed, 20 insertions(+), 11 deletions(-) create mode 100644 tests/pos/i20588/Baz_2.scala create mode 100644 tests/pos/i20588/Foo_1.scala diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index b1bf7a266c91..b915373da021 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -569,6 +569,7 @@ object Flags { val ConstructorProxyModule: FlagSet = ConstructorProxy | Module val DefaultParameter: FlagSet = HasDefault | Param // A Scala 2x default parameter val DeferredInline: FlagSet = Deferred | Inline + val DeferredMethod: FlagSet = Deferred | Method val DeferredOrLazy: FlagSet = Deferred | Lazy val DeferredOrLazyOrMethod: FlagSet = Deferred | Lazy | Method val DeferredOrTermParamOrAccessor: FlagSet = Deferred | ParamAccessor | TermParam // term symbols without right-hand sides diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index b5bc43ee762c..c66e6b9471cb 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -14,6 +14,7 @@ import Decorators.* import scala.collection.mutable import DenotTransformers.* import NameOps.* +import SymDenotations.SymDenotation import NameKinds.OuterSelectName import StdNames.* import config.Feature @@ -35,22 +36,26 @@ object FirstTransform { * if (true) A else B ==> A * if (false) A else B ==> B */ -class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => +class FirstTransform extends MiniPhase with SymTransformer { thisPhase => import ast.tpd.* override def phaseName: String = FirstTransform.name override def description: String = FirstTransform.description - /** eliminate self symbol in ClassInfo */ - override def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match { - case tp @ ClassInfo(_, _, _, _, self: Symbol) => - tp.derivedClassInfo(selfInfo = self.info) - case _ => - tp - } - - override protected def infoMayChange(sym: Symbol)(using Context): Boolean = sym.isClass + /** eliminate self symbol in ClassInfo, reset Deferred for @native methods */ + override def transformSym(sym: SymDenotation)(using Context): SymDenotation = + if sym.isClass then + sym.info match + case tp @ ClassInfo(_, _, _, _, self: Symbol) => + val info1 = tp.derivedClassInfo(selfInfo = self.info) + sym.copySymDenotation(info = info1).copyCaches(sym, ctx.phase.next) + case _ => + sym + else if sym.isAllOf(DeferredMethod) && sym.hasAnnotation(defn.NativeAnnot) then + sym.copySymDenotation(initFlags = sym.flags &~ Deferred) + else + sym override def checkPostCondition(tree: Tree)(using Context): Unit = tree match { @@ -121,7 +126,6 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => override def transformDefDef(ddef: DefDef)(using Context): Tree = val meth = ddef.symbol.asTerm if meth.hasAnnotation(defn.NativeAnnot) then - meth.resetFlag(Deferred) DefDef(meth, _ => ref(defn.Sys_error.termRef).withSpan(ddef.span) .appliedTo(Literal(Constant(s"native method stub")))) diff --git a/tests/pos/i20588/Baz_2.scala b/tests/pos/i20588/Baz_2.scala new file mode 100644 index 000000000000..7dbb038d38da --- /dev/null +++ b/tests/pos/i20588/Baz_2.scala @@ -0,0 +1 @@ +class Baz extends Foo diff --git a/tests/pos/i20588/Foo_1.scala b/tests/pos/i20588/Foo_1.scala new file mode 100644 index 000000000000..1f99cba7ff1c --- /dev/null +++ b/tests/pos/i20588/Foo_1.scala @@ -0,0 +1,3 @@ +class Foo { + @native def test(): Unit +} From f0b6763327249f4198dcfe081a9183ed777bcdfe Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 15 Sep 2024 18:51:45 +0200 Subject: [PATCH 558/827] Three fixes for SAM type handling The first two fixes concern characterization of SAM types. One condition of a SAM type is that it can be instantiated with an empty argument list. This was implemented incorrectly. First, we missed the case where the SAM type is a trait with a parent class that takes arguments. In this case the SAM type _cannot_ be instantiated with an empty argument list. Second, we missed the case where the SAM type constructor has a single vararg parameter. In this case the SAM type _can_ be instantiated with an empty argument list. The second case was also translated incorrectly which led to illegal bytecodes. Fixes #15855 --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 75 ++++++++++++++----- .../src/dotty/tools/dotc/core/Phases.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 19 +++-- .../tools/dotc/transform/ExpandSAMs.scala | 10 ++- tests/neg/i15855.scala | 10 +++ tests/run/i15855.scala | 20 +++++ 6 files changed, 105 insertions(+), 31 deletions(-) create mode 100644 tests/neg/i15855.scala create mode 100644 tests/run/i15855.scala diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 3ce2d1d038dd..d4e585402feb 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -315,24 +315,53 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def TypeDef(sym: TypeSymbol)(using Context): TypeDef = ta.assignType(untpd.TypeDef(sym.name, TypeTree(sym.info)), sym) - def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(using Context): TypeDef = { + /** Create a class definition + * @param cls the class symbol of the created class + * @param constr its primary constructor + * @param body the statements in its template + * @param superArgs the arguments to pass to the superclass constructor + * @param adaptVarargs if true, allow matching a vararg superclass constructor + * with a missing argument in superArgs, and synthesize an + * empty repeated parameter in the supercall in this case + */ + def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], + superArgs: List[Tree] = Nil, adaptVarargs: Boolean = false)(using Context): TypeDef = val firstParent :: otherParents = cls.info.parents: @unchecked + + def isApplicable(constr: Symbol): Boolean = + def recur(ctpe: Type): Boolean = ctpe match + case ctpe: PolyType => + recur(ctpe.instantiate(firstParent.argTypes)) + case ctpe: MethodType => + var paramInfos = ctpe.paramInfos + if adaptVarargs && paramInfos.length == superArgs.length + 1 + && atPhaseNoLater(Phases.elimRepeatedPhase)(constr.info.isVarArgsMethod) + then // accept missing argument for varargs parameter + paramInfos = paramInfos.init + superArgs.corresponds(paramInfos)(_.tpe <:< _) + case _ => + false + recur(constr.info) + + def adaptedSuperArgs(ctpe: Type): List[Tree] = ctpe match + case ctpe: PolyType => + adaptedSuperArgs(ctpe.instantiate(firstParent.argTypes)) + case ctpe: MethodType + if ctpe.paramInfos.length == superArgs.length + 1 => + // last argument must be a vararg, otherwise isApplicable would have failed + superArgs :+ + repeated(Nil, TypeTree(ctpe.paramInfos.last.argInfos.head, inferred = true)) + case _ => + superArgs + val superRef = - if (cls.is(Trait)) TypeTree(firstParent) - else { - def isApplicable(ctpe: Type): Boolean = ctpe match { - case ctpe: PolyType => - isApplicable(ctpe.instantiate(firstParent.argTypes)) - case ctpe: MethodType => - (superArgs corresponds ctpe.paramInfos)(_.tpe <:< _) - case _ => - false - } - val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info)) - New(firstParent, constr.symbol.asTerm, superArgs) - } + if cls.is(Trait) then TypeTree(firstParent) + else + val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(isApplicable) + New(firstParent, constr.symbol.asTerm, adaptedSuperArgs(constr.info)) + ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) - } + end ClassDef def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(using Context): TypeDef = { val selfType = @@ -359,13 +388,18 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * @param parents a non-empty list of class types * @param termForwarders a non-empty list of forwarding definitions specified by their name and the definition they forward to. * @param typeMembers a possibly-empty list of type members specified by their name and their right hand side. + * @param adaptVarargs if true, allow matching a vararg superclass constructor + * with a missing argument in superArgs, and synthesize an + * empty repeated parameter in the supercall in this case * * The class has the same owner as the first function in `termForwarders`. * Its position is the union of all symbols in `termForwarders`. */ - def AnonClass(parents: List[Type], termForwarders: List[(TermName, TermSymbol)], - typeMembers: List[(TypeName, TypeBounds)] = Nil)(using Context): Block = { - AnonClass(termForwarders.head._2.owner, parents, termForwarders.map(_._2.span).reduceLeft(_ union _)) { cls => + def AnonClass(parents: List[Type], + termForwarders: List[(TermName, TermSymbol)], + typeMembers: List[(TypeName, TypeBounds)], + adaptVarargs: Boolean)(using Context): Block = { + AnonClass(termForwarders.head._2.owner, parents, termForwarders.map(_._2.span).reduceLeft(_ union _), adaptVarargs) { cls => def forwarder(name: TermName, fn: TermSymbol) = { val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm for overridden <- fwdMeth.allOverriddenSymbols do @@ -385,6 +419,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * with the specified owner and position. */ def AnonClass(owner: Symbol, parents: List[Type], coord: Coord)(body: ClassSymbol => List[Tree])(using Context): Block = + AnonClass(owner, parents, coord, adaptVarargs = false)(body) + + private def AnonClass(owner: Symbol, parents: List[Type], coord: Coord, adaptVarargs: Boolean)(body: ClassSymbol => List[Tree])(using Context): Block = val parents1 = if (parents.head.classSymbol.is(Trait)) { val head = parents.head.parents.head @@ -393,7 +430,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else parents val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, coord = coord) val constr = newConstructor(cls, Synthetic, Nil, Nil).entered - val cdef = ClassDef(cls, DefDef(constr), body(cls)) + val cdef = ClassDef(cls, DefDef(constr), body(cls), Nil, adaptVarargs) Block(cdef :: Nil, New(cls.typeRef, Nil)) def Import(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 7f925b0fc322..5dff95fc51fb 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -532,7 +532,7 @@ object Phases { def sbtExtractAPIPhase(using Context): Phase = ctx.base.sbtExtractAPIPhase def picklerPhase(using Context): Phase = ctx.base.picklerPhase def inliningPhase(using Context): Phase = ctx.base.inliningPhase - def stagingPhase(using Context): Phase = ctx.base.stagingPhase + def stagingPhase(using Context): Phase = ctx.base.stagingPhase def splicingPhase(using Context): Phase = ctx.base.splicingPhase def firstTransformPhase(using Context): Phase = ctx.base.firstTransformPhase def refchecksPhase(using Context): Phase = ctx.base.refchecksPhase diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 8181b3c83acf..b626464bc428 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5945,16 +5945,21 @@ object Types extends TypeUtils { def samClass(tp: Type)(using Context): Symbol = tp match case tp: ClassInfo => def zeroParams(tp: Type): Boolean = tp.stripPoly match - case mt: MethodType => mt.paramInfos.isEmpty && !mt.resultType.isInstanceOf[MethodType] + case mt: MethodType => + val noArgsNeeded = mt.paramInfos match + case Nil => true + case info :: Nil => info.isRepeatedParam + case _ => false + noArgsNeeded && !mt.resultType.isInstanceOf[MethodType] case et: ExprType => true case _ => false - val cls = tp.cls - val validCtor = + def validCtor(cls: Symbol): Boolean = val ctor = cls.primaryConstructor - // `ContextFunctionN` does not have constructors - !ctor.exists || zeroParams(ctor.info) - val isInstantiable = !cls.isOneOf(FinalOrSealed) && (tp.appliedRef <:< tp.selfType) - if validCtor && isInstantiable then tp.cls + (!ctor.exists || zeroParams(ctor.info)) // `ContextFunctionN` does not have constructors + && (!cls.is(Trait) || validCtor(cls.info.parents.head.classSymbol)) + def isInstantiable = + !tp.cls.isOneOf(FinalOrSealed) && (tp.appliedRef <:< tp.selfType) + if validCtor(tp.cls) && isInstantiable then tp.cls else NoSymbol case tp: AppliedType => samClass(tp.superType) diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index cdbef792dfa9..67bf1bebed87 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -69,10 +69,12 @@ class ExpandSAMs extends MiniPhase: val tpe1 = collectAndStripRefinements(tpe) val Seq(samDenot) = tpe1.possibleSamMethods cpy.Block(tree)(stats, - AnonClass(List(tpe1), - List(samDenot.symbol.asTerm.name -> fn.symbol.asTerm), - refinements.toList - ) + transformFollowingDeep: + AnonClass(List(tpe1), + List(samDenot.symbol.asTerm.name -> fn.symbol.asTerm), + refinements.toList, + adaptVarargs = true + ) ) } case _ => diff --git a/tests/neg/i15855.scala b/tests/neg/i15855.scala new file mode 100644 index 000000000000..ba9112032419 --- /dev/null +++ b/tests/neg/i15855.scala @@ -0,0 +1,10 @@ +// crash.scala +import scala.language.implicitConversions + +class MyFunction(args: String) + +trait MyFunction0[+R] extends MyFunction { + def apply(): R +} + +def fromFunction0[R](f: Function0[R]): MyFunction0[R] = () => f() // error diff --git a/tests/run/i15855.scala b/tests/run/i15855.scala new file mode 100644 index 000000000000..880d6d806132 --- /dev/null +++ b/tests/run/i15855.scala @@ -0,0 +1,20 @@ +// crash.scala +import scala.language.implicitConversions + +class MyFunction(args: String*) + +trait MyFunction0[+R] extends MyFunction { + def apply(): R +} + +abstract class MyFunction1[R](args: R*): + def apply(): R + +def fromFunction0[R](f: Function0[R]): MyFunction0[R] = () => f() +def fromFunction1[R](f: Function0[R]): MyFunction1[R] = () => f() + +@main def Test = + val m0: MyFunction0[Int] = fromFunction0(() => 1) + val m1: MyFunction1[Int] = fromFunction1(() => 2) + assert(m0() == 1) + assert(m1() == 2) From 8aa59f834eaf26965ddc9cb16f7392db15445cf9 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 16 Sep 2024 19:29:36 +0200 Subject: [PATCH 559/827] Align SAM test and expansion Fix SAM test to use the same scheme as SAM expansion to determine whether a type needs zero arguments for construction. --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 22 ++++------------ .../src/dotty/tools/dotc/core/TypeUtils.scala | 26 +++++++++++++++++++ .../src/dotty/tools/dotc/core/Types.scala | 24 +++++++---------- 3 files changed, 41 insertions(+), 31 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index d4e585402feb..f97baa7f7889 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -328,21 +328,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { superArgs: List[Tree] = Nil, adaptVarargs: Boolean = false)(using Context): TypeDef = val firstParent :: otherParents = cls.info.parents: @unchecked - def isApplicable(constr: Symbol): Boolean = - def recur(ctpe: Type): Boolean = ctpe match - case ctpe: PolyType => - recur(ctpe.instantiate(firstParent.argTypes)) - case ctpe: MethodType => - var paramInfos = ctpe.paramInfos - if adaptVarargs && paramInfos.length == superArgs.length + 1 - && atPhaseNoLater(Phases.elimRepeatedPhase)(constr.info.isVarArgsMethod) - then // accept missing argument for varargs parameter - paramInfos = paramInfos.init - superArgs.corresponds(paramInfos)(_.tpe <:< _) - case _ => - false - recur(constr.info) - def adaptedSuperArgs(ctpe: Type): List[Tree] = ctpe match case ctpe: PolyType => adaptedSuperArgs(ctpe.instantiate(firstParent.argTypes)) @@ -357,8 +342,11 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { val superRef = if cls.is(Trait) then TypeTree(firstParent) else - val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(isApplicable) - New(firstParent, constr.symbol.asTerm, adaptedSuperArgs(constr.info)) + val parentConstr = firstParent.applicableConstructors(superArgs.tpes, adaptVarargs) match + case Nil => assert(false, i"no applicable parent constructor of $firstParent for supercall arguments $superArgs") + case constr :: Nil => constr + case _ => assert(false, i"multiple applicable parent constructors of $firstParent for supercall arguments $superArgs") + New(firstParent, parentConstr.asTerm, adaptedSuperArgs(parentConstr.info)) ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) end ClassDef diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index ca0f0d7e43bd..33743868b2bc 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -8,6 +8,7 @@ import Names.{Name, TermName} import Constants.Constant import Names.Name +import StdNames.nme import config.Feature class TypeUtils: @@ -189,5 +190,30 @@ class TypeUtils: def stripRefinement: Type = self match case self: RefinedOrRecType => self.parent.stripRefinement case seld => self + + /** The constructors of this tyoe that that are applicable to `argTypes`, without needing + * an implicit conversion. + * @param adaptVarargs if true, allow a constructor with just a varargs argument to + * match an empty argument list. + */ + def applicableConstructors(argTypes: List[Type], adaptVarargs: Boolean)(using Context): List[Symbol] = + def isApplicable(constr: Symbol): Boolean = + def recur(ctpe: Type): Boolean = ctpe match + case ctpe: PolyType => + if argTypes.isEmpty then recur(ctpe.resultType) // no need to know instances + else recur(ctpe.instantiate(self.argTypes)) + case ctpe: MethodType => + var paramInfos = ctpe.paramInfos + if adaptVarargs && paramInfos.length == argTypes.length + 1 + && atPhaseNoLater(Phases.elimRepeatedPhase)(constr.info.isVarArgsMethod) + then // accept missing argument for varargs parameter + paramInfos = paramInfos.init + argTypes.corresponds(paramInfos)(_ <:< _) + case _ => + false + recur(constr.info) + + self.decl(nme.CONSTRUCTOR).altsWith(isApplicable).map(_.symbol) + end TypeUtils diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index b626464bc428..12de7f465f91 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -5944,22 +5944,18 @@ object Types extends TypeUtils { def samClass(tp: Type)(using Context): Symbol = tp match case tp: ClassInfo => - def zeroParams(tp: Type): Boolean = tp.stripPoly match - case mt: MethodType => - val noArgsNeeded = mt.paramInfos match - case Nil => true - case info :: Nil => info.isRepeatedParam - case _ => false - noArgsNeeded && !mt.resultType.isInstanceOf[MethodType] - case et: ExprType => true - case _ => false - def validCtor(cls: Symbol): Boolean = - val ctor = cls.primaryConstructor - (!ctor.exists || zeroParams(ctor.info)) // `ContextFunctionN` does not have constructors - && (!cls.is(Trait) || validCtor(cls.info.parents.head.classSymbol)) + val cls = tp.cls + def takesNoArgs(tp: Type) = + !tp.classSymbol.primaryConstructor.exists + // e.g. `ContextFunctionN` does not have constructors + || tp.applicableConstructors(Nil, adaptVarargs = true).lengthCompare(1) == 0 + // we require a unique constructor so that SAM expansion is deterministic + val noArgsNeeded: Boolean = + takesNoArgs(tp) + && (!tp.cls.is(Trait) || takesNoArgs(tp.parents.head)) def isInstantiable = !tp.cls.isOneOf(FinalOrSealed) && (tp.appliedRef <:< tp.selfType) - if validCtor(tp.cls) && isInstantiable then tp.cls + if noArgsNeeded && isInstantiable then tp.cls else NoSymbol case tp: AppliedType => samClass(tp.superType) From a795ca412c3d6f5bf0dad1df4f701ef66e14827b Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Tue, 17 Sep 2024 15:16:45 +0200 Subject: [PATCH 560/827] Update workflow to push to scala/dotty.epfl.ch instead of lampepfl/dotty-website --- .github/workflows/ci.yaml | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index dfce9529c0af..309820f8f138 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -687,14 +687,6 @@ jobs: if: "(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && github.repository == 'scala/scala3'" env: NIGHTLYBUILD: yes - DOTTY_WEBSITE_BOT_TOKEN: ${{ secrets.BOT_TOKEN }} # If you need to change this: - # Generate one at https://github.com/settings/tokens - # Make sure you have the write permissions to the repo: https://github.com/lampepfl/dotty-website - # Currently unused token, no need to deploy anything to docs.scala-lang - # DOCS_SCALALANG_BOT_TOKEN: ${{ secrets.DOCS_SCALALANG_BOT_TOKEN }} # If you need to change this: - # Generate one at https://github.com/settings/tokens - # Make sure you have the write permissions to the repo: https://github.com/scala/docs.scala-lang - steps: - name: Reset existing repo run: | @@ -721,10 +713,10 @@ jobs: - name: Deploy Website to dotty-website uses: peaceiris/actions-gh-pages@v4 with: - personal_token: ${{ env.DOTTY_WEBSITE_BOT_TOKEN }} + personal_token: ${{ secrets.DOTTYBOT_TOKEN }} publish_dir: docs/_site - external_repository: lampepfl/dotty-website - publish_branch: gh-pages + external_repository: scala/dotty.epfl.ch + publish_branch: main publish_release: permissions: From 3af67baa337ac8791326fd7b94a752495bdd0f57 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 17 Sep 2024 17:54:39 +0200 Subject: [PATCH 561/827] Address review comments --- compiler/src/dotty/tools/dotc/core/TypeUtils.scala | 6 +++--- tests/neg/i15855.scala | 11 ++++++++--- tests/run/i15855.scala | 3 --- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index 33743868b2bc..485272fe71c5 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -191,8 +191,8 @@ class TypeUtils: case self: RefinedOrRecType => self.parent.stripRefinement case seld => self - /** The constructors of this tyoe that that are applicable to `argTypes`, without needing - * an implicit conversion. + /** The constructors of this type that are applicable to `argTypes`, without needing + * an implicit conversion. Curried constructors are always excluded. * @param adaptVarargs if true, allow a constructor with just a varargs argument to * match an empty argument list. */ @@ -208,7 +208,7 @@ class TypeUtils: && atPhaseNoLater(Phases.elimRepeatedPhase)(constr.info.isVarArgsMethod) then // accept missing argument for varargs parameter paramInfos = paramInfos.init - argTypes.corresponds(paramInfos)(_ <:< _) + argTypes.corresponds(paramInfos)(_ <:< _) && !ctpe.resultType.isInstanceOf[MethodType] case _ => false recur(constr.info) diff --git a/tests/neg/i15855.scala b/tests/neg/i15855.scala index ba9112032419..c1d316ccae81 100644 --- a/tests/neg/i15855.scala +++ b/tests/neg/i15855.scala @@ -1,6 +1,3 @@ -// crash.scala -import scala.language.implicitConversions - class MyFunction(args: String) trait MyFunction0[+R] extends MyFunction { @@ -8,3 +5,11 @@ trait MyFunction0[+R] extends MyFunction { } def fromFunction0[R](f: Function0[R]): MyFunction0[R] = () => f() // error + +class MyFunctionWithImplicit(implicit args: String) + +trait MyFunction0WithImplicit[+R] extends MyFunctionWithImplicit { + def apply(): R +} + +def fromFunction1[R](f: Function0[R]): MyFunction0WithImplicit[R] = () => f() // error diff --git a/tests/run/i15855.scala b/tests/run/i15855.scala index 880d6d806132..b67bcb11d18a 100644 --- a/tests/run/i15855.scala +++ b/tests/run/i15855.scala @@ -1,6 +1,3 @@ -// crash.scala -import scala.language.implicitConversions - class MyFunction(args: String*) trait MyFunction0[+R] extends MyFunction { From 497768c5205178f3e57cf7500432738316760077 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 18 Sep 2024 09:17:36 +0200 Subject: [PATCH 562/827] Adapt scripts to scala/dotty.epfl.ch not lampepfl/dotty-website --- .github/workflows/ci.yaml | 2 +- project/scripts/genDocs | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 309820f8f138..196952247892 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -710,7 +710,7 @@ jobs: git config --global --add safe.directory /__w/scala3/scala3 ./project/scripts/genDocs -doc-snapshot - - name: Deploy Website to dotty-website + - name: Deploy Website to https://dotty.epfl.ch uses: peaceiris/actions-gh-pages@v4 with: personal_token: ${{ secrets.DOTTYBOT_TOKEN }} diff --git a/project/scripts/genDocs b/project/scripts/genDocs index aa061d59b613..9849dac91722 100755 --- a/project/scripts/genDocs +++ b/project/scripts/genDocs @@ -5,7 +5,7 @@ shopt -s extglob # needed for rm everything but x echo "Working directory: $PWD" GENDOC_EXTRA_ARGS=$@ -GIT_HEAD=$(git rev-parse HEAD) # save current head for commit message in gh-pages +GIT_HEAD=$(git rev-parse HEAD) # save current head for commit message in scala/dotty.epfl.ch PREVIOUS_SNAPSHOTS_DIR="$PWD/../prev_snapshots" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)" SITE_OUT_DIR="$PWD/docs/_site" @@ -16,9 +16,9 @@ if [ -d "$PREVIOUS_SNAPSHOTS_DIR" ]; then fi mkdir -pv "$PREVIOUS_SNAPSHOTS_DIR" -git remote add doc-remote "https://github.com/lampepfl/dotty-website.git" -git fetch doc-remote gh-pages -git checkout gh-pages +git remote add doc-remote "https://github.com/scala/dotty.epfl.ch.git" +git fetch doc-remote main +git checkout doc-remote/main (cp -vr [03].*/ "$PREVIOUS_SNAPSHOTS_DIR"; true) # Don't fail if no `3.*` found to copy git checkout "$GIT_HEAD" From b1235b95d42506804d5ba17dcabcb52653163e95 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 18 Sep 2024 14:04:19 +0200 Subject: [PATCH 563/827] Harden skip in Scanner I sometimes see a rogue java process at 100% even after I closed down sbt and vscode. With jstack I got the following stack trace: java.lang.Thread.State: RUNNABLE at dotty.tools.dotc.parsing.Scanners$Scanner.handleNewLine(Scanners.scala:613) at dotty.tools.dotc.parsing.Scanners$Scanner.nextToken(Scanners.scala:396) at dotty.tools.dotc.parsing.Scanners$Scanner.skip(Scanners.scala:312) at dotty.tools.dotc.parsing.Parsers$Parser.skip(Parsers.scala:280) at dotty.tools.dotc.parsing.Parsers$Parser.recur$2(Parsers.scala:376) at dotty.tools.dotc.parsing.Parsers$Parser.statSepOrEnd(Parsers.scala:380) It could be that the loop in skip gives two alternate offsets that would not bump the progress counter. I changed the loop so that it catches more looping conditions. --- compiler/src/dotty/tools/dotc/parsing/Scanners.scala | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 494f56f601cf..2dc0a1a8d805 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -306,11 +306,15 @@ object Scanners { println(s"\nSTART SKIP AT ${sourcePos().line + 1}, $this in $currentRegion") var noProgress = 0 // Defensive measure to ensure we always get out of the following while loop - // even if source file is weirly formatted (i.e. we never reach EOF + // even if source file is weirly formatted (i.e. we never reach EOF) + var prevOffset = offset while !atStop && noProgress < 3 do - val prevOffset = offset nextToken() - if offset == prevOffset then noProgress += 1 else noProgress = 0 + if offset <= prevOffset then + noProgress += 1 + else + prevOffset = offset + noProgress = 0 if debugTokenStream then println(s"\nSTOP SKIP AT ${sourcePos().line + 1}, $this in $currentRegion") if token == OUTDENT then dropUntil(_.isInstanceOf[Indented]) From 69c16f8d1e6cd93e9827b581a664371c4888b167 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Fri, 13 Sep 2024 12:25:43 +0200 Subject: [PATCH 564/827] Cache signature in SingleDenotation for matchDegree; reduce denot calls in widens --- .../dotty/tools/dotc/core/Denotations.scala | 28 +++++++++++++++++-- .../src/dotty/tools/dotc/core/Types.scala | 12 ++++++-- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 2418aba1978b..5f58d3a6bf08 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -626,6 +626,30 @@ object Denotations { throw ex case _ => Signature.NotAMethod + private var myCurrentJavaSig: Signature = uninitialized + private var myCurrentJavaSigRunId: RunId = NoRunId + private var myCurrentScala2Sig: Signature = uninitialized + private var myCurrentScala2SigRunId: RunId = NoRunId + private var myCurrentSig: Signature = uninitialized + private var myCurrentSigRunId: RunId = NoRunId + + def currentSignature(sourceLanguage: SourceLanguage)(using Context): Signature = sourceLanguage match + case SourceLanguage.Java => + if myCurrentJavaSigRunId != ctx.runId then + myCurrentJavaSig = signature(sourceLanguage) + myCurrentJavaSigRunId = ctx.runId + myCurrentJavaSig + case SourceLanguage.Scala2 => + if myCurrentScala2SigRunId != ctx.runId then + myCurrentScala2Sig = signature(sourceLanguage) + myCurrentScala2SigRunId = ctx.runId + myCurrentScala2Sig + case SourceLanguage.Scala3 => + if myCurrentSigRunId != ctx.runId then + myCurrentSig = signature(sourceLanguage) + myCurrentSigRunId = ctx.runId + myCurrentSig + def derivedSingleDenotation(symbol: Symbol, info: Type, pre: Type = this.prefix, isRefinedMethod: Boolean = this.isRefinedMethod)(using Context): SingleDenotation = if ((symbol eq this.symbol) && (info eq this.info) && (pre eq this.prefix) && (isRefinedMethod == this.isRefinedMethod)) this else newLikeThis(symbol, info, pre, isRefinedMethod) @@ -1033,8 +1057,8 @@ object Denotations { val thisLanguage = SourceLanguage(symbol) val otherLanguage = SourceLanguage(other.symbol) val commonLanguage = SourceLanguage.commonLanguage(thisLanguage, otherLanguage) - val sig = signature(commonLanguage) - val otherSig = other.signature(commonLanguage) + val sig = currentSignature(commonLanguage) + val otherSig = other.currentSignature(commonLanguage) sig.matchDegree(otherSig) match case FullMatch => !alwaysCompareTypes || info.matches(other.info) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 12de7f465f91..aba8c3bb31fd 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1311,7 +1311,8 @@ object Types extends TypeUtils { final def widen(using Context): Type = this match case _: TypeRef | _: MethodOrPoly => this // fast path for most frequent cases case tp: TermRef => // fast path for next most frequent case - if tp.isOverloaded then tp else tp.underlying.widen + val denot = tp.denot + if denot.isOverloaded then tp else denot.info.widen case tp: SingletonType => tp.underlying.widen case tp: ExprType => tp.resultType.widen case tp => @@ -1325,7 +1326,10 @@ object Types extends TypeUtils { * base type by applying one or more `underlying` dereferences. */ final def widenSingleton(using Context): Type = stripped match { - case tp: SingletonType if !tp.isOverloaded => tp.underlying.widenSingleton + case tp: TermRef => + val denot = tp.denot + if denot.isOverloaded then this else denot.info.widenSingleton + case tp: SingletonType => tp.underlying.widenSingleton case _ => this } @@ -1333,7 +1337,9 @@ object Types extends TypeUtils { * base type, while also skipping Expr types. */ final def widenTermRefExpr(using Context): Type = stripTypeVar match { - case tp: TermRef if !tp.isOverloaded => tp.underlying.widenExpr.widenTermRefExpr + case tp: TermRef => + val denot = tp.denot + if denot.isOverloaded then this else denot.info.widenExpr.widenTermRefExpr case _ => this } From d9e13e5736b4c0c14305f0a3e3a4bbfb188c65c9 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 18 Sep 2024 15:38:33 +0200 Subject: [PATCH 565/827] Revert cache for signature in denot --- .../dotty/tools/dotc/core/Denotations.scala | 28 ++----------------- 1 file changed, 2 insertions(+), 26 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 5f58d3a6bf08..2418aba1978b 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -626,30 +626,6 @@ object Denotations { throw ex case _ => Signature.NotAMethod - private var myCurrentJavaSig: Signature = uninitialized - private var myCurrentJavaSigRunId: RunId = NoRunId - private var myCurrentScala2Sig: Signature = uninitialized - private var myCurrentScala2SigRunId: RunId = NoRunId - private var myCurrentSig: Signature = uninitialized - private var myCurrentSigRunId: RunId = NoRunId - - def currentSignature(sourceLanguage: SourceLanguage)(using Context): Signature = sourceLanguage match - case SourceLanguage.Java => - if myCurrentJavaSigRunId != ctx.runId then - myCurrentJavaSig = signature(sourceLanguage) - myCurrentJavaSigRunId = ctx.runId - myCurrentJavaSig - case SourceLanguage.Scala2 => - if myCurrentScala2SigRunId != ctx.runId then - myCurrentScala2Sig = signature(sourceLanguage) - myCurrentScala2SigRunId = ctx.runId - myCurrentScala2Sig - case SourceLanguage.Scala3 => - if myCurrentSigRunId != ctx.runId then - myCurrentSig = signature(sourceLanguage) - myCurrentSigRunId = ctx.runId - myCurrentSig - def derivedSingleDenotation(symbol: Symbol, info: Type, pre: Type = this.prefix, isRefinedMethod: Boolean = this.isRefinedMethod)(using Context): SingleDenotation = if ((symbol eq this.symbol) && (info eq this.info) && (pre eq this.prefix) && (isRefinedMethod == this.isRefinedMethod)) this else newLikeThis(symbol, info, pre, isRefinedMethod) @@ -1057,8 +1033,8 @@ object Denotations { val thisLanguage = SourceLanguage(symbol) val otherLanguage = SourceLanguage(other.symbol) val commonLanguage = SourceLanguage.commonLanguage(thisLanguage, otherLanguage) - val sig = currentSignature(commonLanguage) - val otherSig = other.currentSignature(commonLanguage) + val sig = signature(commonLanguage) + val otherSig = other.signature(commonLanguage) sig.matchDegree(otherSig) match case FullMatch => !alwaysCompareTypes || info.matches(other.info) From ee511b067a83bf81e3fdf4649b0ade606afa84c7 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Thu, 19 Sep 2024 13:57:54 +0200 Subject: [PATCH 566/827] Escape the '\' between " --- project/Build.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 6c6666b9f62b..3e5f595024b6 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -889,8 +889,8 @@ object Build { extraClasspath ++= Seq(dottyCompiler, dottyInterfaces, asm, dottyStaging, dottyTastyInspector, tastyCore, compilerInterface) } - val wrappedArgs = (if (printTasty) args else insertClasspathInArgs(args, extraClasspath.mkString(File.pathSeparator))).map(arg => "\""+ arg + "\"") - val fullArgs = main :: defaultOutputDirectory ::: wrappedArgs + val wrappedArgs = if (printTasty) args else insertClasspathInArgs(args, extraClasspath.mkString(File.pathSeparator)) + val fullArgs = main :: defaultOutputDirectory ::: wrappedArgs.map("\""+ _ + "\"").map(_.replace("\\", "\\\\")) (Compile / runMain).toTask(fullArgs.mkString(" ", " ", "")) }.evaluated, From 7aa4ac9d169b2c60dd6325ae6568abdb29a5c94b Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Thu, 19 Sep 2024 14:05:48 +0200 Subject: [PATCH 567/827] Also escape \ in the default output directory --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 3e5f595024b6..82ca537279bf 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -890,7 +890,7 @@ object Build { } val wrappedArgs = if (printTasty) args else insertClasspathInArgs(args, extraClasspath.mkString(File.pathSeparator)) - val fullArgs = main :: defaultOutputDirectory ::: wrappedArgs.map("\""+ _ + "\"").map(_.replace("\\", "\\\\")) + val fullArgs = main :: (defaultOutputDirectory ::: wrappedArgs).map("\""+ _ + "\"").map(_.replace("\\", "\\\\")) (Compile / runMain).toTask(fullArgs.mkString(" ", " ", "")) }.evaluated, From 86a51edd870b03ed49e90090c9d320d3a0242247 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 19 Sep 2024 13:07:18 +0200 Subject: [PATCH 568/827] Set reference version to 3.5.2-RC1 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 6c6666b9f62b..5268fc863fb8 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -91,7 +91,7 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.5.1-RC2" + val referenceVersion = "3.5.2-RC1" val baseVersion = "3.6.0" // Will be required by some automation later From b9f86dc66e5b475ba159069eef1057210819a743 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 19 Sep 2024 18:10:54 +0200 Subject: [PATCH 569/827] Test for #21614 --- tests/neg-custom-args/captures/i21614.check | 17 +++++++++++++++++ tests/neg-custom-args/captures/i21614.scala | 12 ++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 tests/neg-custom-args/captures/i21614.check create mode 100644 tests/neg-custom-args/captures/i21614.scala diff --git a/tests/neg-custom-args/captures/i21614.check b/tests/neg-custom-args/captures/i21614.check new file mode 100644 index 000000000000..14b468db4c8e --- /dev/null +++ b/tests/neg-custom-args/captures/i21614.check @@ -0,0 +1,17 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i21614.scala:9:33 ---------------------------------------- +9 | files.map((f: F) => new Logger(f)) // error, Q: can we make this pass (see #19076)? + | ^ + | Found: (f : F^) + | Required: File^ + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i21614.scala:12:12 --------------------------------------- +12 | files.map(new Logger(_)) // error, Q: can we improve the error message? + | ^^^^^^^^^^^^^ + | Found: Logger{val f: (_$1 : File^{files*})}^ + | Required: Logger{val f: File^?}^? + | + | Note that the universal capability `cap` + | cannot be included in capture set ? + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i21614.scala b/tests/neg-custom-args/captures/i21614.scala new file mode 100644 index 000000000000..a5ed25d818a5 --- /dev/null +++ b/tests/neg-custom-args/captures/i21614.scala @@ -0,0 +1,12 @@ +import language.experimental.captureChecking +import caps.Capability +import caps.unbox + +trait File extends Capability +class Logger(f: File^) extends Capability // <- will work if we remove the extends clause + +def mkLoggers1[F <: File^](@unbox files: List[F]): List[Logger^] = + files.map((f: F) => new Logger(f)) // error, Q: can we make this pass (see #19076)? + +def mkLoggers2(@unbox files: List[File^]): List[Logger^] = + files.map(new Logger(_)) // error, Q: can we improve the error message? From 806823919d8a4a29877dab5a0c5a9c4cb4df5658 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 20 Sep 2024 10:38:01 +0200 Subject: [PATCH 570/827] Bump Inkuire version to fix it for the new Scala versions (#21611) It seems like Inkuire depended on the way some type lambdas were encoded in TASTy which caused it to falsely reject a ton of valid functions. This update also includes a slight change to the ordering heuristic, so that more complicated names of functions are shown lower in the lost and synthetic names are shown last. Inkuire changes: - `1.0.0-M8` https://github.com/VirtusLab/Inkuire/releases/tag/v1.0.0-M8 - `1.0.0-M9` https://github.com/VirtusLab/Inkuire/releases/tag/v1.0.0-M9 --- project/DocumentationWebsite.scala | 2 +- .../src/dotty/tools/scaladoc/tasty/InkuireSupport.scala | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/project/DocumentationWebsite.scala b/project/DocumentationWebsite.scala index 5f8e499af62f..5b05168b7f27 100644 --- a/project/DocumentationWebsite.scala +++ b/project/DocumentationWebsite.scala @@ -43,7 +43,7 @@ object DocumentationWebsite { import _root_.scala.concurrent._ import _root_.scala.concurrent.duration.Duration import ExecutionContext.Implicits.global - val inkuireVersion = "v1.0.0-M7" + val inkuireVersion = "v1.0.0-M9" val inkuireLink = s"https://github.com/VirtusLab/Inkuire/releases/download/$inkuireVersion/inkuire.js" val inkuireDestinationFile = baseDest / "dotty_res" / "scripts" / "inkuire.js" sbt.IO.touch(inkuireDestinationFile) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala index 8a703cfb5d24..d5eebd1ab798 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala @@ -184,10 +184,10 @@ trait InkuireSupport(using DocContext) extends Resources: else ownerNameChain(sym.owner) :+ sym.normalizedName private def viableSymbol(s: Symbol): Boolean = - !s.flags.is(Flags.Private) && - !s.flags.is(Flags.Protected) && - !s.flags.is(Flags.Override) && - !s.flags.is(Flags.Synthetic) + !s.flags.is(Flags.Private) && + !s.flags.is(Flags.Protected) && + !s.flags.is(Flags.Override) && + !s.flags.is(Flags.Synthetic) private def varName(t: Inkuire.TypeLike): Option[String] = t match { case tpe: Inkuire.Type => Some(tpe.name.name) From b410f30614e8e684e2e8f10e8702b6738bee0c37 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 20 Sep 2024 20:01:08 +0200 Subject: [PATCH 571/827] Tighten closure extractor in TreeInfo The previous extractor for closures matches also arbitrary blocks that ended in a (possible deeply nested) closure. This caused wrong use sets in #21620. The new definition is stricter. There is also a new blockEndingInclosure extractor that keeps the old behavior. Fixes #21620 --- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 24 +++++++++++++++---- .../dotty/tools/dotc/typer/Migrations.scala | 2 +- tests/neg-custom-args/captures/i21620.check | 13 ++++++++++ tests/neg-custom-args/captures/i21620.scala | 10 ++++++++ tests/pos-custom-args/captures/i21620.scala | 11 +++++++++ 5 files changed, 54 insertions(+), 6 deletions(-) create mode 100644 tests/neg-custom-args/captures/i21620.check create mode 100644 tests/neg-custom-args/captures/i21620.scala create mode 100644 tests/pos-custom-args/captures/i21620.scala diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 5b89c9bbacd1..d121157b223e 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -814,17 +814,31 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case _ => false } - /** An extractor for closures, either contained in a block or standalone. + /** An extractor for closures, possibly typed, and possibly including the + * definition of the ananonymous def. */ object closure { - def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match { - case Block(_, expr) => unapply(expr) - case Closure(env, meth, tpt) => Some(env, meth, tpt) - case Typed(expr, _) => unapply(expr) + def unapply(tree: Tree)(using Context): Option[(List[Tree], Tree, Tree)] = tree match { + case Block((meth : DefDef) :: Nil, closure: Closure) if meth.symbol == closure.meth.symbol => + unapply(closure) + case Block(Nil, expr) => + unapply(expr) + case Closure(env, meth, tpt) => + Some(env, meth, tpt) + case Typed(expr, _) => + unapply(expr) case _ => None } } + /** An extractor for a closure or a block ending in one. This was + * previously `closure` before that one was tightened. + */ + object blockEndingInClosure: + def unapply(tree: Tree)(using Context): Option[(List[Tree], Tree, Tree)] = tree match + case Block(_, expr) => unapply(expr) + case _ => closure.unapply(tree) + /** An extractor for def of a closure contained the block of the closure. */ object closureDef { def unapply(tree: Tree)(using Context): Option[DefDef] = tree match { diff --git a/compiler/src/dotty/tools/dotc/typer/Migrations.scala b/compiler/src/dotty/tools/dotc/typer/Migrations.scala index d6b95ceb93dc..7f27f27112a0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Migrations.scala +++ b/compiler/src/dotty/tools/dotc/typer/Migrations.scala @@ -57,7 +57,7 @@ trait Migrations: val nestedCtx = ctx.fresh.setNewTyperState() val res = typed(qual, pt1)(using nestedCtx) res match { - case closure(_, _, _) => + case blockEndingInClosure(_, _, _) => case _ => val recovered = typed(qual)(using ctx.fresh.setExploreTyperState()) val msg = OnlyFunctionsCanBeFollowedByUnderscore(recovered.tpe.widen, tree) diff --git a/tests/neg-custom-args/captures/i21620.check b/tests/neg-custom-args/captures/i21620.check new file mode 100644 index 000000000000..3a09ba978574 --- /dev/null +++ b/tests/neg-custom-args/captures/i21620.check @@ -0,0 +1,13 @@ +-- [E129] Potential Issue Warning: tests/neg-custom-args/captures/i21620.scala:5:6 ------------------------------------- +5 | x + | ^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i21620.scala:9:31 ---------------------------------------- +9 | val _: () -> () ->{x} Unit = f // error + | ^ + | Found: () ->{f} () ->{x} Unit + | Required: () -> () ->{x} Unit + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i21620.scala b/tests/neg-custom-args/captures/i21620.scala new file mode 100644 index 000000000000..a21a41a10863 --- /dev/null +++ b/tests/neg-custom-args/captures/i21620.scala @@ -0,0 +1,10 @@ +class C +def test(x: C^) = + val f = () => + def foo() = + x + () + println(s"hey: $x") + () => foo() + val _: () -> () ->{x} Unit = f // error + () diff --git a/tests/pos-custom-args/captures/i21620.scala b/tests/pos-custom-args/captures/i21620.scala new file mode 100644 index 000000000000..b2c382aa4c75 --- /dev/null +++ b/tests/pos-custom-args/captures/i21620.scala @@ -0,0 +1,11 @@ +class C +def test(x: C^) = + def foo() = + x + () + val f = () => + // println() // uncomenting would give an error, but with + // a different way of handling curried functions should be OK + () => foo() + val _: () -> () ->{x} Unit = f + () From 614314a9d12cff71c63978f7be500ba131b5e833 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 22 Sep 2024 12:24:37 +0200 Subject: [PATCH 572/827] Update compiler/src/dotty/tools/dotc/ast/TreeInfo.scala Co-authored-by: Dale Wijnand --- compiler/src/dotty/tools/dotc/ast/TreeInfo.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index d121157b223e..23610a0fcfeb 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -815,7 +815,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => } /** An extractor for closures, possibly typed, and possibly including the - * definition of the ananonymous def. + * definition of the anonymous def. */ object closure { def unapply(tree: Tree)(using Context): Option[(List[Tree], Tree, Tree)] = tree match { From f69680d8982c3680ca9e051c6578c1776f142c9d Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Mon, 23 Sep 2024 08:44:02 +0200 Subject: [PATCH 573/827] Enable PC tests in test_windows_fast --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 309820f8f138..cb8977184328 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -228,7 +228,7 @@ jobs: uses: actions/checkout@v4 - name: Test - run: sbt ";scala3-bootstrapped/compile; scala3-bootstrapped/testCompilation" + run: sbt ";scala3-bootstrapped/compile; scala3-bootstrapped/testCompilation; scala3-presentation-compiler-bootstrapped/test; scala3-language-server/test" shell: cmd - name: build binary From c2287a25baa3470fdc0a42198062dbabf97aedb2 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Fri, 6 Sep 2024 18:04:35 +0200 Subject: [PATCH 574/827] Check if symbol was moved to a companion when bringing forward denotation While bringing forward the denotation to a new run, check if the symbol was moved from its owner to a companion object. If so, return NoDenotation, as that denotation seems to be a leftover from pre-MoveStatics phases in a previous run. --- compiler/src/dotty/tools/dotc/core/Denotations.scala | 7 ++++++- .../src/dotty/tools/dotc/core/SymDenotations.scala | 4 ++++ tests/pos-macros/i21271/Macro.scala | 12 ++++++++++++ tests/pos-macros/i21271/Test.scala | 1 + 4 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 tests/pos-macros/i21271/Macro.scala create mode 100644 tests/pos-macros/i21271/Test.scala diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 2418aba1978b..60a7555456bf 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid } +import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, movedToCompanionClass, acceptStale, traceInvalid } import Contexts.* import Names.* import NameKinds.* @@ -755,6 +755,11 @@ object Denotations { } if (!symbol.exists) return updateValidity() if (!coveredInterval.containsPhaseId(ctx.phaseId)) return NoDenotation + // Moved to a companion class, likely at a later phase (in MoveStatics) + this match { + case symd: SymDenotation if movedToCompanionClass(symd) => return NoDenotation + case _ => + } if (ctx.debug) traceInvalid(this) staleSymbolError } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 3904228756a0..906e74735097 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -2680,6 +2680,10 @@ object SymDenotations { stillValidInOwner(denot) } + def movedToCompanionClass(denot: SymDenotation)(using Context): Boolean = + val ownerCompanion = denot.maybeOwner.companionClass + stillValid(ownerCompanion) && ownerCompanion.unforcedDecls.contains(denot.name, denot.symbol) + private[SymDenotations] def stillValidInOwner(denot: SymDenotation)(using Context): Boolean = try val owner = denot.maybeOwner.denot stillValid(owner) diff --git a/tests/pos-macros/i21271/Macro.scala b/tests/pos-macros/i21271/Macro.scala new file mode 100644 index 000000000000..09d29ecc65af --- /dev/null +++ b/tests/pos-macros/i21271/Macro.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +trait Schema +object Schema: + lazy val sampleDate: String = "" // lazy val requried to reproduce + + inline def derived: Schema = + annotations + new Schema {} + +inline def annotations: Int = ${ annotationsImpl } +def annotationsImpl(using Quotes): Expr[Int] = Expr(1) diff --git a/tests/pos-macros/i21271/Test.scala b/tests/pos-macros/i21271/Test.scala new file mode 100644 index 000000000000..c0ba38212b09 --- /dev/null +++ b/tests/pos-macros/i21271/Test.scala @@ -0,0 +1 @@ +val inputValueSchema = Schema.derived From 000f484dbb0e4a526207004a10c9e8f4a3037492 Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Tue, 24 Sep 2024 12:06:10 -0400 Subject: [PATCH 575/827] Add better error reporting for inlined non-immutable paths Co-authored-by: Matt Bovel --- .../src/dotty/tools/dotc/reporting/messages.scala | 14 +++++++++++++- tests/neg/21538.check | 11 +++++++++++ tests/neg/21538.scala | 3 +++ 3 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 tests/neg/21538.check create mode 100644 tests/neg/21538.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 01eb2acfa4de..cb730efbfe89 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -1817,13 +1817,25 @@ class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context) } class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathID): - def msg(using Context) = i"$tp is not a valid $usage, since it is not an immutable path" + def msg(using Context) = i"$tp is not a valid $usage, since it is not an immutable path" + inlineParamAddendum def explain(using Context) = i"""An immutable path is | - a reference to an immutable value, or | - a reference to `this`, or | - a selection of an immutable path with an immutable value.""" + def inlineParamAddendum(using Context) = + val sym = tp.termSymbol + if sym.isAllOf(Flags.InlineParam) then + i""" + |Inline parameters are not considered immutable paths and cannot be used as + |singleton types. + | + |Hint: Removing the `inline` qualifier from the `${sym.name}` parameter + |may help resolve this issue.""" + else "" + + class WrongNumberOfParameters(tree: untpd.Tree, foundCount: Int, pt: Type, expectedCount: Int)(using Context) extends SyntaxMsg(WrongNumberOfParametersID) { def msg(using Context) = s"Wrong number of parameters, expected: $expectedCount" diff --git a/tests/neg/21538.check b/tests/neg/21538.check new file mode 100644 index 000000000000..0e799bef3611 --- /dev/null +++ b/tests/neg/21538.check @@ -0,0 +1,11 @@ +-- [E083] Type Error: tests/neg/21538.scala:3:45 ----------------------------------------------------------------------- +3 |inline def foo[V](inline value: V)(using Bar[value.type]) : Unit = {} // error + | ^^^^^^^^^^ + | (value : V) is not a valid singleton type, since it is not an immutable path + | Inline parameters are not considered immutable paths and cannot be used as + | singleton types. + | + | Hint: Removing the `inline` qualifier from the `value` parameter + | may help resolve this issue. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/21538.scala b/tests/neg/21538.scala new file mode 100644 index 000000000000..761e9cde678a --- /dev/null +++ b/tests/neg/21538.scala @@ -0,0 +1,3 @@ +trait Bar[T] +given [T]: Bar[T] with {} +inline def foo[V](inline value: V)(using Bar[value.type]) : Unit = {} // error \ No newline at end of file From 4ae882f85aaec5271d51a5ad7c261eba90108b82 Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Tue, 24 Sep 2024 12:41:41 -0400 Subject: [PATCH 576/827] Filter opaque modifier from object documentation Co-authored-by: Matt Bovel --- scaladoc-testcases/src/tests/opaqueTypes.scala | 6 +++++- .../src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/scaladoc-testcases/src/tests/opaqueTypes.scala b/scaladoc-testcases/src/tests/opaqueTypes.scala index 33cc7ab9ff91..c248632092bd 100644 --- a/scaladoc-testcases/src/tests/opaqueTypes.scala +++ b/scaladoc-testcases/src/tests/opaqueTypes.scala @@ -6,4 +6,8 @@ opaque type Permissions = Int opaque type PermissionChoice = Int -//opaque type Permission <: Permissions & PermissionChoice = Int TODO: #112 \ No newline at end of file +//opaque type Permission <: Permissions & PermissionChoice = Int TODO: #112 + +object Foo: + opaque type Bar + = Int \ No newline at end of file diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 497b58b6ed2c..d3c93aaba8c7 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -314,7 +314,7 @@ trait ClassLikeSupport: def parseObject(classDef: ClassDef, signatureOnly: Boolean = false): Member = mkClass(classDef)( // All objects are final so we do not need final modifier! - modifiers = classDef.symbol.getExtraModifiers().filter(_ != Modifier.Final), + modifiers = classDef.symbol.getExtraModifiers().filter(mod => mod != Modifier.Final && mod != Modifier.Opaque), signatureOnly = signatureOnly ) From f684bacbb8f149f6fad92b373472315c1725d677 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 24 Sep 2024 18:06:07 +0100 Subject: [PATCH 577/827] Make right assoc extx method fwd refs error --- .../src/dotty/tools/dotc/ast/Desugar.scala | 20 ++++++++++--- tests/neg/i16815.check | 28 +++++++++++++++++++ tests/neg/i16815.scala | 20 +++++++++++++ 3 files changed, 64 insertions(+), 4 deletions(-) create mode 100644 tests/neg/i16815.check create mode 100644 tests/neg/i16815.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 659701b02371..5c468721fd43 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1099,8 +1099,8 @@ object desugar { paramss match case rightParam :: paramss1 => // `rightParam` must have a single parameter and without `given` flag - def badRightAssoc(problem: String) = - report.error(em"right-associative extension method $problem", mdef.srcPos) + def badRightAssoc(problem: String, pos: SrcPos) = + report.error(em"right-associative extension method $problem", pos) extParamss ++ mdef.paramss rightParam match @@ -1116,11 +1116,23 @@ object desugar { // // If you change the names of the clauses below, also change them in right-associative-extension-methods.md val (leftTyParamsAndLeadingUsing, leftParamAndTrailingUsing) = extParamss.span(isUsingOrTypeParamClause) + + val names = (for ps <- mdef.paramss; p <- ps yield p.name).toSet[Name] + + val tt = new untpd.UntypedTreeTraverser: + def traverse(tree: Tree)(using Context): Unit = tree match + case tree: Ident if names.contains(tree.name) => + badRightAssoc(s"cannot have a forward reference to ${tree.name}", tree.srcPos) + case _ => traverseChildren(tree) + + for ts <- leftParamAndTrailingUsing; t <- ts do + tt.traverse(t) + leftTyParamsAndLeadingUsing ::: rightTyParams ::: rightParam :: leftParamAndTrailingUsing ::: paramss1 else - badRightAssoc("cannot start with using clause") + badRightAssoc("cannot start with using clause", mdef.srcPos) case _ => - badRightAssoc("must start with a single parameter") + badRightAssoc("must start with a single parameter", mdef.srcPos) case _ => // no value parameters, so not an infix operator. extParamss ++ mdef.paramss diff --git a/tests/neg/i16815.check b/tests/neg/i16815.check new file mode 100644 index 000000000000..8f2f5c57d405 --- /dev/null +++ b/tests/neg/i16815.check @@ -0,0 +1,28 @@ +-- Error: tests/neg/i16815.scala:3:37 ---------------------------------------------------------------------------------- +3 |extension [C1 >: Chain <: Chain](c2: c1.Tail) // error + | ^^ + | right-associative extension method cannot have a forward reference to c1 +-- Error: tests/neg/i16815.scala:6:24 ---------------------------------------------------------------------------------- +6 |extension [C1](c2: (C1, C2)) // error + | ^^ + | right-associative extension method cannot have a forward reference to C2 +-- Error: tests/neg/i16815.scala:9:19 ---------------------------------------------------------------------------------- +9 |extension [C1](c2: C2) // error + | ^^ + | right-associative extension method cannot have a forward reference to C2 +-- Error: tests/neg/i16815.scala:12:24 --------------------------------------------------------------------------------- +12 |extension [C1](c2: (C1, C2, C3)) // error // error + | ^^ + | right-associative extension method cannot have a forward reference to C2 +-- Error: tests/neg/i16815.scala:12:28 --------------------------------------------------------------------------------- +12 |extension [C1](c2: (C1, C2, C3)) // error // error + | ^^ + | right-associative extension method cannot have a forward reference to C3 +-- Error: tests/neg/i16815.scala:15:48 --------------------------------------------------------------------------------- +15 |extension [C1](str: String)(using z: (str.type, C2)) // error + | ^^ + | right-associative extension method cannot have a forward reference to C2 +-- Error: tests/neg/i16815.scala:19:31 --------------------------------------------------------------------------------- +19 |extension [D1 <: Int](D2: (D1, D2)) // error + | ^^ + | right-associative extension method cannot have a forward reference to D2 diff --git a/tests/neg/i16815.scala b/tests/neg/i16815.scala new file mode 100644 index 000000000000..595f75e40df4 --- /dev/null +++ b/tests/neg/i16815.scala @@ -0,0 +1,20 @@ +trait Chain { type Tail <: Chain } + +extension [C1 >: Chain <: Chain](c2: c1.Tail) // error + def ra1_:[C2 <: C1](c1: C1): C2 = ??? + +extension [C1](c2: (C1, C2)) // error + def ra2_:[C2 <: C1](c1: (C1, C2)): C2 = ??? + +extension [C1](c2: C2) // error + def ra3_:[C2 <: C1](c1: C1): C2 = ??? + +extension [C1](c2: (C1, C2, C3)) // error // error + def ra4_:[C2 <: C1, C3 <: C1](c1: (C1, C2)): C2 = ??? + +extension [C1](str: String)(using z: (str.type, C2)) // error + def ra5_:[C2 <: Int](c1: C1): C2 = ??? + +type D2 = String +extension [D1 <: Int](D2: (D1, D2)) // error + def sa2_:[D2 <: D1](D1: (D1, D2)): D2 = ??? From d02d4b86dfe281cca18760d49f026a2a9b332e53 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 25 Sep 2024 16:43:07 +0100 Subject: [PATCH 578/827] Avoid erasure/preErasure issues around Any in transformIsInstanceOf The testType Any is erased to Object, but the expr type Int isn't erased to Integer, so then it fails as Int !<: Object. We avoid the problem by feeding in AnyVal, leading to a (possibly elided) non-null test only. --- .../src/dotty/tools/dotc/transform/TypeTestsCasts.scala | 6 +++++- tests/pos/i21544.scala | 2 ++ 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21544.scala diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index 082c239c6443..45596e1d47f6 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -358,7 +358,11 @@ object TypeTestsCasts { report.error(em"$untestable cannot be used in runtime type tests", tree.srcPos) constant(expr, Literal(Constant(false))) case _ => - val erasedTestType = erasure(testType) + val erasedTestType = + if testType.isAny && expr.tpe.isPrimitiveValueType then + defn.AnyValType + else + erasure(testType) transformIsInstanceOf(expr, erasedTestType, erasedTestType, flagUnrelated) } diff --git a/tests/pos/i21544.scala b/tests/pos/i21544.scala new file mode 100644 index 000000000000..45da101e7490 --- /dev/null +++ b/tests/pos/i21544.scala @@ -0,0 +1,2 @@ +class Test(): + def m1(xs: List[Boolean]) = for (x: Any) <- xs yield x From 07dce3f9f1065b2bb4de9d0aa39aa69d5eebd40e Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 25 Sep 2024 21:31:49 +0100 Subject: [PATCH 579/827] Tighten java annot value parsing --- .../dotty/tools/dotc/parsing/JavaParsers.scala | 4 ++-- tests/pos/i20026/J.java | 4 ++++ tests/pos/i20026/S.scala | 1 + tests/pos/i20026/TestInstance.java | 15 +++++++++++++++ 4 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i20026/J.java create mode 100644 tests/pos/i20026/S.scala create mode 100644 tests/pos/i20026/TestInstance.java diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 79282b0e5223..8a9eca2c1e67 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -369,8 +369,8 @@ object JavaParsers { def annotation(): Option[Tree] = { def classOrId(): Tree = val id = qualId() - if in.lookaheadToken == CLASS then - in.nextToken() + if in.token == DOT && in.lookaheadToken == CLASS then + accept(DOT) accept(CLASS) TypeApply( Select( diff --git a/tests/pos/i20026/J.java b/tests/pos/i20026/J.java new file mode 100644 index 000000000000..e950024ed913 --- /dev/null +++ b/tests/pos/i20026/J.java @@ -0,0 +1,4 @@ +package p; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class J { } diff --git a/tests/pos/i20026/S.scala b/tests/pos/i20026/S.scala new file mode 100644 index 000000000000..7da04d6b6bbe --- /dev/null +++ b/tests/pos/i20026/S.scala @@ -0,0 +1 @@ +class S diff --git a/tests/pos/i20026/TestInstance.java b/tests/pos/i20026/TestInstance.java new file mode 100644 index 000000000000..8db79ad88d89 --- /dev/null +++ b/tests/pos/i20026/TestInstance.java @@ -0,0 +1,15 @@ +package p; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Inherited +public @interface TestInstance { + enum Lifecycle { PER_CLASS, PER_METHOD; } + Lifecycle value(); +} From eb5c3e81304eee5ce75c2796bd4b7b16d2d1b19b Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 16 Sep 2024 10:29:51 +0100 Subject: [PATCH 580/827] Avoid cyclic errors forcing default arg types --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 4 ++++ tests/neg/19414-desugared.check | 3 +-- tests/neg/19414.check | 3 +-- tests/neg/given-ambiguous-default-2.check | 8 ++++---- tests/pos/i21568.scala | 6 ++++++ 5 files changed, 16 insertions(+), 8 deletions(-) create mode 100644 tests/pos/i21568.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 11a95ce23f93..992f283154ca 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -696,6 +696,10 @@ trait Applications extends Compatibility { fail(MissingArgument(methodType.paramNames(n), methString)) def tryDefault(n: Int, args1: List[Arg]): Unit = { + if !success then + missingArg(n) // fail fast before forcing the default arg tpe, to avoid cyclic errors + return + val sym = methRef.symbol val testOnly = this.isInstanceOf[TestApplication[?]] diff --git a/tests/neg/19414-desugared.check b/tests/neg/19414-desugared.check index c21806e16c2c..cc51ee471553 100644 --- a/tests/neg/19414-desugared.check +++ b/tests/neg/19414-desugared.check @@ -8,7 +8,6 @@ | writer = | /* ambiguous: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] */ | summon[Writer[B]] - | , - | this.given_BodySerializer_B$default$2[B]) + | ) | |But both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B]. diff --git a/tests/neg/19414.check b/tests/neg/19414.check index 6804546df037..016e3942c825 100644 --- a/tests/neg/19414.check +++ b/tests/neg/19414.check @@ -8,7 +8,6 @@ | evidence$1 = | /* ambiguous: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] */ | summon[Writer[B]] - | , - | this.given_BodySerializer_B$default$2[B]) + | ) | |But both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B]. diff --git a/tests/neg/given-ambiguous-default-2.check b/tests/neg/given-ambiguous-default-2.check index cbe8b972a389..4d473a301340 100644 --- a/tests/neg/given-ambiguous-default-2.check +++ b/tests/neg/given-ambiguous-default-2.check @@ -1,9 +1,9 @@ -- [E172] Type Error: tests/neg/given-ambiguous-default-2.scala:18:23 -------------------------------------------------- 18 |def f: Unit = summon[C] // error: Ambiguous given instances | ^ - |No best given instance of type C was found for parameter x of method summon in object Predef. - |I found: + | No best given instance of type C was found for parameter x of method summon in object Predef. + | I found: | - | given_C(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A], this.given_C$default$2) + | given_C(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A]) | - |But both given instance a1 and given instance a2 match type A. + | But both given instance a1 and given instance a2 match type A. diff --git a/tests/pos/i21568.scala b/tests/pos/i21568.scala new file mode 100644 index 000000000000..87184956ea79 --- /dev/null +++ b/tests/pos/i21568.scala @@ -0,0 +1,6 @@ +class Lang(name: String) +object Lang { + val Default = Lang("") + def apply(language: String): Lang = ??? + def apply(maybeLang: Option[String], default: Lang = Default): Lang = ??? +} From af0283f1c9e3ded09be7b5949755921c82f7c0fd Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 4 Sep 2024 14:09:40 +0100 Subject: [PATCH 581/827] Allow imports nested in packagings to shadow --- compiler/src/dotty/tools/dotc/core/ContextOps.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Namer.scala | 2 +- tests/pos/i21405.scala | 7 +++++++ 3 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i21405.scala diff --git a/compiler/src/dotty/tools/dotc/core/ContextOps.scala b/compiler/src/dotty/tools/dotc/core/ContextOps.scala index 57c369a08de6..c307b6ac569e 100644 --- a/compiler/src/dotty/tools/dotc/core/ContextOps.scala +++ b/compiler/src/dotty/tools/dotc/core/ContextOps.scala @@ -132,7 +132,7 @@ object ContextOps: } def packageContext(tree: untpd.PackageDef, pkg: Symbol): Context = inContext(ctx) { - if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree) + if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree).setNewScope else ctx } end ContextOps diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 6167db62fbe0..5d5d7d1054ea 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -395,7 +395,7 @@ class Namer { typer: Typer => def recur(stat: Tree): Context = stat match { case pcl: PackageDef => val pkg = createPackageSymbol(pcl.pid) - index(pcl.stats)(using ctx.fresh.setOwner(pkg.moduleClass)) + index(pcl.stats)(using ctx.packageContext(pcl, pkg)) invalidateCompanions(pkg, Trees.flatten(pcl.stats map expanded)) setDocstring(pkg, stat) ctx diff --git a/tests/pos/i21405.scala b/tests/pos/i21405.scala new file mode 100644 index 000000000000..4fcc1302882b --- /dev/null +++ b/tests/pos/i21405.scala @@ -0,0 +1,7 @@ +package o { class IO } +package p { class IO } +import o._ +package q { + import p._ + class D extends IO +} From bac87814faf2492087fcd9e99f6eecbc63ea2b29 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 26 Sep 2024 16:14:47 +0200 Subject: [PATCH 582/827] Handle suspension due to macro call completed in arbitrary phases Previously we only supported suspension in Typer and Inliner. In the added test case this happens in PostTyper, but I've seen it happen in Mixin too. Fixes #18517. --- compiler/src/dotty/tools/dotc/core/Phases.scala | 14 +++++++++----- .../dotty/tools/dotc/transform/Inlining.scala | 8 +------- tests/pos-macros/i18517/Caller.scala | 17 +++++++++++++++++ tests/pos-macros/i18517/Macro.scala | 7 +++++++ tests/pos-macros/i18517/User.scala | 6 ++++++ 5 files changed, 40 insertions(+), 12 deletions(-) create mode 100644 tests/pos-macros/i18517/Caller.scala create mode 100644 tests/pos-macros/i18517/Macro.scala create mode 100644 tests/pos-macros/i18517/User.scala diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 5dff95fc51fb..85df3f9f2c18 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -378,14 +378,18 @@ object Phases { () else run - catch case ex: Throwable if !ctx.run.enrichedErrorMessage => - println(ctx.run.enrichErrorMessage(s"unhandled exception while running $phaseName on $unit")) - throw ex + buf += unitCtx.compilationUnit + catch + case _: CompilationUnit.SuspendException => // this unit will be run again in `Run#compileSuspendedUnits` + case ex: Throwable if !ctx.run.enrichedErrorMessage => + println(ctx.run.enrichErrorMessage(s"unhandled exception while running $phaseName on $unit")) + throw ex finally ctx.run.advanceUnit() - buf += unitCtx.compilationUnit end if end for - buf.result() + val res = buf.result() + ctx.run.nn.checkSuspendedUnits(res) + res end runOn /** Convert a compilation unit's tree to a string; can be overridden */ diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 335d5a38931a..751636c7d806 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -36,13 +36,7 @@ class Inlining extends MacroTransform, IdentityDenotTransformer { override def run(using Context): Unit = if ctx.compilationUnit.needsInlining || ctx.compilationUnit.hasMacroAnnotations then - try super.run - catch case _: CompilationUnit.SuspendException => () - - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = - val newUnits = super.runOn(units).filterNot(_.suspended) - ctx.run.nn.checkSuspendedUnits(newUnits) - newUnits + super.run override def checkPostCondition(tree: Tree)(using Context): Unit = tree match { diff --git a/tests/pos-macros/i18517/Caller.scala b/tests/pos-macros/i18517/Caller.scala new file mode 100644 index 000000000000..3f5ce9eee903 --- /dev/null +++ b/tests/pos-macros/i18517/Caller.scala @@ -0,0 +1,17 @@ +package dummy + +trait BG { + val description: { type Structure } + type Structure = description.Structure +} + +abstract class Caller extends BG { + type Foo >: this.type <: this.type + + transparent inline def generate2() = + ${Macro.impl() } + + final val description = { + generate2() + } +} diff --git a/tests/pos-macros/i18517/Macro.scala b/tests/pos-macros/i18517/Macro.scala new file mode 100644 index 000000000000..d18b07e910a5 --- /dev/null +++ b/tests/pos-macros/i18517/Macro.scala @@ -0,0 +1,7 @@ +package dummy + +import scala.quoted.* + +object Macro: + def impl()(using quotes:Quotes) : Expr[Any] = + '{ null } diff --git a/tests/pos-macros/i18517/User.scala b/tests/pos-macros/i18517/User.scala new file mode 100644 index 000000000000..8216c581937b --- /dev/null +++ b/tests/pos-macros/i18517/User.scala @@ -0,0 +1,6 @@ +package dummy + +trait User: + final def bar(cell:Any) : Unit = + (cell: cell.type) match + case c: (Caller & cell.type) => () From 0ee87625e80566ae52e236030b4af70f039c40fb Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 28 May 2024 19:07:05 +0200 Subject: [PATCH 583/827] Expr#show: Don't crash when the expression contains an unsupported type (like a SkolemType) When the SkolemType appears as the prefix of a TypeRef, we avoid it by going using `qualifier` which is defined in QuotesImpl to widen skolem, but skolems can appear in any position, and so before this change we would get a compiler crash in the added test case where the skolem appears as the prefix of a TermRef. We fix this by adding fallback cases in the quotes pretty-printer, now for the test case we get: Test.f.ho(((arg: < does not have a source representation>.x.type) => arg)) Which isn't great, but better than a crash. Maybe we should run `Type#deskolemized` on a type before trying to print it in SourceCode/Extractors, but currently these files are intentionally defined to not depend on compiler internals and do not have a `Context` so we cannot even call `deskolemized` on them. Alternatively, maybe SkolemType should be a tasty-reflect constructor but that would also be a pretty big change. --- .../quoted/runtime/impl/printers/Extractors.scala | 4 ++++ .../quoted/runtime/impl/printers/SourceCode.scala | 8 +++++--- tests/pos-macros/skolem/Macro_1.scala | 10 ++++++++++ tests/pos-macros/skolem/Test_2.scala | 9 +++++++++ tests/run-macros/i19905.check | 4 ++-- 5 files changed, 30 insertions(+), 5 deletions(-) create mode 100644 tests/pos-macros/skolem/Macro_1.scala create mode 100644 tests/pos-macros/skolem/Test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index acf66fcf2009..82be54a9d793 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -177,6 +177,8 @@ object Extractors { this += "Alternatives(" ++= patterns += ")" case TypedOrTest(tree, tpt) => this += "TypedOrTest(" += tree += ", " += tpt += ")" + case tree => + this += s"" } def visitConstant(x: Constant): this.type = x match { @@ -241,6 +243,8 @@ object Extractors { this += "MatchCase(" += pat += ", " += rhs += ")" case FlexibleType(tp) => this += "FlexibleType(" += tp += ")" + case tp => + this += s"" } def visitSignature(sig: Signature): this.type = { diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index 9503177ff738..a1f54c5a2069 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -1292,7 +1292,9 @@ object SourceCode { val sym = annot.tpe.typeSymbol sym != Symbol.requiredClass("scala.forceInline") && sym.maybeOwner != Symbol.requiredPackage("scala.annotation.internal") - case x => cannotBeShownAsSource(x.show(using Printer.TreeStructure)) + case x => + cannotBeShownAsSource(x.show(using Printer.TreeStructure)) + false } printAnnotations(annots) if (annots.nonEmpty) this += " " @@ -1463,8 +1465,8 @@ object SourceCode { } } - private def cannotBeShownAsSource(x: String): Nothing = - throw new Exception(s"$x does not have a source representation") + private def cannotBeShownAsSource(x: String): this.type = + this += s"<$x does not have a source representation>" private object SpecialOp { def unapply(arg: Tree): Option[(String, List[Term])] = arg match { diff --git a/tests/pos-macros/skolem/Macro_1.scala b/tests/pos-macros/skolem/Macro_1.scala new file mode 100644 index 000000000000..65b14cffbc5b --- /dev/null +++ b/tests/pos-macros/skolem/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +object Macro { + + def impl(expr: Expr[Any])(using Quotes): Expr[Unit] = + println(expr.show) + '{ () } + + inline def macr(inline x: Any): Unit = ${impl('x)} +} diff --git a/tests/pos-macros/skolem/Test_2.scala b/tests/pos-macros/skolem/Test_2.scala new file mode 100644 index 000000000000..e243b8844c23 --- /dev/null +++ b/tests/pos-macros/skolem/Test_2.scala @@ -0,0 +1,9 @@ +trait Foo: + val x: Int + def ho(p: x.type => x.type): Unit = () + +object Test { + var f: Foo = ??? + Macro.macr: + f.ho(arg => arg) +} diff --git a/tests/run-macros/i19905.check b/tests/run-macros/i19905.check index 36ba7772bfdb..47e9d86e3662 100644 --- a/tests/run-macros/i19905.check +++ b/tests/run-macros/i19905.check @@ -1,3 +1,3 @@ -java.lang.Exception: NoPrefix() does not have a source representation -java.lang.Exception: NoPrefix() does not have a source representation + + NoPrefix() From 905cbd147349fcffcd5509850c36ad342dc4fed4 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 26 Sep 2024 17:13:47 +0100 Subject: [PATCH 584/827] Fix CompletionTest.importAnnotationAfterImport expectations So, before my change, importing java.lang.annotation and then importing "annotation" will return java.lang.annotation, but complete as scala.annotation - so the completion logic (in scopeCompletions) is wrong somewhere. After my change they both return java.lang.annotation, so looks like I might have unintentionally made that logic work for this case - just need to update the test expectation. --- .../test/dotty/tools/languageserver/CompletionTest.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index 8034b4c8d40b..38deb4c40c0f 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -987,7 +987,7 @@ class CompletionTest { @Test def importAnnotationAfterImport : Unit = code"""import java.lang.annotation; import annot${m1}""" - .completion(("annotation", Module, "scala.annotation")) + .completion(("annotation", Module, "java.lang.annotation")) @Test def completeTemplateConstrArgType: Unit = { code"""import scala.concurrent.Future From d2cf0fb967111168cae5ff880a9d1de898650234 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 27 Sep 2024 11:23:48 +0200 Subject: [PATCH 585/827] Allow export statements in AnyVal --- compiler/src/dotty/tools/dotc/typer/Checking.scala | 2 +- tests/run/export-anyval.check | 1 + tests/run/export-anyval.scala | 12 ++++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 tests/run/export-anyval.check create mode 100644 tests/run/export-anyval.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 7f5ac955fa12..700bd483ff38 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -727,7 +727,7 @@ object Checking { report.error(ValueClassesMayNotDefineNonParameterField(clazz, stat.symbol), stat.srcPos) case _: DefDef if stat.symbol.isConstructor => report.error(ValueClassesMayNotDefineASecondaryConstructor(clazz, stat.symbol), stat.srcPos) - case _: MemberDef | _: Import | EmptyTree => + case _: MemberDef | _: Import | _: Export | EmptyTree => // ok case _ => report.error(ValueClassesMayNotContainInitalization(clazz), stat.srcPos) diff --git a/tests/run/export-anyval.check b/tests/run/export-anyval.check new file mode 100644 index 000000000000..1c2f472bb006 --- /dev/null +++ b/tests/run/export-anyval.check @@ -0,0 +1 @@ +Hello from export \ No newline at end of file diff --git a/tests/run/export-anyval.scala b/tests/run/export-anyval.scala new file mode 100644 index 000000000000..26fb2230781d --- /dev/null +++ b/tests/run/export-anyval.scala @@ -0,0 +1,12 @@ +class Foo(val x: String) + + +class Bar(val y: Foo) extends AnyVal: + export y.* + def foo: String = x +end Bar + +@main def Test = + val a = Bar(Foo("Hello from export")) + println(a.foo) + From 14acdc06067a202f381fa9066a961cebb1314d6a Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 28 Sep 2024 14:58:05 +0200 Subject: [PATCH 586/827] Implement SIP 64 as non-experimental --- .../src/dotty/tools/dotc/ast/Desugar.scala | 2 +- compiler/src/dotty/tools/dotc/ast/untpd.scala | 1 - .../dotty/tools/dotc/parsing/Parsers.scala | 20 ++++++-------- .../tools/dotc/printing/RefinedPrinter.scala | 4 ++- docs/_docs/internals/syntax.md | 10 +++++-- docs/_docs/reference/syntax.md | 27 +++++++++++++++---- library/src/scala/compiletime/package.scala | 1 - .../AutoImplementAbstractMembersSuite.scala | 4 ++- tests/neg/empty-given.scala | 4 +-- tests/neg/i12348.check | 6 ++--- .../stdlibExperimentalDefinitions.scala | 1 - tests/warn/abstract-givens-new.check | 6 +---- tests/warn/abstract-givens-new.scala | 3 ++- 13 files changed, 53 insertions(+), 36 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 5c468721fd43..e1a6b97fc7d3 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1261,7 +1261,7 @@ object desugar { str.toTermName.asSimpleName /** Extract a synthesized given name from a type tree. This is used for - * both anonymous givens and (under x.modularity) deferred givens. + * both anonymous givens and deferred givens. * @param followArgs if true include argument types in the name */ private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 60309d4d83bd..935e42d5e05c 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -119,7 +119,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree case class ContextBoundTypeTree(tycon: Tree, paramName: TypeName, ownName: TermName)(implicit @constructorOnly src: SourceFile) extends Tree - // `paramName: tycon as ownName`, ownName != EmptyTermName only under x.modularity case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 8a173faa3cec..5af9b105a5c1 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -994,8 +994,8 @@ object Parsers { skipParams() lookahead.isColon && { - !in.featureEnabled(Feature.modularity) - || { // with modularity language import, a `:` at EOL after an identifier represents a single identifier given + !sourceVersion.isAtLeast(`3.6`) + || { // in the new given syntax, a `:` at EOL after an identifier represents a single identifier given // Example: // given C: // def f = ... @@ -1833,7 +1833,7 @@ object Parsers { infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, isOperator = !followingIsVararg() && !isPureArrow - && !(isIdent(nme.as) && in.featureEnabled(Feature.modularity)) + && !(isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`)) && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] @@ -2226,18 +2226,19 @@ object Parsers { def contextBound(pname: TypeName): Tree = val t = toplevelTyp() val ownName = - if isIdent(nme.as) && in.featureEnabled(Feature.modularity) then + if isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`) then in.nextToken() ident() else EmptyTermName ContextBoundTypeTree(t, pname, ownName) - /** ContextBounds ::= ContextBound | `{` ContextBound {`,` ContextBound} `}` + /** ContextBounds ::= ContextBound [`:` ContextBounds] + * | `{` ContextBound {`,` ContextBound} `}` */ def contextBounds(pname: TypeName): List[Tree] = if in.isColon then in.nextToken() - if in.token == LBRACE && in.featureEnabled(Feature.modularity) + if in.token == LBRACE && sourceVersion.isAtLeast(`3.6`) then inBraces(commaSeparated(() => contextBound(pname))) else contextBound(pname) :: contextBounds(pname) else if in.token == VIEWBOUND then @@ -4189,7 +4190,7 @@ object Parsers { def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) val nameStart = in.offset - var newSyntaxAllowed = in.featureEnabled(Feature.modularity) + var newSyntaxAllowed = sourceVersion.isAtLeast(`3.6`) val hasEmbeddedColon = !in.isColon && followingIsGivenDefWithColon() val name = if isIdent && hasEmbeddedColon then ident() else EmptyTermName @@ -4293,11 +4294,6 @@ object Parsers { // old-style abstract given if name.isEmpty then syntaxError(em"Anonymous given cannot be abstract, or maybe you want to define a concrete given and are missing a `()` argument?", in.lastOffset) - if newSyntaxAllowed then - warning( - em"""This defines an abstract given, which is deprecated. Use a `deferred` given instead. - |Or, if you intend to define a concrete given, follow the type with `()` arguments.""", - in.lastOffset) DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else // structural instance diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index ea729e9549d5..b229c7ec29d9 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -24,6 +24,8 @@ import TypeApplications.* import NameKinds.{WildcardParamName, DefaultGetterName} import util.Chars.isOperatorPart import config.{Config, Feature} +import config.Feature.sourceVersion +import config.SourceVersion.* import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.ast.untpd.{MemberDef, Modifiers, PackageDef, RefTree, Template, TypeDef, ValOrDefDef} @@ -751,7 +753,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case GenAlias(pat, expr) => toText(pat) ~ " = " ~ toText(expr) case ContextBounds(bounds, cxBounds) => - if Feature.enabled(Feature.modularity) then + if sourceVersion.isAtLeast(`3.6`) then def boundsText(bounds: Tree) = bounds match case ContextBoundTypeTree(tpt, _, ownName) => toText(tpt) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index f82e9c998b4d..0cde7bc127aa 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -223,7 +223,9 @@ TypeArgs ::= ‘[’ Types ‘]’ Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] ContextBounds(typeBounds, tps) -ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBounds ::= ContextBound + | ContextBound `:` ContextBounds -- to be deprecated + | '{' ContextBound {',' ContextBound} '}' ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} NamesAndTypes ::= NameAndType {‘,’ NameAndType} @@ -464,7 +466,7 @@ TypeDef ::= id [HkTypeParamClause] {FunParamClause} TypeAndCtxBounds TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef | ‘enum’ EnumDef - | ‘given’ GivenDef + | ‘given’ (GivenDef | OldGivenDef) ClassDef ::= id ClassConstr [Template] ClassDef(mods, name, tparams, templ) ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses with DefDef(_, , Nil, vparamss, EmptyTree, EmptyTree) as first stat ConstrMods ::= {Annotation} [AccessModifier] @@ -483,6 +485,10 @@ GivenConditional ::= DefTypeParamClause | GivenType GivenType ::= AnnotType1 {id [nl] AnnotType1} +OldGivenDef ::= [OldGivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -- syntax up to Scala 3.5, to be deprecated in the future +OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index 5d984c762a89..adf25c9342fa 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -214,7 +214,11 @@ ParamValueType ::= Type [‘*’] TypeArgs ::= ‘[’ Types ‘]’ Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> TypeBounds ::= [‘>:’ Type] [‘<:’ Type] -TypeAndCtxBounds ::= TypeBounds {‘:’ Type} +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +ContextBounds ::= ContextBound + | ContextBound `:` ContextBounds -- to be deprecated + | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} ``` @@ -437,16 +441,29 @@ TypeDef ::= id [HkTypeParamClause] {FunParamClause}TypeBounds TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef | ‘enum’ EnumDef - | ‘given’ GivenDef + | ‘given’ (GivenDef | OldGivenDef) ClassDef ::= id ClassConstr [Template] ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present -GivenType ::= AnnotType {id [nl] AnnotType} + +GivenDef ::= [id ':'] GivenSig +GivenSig ::= GivenImpl + | '(' ')' '=>' GivenImpl + | GivenConditional '=>' GivenSig +GivenImpl ::= GivenType ([‘=’ Expr] | TemplateBody) + | ConstrApps TemplateBody +GivenConditional ::= DefTypeParamClause + | DefTermParamClause + | '(' FunArgTypes ')' + | GivenType +GivenType ::= AnnotType1 {id [nl] AnnotType1} + +OldGivenDef ::= [OldGivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -- syntax up to Scala 3.5, to be deprecated in the future +OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index a3896a1eeb06..8215ae2452a3 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -52,7 +52,6 @@ def uninitialized: Nothing = ??? * that implement the enclosing trait and that do not contain an explicit overriding * definition of that given. */ -@experimental @compileTimeOnly("`deferred` can only be used as the right hand side of a given definition in a trait") def deferred: Nothing = ??? diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala index ffe4e293ba30..2df69cc85af2 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala @@ -1089,11 +1089,13 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: | def foo(x: Int): Int | def bar(x: String): String | - |given Foo with + |given Foo { | | override def foo(x: Int): Int = ??? | | override def bar(x: String): String = ??? + | + |} |""".stripMargin ) diff --git a/tests/neg/empty-given.scala b/tests/neg/empty-given.scala index 10daf5ac009a..cf7566724cc2 100644 --- a/tests/neg/empty-given.scala +++ b/tests/neg/empty-given.scala @@ -1,3 +1,3 @@ -given { // error +given { def foo = 1 // error -} // error \ No newline at end of file +} \ No newline at end of file diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check index 55806fa5ca1b..8d0a24a60308 100644 --- a/tests/neg/i12348.check +++ b/tests/neg/i12348.check @@ -1,4 +1,4 @@ --- [E040] Syntax Error: tests/neg/i12348.scala:2:15 -------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i12348.scala:2:16 -------------------------------------------------------------------- 2 | given inline x: Int = 0 // error - | ^ - | 'with' expected, but identifier found + | ^ + | an identifier expected, but ':' found diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 15ccd38f860c..e28c2240a414 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -83,7 +83,6 @@ val experimentalDefinitionInLibrary = Set( // New feature: modularity "scala.Precise", "scala.annotation.internal.WitnessNames", - "scala.compiletime.package$package$.deferred", "scala.runtime.stdLibPatches.Predef$.is", // New feature: functions with erased parameters. diff --git a/tests/warn/abstract-givens-new.check b/tests/warn/abstract-givens-new.check index 197d9bcb4f3e..8b137891791f 100644 --- a/tests/warn/abstract-givens-new.check +++ b/tests/warn/abstract-givens-new.check @@ -1,5 +1 @@ --- Warning: tests/warn/abstract-givens-new.scala:7:22 ------------------------------------------------------------------ -7 | given intC: Int is C // warn - | ^ - | This defines an abstract given, which is deprecated. Use a `deferred` given instead. - | Or, if you intend to define a concrete given, follow the type with `()` arguments. + diff --git a/tests/warn/abstract-givens-new.scala b/tests/warn/abstract-givens-new.scala index b38fd11c4458..2ecf700f18f7 100644 --- a/tests/warn/abstract-givens-new.scala +++ b/tests/warn/abstract-givens-new.scala @@ -4,6 +4,7 @@ class C: trait T: given Int is C // ok - given intC: Int is C // warn + given intC: Int is C // ok for now, will be warning given intC2: (Int is C)() // ok given intC3: Int is C {} // also ok + From 3fd727d2cb6f5284db529fbbad202a6f91d0a979 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 28 Sep 2024 18:17:20 +0200 Subject: [PATCH 587/827] Under -source future, warn if old given and context bound syntax is used --- .../tools/dotc/config/MigrationVersion.scala | 1 + .../tools/dotc/config/SourceVersion.scala | 2 ++ .../dotty/tools/dotc/parsing/Parsers.scala | 22 +++++++++++++++- .../neg/context-bounds-migration-future.check | 4 +++ tests/neg/infix.scala | 2 +- tests/neg/tracked.check | 8 +++--- tests/neg/tracked.scala | 2 +- tests/warn/abstract-givens-new.check | 6 ++++- tests/warn/abstract-givens-new.scala | 2 +- tests/warn/old-givens.check | 25 +++++++++++++++++++ tests/warn/old-givens.scala | 18 +++++++++++++ 11 files changed, 83 insertions(+), 9 deletions(-) create mode 100644 tests/warn/old-givens.check create mode 100644 tests/warn/old-givens.scala diff --git a/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala index 2a4e252fa695..3da716abbc40 100644 --- a/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala @@ -30,6 +30,7 @@ enum MigrationVersion(val warnFrom: SourceVersion, val errorFrom: SourceVersion) case ImportRename extends MigrationVersion(future, future) case ParameterEnclosedByParenthesis extends MigrationVersion(future, future) case XmlLiteral extends MigrationVersion(future, future) + case GivenSyntax extends MigrationVersion(future, never) require(warnFrom.ordinal <= errorFrom.ordinal) diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index caf1187614b7..3b2e2d420d4f 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -16,6 +16,8 @@ enum SourceVersion: // !!! Keep in sync with scala.runtime.stdlibPatches.language !!! case `future-migration`, `future` + case `never` // needed for MigrationVersion.errorFrom if we never want to issue an error + val isMigrating: Boolean = toString.endsWith("-migration") def stable: SourceVersion = diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 5af9b105a5c1..f94c14079f59 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -2240,7 +2240,16 @@ object Parsers { in.nextToken() if in.token == LBRACE && sourceVersion.isAtLeast(`3.6`) then inBraces(commaSeparated(() => contextBound(pname))) - else contextBound(pname) :: contextBounds(pname) + else + val bound = contextBound(pname) + val rest = + if in.isColon then + report.errorOrMigrationWarning( + em"Multiple context bounds should be enclosed in `{ ... }`", + in.sourcePos(), MigrationVersion.GivenSyntax) + contextBounds(pname) + else Nil + bound :: rest else if in.token == VIEWBOUND then report.errorOrMigrationWarning( em"view bounds `<%' are no longer supported, use a context bound `:' instead", @@ -4261,6 +4270,9 @@ object Parsers { in.nextToken() newSignature() else if hasEmbeddedColon then + report.errorOrMigrationWarning( + em"This old given syntax is no longer supported; use `=>` instead of `:`", + in.sourcePos(), MigrationVersion.GivenSyntax) newSyntaxAllowed = false val tparamsOld = typeParamClauseOpt(ParamOwner.Given) newLineOpt() @@ -4294,6 +4306,11 @@ object Parsers { // old-style abstract given if name.isEmpty then syntaxError(em"Anonymous given cannot be abstract, or maybe you want to define a concrete given and are missing a `()` argument?", in.lastOffset) + if newSyntaxAllowed then + report.errorOrMigrationWarning( + em"""This defines an abstract given, which is no longer supported. Use a `deferred` given instead. + |Or, if you intend to define a concrete given, follow the type with `()` arguments.""", + in.sourcePos(in.lastOffset), MigrationVersion.GivenSyntax) DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else // structural instance @@ -4483,6 +4500,9 @@ object Parsers { /** with Template, with EOL interpreted */ def withTemplate(constr: DefDef, parents: List[Tree]): Template = + report.errorOrMigrationWarning( + em"Given member definitions starting with `with` are no longer supported; use `{...}` or `:` followed by newline instead", + in.sourcePos(), MigrationVersion.GivenSyntax) accept(WITH) val (self, stats) = templateBody(parents, rewriteWithColon = false) Template(constr, parents, Nil, self, stats) diff --git a/tests/neg/context-bounds-migration-future.check b/tests/neg/context-bounds-migration-future.check index f56da5d6b28d..f517a1e335c9 100644 --- a/tests/neg/context-bounds-migration-future.check +++ b/tests/neg/context-bounds-migration-future.check @@ -4,3 +4,7 @@ | method foo does not take more parameters | | longer explanation available when compiling with `-explain` +-- Warning: tests/neg/context-bounds-migration-future.scala:6:6 -------------------------------------------------------- +6 |given [T]: C[T] = C[T]() + | ^ + | This old given syntax is no longer supported; use `=>` instead of `:` diff --git a/tests/neg/infix.scala b/tests/neg/infix.scala index dda638c829f9..bfd68a17e656 100644 --- a/tests/neg/infix.scala +++ b/tests/neg/infix.scala @@ -8,7 +8,7 @@ class C: def +(x: Int): Int = ??? object C: - given AnyRef with + given AnyRef: extension (x: C) infix def iop (y: Int) = ??? def mop (y: Int) = ??? diff --git a/tests/neg/tracked.check b/tests/neg/tracked.check index ae734e7aa0b4..14a4d2a08300 100644 --- a/tests/neg/tracked.check +++ b/tests/neg/tracked.check @@ -22,10 +22,10 @@ 17 | tracked type T = Int // error // error | ^^^^ | end of statement expected but 'type' found --- Error: tests/neg/tracked.scala:20:29 -------------------------------------------------------------------------------- -20 | given g2(using tracked val x: Int): C = C(x) // error - | ^^^^^^^^^^^^^^^^^^ - | method parameter x may not be a `val` +-- Error: tests/neg/tracked.scala:20:25 -------------------------------------------------------------------------------- +20 | given g2: (tracked val x: Int) => C = C(x) // error + | ^^^^^^^^^^^^^^^^^^ + | method parameter x may not be a `val` -- Error: tests/neg/tracked.scala:4:21 --------------------------------------------------------------------------------- 4 |class C2(tracked var x: Int) // error | ^ diff --git a/tests/neg/tracked.scala b/tests/neg/tracked.scala index 8d315a7b89ac..9f874ca3c0da 100644 --- a/tests/neg/tracked.scala +++ b/tests/neg/tracked.scala @@ -17,4 +17,4 @@ object D: tracked type T = Int // error // error object E: - given g2(using tracked val x: Int): C = C(x) // error + given g2: (tracked val x: Int) => C = C(x) // error diff --git a/tests/warn/abstract-givens-new.check b/tests/warn/abstract-givens-new.check index 8b137891791f..a33a109db1b4 100644 --- a/tests/warn/abstract-givens-new.check +++ b/tests/warn/abstract-givens-new.check @@ -1 +1,5 @@ - +-- Warning: tests/warn/abstract-givens-new.scala:7:22 ------------------------------------------------------------------ +7 | given intC: Int is C // warn + | ^ + | This defines an abstract given, which is no longer supported. Use a `deferred` given instead. + | Or, if you intend to define a concrete given, follow the type with `()` arguments. diff --git a/tests/warn/abstract-givens-new.scala b/tests/warn/abstract-givens-new.scala index 2ecf700f18f7..c18ebe46acf7 100644 --- a/tests/warn/abstract-givens-new.scala +++ b/tests/warn/abstract-givens-new.scala @@ -4,7 +4,7 @@ class C: trait T: given Int is C // ok - given intC: Int is C // ok for now, will be warning + given intC: Int is C // warn given intC2: (Int is C)() // ok given intC3: Int is C {} // also ok diff --git a/tests/warn/old-givens.check b/tests/warn/old-givens.check new file mode 100644 index 000000000000..ec237ae8fd93 --- /dev/null +++ b/tests/warn/old-givens.check @@ -0,0 +1,25 @@ +-- Warning: tests/warn/old-givens.scala:8:20 --------------------------------------------------------------------------- +8 | given intC: C[Int] // warn + | ^ + | This defines an abstract given, which is no longer supported. Use a `deferred` given instead. + | Or, if you intend to define a concrete given, follow the type with `()` arguments. +-- Warning: tests/warn/old-givens.scala:11:8 --------------------------------------------------------------------------- +11 | given [T]: Ord[T] with // warn // warn + | ^ + | This old given syntax is no longer supported; use `=>` instead of `:` +-- Warning: tests/warn/old-givens.scala:11:20 -------------------------------------------------------------------------- +11 | given [T]: Ord[T] with // warn // warn + | ^ + |Given member definitions starting with `with` are no longer supported; use `{...}` or `:` followed by newline instead +-- Warning: tests/warn/old-givens.scala:14:8 --------------------------------------------------------------------------- +14 | given [T](using Ord[T]): Ord[List[T]] with // warn // warn + | ^ + | This old given syntax is no longer supported; use `=>` instead of `:` +-- Warning: tests/warn/old-givens.scala:14:40 -------------------------------------------------------------------------- +14 | given [T](using Ord[T]): Ord[List[T]] with // warn // warn + | ^ + |Given member definitions starting with `with` are no longer supported; use `{...}` or `:` followed by newline instead +-- Warning: tests/warn/old-givens.scala:17:15 -------------------------------------------------------------------------- +17 | def f[T: Ord : C]() = ??? // warn + | ^ + | Multiple context bounds should be enclosed in `{ ... }` diff --git a/tests/warn/old-givens.scala b/tests/warn/old-givens.scala new file mode 100644 index 000000000000..83e650df47d3 --- /dev/null +++ b/tests/warn/old-givens.scala @@ -0,0 +1,18 @@ +//> using options -source future +trait Ord[T]: + def compare(x: T, y: T): Boolean + +class C[T] + +trait T: + given intC: C[Int] // warn + given intC2: C[Int] () // OK + + given [T]: Ord[T] with // warn // warn + def compare(x: T, y: T): Boolean = ??? + + given [T](using Ord[T]): Ord[List[T]] with // warn // warn + def compare(x: List[T], y: List[T]): Boolean = ??? + + def f[T: Ord : C]() = ??? // warn + From 20e942ba03eb44ec9580dcb1ce1e6a323e24853e Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 28 Sep 2024 18:51:24 +0200 Subject: [PATCH 588/827] Allow context bounds on type members in 3.6 It was forgotten before to enable this. --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index f94c14079f59..978c89398ecc 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -4024,7 +4024,7 @@ object Parsers { case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => makeTypeDef(typeAndCtxBounds(tname)) case _ if (staged & StageKind.QuotedPattern) != 0 - || in.featureEnabled(Feature.modularity) && in.isColon => + || sourceVersion.isAtLeast(`3.6`) && in.isColon => makeTypeDef(typeAndCtxBounds(tname)) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) From 3cff3be904dfd08afb08fc2cf7e808a9d89a8d7a Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 28 Sep 2024 19:16:12 +0200 Subject: [PATCH 589/827] Drop modularity import from tests that no longer need it. --- tests/neg/cb-companion-leaks.scala | 2 +- tests/neg/deferred-givens.scala | 2 +- tests/neg/deferredSummon.scala | 2 +- tests/pos-custom-args/captures/logger.scala | 1 - tests/pos-custom-args/captures/nested-classes.scala | 1 - tests/pos/FromString-cb-companion.scala | 2 +- tests/pos/FromString-typeparam.scala | 1 - tests/pos/cb-companion-joins.scala | 1 - tests/pos/deferred-givens-singletons.scala | 2 +- tests/pos/deferred-givens.scala | 2 +- tests/pos/dep-context-bounds.scala | 2 +- tests/pos/i21189-alt.scala | 2 -- tests/pos/i21189.scala | 2 -- tests/pos/typeclasses-arrow0.scala | 2 -- tests/run/Providers.scala | 1 - tests/run/byname-given.scala | 2 -- 16 files changed, 7 insertions(+), 20 deletions(-) diff --git a/tests/neg/cb-companion-leaks.scala b/tests/neg/cb-companion-leaks.scala index 07155edb05dc..d9d8f3d6d19b 100644 --- a/tests/neg/cb-companion-leaks.scala +++ b/tests/neg/cb-companion-leaks.scala @@ -1,4 +1,4 @@ -//> using options -language:experimental.modularity -source future -explain +//> using options -language:experimental.modularity -explain class C[Self] diff --git a/tests/neg/deferred-givens.scala b/tests/neg/deferred-givens.scala index 8a1bcb2b50fc..4de79120a1c7 100644 --- a/tests/neg/deferred-givens.scala +++ b/tests/neg/deferred-givens.scala @@ -1,4 +1,4 @@ -//> using options -language:experimental.modularity -source future + import compiletime.deferred class Ctx diff --git a/tests/neg/deferredSummon.scala b/tests/neg/deferredSummon.scala index cddde82535fb..39a775cf78bf 100644 --- a/tests/neg/deferredSummon.scala +++ b/tests/neg/deferredSummon.scala @@ -1,4 +1,4 @@ -//> using options -language:experimental.modularity + object Test: given Int = compiletime.deferred // error diff --git a/tests/pos-custom-args/captures/logger.scala b/tests/pos-custom-args/captures/logger.scala index 81eeb521fee5..c2cfed0462b6 100644 --- a/tests/pos-custom-args/captures/logger.scala +++ b/tests/pos-custom-args/captures/logger.scala @@ -1,5 +1,4 @@ import language.experimental.saferExceptions -import language.experimental.modularity class FileSystem extends caps.Capability diff --git a/tests/pos-custom-args/captures/nested-classes.scala b/tests/pos-custom-args/captures/nested-classes.scala index 4a76a88c03ff..1c624d37cee1 100644 --- a/tests/pos-custom-args/captures/nested-classes.scala +++ b/tests/pos-custom-args/captures/nested-classes.scala @@ -1,5 +1,4 @@ import language.experimental.captureChecking -import language.experimental.modularity import annotation.{capability, constructorOnly} class IO extends caps.Capability diff --git a/tests/pos/FromString-cb-companion.scala b/tests/pos/FromString-cb-companion.scala index d086420761ee..de525629a028 100644 --- a/tests/pos/FromString-cb-companion.scala +++ b/tests/pos/FromString-cb-companion.scala @@ -1,4 +1,4 @@ -//> using options -language:experimental.modularity -source future +//> using options -language:experimental.modularity trait FromString[Self]: def fromString(s: String): Self diff --git a/tests/pos/FromString-typeparam.scala b/tests/pos/FromString-typeparam.scala index 893bcfd3decc..a989e679cd94 100644 --- a/tests/pos/FromString-typeparam.scala +++ b/tests/pos/FromString-typeparam.scala @@ -1,4 +1,3 @@ -//> using options -language:experimental.modularity -source future trait FromString[A]: def fromString(s: String): A diff --git a/tests/pos/cb-companion-joins.scala b/tests/pos/cb-companion-joins.scala index 97e0a8a7e4ac..91df784bb2ce 100644 --- a/tests/pos/cb-companion-joins.scala +++ b/tests/pos/cb-companion-joins.scala @@ -1,5 +1,4 @@ import language.experimental.modularity -import language.future trait M[Self]: extension (x: Self) def combine (y: Self): String diff --git a/tests/pos/deferred-givens-singletons.scala b/tests/pos/deferred-givens-singletons.scala index 60a881340b75..c03bd59ccf20 100644 --- a/tests/pos/deferred-givens-singletons.scala +++ b/tests/pos/deferred-givens-singletons.scala @@ -1,4 +1,4 @@ -//> using options -language:experimental.modularity -source future +// //> using options -language:experimental.modularity -source future import compiletime.* trait A: diff --git a/tests/pos/deferred-givens.scala b/tests/pos/deferred-givens.scala index 0ad751fcc7e0..68980a92f602 100644 --- a/tests/pos/deferred-givens.scala +++ b/tests/pos/deferred-givens.scala @@ -1,4 +1,4 @@ -//> using options -language:experimental.modularity -source future + import compiletime.* class Ord[Elem] given Ord[Double]() diff --git a/tests/pos/dep-context-bounds.scala b/tests/pos/dep-context-bounds.scala index c724d92e9809..d89bec5d85ee 100644 --- a/tests/pos/dep-context-bounds.scala +++ b/tests/pos/dep-context-bounds.scala @@ -1,4 +1,4 @@ -//> using options -language:experimental.modularity -source future +//> using options -language:experimental.modularity trait A: type Self diff --git a/tests/pos/i21189-alt.scala b/tests/pos/i21189-alt.scala index 10a55ec25185..d99a95f0d67e 100644 --- a/tests/pos/i21189-alt.scala +++ b/tests/pos/i21189-alt.scala @@ -1,5 +1,3 @@ -//> using options -source:future -language:experimental.modularity - class MySortedSet[T : Ord] extends SortedSet[T] trait Ord[T] diff --git a/tests/pos/i21189.scala b/tests/pos/i21189.scala index ea27f88402de..556f470945eb 100644 --- a/tests/pos/i21189.scala +++ b/tests/pos/i21189.scala @@ -1,5 +1,3 @@ -//> using options -source:future -language:experimental.modularity - class MySortedSet[T : Ord] extends SortedSet[T] trait Ord[T] diff --git a/tests/pos/typeclasses-arrow0.scala b/tests/pos/typeclasses-arrow0.scala index d7d85e6b7400..279d2367e185 100644 --- a/tests/pos/typeclasses-arrow0.scala +++ b/tests/pos/typeclasses-arrow0.scala @@ -1,5 +1,3 @@ -//> using options -language:experimental.modularity -source future - class Common: trait Ord[A]: diff --git a/tests/run/Providers.scala b/tests/run/Providers.scala index 8c5bf20bc02e..445781c49987 100644 --- a/tests/run/Providers.scala +++ b/tests/run/Providers.scala @@ -1,4 +1,3 @@ -import language.experimental.modularity import compiletime.constValue import compiletime.ops.int.S diff --git a/tests/run/byname-given.scala b/tests/run/byname-given.scala index d18ebb221a68..05d0d652ff68 100644 --- a/tests/run/byname-given.scala +++ b/tests/run/byname-given.scala @@ -1,5 +1,3 @@ -//> using options -language:experimental.modularity -source future - @main def Test = var x: Int = 0 given () => Int = x From ca6b8f7c3ed79848a57a299265dd86f20f69e5f8 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 29 Sep 2024 20:08:40 +0200 Subject: [PATCH 590/827] Make never an illegal source version --- compiler/src/dotty/tools/dotc/config/SourceVersion.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 3b2e2d420d4f..3a7285751827 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -34,7 +34,7 @@ object SourceVersion extends Property.Key[SourceVersion]: def defaultSourceVersion = `3.6` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ - val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) + val illegalSourceVersionNames = List("3.1-migration", "never").map(_.toTermName) /** language versions that the compiler recognises. */ val validSourceVersionNames = values.toList.map(_.toString.toTermName) From 8f4210ebba52eb24cd68447fc002760d91920344 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Mon, 30 Sep 2024 15:07:00 +0200 Subject: [PATCH 591/827] Check parents non-empty before calling reduceLeft --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 992f283154ca..c8eb5b145db1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1962,7 +1962,9 @@ trait Applications extends Compatibility { def widenPrefix(alt: TermRef): Type = alt.prefix.widen match case pre: (TypeRef | ThisType) if pre.typeSymbol.is(Module) => - pre.parents.reduceLeft(TypeComparer.andType(_, _)) + val ps = pre.parents + if ps.isEmpty then pre + else ps.reduceLeft(TypeComparer.andType(_, _)) case wpre => wpre /** If two alternatives have the same symbol, we pick the one with the most From 0c721dcffb473b92c71e58569090313577f5368c Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Sep 2024 11:31:57 +0200 Subject: [PATCH 592/827] Update docs to new conventions Also: Some reshuffling and editing of existing material. --- .../contextual/by-name-context-parameters.md | 2 +- .../reference/contextual/context-bounds.md | 193 +++++++- .../_docs/reference/contextual/conversions.md | 2 +- .../reference/contextual/deferred-givens.md | 57 +++ .../reference/contextual/derivation-macro.md | 4 +- docs/_docs/reference/contextual/derivation.md | 26 +- .../reference/contextual/extension-methods.md | 2 +- .../reference/contextual/given-imports.md | 2 +- docs/_docs/reference/contextual/givens.md | 181 ++------ .../_docs/reference/contextual/more-givens.md | 204 +++++++++ .../contextual/multiversal-equality.md | 2 +- .../reference/contextual/previous-givens.md | 233 ++++++++++ .../contextual/relationship-implicits.md | 10 +- .../reference/contextual/type-classes.md | 36 +- .../reference/contextual/using-clauses.md | 2 +- docs/_docs/reference/enums/enums.md | 12 +- .../experimental/erased-defs-spec.md | 6 +- .../experimental/numeric-literals.md | 4 +- .../reference/experimental/typeclasses.md | 431 +----------------- .../reference/metaprogramming/macros-spec.md | 2 +- .../_docs/reference/metaprogramming/macros.md | 4 +- docs/sidebar.yml | 4 + 22 files changed, 764 insertions(+), 655 deletions(-) create mode 100644 docs/_docs/reference/contextual/deferred-givens.md create mode 100644 docs/_docs/reference/contextual/more-givens.md create mode 100644 docs/_docs/reference/contextual/previous-givens.md diff --git a/docs/_docs/reference/contextual/by-name-context-parameters.md b/docs/_docs/reference/contextual/by-name-context-parameters.md index 7c517abe9406..e903ac2642c3 100644 --- a/docs/_docs/reference/contextual/by-name-context-parameters.md +++ b/docs/_docs/reference/contextual/by-name-context-parameters.md @@ -12,7 +12,7 @@ trait Codec[T]: given intCodec: Codec[Int] = ??? -given optionCodec[T](using ev: => Codec[T]): Codec[Option[T]] with +given optionCodec: [T] => (ev: => Codec[T]) => Codec[Option[T]]: def write(xo: Option[T]) = xo match case Some(x) => ev.write(x) case None => diff --git a/docs/_docs/reference/contextual/context-bounds.md b/docs/_docs/reference/contextual/context-bounds.md index 11d57c8cbd52..ef32dc7b08c2 100644 --- a/docs/_docs/reference/contextual/context-bounds.md +++ b/docs/_docs/reference/contextual/context-bounds.md @@ -4,50 +4,205 @@ title: "Context Bounds" nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-bounds.html --- -A context bound is a shorthand for expressing the common pattern of a context parameter that depends on a type parameter. Using a context bound, the `maximum` function of the last section can be written like this: +A context bound is a shorthand for expressing the common pattern of a context parameter that depends on a type parameter. These patterns are commonplace when modelling type classes in Scala. Using a context bound, the `maximum` function of the [last section](./using-clauses.md) can be written like this: ```scala def maximum[T: Ord](xs: List[T]): T = xs.reduceLeft(max) ``` -A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `using Ord[T]`. The context parameter(s) generated from context bounds -are added as follows: +A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `using Ord[T]`, which is added to the signature of the enclosing method. The generated parameter is called a _witness_ for the context bound. - - If the method parameters end in an implicit parameter list or using clause, - context parameters are added in front of that list. - - Otherwise they are added as a separate parameter clause at the end. - -Example: +For instance the `maximum` method above expands to +```scala +def maximum[T](xs: List[T])(using Ord[T]): T = ... +``` +Context bounds can be combined with subtype bounds. If both are present, subtype bounds come first, e.g. ```scala -def f[T: C1 : C2, U: C3](x: T)(using y: U, z: V): R +def f[T <: B : C](x: T): R = ... ``` -would expand to +## Named Context Bounds +A context bound can be given a name with an `as` clause. For example, assume the following trait definitions. +```scala + trait SemiGroup[A]: + extension (x: A) def combine(y: A): A + + trait Monoid[A] extends SemiGroup[A]: + def unit: A +``` +We can write `reduce` function over lists of monoid instances like this: ```scala -def f[T, U](x: T)(using _: C1[T], _: C2[T], _: C3[U], y: U, z: V): R + def reduce[A: Monoid as m](xs: List[A]): A = + xs.foldLeft(m.unit)(_ `combine` _) ``` +We use `as x` after the type of a context bound to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. -Context bounds can be combined with subtype bounds. If both are present, subtype bounds come first, e.g. +In a context bound with a naming clause the witness parameter carries the given name. For instance the expanded signature of `reduce` would be +```scala + def reduce[A](xs: List[A])(using m: Monoid[A]): A +``` +Since the context parameter now has a name, it can be referred +to in the body of `reduce`. An example is the `m.unit` reference in the definition above. + +If the context bound does not carry an `as` clause, the generated witness parameter gets a compiler-synthesized name. However, a [currently experimental +language extension](../experimental/default-names-context-bounds.md) would in this case give the context parameter the same name as the bound type parameter. + +Named context bounds were introduced in Scala 3.6. + +## Aggregate Context Bounds +A type parameter can have several context bounds. If there are multiple bounds, they are written inside braces `{...}`. Example: ```scala -def g[T <: B : C](x: T): R = ... + trait: + def showMax[X : {Ord, Show}](x: X, y: X): String + class B extends A: + def showMax[X : {Ord as ordering, Show as show}](x: X, y: X): String = + show.asString(ordering.max(x, y)) ``` -## Migration +This syntax is valid from Scala 3.6. The previous syntax used +chains of `:` clauses, as in `[X : Ord : Show]`. This syntax is still available but will be deprecated and removed over time. -To ease migration, context bounds in Dotty map in Scala 3.0 to old-style implicit parameters -for which arguments can be passed either with a `(using ...)` clause or with a normal application. From Scala 3.1 on, they will map to context parameters instead, as is described above. +## Placement of Generated Context Parameters -If the source version is `future-migration`, any pairing of an evidence +The witness context parameter(s) generated from context bounds are added as follows: + + 1. If one of the bounds is referred to by its name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. + 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. + 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. + +Rules (2) and (3) match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility with Scala 2 and earlier Scala 3 versions is maintained. + +**Examples:** + + 1. By rule 3, + ```scala + def f[T: {C1, C2}](x: T): R + ``` + expands to + ```scala + def f[T](x: T)(using C1, C2): R + ``` + Equally by rule 3, + ```scala + def f[T: {C1 as c1, C2 as c2}](x: T): R + ``` + expands to + ```scala + def f[T](x: T)(using c1: C1, c2: C2): R + + 2. By rule 2, + ```scala + def f[T: {C1, C2}, U: C3](x: T)(using y: U, z: V): R + ``` + expands to + ```scala + def f[T, U](x: T)(using _: C1[T], _: C2[T], _: C3[U], y: U, z: V): R + ``` + The same expansion occurs if `y` and `z` are Scala 2 style `implicit` parameters. + 3. Assume the following trait definition: + ```scala + trait Parser[P]: + type Input + type Result + ``` + Here is a method `run` that runs a parser on an input of the required type: + ```scala + def run[P : Parser as p](in: p.Input): p.Result + ``` + By rule 1, this method definition is expanded to: + ```scala + def run[P](using p: Parser[P]](in: p.Input): p.Result + ``` + Note that the `using` clause is placed in front of the explicit parameter clause `(in: p.Result)` so that + the type `p.Result` can legally refer to the context parameter `p`. + +### Migration + +To ease migration, context bounds map in Scala 3.0 - 3.5 to old-style implicit parameters +for which arguments can be passed either with a `(using ...)` clause or with a normal application. From Scala 3.6 on, they will map to context parameters instead, as is described above. + +If the source version is `3.6-migration`, any pairing of an evidence context parameter stemming from a context bound with a normal argument will give a migration warning. The warning indicates that a `(using ...)` clause is needed instead. The rewrite can be done automatically under `-rewrite`. +## Context Bounds for Polymorphic Functions + +From Scala 3.6 on, context bounds can also be used in polymorphic function types and polymorphic function literals: + +```scala +type Comparer = [X: Ord] => (x: X, y: X) => Boolean +val less: Comparer = [X: Ord as ord] => (x: X, y: X) => + ord.compare(x, y) < 0 +``` + +The expansion of such context bounds is analogous to the expansion in method types, except that instead of adding a using clause in a method, we insert a [context function type](./context-functions.md). + +For instance, the `type` and `val` definitions above would expand to +```scala +type Comparer = [X] => (x: X, y: X) => Ord[X] ?=> Boolean +val less: Comparer = [X] => (x: X, y: X) => (ord: Ord[X]) ?=> + ord.compare(x, y) < 0 +``` + +The expansion of using clauses does look inside alias types. For instance, +here is a variation of the previous example that uses a parameterized type alias: +```scala +type Cmp[X] = (x: X, y: X) => Ord[X] ?=> Boolean +type Comparer2 = [X: Ord] => Cmp[X] +``` +The expansion of the right hand side of `Comparer2` expands the `Cmp[X]` alias +and then inserts the context function at the same place as what's done for `Comparer`. + +### Context Bounds for Type Members + +From Scala 3.6 on, context bounds can not only used for type parameters but also for abstract type members. + +**Example**: + +```scala + class Collection: + type Element: Ord +``` + +These context bounds have to expand differently from context bounds for type parameters since there is no parameter list to accommodate any generated witnesses. Instead, context bounds for abstract types map to +[deferred givens](./deferred-givens.md). + +For instance, the `Collection` class above expands to: +```scala + class Collection: + type Element + given Ord[Element] = deferred +``` +As is explain in the [section on deferred givens](./deferred-givens.md), `deferred` is a special name defined in the `scala.compiletime` package. + + ## Syntax +The new syntax of context bounds is as follows: + ```ebnf -TypeParamBounds ::= [SubtypeBounds] {ContextBound} -ContextBound ::= ‘:’ Type +TypeParamBounds ::= TypeAndCtxBounds +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +ContextBounds ::= ContextBound + | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] ``` + +The syntax of function types and function literals +is generalized as follows to allow context bounds for generic type parameters. + +```ebnf +FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type + | DefTypeParamClause '=>' Type +FunExpr ::= FunParams (‘=>’ | ‘?=>’) Expr + | DefTypeParamClause ‘=>’ Expr +``` +The syntax for abstract type members is generalized as follows to allow context bounds: + +```scala +TypeDef ::= id [TypeParamClause] TypeAndCtxBounds +``` \ No newline at end of file diff --git a/docs/_docs/reference/contextual/conversions.md b/docs/_docs/reference/contextual/conversions.md index 1ce8d42074e7..cb063b949a71 100644 --- a/docs/_docs/reference/contextual/conversions.md +++ b/docs/_docs/reference/contextual/conversions.md @@ -12,7 +12,7 @@ abstract class Conversion[-T, +U] extends (T => U): ``` For example, here is an implicit conversion from `String` to `Token`: ```scala -given Conversion[String, Token] with +given Conversion[String, Token]: def apply(str: String): Token = new KeyWord(str) ``` Using an alias this can be expressed more concisely as: diff --git a/docs/_docs/reference/contextual/deferred-givens.md b/docs/_docs/reference/contextual/deferred-givens.md new file mode 100644 index 000000000000..232c15afda35 --- /dev/null +++ b/docs/_docs/reference/contextual/deferred-givens.md @@ -0,0 +1,57 @@ +--- +layout: doc-page +title: "Deferred Givens" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/deferred-givens.html +--- + +Scala 3.6 introduces a new way to implement a given definition in a trait like this: +```scala +given T = deferred +``` +Such givens can be implemented automatically in subclasses. `deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. + +Deferred givens allow a clean implementation of context bounds in traits, +as in the following example: +```scala +trait Sorted: + type Element : Ord + +class SortedSet[A : Ord as ord] extends Sorted: + type Element = A +``` +The compiler expands this to the following implementation. +```scala +trait Sorted: + type Element + given Ord[Element] = compiletime.deferred + +class SortedSet[A](using ord: Ord[A]) extends Sorted: + type Element = A + override given Ord[Element] = ord +``` + +The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. + +One can also provide an explicit implementation of a deferred given, as in the following example: + +```scala +class SortedString[A] extends Sorted: + type Element = String + override given Ord[String] = ... +``` + +Note that the implementing given needs an `override` modifier since the `deferred` given in class `Sorted` counts as a concrete (i.e. not abstract) definition. In a sense, the `deferred` right hand side in `Sorted` is like a (magic, compiler-supported) macro, with the peculiarity that the macro's implementation also affects subclasses. + +## Abstract Givens + +A given may also be an abstract member, with the restriction that it must have an explicit name. Example: + +```scala +trait HasOrd[T]: + given ord: Ord[T] +``` +An abstract given has the form `given name: Type` without a right-hand side or arguments to the type. + +Since Scala 3.6, abstract givens are made redundant by deferred givens. Deferred givens can replace abstract givens. They have better ergonomics, since deferred givens get naturally implemented in inheriting classes, so there is no longer any need for boilerplate to fill in definitions of abstract givens. + +It is therefore recommended that software architectures relying on abstract givens be migrated to use deferred givens instead. Abstract givens are still supported in Scala 3.6, but will likely be deprecated and phased out over time. diff --git a/docs/_docs/reference/contextual/derivation-macro.md b/docs/_docs/reference/contextual/derivation-macro.md index 4b8dcffec846..6540a5d68a4b 100644 --- a/docs/_docs/reference/contextual/derivation-macro.md +++ b/docs/_docs/reference/contextual/derivation-macro.md @@ -135,10 +135,10 @@ trait Eq[T]: def eqv(x: T, y: T): Boolean object Eq: - given Eq[String] with + given Eq[String]: def eqv(x: String, y: String) = x == y - given Eq[Int] with + given Eq[Int]: def eqv(x: Int, y: Int) = x == y def eqProduct[T](body: (T, T) => Boolean): Eq[T] = diff --git a/docs/_docs/reference/contextual/derivation.md b/docs/_docs/reference/contextual/derivation.md index ed0e005c1bd4..cbf736d88034 100644 --- a/docs/_docs/reference/contextual/derivation.md +++ b/docs/_docs/reference/contextual/derivation.md @@ -19,9 +19,9 @@ The `derives` clause generates the following given instances for the `Eq`, `Orde companion object of `Tree`: ```scala -given [T: Eq] : Eq[Tree[T]] = Eq.derived -given [T: Ordering] : Ordering[Tree[T]] = Ordering.derived -given [T: Show] : Show[Tree[T]] = Show.derived +given [T: Eq] => Eq[Tree[T]] = Eq.derived +given [T: Ordering] => Ordering[Tree[T]] = Ordering.derived +given [T: Show] => Show[Tree[T]] = Show.derived ``` We say that `Tree` is the _deriving type_ and that the `Eq`, `Ordering` and `Show` instances are _derived instances_. @@ -29,7 +29,7 @@ We say that `Tree` is the _deriving type_ and that the `Eq`, `Ordering` and `Sho **Note:** `derived` can be used manually, this is useful when you do not have control over the definition. For example we can implement `Ordering` for `Option`s like so: ```scala -given [T: Ordering]: Ordering[Option[T]] = Ordering.derived +given [T: Ordering] => Ordering[Option[T]] = Ordering.derived ``` It is discouraged to directly refer to the `derived` member if you can use a `derives` clause instead. @@ -44,7 +44,7 @@ For a class/trait/object/enum `DerivingType[T_1, ..., T_N] derives TC`, a derive The general "shape" of the derived instance is as follows: ```scala -given [...](using ...): TC[ ... DerivingType[...] ... ] = TC.derived +given [...] => (...) => TC[ ... DerivingType[...] ... ] = TC.derived ``` `TC.derived` should be an expression that conforms to the expected type on the left, potentially elaborated using term and/or type inference. @@ -62,7 +62,7 @@ There are two further cases depending on the kinds of arguments: The generated instance is then: ```scala -given [T_1: TC, ..., T_N: TC]: TC[DerivingType[T_1, ..., T_N]] = TC.derived +given [T_1: TC, ..., T_N: TC] => TC[DerivingType[T_1, ..., T_N]] = TC.derived ``` This is the most common case, and is the one that was highlighted in the introduction. @@ -92,7 +92,7 @@ This section covers cases where you can pair arguments of `F` and `DerivingType` The general shape will then be: ```scala -given [...]: TC[ [...] =>> DerivingType[...] ] = TC.derived +given [...] => TC[ [...] =>> DerivingType[...] ] = TC.derived ``` Where of course `TC` and `DerivingType` are applied to types of the correct kind. @@ -114,7 +114,7 @@ given TC[ [A_1, ..., A_K] =>> DerivingType ] = TC.derived If `F` takes fewer arguments than `DerivingType` (`K < N`), we fill in the remaining leftmost slots with type parameters of the given: ```scala -given [T_1, ... T_(N-K)]: TC[[A_1, ..., A_K] =>> DerivingType[T_1, ... T_(N-K), A_1, ..., A_K]] = TC.derived +given [T_1, ... T_(N-K)] => TC[[A_1, ..., A_K] =>> DerivingType[T_1, ... T_(N-K), A_1, ..., A_K]] = TC.derived ``` ### `TC` is the `CanEqual` type class @@ -142,7 +142,7 @@ generates the following given instance: ```scala object MyClass: ... - given [A_L, A_R, G_L[_], G_R[_]](using CanEqual[A_L, A_R]): CanEqual[MyClass[A_L, G_L], MyClass[A_R, G_R]] = CanEqual.derived + given [A_L, A_R, G_L[_], G_R[_]] => CanEqual[A_L, A_R] => CanEqual[MyClass[A_L, G_L], MyClass[A_R, G_R]] = CanEqual.derived ``` ### `TC` is not valid for automatic derivation @@ -419,7 +419,7 @@ trait Eq[T]: def eqv(x: T, y: T): Boolean object Eq: - given Eq[Int] with + given Eq[Int]: def eqv(x: Int, y: Int) = x == y def check(x: Any, y: Any, elem: Eq[?]): Boolean = @@ -468,7 +468,7 @@ In this case the code that is generated by the inline expansion for the derived following, after a little polishing, ```scala -given derived$Eq[T](using eqT: Eq[T]): Eq[Lst[T]] = +given derived$Eq[T] => (eqT: Eq[T]) => Eq[Lst[T]] = eqSum(summon[Mirror.Of[Lst[T]]], {/* cached lazily */ List( eqProduct(summon[Mirror.Of[Cns[T]]], {/* cached lazily */ @@ -491,12 +491,12 @@ As a third example, using a higher-level library such as Shapeless, the type cla `derived` method as, ```scala -given eqSum[A](using inst: => K0.CoproductInstances[Eq, A]): Eq[A] with +given eqSum: [A] => (inst: => K0.CoproductInstances[Eq, A]) => Eq[A]: def eqv(x: A, y: A): Boolean = inst.fold2(x, y)(false)( [t] => (eqt: Eq[t], t0: t, t1: t) => eqt.eqv(t0, t1) ) -given eqProduct[A](using inst: => K0.ProductInstances[Eq, A]): Eq[A] with +given eqProduct: [A] => (inst: => K0.ProductInstances[Eq, A]) => Eq[A]: def eqv(x: A, y: A): Boolean = inst.foldLeft2(x, y)(true: Boolean)( [t] => (acc: Boolean, eqt: Eq[t], t0: t, t1: t) => Complete(!eqt.eqv(t0, t1))(false)(true) diff --git a/docs/_docs/reference/contextual/extension-methods.md b/docs/_docs/reference/contextual/extension-methods.md index 8b9a3df5b84c..2aaa6a90e536 100644 --- a/docs/_docs/reference/contextual/extension-methods.md +++ b/docs/_docs/reference/contextual/extension-methods.md @@ -225,7 +225,7 @@ object List: extension [T](xs: List[List[T]]) def flatten: List[T] = xs.foldLeft(List.empty[T])(_ ++ _) - given [T: Ordering]: Ordering[List[T]] with + given [T: Ordering] => Ordering[List[T]]: extension (xs: List[T]) def < (ys: List[T]): Boolean = ... end List diff --git a/docs/_docs/reference/contextual/given-imports.md b/docs/_docs/reference/contextual/given-imports.md index 28442581e408..c9247b01183f 100644 --- a/docs/_docs/reference/contextual/given-imports.md +++ b/docs/_docs/reference/contextual/given-imports.md @@ -61,7 +61,7 @@ For instance, assuming the object ```scala object Instances: given intOrd: Ordering[Int] = ... - given listOrd[T: Ordering]: Ordering[List[T]] = ... + given listOrd: [T: Ordering] => Ordering[List[T]] = ... given ec: ExecutionContext = ... given im: Monoid[Int] = ... ``` diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index 5499fc39b53c..b7be460c9a34 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -14,28 +14,25 @@ trait Ord[T]: def < (y: T) = compare(x, y) < 0 def > (y: T) = compare(x, y) > 0 -given intOrd: Ord[Int] with +given intOrd: Ord[Int]: def compare(x: Int, y: Int) = if x < y then -1 else if x > y then +1 else 0 -given listOrd[T](using ord: Ord[T]): Ord[List[T]] with +given listOrd: [T: Ord] => Ord[List[T]]: def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 case (_, Nil) => +1 case (x :: xs1, y :: ys1) => - val fst = ord.compare(x, y) + val fst = summon[Ord[T]].compare(x, y) if fst != 0 then fst else compare(xs1, ys1) ``` This code defines a trait `Ord` with two given instances. `intOrd` defines a given for the type `Ord[Int]` whereas `listOrd[T]` defines givens -for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]` -themselves. The `using` clause in `listOrd` defines a condition: There must be a -given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. -Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). +for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]`. The clause `[T: Ord]` is a [context bound](./context-bounds.md) which defines a condition: There must be a given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). ## Anonymous Givens @@ -43,9 +40,9 @@ The name of a given can be left out. So the definitions of the last section can also be expressed like this: ```scala -given Ord[Int] with +given Ord[Int]: ... -given [T](using Ord[T]): Ord[List[T]] with +given [T: Ord] => Ord[List[T]]: ... ``` @@ -60,8 +57,7 @@ given_Ord_List ``` The precise rules for synthesizing names are found [here](./relationship-implicits.html#anonymous-given-instances). These rules do not guarantee absence of name conflicts between -given instances of types that are "too similar". To avoid conflicts one can -use named instances. +given instances of types that are "too similar". To avoid conflicts one can use named instances. **Note:** To ensure robust binary compatibility, publicly available libraries should prefer named instances. @@ -82,152 +78,41 @@ Alias givens can be anonymous as well, e.g. ```scala given Position = enclosingTree.position -given (using config: Config): Factory = MemoizingFactory(config) -``` - -An alias given can have type parameters and context parameters just like any other given, -but it can only implement a single type. - -## Abstract Givens - -A given may be an abstract member, with the restriction that it must have an explicit name. - -```scala -trait HasOrd[T]: - given ord: Ord[T] -``` - -## More Structural Givens - -If an alias given instance is analogous to a lazy val, -and a structural given instance is analogous to an object, -albeit an object with an explicit type, -then a structural given may also be specified without an explicit type: - -```scala -class IntOrd extends Ord[Int]: - def compare(x: Int, y: Int) = - if x < y then -1 else if x > y then +1 else 0 - -given IntOrd() -``` - -Compare this syntax to: - -```scala -object intOrd extends IntOrd() -``` - -The empty parentheses are optional in the extends clause when defining a class, -but are required when defining a given. - -Further mixins are allowed as usual: - -```scala -given IntOrd() with OrdOps[Int] -``` - -## Given Macros - -Given aliases can have the `inline` and `transparent` modifiers. -Example: - -```scala -transparent inline given mkAnnotations[A, T]: Annotations[A, T] = ${ - // code producing a value of a subtype of Annotations -} -``` - -Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. - -Given instances can have the `inline` but not `transparent` modifiers as their type is already known from the signature. -Example: - -```scala -trait Show[T] { - inline def show(x: T): String -} - -inline given Show[Foo] with { - /*transparent*/ inline def show(x: Foo): String = ${ ... } -} - -def app = - // inlines `show` method call and removes the call to `given Show[Foo]` - summon[Show[Foo]].show(foo) -``` -Note that the inline methods within the given instances may be `transparent`. - -The inlining of given instances will not inline/duplicate the implementation of the given, it will just inline the instantiation of that instance. -This is used to help dead code elimination of the given instances that are not used after inlining. - - -## Pattern-Bound Given Instances - -Given instances can also appear in patterns. Example: - -```scala -for given Context <- applicationContexts do - -pair match - case (ctx @ given Context, y) => ... -``` - -In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` -instance named `ctx` is established by matching against the first half of the `pair` selector. - -In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. - -## Negated Givens - -Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement the analogue of a "negated" search in implicit resolution, -where a query Q1 fails if some other query Q2 succeeds and Q1 succeeds if Q2 fails. With the new cleaned up behavior these techniques no longer work. -But the new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) now implements negation directly. - -For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit -search for `Q` fails, for example: - -```scala -import scala.util.NotGiven - -trait Tagged[A] - -case class Foo[A](value: Boolean) -object Foo: - given fooTagged[A](using Tagged[A]): Foo[A] = Foo(true) - given fooNotTagged[A](using NotGiven[Tagged[A]]): Foo[A] = Foo(false) - -@main def test(): Unit = - given Tagged[Int]() - assert(summon[Foo[Int]].value) // fooTagged is found - assert(!summon[Foo[String]].value) // fooNotTagged is found ``` ## Given Instance Initialization -A given instance without type or context parameters is initialized on-demand, the first -time it is accessed. If a given has type or context parameters, a fresh instance -is created for each reference. +An unconditional given instance without parameters is initialized on-demand, the first +time it is accessed. If the given is a simple alias to some immutable value, the given is implemented as a simple forwarder, without incurring the cost of a field to hold a cached value. If a given is conditional, a fresh instance is created for each reference. ## Syntax -Here is the syntax for given instances: +Here is the full syntax for given instances. Some of these forms of givens are explained on a separate page on [Other Forms of Givens](../more-givens.md). ```ebnf -TmplDef ::= ... - | ‘given’ GivenDef -GivenDef ::= [GivenSig] StructuralInstance - | [GivenSig] AnnotType ‘=’ Expr - | [GivenSig] AnnotType -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ TemplateBody] +Here is the complete context-free syntax for all proposed features. +``` +TmplDef ::= ... | 'given' GivenDef +GivenDef ::= [id ':'] GivenSig +GivenSig ::= GivenImpl + | '(' ')' '=>' GivenImpl + | GivenConditional '=>' GivenSig +GivenImpl ::= GivenType ([‘=’ Expr] | TemplateBody) + | ConstrApps TemplateBody +GivenConditional ::= DefTypeParamClause + | DefTermParamClause + | '(' FunArgTypes ')' + | GivenType +GivenType ::= AnnotType1 {id [nl] AnnotType1} ``` -A given instance starts with the reserved word `given` and an optional _signature_. The signature -defines a name and/or parameters for the instance. It is followed by `:`. There are three kinds -of given instances: +A given instance starts with the reserved word `given`, which is followed by + + - An optional name and a colon + - An optional list of conditions. + - The implemented type(s) and their implementation, in two forms: alias givens and structural givens. + - An _alias given_ implements a single type with a right hand side following `=`. + - A _structural given_ implements one or more class constructors with a + list of member definitions in a template body. -- A _structural instance_ contains one or more types or constructor applications, - followed by `with` and a template body that contains member definitions of the instance. -- An _alias instance_ contains a type, followed by `=` and a right-hand side expression. -- An _abstract instance_ contains just the name and type, which is not followed by anything. +**Note** Parts of the given syntax have changed in Scala 3.6. The original syntax from Scala 3.0 on is described in a separate page [Previous Given Syntax](../previous-givens.md). The original syntax is still supported for now but will be deprecated and phased out over time. diff --git a/docs/_docs/reference/contextual/more-givens.md b/docs/_docs/reference/contextual/more-givens.md new file mode 100644 index 000000000000..3d0076543cd7 --- /dev/null +++ b/docs/_docs/reference/contextual/more-givens.md @@ -0,0 +1,204 @@ +--- +layout: doc-page +title: "Other Forms Of Givens" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/givens.html +--- + +The concept of given instances is quite general. This page covers forms of givens that were not treated before. + +## Simple Structural Givens + +Some givens simply instantiate a class without needing an alias or additional member declarations. Example: + +```scala +class IntOrd extends Ord[Int]: + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given IntOrd() +``` +In this case, the given clause consists of just a class creation expression, such as `IntOrd()` above. + +## Conditional Givens with Parameters + +Conditional givens can also be defined with parameters. Example: +```scala +given (config: Config) => Factory = MemoizingFactory(config) +``` +Here, `(config: Config)` describes a context parameter expressing a condition: We can synthesize a given `Factory` _provided_ we can synthesize a given `config` of type `Config`. + +Type parameters and context parameters can be combined. For instance the `listOrd` instance above could alternatively be expressed like this: +```scala +given listOrd: [T] => Ord[T] => Ord[List[T]]: + ... + def compare(x: List[T], y: List[T]) = ... +``` +As the example shows, each parameter section is followed by an `=>`. + +It is also possible to name context parameters: +```scala +given listOrd: [T] => (ord: Ord[T]) => Ord[List[T]]: + ... +``` + +## By Name Givens + +We sometimes find it necessary that a given alias is re-evaluated each time it is called. For instance, say we have a mutable variable `curCtx` and we want to define a given that returns the current value of that variable. A normal given alias will not do since by default given aliases are mapped to lazy vals. + +In general, we want to avoid re-evaluation of givens. But there are situations like the one above where we want to specify _by-name_ evaluation instead. This is achieved by writing a conditional given with an empty parameter list: +```scala + val curCtx: Context + given context: () => Context = curCtx +``` +With this definition, each time a `Context` is summoned we evaluate `context` function, which produces the current value of `curCtx`. + +## Given Macros + +Given aliases can have the `inline` and `transparent` modifiers. +Example: + +```scala +transparent inline given mkAnnotations: [A, T] => Annotations[A, T] = ${ + // code producing a value of a subtype of Annotations +} +``` + +Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. + +Structural givens can also have the `inline` modifier. But the `transparent` modifier is not allowed for them as their type is already known from the signature. + +Example: + +```scala +trait Show[T]: + inline def show(x: T): String + +inline given Show[Foo]: + inline def show(x: Foo): String = ${ ... } + +def app = + // inlines `show` method call and removes the call to `given Show[Foo]` + summon[Show[Foo]].show(foo) +``` +Note that inline methods within given instances may be `transparent`. + + + +## Pattern-Bound Given Instances + +Given instances can also appear in patterns. Example: + +```scala +for given Context <- applicationContexts do + +pair match + case (ctx @ given Context, y) => ... +``` + +In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` +instance named `ctx` is established by matching against the first half of the `pair` selector. + +In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. + +## Negated Givens + + +We sometimes want to have an implicit search succeed if a given instance for some other type is _not_ available. There is a special class [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) that implements this kind of negation. + +For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit +search for `Q` fails, for example: + +```scala +import scala.util.NotGiven + +trait Tagged[A] + +case class Foo[A](value: Boolean) +object Foo: + given fooTagged: [A] => Tagged[A] => Foo[A] = Foo(true) + given fooNotTagged: [A] => NotGiven[Tagged[A]] => Foo[A] = Foo(false) + +@main def test(): Unit = + given Tagged[Int]() + assert(summon[Foo[Int]].value) // fooTagged is found + assert(!summon[Foo[String]].value) // fooNotTagged is found +``` + +## Summary + +Here is a summary of common forms of given clauses: + +```scala + // Simple typeclass + given Ord[Int]: + def compare(x: Int, y: Int) = ... + + // Parameterized typeclass with context bound + given [A: Ord] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with context parameter + given [A] => Ord[A] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with named context parameter + given [A] => (ord: Ord[A]) => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Simple alias + given Ord[Int] = IntOrd() + + // Parameterized alias with context bound + given [A: Ord] => Ord[List[A]] = + ListOrd[A] + + // Parameterized alias with context parameter + given [A] => Ord[A] => Ord[List[A]] = + ListOrd[A] + + // Deferred given + given Context = deferred + + // By-name given + given () => Context = curCtx +``` + +All of these clauses also exist in named form: +```scala + // Simple typeclass + given intOrd: Ord[Int]: + def compare(x: Int, y: Int) = ... + + // Parameterized typeclass with context bound + given listOrd: [A: Ord] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with context parameter + given listOrd: [A] => Ord[A] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with named context parameter + given listOrd: [A] => (ord: Ord[A]) => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Simple alias + given intOrd: Ord[Int] = IntOrd() + + // Parameterized alias with context bound + given listOrd: [A: Ord] => Ord[List[A]] = + ListOrd[A] + + // Parameterized alias with context parameter + given listOrd: [A] => Ord[A] => Ord[List[A]] = + ListOrd[A] + + // Abstract or deferred given + given context: Context = deferred + + // By-name given + given context: () => Context = curCtx +``` diff --git a/docs/_docs/reference/contextual/multiversal-equality.md b/docs/_docs/reference/contextual/multiversal-equality.md index 6258973c0cda..fb980853ea8e 100644 --- a/docs/_docs/reference/contextual/multiversal-equality.md +++ b/docs/_docs/reference/contextual/multiversal-equality.md @@ -109,7 +109,7 @@ By the usual rules of [type class derivation](./derivation.md), this generates the following `CanEqual` instance in the companion object of `Box`: ```scala -given [T, U](using CanEqual[T, U]): CanEqual[Box[T], Box[U]] = +given [T, U] => CanEqual[T, U] => CanEqual[Box[T], Box[U]] = CanEqual.derived ``` diff --git a/docs/_docs/reference/contextual/previous-givens.md b/docs/_docs/reference/contextual/previous-givens.md new file mode 100644 index 000000000000..dc88daaab691 --- /dev/null +++ b/docs/_docs/reference/contextual/previous-givens.md @@ -0,0 +1,233 @@ +--- +layout: doc-page +title: "Previous Given Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/previous-givens.html +--- + +Given instances (or, simply, "givens") define "canonical" values of certain types +that serve for synthesizing arguments to [context parameters](./using-clauses.md). Example: + +```scala +trait Ord[T]: + def compare(x: T, y: T): Int + extension (x: T) + def < (y: T) = compare(x, y) < 0 + def > (y: T) = compare(x, y) > 0 + +given intOrd: Ord[Int] with + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given listOrd[T](using ord: Ord[T]): Ord[List[T]] with + + def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = ord.compare(x, y) + if fst != 0 then fst else compare(xs1, ys1) + +``` + +This code defines a trait `Ord` with two given instances. `intOrd` defines +a given for the type `Ord[Int]` whereas `listOrd[T]` defines givens +for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]` +themselves. The `using` clause in `listOrd` defines a condition: There must be a +given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. +Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). + +## Anonymous Givens + +The name of a given can be left out. So the definitions +of the last section can also be expressed like this: + +```scala +given Ord[Int] with + ... +given [T](using Ord[T]): Ord[List[T]] with + ... +``` + +If the name of a given is missing, the compiler will synthesize a name from +the implemented type(s). + +**Note:** The name synthesized by the compiler is chosen to be readable and reasonably concise. For instance, the two instances above would get the names: + +```scala +given_Ord_Int +given_Ord_List +``` + +The precise rules for synthesizing names are found [here](./relationship-implicits.html#anonymous-given-instances). These rules do not guarantee absence of name conflicts between +given instances of types that are "too similar". To avoid conflicts one can +use named instances. + +**Note:** To ensure robust binary compatibility, publicly available libraries should prefer named instances. + +## Alias Givens + +An alias can be used to define a given instance that is equal to some expression. Example: + +```scala +given global: ExecutionContext = ForkJoinPool() +``` + +This creates a given `global` of type `ExecutionContext` that resolves to the right +hand side `ForkJoinPool()`. +The first time `global` is accessed, a new `ForkJoinPool` is created, which is then +returned for this and all subsequent accesses to `global`. This operation is thread-safe. + +Alias givens can be anonymous as well, e.g. + +```scala +given Position = enclosingTree.position +given (using config: Config): Factory = MemoizingFactory(config) +``` + +An alias given can have type parameters and context parameters just like any other given, +but it can only implement a single type. + +## Abstract Givens + +A given may be an abstract member, with the restriction that it must have an explicit name. + +```scala +trait HasOrd[T]: + given ord: Ord[T] +``` + +## More Structural Givens + +If an alias given instance is analogous to a lazy val, +and a structural given instance is analogous to an object, +albeit an object with an explicit type, +then a structural given may also be specified without an explicit type: + +```scala +class IntOrd extends Ord[Int]: + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given IntOrd() +``` + +Compare this syntax to: + +```scala +object intOrd extends IntOrd() +``` + +The empty parentheses are optional in the extends clause when defining a class, +but are required when defining a given. + +Further mixins are allowed as usual: + +```scala +given IntOrd() with OrdOps[Int] +``` + +## Given Macros + +Given aliases can have the `inline` and `transparent` modifiers. +Example: + +```scala +transparent inline given mkAnnotations[A, T]: Annotations[A, T] = ${ + // code producing a value of a subtype of Annotations +} +``` + +Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. + +Given instances can have the `inline` but not `transparent` modifiers as their type is already known from the signature. +Example: + +```scala +trait Show[T] { + inline def show(x: T): String +} + +inline given Show[Foo] with { + /*transparent*/ inline def show(x: Foo): String = ${ ... } +} + +def app = + // inlines `show` method call and removes the call to `given Show[Foo]` + summon[Show[Foo]].show(foo) +``` +Note that the inline methods within the given instances may be `transparent`. + +The inlining of given instances will not inline/duplicate the implementation of the given, it will just inline the instantiation of that instance. +This is used to help dead code elimination of the given instances that are not used after inlining. + + +## Pattern-Bound Given Instances + +Given instances can also appear in patterns. Example: + +```scala +for given Context <- applicationContexts do + +pair match + case (ctx @ given Context, y) => ... +``` + +In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` +instance named `ctx` is established by matching against the first half of the `pair` selector. + +In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. + +## Negated Givens + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement the analogue of a "negated" search in implicit resolution, +where a query Q1 fails if some other query Q2 succeeds and Q1 succeeds if Q2 fails. With the new cleaned up behavior these techniques no longer work. +But the new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) now implements negation directly. + +For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit +search for `Q` fails, for example: + +```scala +import scala.util.NotGiven + +trait Tagged[A] + +case class Foo[A](value: Boolean) +object Foo: + given fooTagged[A](using Tagged[A]): Foo[A] = Foo(true) + given fooNotTagged[A](using NotGiven[Tagged[A]]): Foo[A] = Foo(false) + +@main def test(): Unit = + given Tagged[Int]() + assert(summon[Foo[Int]].value) // fooTagged is found + assert(!summon[Foo[String]].value) // fooNotTagged is found +``` + +## Given Instance Initialization + +A given instance without type or context parameters is initialized on-demand, the first +time it is accessed. If a given has type or context parameters, a fresh instance +is created for each reference. + +## Syntax + +Here is the syntax for given instances: + +```ebnf +TmplDef ::= ... + | ‘given’ GivenDef +GivenDef ::= [GivenSig] StructuralInstance + | [GivenSig] AnnotType ‘=’ Expr + | [GivenSig] AnnotType +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ TemplateBody] +``` + +A given instance starts with the reserved word `given` and an optional _signature_. The signature +defines a name and/or parameters for the instance. It is followed by `:`. There are three kinds +of given instances: + +- A _structural instance_ contains one or more types or constructor applications, + followed by `with` and a template body that contains member definitions of the instance. +- An _alias instance_ contains a type, followed by `=` and a right-hand side expression. +- An _abstract instance_ contains just the name and type, which is not followed by anything. diff --git a/docs/_docs/reference/contextual/relationship-implicits.md b/docs/_docs/reference/contextual/relationship-implicits.md index fce07f51151a..4ff38f709200 100644 --- a/docs/_docs/reference/contextual/relationship-implicits.md +++ b/docs/_docs/reference/contextual/relationship-implicits.md @@ -15,7 +15,7 @@ Given instances can be mapped to combinations of implicit objects, classes and i 1. Given instances without parameters are mapped to implicit objects. For instance, ```scala - given intOrd: Ord[Int] with { ... } + given intOrd: Ord[Int] { ... } ``` maps to @@ -27,7 +27,7 @@ Given instances can be mapped to combinations of implicit objects, classes and i 2. Parameterized givens are mapped to combinations of classes and implicit methods. For instance, ```scala - given listOrd[T](using ord: Ord[T]): Ord[List[T]] with { ... } + given listOrd: [T] => (ord: Ord[T]) => Ord[List[T]] { ... } ``` maps to @@ -63,8 +63,8 @@ final implicit def given_Context = ctx Anonymous given instances get compiler synthesized names, which are generated in a reproducible way from the implemented type(s). For example, if the names of the `IntOrd` and `ListOrd` givens above were left out, the following names would be synthesized instead: ```scala -given given_Ord_Int: Ord[Int] with { ... } -given given_Ord_List[T](using ord: Ord[T]): Ord[List[T]] with { ... } +given given_Ord_Int: Ord[Int] { ... } +given given_Ord_List: [T] => (ord: Ord[T]) => Ord[List[T]] { ... } ``` The synthesized type names are formed from @@ -153,7 +153,7 @@ implicit def stringToToken(str: String): Token = new Keyword(str) one can write ```scala -given stringToToken: Conversion[String, Token] with +given stringToToken: Conversion[String, Token]: def apply(str: String): Token = KeyWord(str) ``` diff --git a/docs/_docs/reference/contextual/type-classes.md b/docs/_docs/reference/contextual/type-classes.md index 6a15ac3a83d4..0e1ccdf8d2c8 100644 --- a/docs/_docs/reference/contextual/type-classes.md +++ b/docs/_docs/reference/contextual/type-classes.md @@ -27,7 +27,7 @@ trait Monoid[T] extends SemiGroup[T]: An implementation of this `Monoid` type class for the type `String` can be the following: ```scala -given Monoid[String] with +given Monoid[String]: extension (x: String) def combine (y: String): String = x.concat(y) def unit: String = "" ``` @@ -35,7 +35,7 @@ given Monoid[String] with Whereas for the type `Int` one could write the following: ```scala -given Monoid[Int] with +given Monoid[Int]: extension (x: Int) def combine (y: Int): Int = x + y def unit: Int = 0 ``` @@ -43,22 +43,8 @@ given Monoid[Int] with This monoid can now be used as _context bound_ in the following `combineAll` method: ```scala -def combineAll[T: Monoid](xs: List[T]): T = - xs.foldLeft(summon[Monoid[T]].unit)(_.combine(_)) -``` - -To get rid of the `summon[...]` we can define a `Monoid` object as follows: - -```scala -object Monoid: - def apply[T](using m: Monoid[T]) = m -``` - -Which would allow to re-write the `combineAll` method this way: - -```scala -def combineAll[T: Monoid](xs: List[T]): T = - xs.foldLeft(Monoid[T].unit)(_.combine(_)) +def combineAll[T: Monoid as m](xs: List[T]): T = + xs.foldLeft(m.unit)(_.combine(_)) ``` ## Functors @@ -77,7 +63,7 @@ Which could read as follows: "A `Functor` for the type constructor `F[_]` repres This way, we could define an instance of `Functor` for the `List` type: ```scala -given Functor[List] with +given Functor[List]: def map[A, B](x: List[A], f: A => B): List[B] = x.map(f) // List already has a `map` method ``` @@ -109,7 +95,7 @@ trait Functor[F[_]]: The instance of `Functor` for `List` now becomes: ```scala -given Functor[List] with +given Functor[List]: extension [A](xs: List[A]) def map[B](f: A => B): List[B] = xs.map(f) // List already has a `map` method @@ -159,7 +145,7 @@ end Monad A `List` can be turned into a monad via this `given` instance: ```scala -given listMonad: Monad[List] with +given listMonad: Monad[List]: def pure[A](x: A): List[A] = List(x) extension [A](xs: List[A]) @@ -176,7 +162,7 @@ it explicitly. `Option` is an other type having the same kind of behaviour: ```scala -given optionMonad: Monad[Option] with +given optionMonad: Monad[Option]: def pure[A](x: A): Option[A] = Option(x) extension [A](xo: Option[A]) @@ -223,7 +209,7 @@ type ConfigDependent[Result] = Config => Result The monad instance will look like this: ```scala -given configDependentMonad: Monad[ConfigDependent] with +given configDependentMonad: Monad[ConfigDependent]: def pure[A](x: A): ConfigDependent[A] = config => x @@ -244,7 +230,7 @@ type ConfigDependent = [Result] =>> Config => Result Using this syntax would turn the previous `configDependentMonad` into: ```scala -given configDependentMonad: Monad[[Result] =>> Config => Result] with +given configDependentMonad: Monad[[Result] =>> Config => Result]: def pure[A](x: A): Config => A = config => x @@ -259,7 +245,7 @@ end configDependentMonad It is likely that we would like to use this pattern with other kinds of environments than our `Config` trait. The Reader monad allows us to abstract away `Config` as a type _parameter_, named `Ctx` in the following definition: ```scala -given readerMonad[Ctx]: Monad[[X] =>> Ctx => X] with +given readerMonad: [Ctx] => Monad[[X] =>> Ctx => X]: def pure[A](x: A): Ctx => A = ctx => x diff --git a/docs/_docs/reference/contextual/using-clauses.md b/docs/_docs/reference/contextual/using-clauses.md index 9177a2f47dc9..9d03a7d2cec5 100644 --- a/docs/_docs/reference/contextual/using-clauses.md +++ b/docs/_docs/reference/contextual/using-clauses.md @@ -115,7 +115,7 @@ Multiple `using` clauses are matched left-to-right in applications. Example: ```scala object global extends Universe { type Context = ... } -given ctx : global.Context with { type Symbol = ...; type Kind = ... } +given ctx : global.Context { type Symbol = ...; type Kind = ... } given sym : ctx.Symbol given kind: ctx.Kind diff --git a/docs/_docs/reference/enums/enums.md b/docs/_docs/reference/enums/enums.md index 8d4fca3268b0..4cad29cbd76a 100644 --- a/docs/_docs/reference/enums/enums.md +++ b/docs/_docs/reference/enums/enums.md @@ -147,16 +147,13 @@ We now want to deprecate the `Pluto` case. First we add the `scala.deprecated` a Outside the lexical scopes of `enum Planet` or `object Planet`, references to `Planet.Pluto` will produce a deprecation warning, but within those scopes we can still reference it to implement introspection over the deprecated cases: ```scala -trait Deprecations[T <: reflect.Enum] { +trait Deprecations[T <: reflect.Enum]: extension (t: T) def isDeprecatedCase: Boolean -} -object Planet { - given Deprecations[Planet] with { +object Planet: + given Deprecations[Planet]: extension (p: Planet) def isDeprecatedCase = p == Pluto - } -} ``` We could imagine that a library may use [type class derivation](../contextual/derivation.md) to automatically provide an instance for `Deprecations`. @@ -167,7 +164,8 @@ If you want to use the Scala-defined enums as [Java enums](https://docs.oracle.c the class `java.lang.Enum`, which is imported by default, as follows: ```scala -enum Color extends Enum[Color] { case Red, Green, Blue } +enum Color extends Enum[Color]: + case Red, Green, Blue ``` The type parameter comes from the Java enum [definition](https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Enum.html) and should be the same as the type of the enum. diff --git a/docs/_docs/reference/experimental/erased-defs-spec.md b/docs/_docs/reference/experimental/erased-defs-spec.md index 59dfed92da2a..1861b734bb47 100644 --- a/docs/_docs/reference/experimental/erased-defs-spec.md +++ b/docs/_docs/reference/experimental/erased-defs-spec.md @@ -34,9 +34,9 @@ TODO: complete 3. Functions * `(erased x1: T1, x2: T2, ..., xN: TN) => y : (erased T1, T2, ..., TN) => R` - * `(given x1: T1, erased x2: T2, ..., xN: TN) => y: (given T1, erased T2, ..., TN) => R` - * `(given erased T1) => R <:< erased T1 => R` - * `(given T1, erased T2) => R <:< (T1, erased T2) => R` + * `(using x1: T1, erased x2: T2, ..., xN: TN) => y: (using T1, erased T2, ..., TN) => R` + * `(using erased T1) => R <:< erased T1 => R` + * `(using T1, erased T2) => R <:< (T1, erased T2) => R` * ... Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`). The `erased` parameters must match exactly in their respective positions. diff --git a/docs/_docs/reference/experimental/numeric-literals.md b/docs/_docs/reference/experimental/numeric-literals.md index 8b7aaa23f9e0..8317e9ff83c4 100644 --- a/docs/_docs/reference/experimental/numeric-literals.md +++ b/docs/_docs/reference/experimental/numeric-literals.md @@ -168,7 +168,7 @@ To accept `BigFloat` literals, all that's needed in addition is a `given` instan `FromDigits.Floating[BigFloat]`: ```scala - given FromDigits: FromDigits.Floating[BigFloat] with + given FromDigits: FromDigits.Floating[BigFloat]: def fromDigits(digits: String) = apply(digits) end BigFloat ``` @@ -205,7 +205,7 @@ object BigFloat: class FromDigits extends FromDigits.Floating[BigFloat]: def fromDigits(digits: String) = apply(digits) - given FromDigits with + given FromDigits: override inline def fromDigits(digits: String) = ${ fromDigitsImpl('digits) } diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index 08839ffe58eb..c366c40779b9 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -4,7 +4,7 @@ title: "Better Support for Type Classes" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses.html --- -Martin Odersky, 8.1.2024, edited 5.4.2024 +Martin Odersky, 8.1.2024, edited 5.4.2024 and 30.9.2024 A type class in Scala is a pattern where we define @@ -18,13 +18,14 @@ a bit cumbersome and limiting for standard generic programming patterns. Much ha This note shows that with some fairly small and reasonable tweaks to Scala's syntax and typing rules we can obtain a much better scheme for working with type classes, or do generic programming in general. The bulk of the suggested improvements has been implemented and is available -under source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: +in under source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: ``` scala compile -source:future -language:experimental.modularity ``` -It is intended to turn features described here into proposals under the Scala improvement process. A first installment is SIP 64, which covers some syntactic changes, names for context bounds, multiple context bounds and deferred givens. The order of exposition described in this note is different from the planned proposals of SIPs. This doc is not a guide on how to sequence details, but instead wants to present a vision of what is possible. For instance, we start here with a feature (Self types and `is` syntax) that has turned out to be controversial and that will probably be proposed only late in the sequence of SIPs. +It is intended to turn features described here into proposals under the Scala improvement process. A first installment is SIP 64, which covers some syntactic changes, names for context bounds, multiple context bounds and deferred givens. This SIP has been accepted for inclusion in the language and will be released in Scala 3.6. The remaining elements +that concern type classes are described in the following. There is also a separate [page on modularity improvements](../modularity.md) that describes proposed additions not directly related to type classes. ## Generalizing Context Bounds @@ -145,70 +146,6 @@ This makes writing instance definitions and using clauses quite pleasant. Exampl (more examples will follow below) - - -## Naming Context Bounds - -Context bounds are a convenient and legible abbreviation. A problem so far is that they are always anonymous, -one cannot name the using parameter to which a context bound expands. - -For instance, consider a `reduce` method over `Monoid`s defined like this: - -```scala -def reduce[A : Monoid](xs: List[A]): A = ??? -``` -Since we don't have a name for the `Monoid` instance of `A`, we need to resort to `summon` in the body of `reduce`: -```scala -def reduce[A : Monoid](xs: List[A]): A = - xs.foldLeft(summon Monoid[A])(_ `combine` _) -``` -That's generally considered too painful to write and read, hence people usually adopt one of two alternatives. Either, eschew context bounds and switch to using clauses: -```scala -def reduce[A](xs: List[A])(using m: Monoid[A]): A = - xs.foldLeft(m)(_ `combine` _) -``` -Or, plan ahead and define a "trampoline" method in `Monoid`'s companion object: -```scala - trait Monoid[A] extends SemiGroup[A]: - def unit: A - object Monoid: - def unit[A](using m: Monoid[A]): A = m.unit - ... - def reduce[A : Monoid](xs: List[A]): A = - xs.foldLeft(Monoid.unit)(_ `combine` _) -``` -This is all accidental complexity which can be avoided by the following proposal. - -**Proposal:** Allow to name a context bound, like this: -```scala - def reduce[A : Monoid as m](xs: List[A]): A = - xs.foldLeft(m.unit)(_ `combine` _) -``` - -We use `as x` after the type to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. - -**Benefits:** The new syntax is simple and clear. -It avoids the awkward choice between concise context bounds that can't be named and verbose using clauses that can. - -### New Syntax for Aggregate Context Bounds - -Aggregate context bounds like `A : X : Y` are not obvious to read, and it becomes worse when we add names, e.g. `A : X as x : Y as y`. - -**Proposal:** Allow to combine several context bounds inside `{...}`, analogous -to import clauses. Example: - -```scala - trait: - def showMax[X : {Ordering, Show}](x: X, y: X): String - class B extends A: - def showMax[X : {Ordering as ordering, Show as show}](x: X, y: X): String = - show.asString(ordering.max(x, y)) -``` - -The old syntax with multiple `:` should be phased out over time. - -**Benefits:** The new syntax is much clearer than the old one, in particular for newcomers that don't know context bounds well. - ### Better Default Names for Context Bounds So far, an unnamed context bound for a type parameter gets a synthesized fresh name. It would be much more useful if it got the name of the constrained type parameter instead, translated to be a term name. This means our `reduce` method over monoids would not even need an `as` binding. We could simply formulate it as follows: @@ -233,330 +170,7 @@ The default naming convention reduces the need for named context bounds. But nam - They give an explanation what a single unnamed context bound expands to. -### Expansion of Context Bounds - -Context bounds are currently translated to implicit parameters in the last parameter list of a method or class. This is a problem if a context bound is mentioned in one of the preceding parameter types. For example, consider a type class of parsers with associated type members `Input` and `Result` describing the input type on which the parsers operate and the type of results they produce: -```scala -trait Parser[P]: - type Input - type Result -``` -Here is a method `run` that runs a parser on an input of the required type: - -```scala -def run[P : Parser](in: P.Input): P.Result -``` -Or, making clearer what happens by using an explicit name for the context bound: -```scala -def run[P : Parser as p](in: p.Input): p.Result -``` -With the current translation this does not work since it would be expanded to: -```scala - def run[P](x: p.Input)(using p: Parser[P]): p.Result -``` -Note that the `p` in `p.Input` refers to the `p` introduced in the using clause, which comes later. So this is ill-formed. - -This problem would be fixed by changing the translation of context bounds so that they expand to using clauses immediately after the type parameter. But such a change is infeasible, for two reasons: - - 1. It would be a binary-incompatible change. - 2. Putting using clauses earlier can impair type inference. A type in - a using clause can be constrained by term arguments coming before that - clause. Moving the using clause first would miss those constraints, which could cause ambiguities in implicit search. - -But there is an alternative which is feasible: - -**Proposal:** Map the context bounds of a method or class as follows: - - 1. If one of the bounds is referred to by its term name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. - 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. - 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. - -Rules (2) and (3) are the status quo, and match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility is maintained. - -**Discussion** More refined rules could be envisaged where context bounds are spread over different using clauses so that each comes as late as possible. But it would make matters more complicated and the gain in expressiveness is not clear to me. - -Named (either explicitly, or by default) context bounds in givens that produce classes are mapped to tracked val's of these classes (see #18958). This allows -references to these parameters to be precise, so that information about dependent type members is preserved. - - -## Context Bounds for Type Members - -It's not very orthogonal to allow subtype bounds for both type parameters and abstract type members, but context bounds only for type parameters. What's more, we don't even have the fallback of an explicit using clause for type members. The only alternative is to also introduce a set of abstract givens that get implemented in each subclass. This is extremely heavyweight and opaque to newcomers. - -**Proposal**: Allow context bounds for type members. Example: - -```scala - class Collection: - type Element : Ord -``` - -The question is how these bounds are expanded. Context bounds on type parameters -are expanded into using clauses. But for type members this does not work, since we cannot refer to a member type of a class in a parameter type of that class. What we are after is an equivalent of using parameter clauses but represented as class members. - -**Proposal:** Introduce a new way to implement a given definition in a trait like this: -```scala -given T = deferred -``` -`deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. - -Deferred givens allow a clean implementation of context bounds in traits, -as in the following example: -```scala -trait Sorted: - type Element : Ord - -class SortedSet[A : Ord] extends Sorted: - type Element = A -``` -The compiler expands this to the following implementation: -```scala -trait Sorted: - type Element - given Ord[Element] = compiletime.deferred - -class SortedSet[A](using A: Ord[A]) extends Sorted: - type Element = A - override given Ord[Element] = A // i.e. the A defined by the using clause -``` - -The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. - -**Benefits:** - - - Better orthogonality, type parameters and abstract type members now accept the same kinds of bounds. - - Better ergonomics, since deferred givens get naturally implemented in inheriting classes, no need for boilerplate to fill in definitions of abstract givens. - -**Alternative:** It was suggested that we use a modifier for a deferred given instead of a `= deferred`. Something like `deferred given C[T]`. But a modifier does not suggest the concept that a deferred given will be implemented automatically in subclasses unless an explicit definition is written. In a sense, we can see `= deferred` as the invocation of a magic macro that is provided by the compiler. So from a user's point of view a given with `deferred` right hand side is not abstract. -It is a concrete definition where the compiler will provide the correct implementation. - -### Abolish Abstract Givens - -With `deferred` givens there is no need anymore to also define abstract givens. The two mechanisms are very similar, but the user experience for -deferred givens is generally more ergonomic. Abstract givens also are uncomfortably close to concrete class instances. Their syntax clashes -with the quite common case where we want to establish a given without any nested definitions. For instance, consider a given that constructs a type tag: -```scala -class Tag[T] -``` -Then this works: -```scala -given Tag[String]() -given Tag[String] with {} -``` -But the following more natural syntax fails: -```scala -given Tag[String] -``` -The last line gives a rather cryptic error: -``` -1 |given Tag[String] - | ^ - | anonymous given cannot be abstract -``` -The underlying problem is that abstract givens are very rare (and should become completely unnecessary once deferred givens are introduced), yet occupy a syntax that looks very close to the more common case of concrete -typeclasses without nested definitions. - -**Proposal:** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. Deprecate the current version of abstract givens, and remove them in a future Scala version. - -**Benefits:** - - - Simplification of the language since a feature is dropped - - Eliminate non-obvious and misleading syntax. - -The only downside is that deferred givens are restricted to be used in traits, whereas abstract givens are also allowed in abstract classes. But I would be surprised if actual code relied on that difference, and such code could in any case be easily rewritten to accommodate the restriction. - -## New Given Syntax - -A good language syntax is like a Bach fugue: A small set of motifs is combined in a multitude of harmonic ways. Dissonances and irregularities should be avoided. - -When designing Scala 3, I believe that, by and large, we achieved that goal, except in one area, which is the syntax of givens. There _are_ some glaring dissonances, as seen in this code for defining an ordering on lists: -```scala -given [A](using Ord[A]): Ord[List[A]] with - def compare(x: List[A], y: List[A]) = ... -``` -The `:` feels utterly foreign in this position. It's definitely not a type ascription, so what is its role? Just as bad is the trailing `with`. Everywhere else we use braces or trailing `:` to start a scope of nested definitions, so the need of `with` sticks out like a sore thumb. - -Sometimes unconventional syntax grows on you and becomes natural after a while. But here it was unfortunately the opposite. The longer I used given definitions in this style the more awkward they felt, in particular since the rest of the language seemed so much better put together by comparison. And I believe many others agree with me on this. Since the current syntax is unnatural and esoteric, this means it's difficult to discover and very foreign even after that. This makes it much harder to learn and apply givens than it need be. - -The previous conditional given syntax was inspired from method definitions. If we add the optional name to the previous example, we obtain something akin to an implicit method in Scala 2: -```scala -given listOrd[A](using Ord[A]): Ord[List[A]] with - def compare(x: List[A], y: List[A]) = ... -``` -The anonymous syntax was then obtained by simply dropping the name. -But without a name, the syntax looks weird and inconsistent. - -This is a problem since at least for typeclasses, anonymous givens should be the norm. -Givens are like extends clauses. We state a _fact_, that a -type implements a type class, or that a value can be used implicitly. We don't need a name for that fact. It's analogous to extends clauses, where we state that a class is a subclass of some other class or trait. We would not think it useful to name an extends clause, it's simply a fact that is stated. -It's also telling that every other language that defines type classes uses anonymous syntax. Somehow, nobody ever found it necessary to name these instances. - -A more intuitive and in my opinion cleaner alternative is to decree that a given should always look like it _implements a type_. Conditional givens should look like they implement function types. The `Ord` typeclass instances for `Int` and `List` would then look like this: -```scala -given Ord[String]: - def compare(x: String, y: String) = ... - -given [A : Ord] => Ord[List[A]]: - def compare(x: List[A], y: List[A]) = ... -``` -The second, conditional instance looks like it implements the function type -```scala -[A : Ord] => Ord[List[A]] -``` -Another way to see this is as an implication: -If `A` is a type that is `Ord`, then `List[A]` is `Ord` (and the rest of the given clause gives the implementation that makes it so). -Equivalently, `A` is `Ord` _implies_ `List[A]` is `Ord`, hence the `=>`. - -Yet another related meaning is that the given clause establishes a _context function_ of type `[A: Ord] ?=> Ord[List[A]]` that is automatically applied to evidence arguments of type `Ord[A]` and that yields instances of type `Ord[List[A]]`. Since givens are in any case applied automatically to all their arguments, we don't need to specify that separately with `?=>`, a simple `=>` arrow is sufficiently clear and is easier to read. - -All these viewpoints are equivalent, in a deep sense. This is exactly the Curry Howard isomorphism, which equates function types and implications. - -In the new syntax, a `given` clause consists of the following elements: - - - An optional name binding `id :` - - Zero or more _conditions_, which introduce type or value parameters. Each precondition ends in a `=>`. - - the implemented _type_, - - an implementation which consists of either an `=` and an expression, - or a template body. - -**Examples:** - -Here is an enumeration of common forms of given definitions in the new syntax. We show the following use cases: - - 1. A simple typeclass instance, such as `Ord[Int]`. - 2. A parameterized type class instance, such as `Ord` for lists. - 3. A type class instance with an explicit context parameter. - 4. A type class instance with a named eexplicit context parameter. - 4. A simple given alias. - 5. A parameterized given alias - 6. A given alias with an explicit context parameter. - 8. An abstract or deferred given - 9. A by-name given, e.g. if we have a given alias of a mutable variable, and we - want to make sure that it gets re-evaluated on each access. -```scala - // Simple typeclass - given Ord[Int]: - def compare(x: Int, y: Int) = ... - - // Parameterized typeclass with context bound - given [A: Ord] => Ord[List[A]]: - def compare(x: List[A], y: List[A]) = ... - - // Parameterized typeclass with context parameter - given [A] => Ord[A] => Ord[List[A]]: - def compare(x: List[A], y: List[A]) = ... - - // Parameterized typeclass with named context parameter - given [A] => (ord: Ord[A]) => Ord[List[A]]: - def compare(x: List[A], y: List[A]) = ... - - // Simple alias - given Ord[Int] = IntOrd() - - // Parameterized alias with context bound - given [A: Ord] => Ord[List[A]] = - ListOrd[A] - - // Parameterized alias with context parameter - given [A] => Ord[A] => Ord[List[A]] = - ListOrd[A] - - // Abstract or deferred given - given Context = deferred - - // By-name given - given () => Context = curCtx -``` -Here are the same examples, with optional names provided: -```scala - // Simple typeclass - given intOrd: Ord[Int]: - def compare(x: Int, y: Int) = ... - - // Parameterized typeclass with context bound - given listOrd: [A: Ord] => Ord[List[A]]: - def compare(x: List[A], y: List[A]) = ... - - // Parameterized typeclass with context parameter - given listOrd: [A] => Ord[A] => Ord[List[A]]: - def compare(x: List[A], y: List[A]) = ... - - // Parameterized typeclass with named context parameter - given listOrd: [A] => (ord: Ord[A]) => Ord[List[A]]: - def compare(x: List[A], y: List[A]) = ... - - // Simple alias - given intOrd: Ord[Int] = IntOrd() - - // Parameterized alias with context bound - given listOrd: [A: Ord] => Ord[List[A]] = - ListOrd[A] - - // Parameterized alias with context parameter - given listOrd: [A] => Ord[A] => Ord[List[A]] = - ListOrd[A] - - // Abstract or deferred given - given context: Context = deferred - - // By-name given - given context: () => Context = curCtx -``` - -**By Name Givens** - -We sometimes find it necessary that a given alias is re-evaluated each time it is called. For instance, say we have a mutable variable `curCtx` and we want to define a given that returns the current value of that variable. A normal given alias will not do since by default given aliases are mapped to -lazy vals. - -In general, we want to avoid re-evaluation of the given. But there are situations like the one above where we want to specify _by-name_ evaluation instead. The proposed new syntax for this is shown in the last clause above. This is arguably the a natural way to express by-name givens. We want to use a conditional given, since these map to methods, but the set of preconditions is empty, hence the `()` parameter. Equivalently, under the context function viewpoint, we are defining a context function of the form `() ?=> T`, and these are equivalent to by-name parameters. - -Compare with the current best way to do achieve this, which is to use a dummy type parameter. -```scala - given [DummySoThatItsByName]: Context = curCtx -``` -This has the same effect, but feels more like a hack than a clean solution. - -**Dropping `with`** - -In the new syntax, all typeclass instances introduce definitions like normal -class bodies, enclosed in braces `{...}` or following a `:`. The irregular -requirement to use `with` is dropped. In retrospect, the main reason to introduce `with` was since a definition like - -```scala -given [A](using Ord[A]): Ord[List[A]]: - def compare(x: List[A], y: List[A]) = ... -``` -was deemed to be too cryptic, with the double meaning of colons. But since that syntax is gone, we don't need `with` anymore. There's still a double meaning of colons, e.g. in -```scala -given intOrd: Ord[Int]: - ... -``` -but since now both uses of `:` are very familiar (type ascription _vs_ start of nested definitions), it's manageable. Besides, the problem occurs only for named typeclass instances, which should be the exceptional case anyway. - - -**Possible ambiguities** - -If one wants to define a given for an a actual function type (which is probably not advisable in practice), one needs to enclose the function type in parentheses, i.e. `given ([A] => F[A])`. This is true in the currently implemented syntax and stays true for all discussed change proposals. - -The double meaning of : with optional prefix names is resolved as usual. A : at the end of a line starts a nested definition block. If for some obscure reason one wants to define a named given on multiple lines, one has to format it as follows: -```scala - given intOrd - : Ord = ... -``` - -**Summary** - -This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s -in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. - -Changing something introduced just recently in Scala 3 is not fun, -but I believe these adjustments are preferable to let bad syntax -sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code -starts migrating. - -Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. - - -### Bonus: Fixing Singleton +## Fixing Singleton We know the current treatment of `Singleton` as a type bound is broken since `x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. @@ -578,7 +192,7 @@ def f[X: Singleton](x: X) = ... The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). -### Bonus: Precise Typing +##: Precise Typing This approach also presents a solution to the problem how to express precise type variables. We can introduce another special type class `Precise` and use it like this: @@ -588,28 +202,6 @@ def f[X: Precise](x: X) = ... Like a `Singleton` bound, a `Precise` bound disables automatic widening of singleton types or union types in inferred instances of type variable `X`. But there is no requirement that the type argument _must_ be a singleton. -## Summary of Syntax Changes - -Here is the complete context-free syntax for all proposed features. -Overall the syntax for givens becomes a lot simpler than what it was before. - -``` -TmplDef ::= 'given' GivenDef -GivenDef ::= [GivenConditional '=>'] GivenSig -GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} -GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) - | ConstrApps ['as' id] TemplateBody -GivenType ::= AnnotType {id [nl] AnnotType} - -TypeDef ::= id [TypeParamClause] TypeAndCtxBounds -TypeParamBounds ::= TypeAndCtxBounds -TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] -ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' -ContextBound ::= Type ['as' id] -``` - - - ## Examples @@ -880,17 +472,12 @@ Pattern2 ::= InfixPattern ['as' id] ## Summary -I have proposed some tweaks to Scala 3, which would greatly increase its usability for modular, type class based, generic programming. The proposed changes are: +I have proposed some tweaks to Scala 3, which would increase its usability for modular, type class based, generic programming. The proposed changes are: 1. Allow context bounds over classes that define a `Self` member type. - 1. Allow context bounds to be named with `as`. Use the bound parameter name as a default name for the generated context bound evidence. - 1. Add a new `{...}` syntax for multiple context bounds. - 1. Make context bounds also available for type members, which expand into a new form of deferred given. Phase out the previous abstract givens in favor of the new form. 1. Add a predefined type alias `is`. - 1. Introduce a new cleaner syntax of given clauses. - -It's interesting that givens, which are a very general concept in Scala, were "almost there" when it comes to full support of concepts and generic programming. We only needed to add a few usability tweaks to context bounds, -alongside two syntactic changes that supersede the previous forms of `given .. with` clauses and abstract givens. Also interesting is that the superseded syntax constructs were the two areas where we collectively felt that the previous solutions were a bit awkward, but we could not think of better ones at the time. It's very nice that more satisfactory solutions are now emerging. + 1. If a type parameter or member `T` has context bound `CB`, use `T` as the default name for the witness of `CB`. + 1. Cleanup `Singleton` and add a new trait `Precise` for non-widening instantiation of type variables., ## Conclusion diff --git a/docs/_docs/reference/metaprogramming/macros-spec.md b/docs/_docs/reference/metaprogramming/macros-spec.md index 27a0a2c1bdcb..261f9002e1e6 100644 --- a/docs/_docs/reference/metaprogramming/macros-spec.md +++ b/docs/_docs/reference/metaprogramming/macros-spec.md @@ -121,7 +121,7 @@ Finally, the object defines `valueOfConstant` (and `valueOfTuple`) which can tra ```scala object Type: - given of[T <: AnyKind](using Quotes): Type[T] = ... + given of: [T <: AnyKind] => Quotes => Type[T] = ... def show[T <: AnyKind](using Type[T])(using Quotes): String = ... def valueOfConstant[T](using Type[T])(using Quotes): Option[T] = ... def valueOfTuple[T <: Tuple](using Type[T])(using Quotes): Option[T] = ... diff --git a/docs/_docs/reference/metaprogramming/macros.md b/docs/_docs/reference/metaprogramming/macros.md index b63616185285..43eb5b733aeb 100644 --- a/docs/_docs/reference/metaprogramming/macros.md +++ b/docs/_docs/reference/metaprogramming/macros.md @@ -100,7 +100,7 @@ We can implement a `ToExpr` using a `given` definition that will add the definit In the following example we show how to implement a `ToExpr[Option[T]]` for any liftable type `T. ```scala -given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with +given OptionToExpr: [T: {Type, ToExpr}] => ToExpr[Option[T]]: def apply(opt: Option[T])(using Quotes): Expr[Option[T]] = opt match case Some(x) => '{ Some[T]( ${Expr(x)} ) } @@ -420,7 +420,7 @@ These value extraction sub-patterns can be polymorphic using an instance of `Fro In the following example, we show the implementation of `OptionFromExpr` which internally uses the `FromExpr[T]` to extract the value using the `Expr(x)` pattern. ```scala -given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with +given OptionFromExpr: [T: {Type, FromExpr}] => FromExpr[Option[T]]: def unapply(x: Expr[Option[T]])(using Quotes): Option[Option[T]] = x match case '{ Some( ${Expr(x)} ) } => Some(Some(x)) diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 5048669ef664..9bea2d6c099d 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -34,7 +34,11 @@ subsection: - page: reference/contextual/givens.md - page: reference/contextual/using-clauses.md - page: reference/contextual/context-bounds.md + - page: reference/contextual/deferred-givens.md - page: reference/contextual/given-imports.md + - page: reference/contextual/more-givens.md + - page: reference/contextual/previous-givens.md + hidden: true - page: reference/contextual/extension-methods.md - page: reference/contextual/right-associative-extension-methods.md - page: reference/contextual/type-classes.md From 97455cfe3435c44fb2b10cda679f36318d16b27f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 13:17:40 +0000 Subject: [PATCH 593/827] Bump webrick from 1.8.1 to 1.8.2 in /docs/_spec Bumps [webrick](https://github.com/ruby/webrick) from 1.8.1 to 1.8.2. - [Release notes](https://github.com/ruby/webrick/releases) - [Commits](https://github.com/ruby/webrick/compare/v1.8.1...v1.8.2) --- updated-dependencies: - dependency-name: webrick dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- docs/_spec/Gemfile.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/_spec/Gemfile.lock b/docs/_spec/Gemfile.lock index b8e54fb6b4cb..c703a87bf993 100644 --- a/docs/_spec/Gemfile.lock +++ b/docs/_spec/Gemfile.lock @@ -41,7 +41,7 @@ GEM sass-listen (4.0.0) rb-fsevent (~> 0.9, >= 0.9.4) rb-inotify (~> 0.9, >= 0.9.7) - webrick (1.8.1) + webrick (1.8.2) PLATFORMS ruby From 907d73a2cb946d6e054d9198cc3b35974e79f323 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Sep 2024 17:51:43 +0200 Subject: [PATCH 594/827] Update most tests to new syntax I left some tests to use the old syntax, just so that we have early warnings for possible regressions. But most tests are now using the new syntax, so that we best reassurance that corner cases work. --- tests/init/crash/i6914.scala | 8 +++---- tests/init/crash/i7821.scala | 6 ++--- tests/neg-macros/BigFloat/BigFloat_1.scala | 4 ++-- tests/neg-macros/GenericNumLits/Even_1.scala | 2 +- tests/neg-macros/i11483/Test_2.scala | 2 +- tests/neg-macros/i17152/DFBits.scala | 4 ++-- tests/neg-macros/i19601/Macro.scala | 2 +- tests/neg-macros/i7919.scala | 6 ++--- .../GenericNumLits/Even_1.scala | 2 +- tests/neg/17579.check | 2 +- tests/neg/17579.scala | 2 +- tests/neg/19414.scala | 2 +- tests/neg/21538.scala | 2 +- tests/neg/abstract-givens.check | 2 +- tests/neg/abstract-givens.scala | 2 +- tests/neg/eql.scala | 2 +- tests/neg/exports.scala | 2 +- tests/neg/extmethod-overload.scala | 19 +++++++-------- .../neg/gadt-approximation-interaction.scala | 2 +- tests/neg/genericNumbers.scala | 6 ++--- tests/neg/given-loop-prevention.scala | 4 ++-- tests/neg/i10901.scala | 2 +- tests/neg/i11985.scala | 4 +--- tests/neg/i14177a.scala | 2 +- tests/neg/i15474b.scala | 2 +- tests/neg/i16453.scala | 2 +- tests/neg/i19328conversion.scala | 2 +- tests/neg/i5978.scala | 2 +- tests/neg/i6716.scala | 4 ++-- tests/neg/i7459.scala | 2 +- tests/neg/i8896-a.scala | 3 +-- tests/neg/i8896-b.scala | 3 +-- tests/neg/i9185.scala | 4 ++-- tests/neg/i9928.scala | 4 ++-- tests/neg/implicit-package-object.scala | 4 ++-- tests/neg/implied-for.scala | 4 ++-- tests/neg/import-given.scala | 2 +- tests/neg/missing-implicit6.check | 24 +++++++++---------- tests/neg/missing-implicit6.scala | 6 ++--- tests/neg/struct-given.scala | 2 +- tests/patmat/i6088.scala | 2 +- tests/pos-custom-args/captures/i16116.scala | 2 +- .../exprSummonWithTypeVar/Macro_1.scala | 4 ++-- tests/pos-macros/i13021/Width.scala | 2 +- tests/pos-macros/i18228.scala | 2 +- tests/pos-macros/macro-docs.scala | 6 ++--- tests/pos-macros/nil-liftable.scala | 2 +- tests/pos/20088.scala | 2 +- tests/pos/X.scala | 4 ++-- tests/pos/bson/bson.scala | 4 ++-- tests/pos/cbproxy-expansion.scala | 2 +- tests/pos/combine.scala | 2 +- tests/pos/end-given.scala | 2 +- tests/pos/erased-conforms.scala | 2 +- tests/pos/ext-override.scala | 2 +- tests/pos/givenFallback.scala | 6 ++--- tests/pos/hylolib-cb-extract.scala | 2 +- tests/pos/hylolib-cb/AnyCollection.scala | 8 ++----- tests/pos/hylolib-cb/AnyValue.scala | 2 +- tests/pos/hylolib-cb/BitArray.scala | 6 ++--- tests/pos/hylolib-cb/Integers.scala | 8 +++---- tests/pos/hylolib-cb/Slice.scala | 2 +- .../pos/hylolib-deferred-given-extract.scala | 4 ++-- .../AnyCollection.scala | 2 +- .../pos/hylolib-deferred-given/AnyValue.scala | 2 +- .../pos/hylolib-deferred-given/BitArray.scala | 6 ++--- .../pos/hylolib-deferred-given/HyArray.scala | 4 ++-- .../pos/hylolib-deferred-given/Integers.scala | 8 +++---- tests/pos/hylolib-deferred-given/Slice.scala | 2 +- tests/pos/i10259.scala | 2 +- tests/pos/i10929.scala | 4 ++-- tests/pos/i11174minimisation.scala | 2 +- tests/pos/i11175.scala | 4 ++-- tests/pos/i11243.scala | 8 +++---- tests/pos/i11538a.scala | 2 +- tests/pos/i11732.scala | 2 +- tests/pos/i11864.scala | 2 +- tests/pos/i12126.scala | 6 ++--- tests/pos/i12379b.scala | 2 +- tests/pos/i12591/Inner.scala | 2 +- tests/pos/i12910.scala | 4 ++-- tests/pos/i12945/A_1.scala | 2 +- tests/pos/i12949.scala | 6 ++--- tests/pos/i13001/Main_1.scala | 4 ++-- tests/pos/i13044.scala | 2 +- tests/pos/i13460.scala | 6 ++--- tests/pos/i13503.scala | 2 +- tests/pos/i13668.scala | 2 +- tests/pos/i13900.scala | 4 ++-- tests/pos/i14013.scala | 2 +- tests/pos/i14177b.scala | 10 ++++---- tests/pos/i14282.scala | 8 +++---- tests/pos/i14637.scala | 2 +- tests/pos/i15160.scala | 2 +- tests/pos/i15177.hylolib.scala | 2 +- tests/pos/i15183/decoder_1.scala | 6 ++--- tests/pos/i15264.scala | 6 ++--- tests/pos/i15331.scala | 2 +- tests/pos/i15664.scala | 2 +- tests/pos/i15670.scala | 4 ++-- tests/pos/i15867.scala | 2 +- tests/pos/i15867.specs2.scala | 2 +- tests/pos/i16104.scala | 2 +- tests/pos/i16596.more.scala | 2 +- tests/pos/i17002.scala | 2 +- tests/pos/i18062.scala | 2 +- tests/pos/i18175.scala | 4 ++-- tests/pos/i18211.scala | 2 +- tests/pos/i18253.orig.scala | 2 +- tests/pos/i18253.scala | 2 +- tests/pos/i18261.min/Main_0.scala | 2 +- tests/pos/i18261/DFBits_0.scala | 2 +- tests/pos/i18276a.scala | 2 +- tests/pos/i19404.scala | 2 +- tests/pos/i19407.scala | 2 +- tests/pos/i19623.scala | 2 +- tests/pos/i19724.scala | 2 +- tests/pos/i19749.scala | 2 +- tests/pos/i19857.scala | 2 +- tests/pos/i19942.1.scala | 4 ++-- tests/pos/i19955a.scala | 4 ++-- tests/pos/i19955b.scala | 4 ++-- tests/pos/i20053b.scala | 4 ++-- tests/pos/i20080.scala | 14 +++++------ tests/pos/i20344.scala | 2 +- tests/pos/i20377.scala | 2 +- tests/pos/i20572.scala | 4 ++-- tests/pos/i20858/defns_1.scala | 2 +- tests/pos/i21036.scala | 6 ++--- tests/pos/i21303/Test.scala | 4 ++-- tests/pos/i21303a/Test.scala | 6 ++--- tests/pos/i21320a.scala | 4 ++-- tests/pos/i21320b.scala | 4 ++-- tests/pos/i21352a/schema.scala | 2 +- tests/pos/i21352a/schemaDerivation.scala | 2 +- tests/pos/i21352b.scala | 4 ++-- tests/pos/i21352c.scala | 2 +- tests/pos/i21390.zio.scala | 2 +- tests/pos/i5915.scala | 4 ++-- tests/pos/i5978.scala | 8 +++---- tests/pos/i6716.scala | 2 +- tests/pos/i6900.scala | 2 +- tests/pos/i6914.scala | 4 ++-- tests/pos/i6938.scala | 6 ++--- tests/pos/i7056.scala | 2 +- tests/pos/i7375.scala | 4 ++-- tests/pos/i7413.scala | 2 +- tests/pos/i7586.scala | 11 +++++---- tests/pos/i7851.scala | 8 +++---- tests/pos/i7868.scala | 5 ++-- tests/pos/i7878.scala | 2 +- tests/pos/i8182.scala | 4 ++-- tests/pos/i8198.scala | 2 +- tests/pos/i8276.scala | 2 +- tests/pos/i8344-1.scala | 2 +- tests/pos/i8397.scala | 14 +++++------ tests/pos/i8623.scala | 2 +- tests/pos/i8825.scala | 2 +- tests/pos/i8927.scala | 2 +- tests/pos/i9342b.scala | 2 +- tests/pos/i9530.scala | 4 ++-- tests/pos/implicit-conversion.scala | 2 +- .../pos/implicit-prefix-disambiguation.scala | 2 +- tests/pos/interleaving-functor.scala | 2 +- tests/pos/mt-deskolemize.scala | 2 +- tests/pos/multi-given.scala | 2 ++ tests/pos/multiversal.scala | 2 +- tests/pos/not-looping-implicit.scala | 2 +- tests/pos/ord-over-tracked.scala | 2 +- tests/pos/parsercombinators-ctx-bounds.scala | 2 +- tests/pos/parsercombinators-givens-2.scala | 2 +- tests/pos/parsercombinators-givens.scala | 2 +- tests/pos/parsercombinators-this.scala | 2 +- tests/pos/phantom-Eq.scala | 2 +- tests/pos/phantom-Eq2/Phantom-Eq_1.scala | 2 +- tests/pos/phantom-Evidence.scala | 2 +- tests/pos/reference/delegates.scala | 18 +++++++------- tests/pos/reference/extension-methods.scala | 4 ++-- tests/pos/suspend-strawman/choices.scala | 2 +- tests/pos/suspend-strawman/generators.scala | 4 ++-- .../suspend-strawman/monadic-reflect.scala | 2 +- .../pos/suspend-strawman/simple-futures.scala | 2 +- tests/pos/the.scala | 2 +- .../pos/toplevel-opaque-xm/Logarithm_1.scala | 2 +- tests/pos/typeclass-aggregates.scala | 2 +- tests/pos/typeclasses-this.scala | 12 +++++----- tests/pos/typeclasses.scala | 10 ++++---- tests/run-macros/BigFloat/BigFloat_1.scala | 4 ++-- .../Derivation_1.scala | 2 +- tests/run/Signals.scala | 2 +- tests/run/Typeable.scala | 4 ++-- tests/run/abstract-givens.scala | 2 +- tests/run/cochis-example.scala | 2 +- tests/run/extension-specificity2.scala | 8 +++---- tests/run/extmethod-overload.scala | 2 +- tests/run/extmethods2.scala | 2 +- tests/run/fragables-extension.scala | 4 ++-- tests/run/genericNumLits.scala | 2 +- tests/run/given-eta.scala | 2 +- tests/run/i11050.scala | 6 ++--- tests/run/i11174.scala | 2 +- tests/run/i11174local.scala | 4 ++-- tests/run/i11542.scala | 2 +- tests/run/i11542a.scala | 2 +- tests/run/i11563.scala | 2 +- tests/run/i11583.scala | 4 ++-- tests/run/i11961.scala | 6 ++--- tests/run/i11966.scala | 4 ++-- tests/run/i12328.scala | 2 +- tests/run/i13146.scala | 6 ++--- tests/run/i13146a.scala | 4 ++-- tests/run/i13146poly.scala | 6 ++--- tests/run/i13304.scala | 4 ++-- tests/run/i13332shapeless.scala | 4 ++-- tests/run/i14150.scala | 2 +- tests/run/i17115.scala | 4 ++-- tests/run/i6716.scala | 2 +- tests/run/i7788.scala | 4 ++-- tests/run/i9011.scala | 4 ++-- tests/run/i9473.scala | 4 ++-- tests/run/i9530.scala | 2 +- tests/run/i9928.scala | 6 ++--- tests/run/ift-return.scala | 4 ++-- tests/run/implicit-alias.scala | 2 +- tests/run/implicit-specifity.scala | 12 +++++----- tests/run/implied-priority.scala | 6 ++--- tests/run/inline-numeric/Fractional.scala | 6 ++--- tests/run/inline-numeric/Integral.scala | 14 +++++------ tests/run/instances-anonymous.scala | 12 +++++----- tests/run/instances.scala | 10 ++++---- tests/run/poly-kinded-derives.scala | 14 +++++------ tests/run/publicInBinary/Lib_1.scala | 4 ++-- ...ng-context-implicits-with-conversion.scala | 2 +- tests/run/structural-contextual.scala | 2 +- tests/run/tagless.scala | 18 +++++++------- .../typeclass-derivation-doc-example.scala | 4 ++-- tests/run/tyql.scala | 2 +- tests/warn/implicit-conversions.scala | 6 ++--- 238 files changed, 451 insertions(+), 468 deletions(-) diff --git a/tests/init/crash/i6914.scala b/tests/init/crash/i6914.scala index 723b2ef94e0b..b5f848626772 100644 --- a/tests/init/crash/i6914.scala +++ b/tests/init/crash/i6914.scala @@ -5,7 +5,7 @@ object test1 { class ToExpr[T](using Liftable[T]) extends Conversion[T, Expr[T]] { def apply(x: T): Expr[T] = ??? } - given toExprFun[T](using Liftable[T]): ToExpr[T] with {} + given toExprFun: [T] => Liftable[T] => ToExpr[T]() given Liftable[Int] = ??? given Liftable[String] = ??? @@ -16,14 +16,12 @@ object test1 { def a: Expr[String] = "abc" } -object test2 { +object test2: - given autoToExpr[T](using Liftable[T]): Conversion[T, Expr[T]] with { + given autoToExpr: [T] => Liftable[T] => Conversion[T, Expr[T]]: def apply(x: T): Expr[T] = ??? - } given Liftable[Int] = ??? given Liftable[String] = ??? def a: Expr[String] = "abc" -} \ No newline at end of file diff --git a/tests/init/crash/i7821.scala b/tests/init/crash/i7821.scala index 1574801826bc..f99037573c75 100644 --- a/tests/init/crash/i7821.scala +++ b/tests/init/crash/i7821.scala @@ -3,9 +3,8 @@ object XObject { def anX: X = 5 - given ops: Object with { + given ops: Object: extension (x: X) def + (y: X): X = x + y - } } object MyXObject { @@ -13,9 +12,8 @@ object MyXObject { def anX: MyX = XObject.anX - given ops: Object with { + given ops: Object: extension (x: MyX) def + (y: MyX): MyX = x + y // error: warring: Infinite recursive call - } } object Main extends App { diff --git a/tests/neg-macros/BigFloat/BigFloat_1.scala b/tests/neg-macros/BigFloat/BigFloat_1.scala index 5bb5b49587bd..246e3dcd442d 100644 --- a/tests/neg-macros/BigFloat/BigFloat_1.scala +++ b/tests/neg-macros/BigFloat/BigFloat_1.scala @@ -35,7 +35,7 @@ object BigFloat extends App { def fromDigits(digits: String) = apply(digits) } - given BigFloatFromDigits with { + given BigFloatFromDigits { override inline def fromDigits(digits: String) = ${ BigFloatFromDigitsImpl('digits) } @@ -43,7 +43,7 @@ object BigFloat extends App { // Should be in StdLib: - given ToExpr[BigInt] with { + given ToExpr[BigInt] { def apply(x: BigInt)(using Quotes) = '{BigInt(${Expr(x.toString)})} } diff --git a/tests/neg-macros/GenericNumLits/Even_1.scala b/tests/neg-macros/GenericNumLits/Even_1.scala index 24bcf32cc7c0..5772d98d3808 100644 --- a/tests/neg-macros/GenericNumLits/Even_1.scala +++ b/tests/neg-macros/GenericNumLits/Even_1.scala @@ -16,7 +16,7 @@ object Even { def fromDigits(digits: String) = evenFromDigits(digits) } - given EvenFromDigits with { + given EvenFromDigits { override transparent inline def fromDigits(digits: String) = ${ EvenFromDigitsImpl('digits) } diff --git a/tests/neg-macros/i11483/Test_2.scala b/tests/neg-macros/i11483/Test_2.scala index 6fe975168684..e61716615d22 100644 --- a/tests/neg-macros/i11483/Test_2.scala +++ b/tests/neg-macros/i11483/Test_2.scala @@ -3,7 +3,7 @@ package x import scala.language.implicitConversions import scala.concurrent.Future -given FutureAsyncMonad: CpsMonad[Future] with +given FutureAsyncMonad: CpsMonad[Future]: def pure[T](t:T): Future[T] = ??? def impure[T](t:Future[T]): T = ??? def map[A,B](x:Future[A])(f: A=>B): Future[B] = ??? diff --git a/tests/neg-macros/i17152/DFBits.scala b/tests/neg-macros/i17152/DFBits.scala index dd0e8b88a962..a88485b56dc6 100644 --- a/tests/neg-macros/i17152/DFBits.scala +++ b/tests/neg-macros/i17152/DFBits.scala @@ -16,7 +16,7 @@ trait Baz trait Width[T]: type Out <: Int object Width: - given fromDFBoolOrBit[T <: DFBoolOrBit]: Width[T] with + given fromDFBoolOrBit: [T <: DFBoolOrBit] => Width[T]: type Out = 1 transparent inline given [T]: Width[T] = ${ getWidthMacro[T] } def getWidthMacro[T](using Quotes, Type[T]): Expr[Width[T]] = @@ -38,7 +38,7 @@ private object CompanionsDFBits: type OutW <: Int def apply(value: R): DFValOf[DFBits[OutW]] object Candidate: - given fromDFUInt[W <: Int, R <: DFValOf[DFDecimal]]: Candidate[R] with + given fromDFUInt: [W <: Int, R <: DFValOf[DFDecimal]] => Candidate[R]: type OutW = W def apply(value: R): DFValOf[DFBits[W]] = import DFVal.Ops.bits diff --git a/tests/neg-macros/i19601/Macro.scala b/tests/neg-macros/i19601/Macro.scala index 8d6d22005017..06260ab8c981 100644 --- a/tests/neg-macros/i19601/Macro.scala +++ b/tests/neg-macros/i19601/Macro.scala @@ -10,7 +10,7 @@ object Macros { '{ () } } - given [A](using Type[A]): FromExpr[Assertion[A]] with { + given [A] => Type[A] => FromExpr[Assertion[A]] { def unapply(assertion: Expr[Assertion[A]])(using Quotes): Option[Assertion[A]] = { import quotes.reflect.* diff --git a/tests/neg-macros/i7919.scala b/tests/neg-macros/i7919.scala index e68965fc614f..74863282e09a 100644 --- a/tests/neg-macros/i7919.scala +++ b/tests/neg-macros/i7919.scala @@ -3,16 +3,16 @@ import scala.quoted.* object Test { def staged[T](using Quotes) = { import quotes.reflect.* - given typeT: Type[T] with {} // error + given typeT: Type[T] {} // error val tt = TypeRepr.of[T] '{ "in staged" } } - given Expr[Int] with {} // error + given Expr[Int] {} // error new Expr[Int] // error class Expr2 extends Expr[Int] // error - given Type[Int] with {} // error + given Type[Int] {} // error new Type[Int] // error class Type2 extends Type[Int] // error diff --git a/tests/neg-with-compiler/GenericNumLits/Even_1.scala b/tests/neg-with-compiler/GenericNumLits/Even_1.scala index 0867150dd944..7f5824b30957 100644 --- a/tests/neg-with-compiler/GenericNumLits/Even_1.scala +++ b/tests/neg-with-compiler/GenericNumLits/Even_1.scala @@ -16,7 +16,7 @@ object Even { def fromDigits(digits: String) = evenFromDigits(digits) } - given EvenFromDigits with { + given EvenFromDigits { override inline def fromDigits(digits: String) = ${ EvenFromDigitsImpl('digits) } diff --git a/tests/neg/17579.check b/tests/neg/17579.check index 1149f9c0faa4..24b7d354dcb6 100644 --- a/tests/neg/17579.check +++ b/tests/neg/17579.check @@ -25,6 +25,6 @@ | | longer explanation available when compiling with `-explain` -- [E147] Syntax Warning: tests/neg/17579.scala:19:6 ------------------------------------------------------------------- -19 | final given Object with {} // warning: modifier `final` is redundant for this definition +19 | final given Object() // warning: modifier `final` is redundant for this definition | ^^^^^ | Modifier final is redundant for this definition diff --git a/tests/neg/17579.scala b/tests/neg/17579.scala index 268199e9006f..0ffd20d4b267 100644 --- a/tests/neg/17579.scala +++ b/tests/neg/17579.scala @@ -16,7 +16,7 @@ class C: { // No error in this case, because the `given` is translated to a class // definition, for which `final` is redundant but not illegal. - final given Object with {} // warning: modifier `final` is redundant for this definition + final given Object() // warning: modifier `final` is redundant for this definition } { diff --git a/tests/neg/19414.scala b/tests/neg/19414.scala index bb275ad943b7..8843441e81f2 100644 --- a/tests/neg/19414.scala +++ b/tests/neg/19414.scala @@ -9,7 +9,7 @@ class Printer given Writer[JsValue] = ??? given Writer[JsObject] = ??? -given [B: Writer](using printer: Printer = new Printer): BodySerializer[B] = ??? +given [B: Writer] => (printer: Printer = new Printer) => BodySerializer[B] = ??? def f: Unit = summon[BodySerializer[JsObject]] // error: Ambiguous given instances diff --git a/tests/neg/21538.scala b/tests/neg/21538.scala index 761e9cde678a..66500277159e 100644 --- a/tests/neg/21538.scala +++ b/tests/neg/21538.scala @@ -1,3 +1,3 @@ trait Bar[T] -given [T]: Bar[T] with {} +given [T] => Bar[T]() inline def foo[V](inline value: V)(using Bar[value.type]) : Unit = {} // error \ No newline at end of file diff --git a/tests/neg/abstract-givens.check b/tests/neg/abstract-givens.check index 022c454c31f1..1430c5b6e950 100644 --- a/tests/neg/abstract-givens.check +++ b/tests/neg/abstract-givens.check @@ -1,5 +1,5 @@ -- Error: tests/neg/abstract-givens.scala:11:8 ------------------------------------------------------------------------- -11 | given s[T](using T): Seq[T] with // error +11 | given s: [T] => T => Seq[T]: // error | ^ |instance cannot be created, since def iterator: Iterator[A] in trait IterableOnce in package scala.collection is not defined -- [E164] Declaration Error: tests/neg/abstract-givens.scala:8:8 ------------------------------------------------------- diff --git a/tests/neg/abstract-givens.scala b/tests/neg/abstract-givens.scala index 5aa5bdee88e3..dbd4a7a85927 100644 --- a/tests/neg/abstract-givens.scala +++ b/tests/neg/abstract-givens.scala @@ -8,7 +8,7 @@ object Test extends T: given y(using Int): String = summon[Int].toString * 22 // error given z[T](using T): Seq[T] = List(summon[T]) // error - given s[T](using T): Seq[T] with // error + given s: [T] => T => Seq[T]: // error def apply(x: Int) = ??? override def length = ??? diff --git a/tests/neg/eql.scala b/tests/neg/eql.scala index 58378800bbc5..40ec1fb5d9ed 100644 --- a/tests/neg/eql.scala +++ b/tests/neg/eql.scala @@ -1,7 +1,7 @@ object lst: opaque type Lst[+T] = Any object Lst: - given lstCanEqual[T, U]: CanEqual[Lst[T], Lst[U]] = CanEqual.derived + given lstCanEqual: [T, U] => CanEqual[Lst[T], Lst[U]] = CanEqual.derived val Empty: Lst[Nothing] = ??? end lst diff --git a/tests/neg/exports.scala b/tests/neg/exports.scala index c187582c940d..459a56e88c4f 100644 --- a/tests/neg/exports.scala +++ b/tests/neg/exports.scala @@ -5,7 +5,7 @@ type PrinterType def print(bits: BitMap): Unit = ??? def status: List[String] = ??? - given bitmap: BitMap with {} + given bitmap: BitMap() } class Scanner { diff --git a/tests/neg/extmethod-overload.scala b/tests/neg/extmethod-overload.scala index 8fa7c05222a1..ef927c0be6ce 100644 --- a/tests/neg/extmethod-overload.scala +++ b/tests/neg/extmethod-overload.scala @@ -1,16 +1,15 @@ -object Test { - given a: AnyRef with - extension (x: Int) { +object Test: + + given a: AnyRef: + extension (x: Int) def |+| (y: Int) = x + y - } - given b: AnyRef with - extension (x: Int) { + + given b: AnyRef: + extension (x: Int) def |+| (y: String) = x + y.length - } + assert((1 |+| 2) == 3) // error ambiguous - locally { + locally: import b.|+| assert((1 |+| "2") == 2) // OK - } -} \ No newline at end of file diff --git a/tests/neg/gadt-approximation-interaction.scala b/tests/neg/gadt-approximation-interaction.scala index 5f010e4b784d..a6bfe0d44007 100644 --- a/tests/neg/gadt-approximation-interaction.scala +++ b/tests/neg/gadt-approximation-interaction.scala @@ -28,7 +28,7 @@ object GivenLookup { class Tag[T] - given ti: Tag[Int] with {} + given ti: Tag[Int]() def foo[T](t: T, ev: T SUB Int) = ev match { case SUB.Refl() => diff --git a/tests/neg/genericNumbers.scala b/tests/neg/genericNumbers.scala index 0c5769f7ba12..7c08caeb4e9d 100644 --- a/tests/neg/genericNumbers.scala +++ b/tests/neg/genericNumbers.scala @@ -7,13 +7,11 @@ object Test extends App { case class Even(n: Int) - given FromDigits[Even] with { - def fromDigits(digits: String): Even = { + given FromDigits[Even]: + def fromDigits(digits: String): Even = val intValue = digits.toInt if (intValue % 2 == 0) Even(intValue) else throw FromDigits.MalformedNumber() - } - } val e: Even = 1234 // error diff --git a/tests/neg/given-loop-prevention.scala b/tests/neg/given-loop-prevention.scala index 9d404b8c6d8e..9ad2163a4bf8 100644 --- a/tests/neg/given-loop-prevention.scala +++ b/tests/neg/given-loop-prevention.scala @@ -2,11 +2,11 @@ class Foo object Bar { - given Foo with {} + given Foo() given List[Foo] = List(summon[Foo]) // ok } object Baz { given List[Foo] = List(summon[Foo]) // error - given Foo with {} + given Foo() } diff --git a/tests/neg/i10901.scala b/tests/neg/i10901.scala index dc1ea6e6eef6..996a0753c2e7 100644 --- a/tests/neg/i10901.scala +++ b/tests/neg/i10901.scala @@ -53,7 +53,7 @@ object BugExp4Point2D { class C object Container: - given C with {} + given C() object Test: extension (x: String)(using C) diff --git a/tests/neg/i11985.scala b/tests/neg/i11985.scala index fee056594974..52313ab09c99 100644 --- a/tests/neg/i11985.scala +++ b/tests/neg/i11985.scala @@ -11,10 +11,8 @@ object Test { def get(t: TT): C } - given [T <: Tuple, C, EV <: TupleTypeIndex[T, C]]: TupleExtractor[T, C] with { + given [T <: Tuple, C, EV <: TupleTypeIndex[T, C]] => TupleExtractor[T, C]: def get(t: T): C = t.toArray.apply(toIntC[TupleTypeIndex[T, C]]).asInstanceOf[C] // error - } - transparent inline def toIntC[N <: Int]: Int = inline constValue[N] match diff --git a/tests/neg/i14177a.scala b/tests/neg/i14177a.scala index 237eaacb3b66..91a63bdd6345 100644 --- a/tests/neg/i14177a.scala +++ b/tests/neg/i14177a.scala @@ -2,5 +2,5 @@ import scala.compiletime.* trait C[A] -inline given [Tup <: Tuple]: C[Tup] with +inline given [Tup <: Tuple] => C[Tup]: val cs = summonAll[Tuple.Map[Tup, C]] // error: Tuple element types must be known at compile time diff --git a/tests/neg/i15474b.scala b/tests/neg/i15474b.scala index 0c04b9880e1c..0a99056f8a6e 100644 --- a/tests/neg/i15474b.scala +++ b/tests/neg/i15474b.scala @@ -3,6 +3,6 @@ import scala.language.implicitConversions object Test1: - given c: Conversion[ String, Int ] with + given c: Conversion[ String, Int ]: def apply(from: String): Int = from.toInt // warn: infinite loop in function body // nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i16453.scala b/tests/neg/i16453.scala index 00495c39e21a..b5767ac35417 100644 --- a/tests/neg/i16453.scala +++ b/tests/neg/i16453.scala @@ -12,7 +12,7 @@ def testScala3() = { given Conversion[Char, String] = ??? given Conversion[Char, Option[Int]] = ??? - given foo: Foo with + given foo: Foo: type T = Int given bar3: Int = 0 given baz3: Char = 'a' diff --git a/tests/neg/i19328conversion.scala b/tests/neg/i19328conversion.scala index 46dd1058b579..458bd5abb5b1 100644 --- a/tests/neg/i19328conversion.scala +++ b/tests/neg/i19328conversion.scala @@ -5,7 +5,7 @@ object i19328conversion: type Id[A] = A - given wrapId[A]: Conversion[A, Id[A]] with + given wrapId: [A] => Conversion[A, Id[A]]: def apply(x: A): Id[A] = x def bar(using bool: Boolean): Unit = () diff --git a/tests/neg/i5978.scala b/tests/neg/i5978.scala index 5dddfafb8726..b7e0344ec1c9 100644 --- a/tests/neg/i5978.scala +++ b/tests/neg/i5978.scala @@ -5,7 +5,7 @@ opaque type Position[Buffer] = Int trait TokenParser[Token, R] object TextParser { - given TP: TokenParser[Char, Position[CharSequence]] with {} + given TP: TokenParser[Char, Position[CharSequence]]() given FromCharToken(using T: TokenParser[Char, Position[CharSequence]]) : Conversion[Char, Position[CharSequence]] = ??? diff --git a/tests/neg/i6716.scala b/tests/neg/i6716.scala index eece8af9e560..a9826cb901c0 100644 --- a/tests/neg/i6716.scala +++ b/tests/neg/i6716.scala @@ -1,12 +1,12 @@ class Foo object Bar { - given Foo with {} + given Foo() given List[Foo] = List(summon[Foo]) // ok } object Baz { @annotation.nowarn given List[Foo] = List(summon[Foo]) // error - given Foo with {} + given Foo() } diff --git a/tests/neg/i7459.scala b/tests/neg/i7459.scala index a17f32b15afa..829132e5179d 100644 --- a/tests/neg/i7459.scala +++ b/tests/neg/i7459.scala @@ -22,7 +22,7 @@ trait Eq[T] { } object Eq { - given Eq[Int] with { + given Eq[Int] { def eqv(x: Int, y: Int) = x == y } diff --git a/tests/neg/i8896-a.scala b/tests/neg/i8896-a.scala index ae2cd6e88f6c..9f0953bfe939 100644 --- a/tests/neg/i8896-a.scala +++ b/tests/neg/i8896-a.scala @@ -4,8 +4,7 @@ trait Foo[A] object Example { - given Foo[Int] with { - } + given Foo[Int]() def foo0[A: Foo]: A => A = identity def foo1[A](implicit foo: Foo[A]): A => A = identity diff --git a/tests/neg/i8896-b.scala b/tests/neg/i8896-b.scala index a2559b00b3cc..f562d2d3b719 100644 --- a/tests/neg/i8896-b.scala +++ b/tests/neg/i8896-b.scala @@ -4,8 +4,7 @@ trait Foo[A] object Example { - given Foo[Int] with { - } + given Foo[Int]() def foo0[A: Foo]: A => A = identity def foo1[A](implicit foo: Foo[A]): A => A = identity diff --git a/tests/neg/i9185.scala b/tests/neg/i9185.scala index 34727eff1c46..8ec28135c1b9 100644 --- a/tests/neg/i9185.scala +++ b/tests/neg/i9185.scala @@ -1,8 +1,8 @@ trait M[F[_]] { def pure[A](x: A): F[A] } object M { extension [A, F[A]](x: A) def pure(using m: M[F]): F[A] = m.pure(x) - given listMonad: M[List] with { def pure[A](x: A): List[A] = List(x) } - given optionMonad: M[Option] with { def pure[A](x: A): Option[A] = Some(x) } + given listMonad: M[List] { def pure[A](x: A): List[A] = List(x) } + given optionMonad: M[Option] { def pure[A](x: A): Option[A] = Some(x) } val value1: List[String] = "ola".pure val value2 = "ola".pure // error val value3 = M.pure("ola") // error diff --git a/tests/neg/i9928.scala b/tests/neg/i9928.scala index a1034b1f20e9..cfe9cf663a85 100644 --- a/tests/neg/i9928.scala +++ b/tests/neg/i9928.scala @@ -2,7 +2,7 @@ trait Magic[F]: extension (x: Int) def read: F object Magic: - given Magic[String] with + given Magic[String]: extension(x: Int) def read: String = println("In string") s"$x" @@ -12,7 +12,7 @@ object Foo: import Magic.given def apply(s: String): Foo = s - given Magic[Foo] with + given Magic[Foo]: extension (x: Int) def read: Foo = println("In foo") Foo(s"$x") diff --git a/tests/neg/implicit-package-object.scala b/tests/neg/implicit-package-object.scala index 7b73d620b9b8..6d8c5d9ddff0 100644 --- a/tests/neg/implicit-package-object.scala +++ b/tests/neg/implicit-package-object.scala @@ -13,7 +13,7 @@ package A { given ToString[AB] = ab => println(ab) opaque type AC = String - given ToString[AC] with { + given ToString[AC] { def print(ac: AC): Unit = println(ac) } } @@ -31,7 +31,7 @@ package B { opaque type BC = String object BC { - given ToString[BC] with { + given ToString[BC] { def print(bc: BC): Unit = println(bc) } } diff --git a/tests/neg/implied-for.scala b/tests/neg/implied-for.scala index 87f762870400..cb43f799ee50 100644 --- a/tests/neg/implied-for.scala +++ b/tests/neg/implied-for.scala @@ -3,8 +3,8 @@ class B extends T class C extends T object A { - given b: B with {} - given c: C with {} + given b: B() + given c: C() } object Test extends App { diff --git a/tests/neg/import-given.scala b/tests/neg/import-given.scala index 080ed1e77ec5..1e5a9536c605 100644 --- a/tests/neg/import-given.scala +++ b/tests/neg/import-given.scala @@ -1,6 +1,6 @@ class TC object A { - given tc: TC with {} + given tc: TC() def foo(using TC) = () } object B { diff --git a/tests/neg/missing-implicit6.check b/tests/neg/missing-implicit6.check index 8c4cb331808b..7d6e16c0ec93 100644 --- a/tests/neg/missing-implicit6.check +++ b/tests/neg/missing-implicit6.check @@ -1,9 +1,9 @@ --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:34:8 ------------------------------------------------------ -34 | "a".xxx // error, no suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:32:8 ------------------------------------------------------ +32 | "a".xxx // error, no suggested import | ^^^^^^^ | value xxx is not a member of String --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:35:8 ------------------------------------------------------ -35 | 123.xxx // error, suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:33:8 ------------------------------------------------------ +33 | 123.xxx // error, suggested import | ^^^^^^^ | value xxx is not a member of Int, but could be made available as an extension method. | @@ -11,8 +11,8 @@ | | import Test.Ops.xxx | --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:36:8 ------------------------------------------------------ -36 | 123.yyy // error, suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:34:8 ------------------------------------------------------ +34 | 123.yyy // error, suggested import | ^^^^^^^ | value yyy is not a member of Int, but could be made available as an extension method. | @@ -20,16 +20,16 @@ | | import Test.Ops.yyy | --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:41:8 ------------------------------------------------------ -41 | 123.xxx // error, no suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:39:8 ------------------------------------------------------ +39 | 123.xxx // error, no suggested import | ^^^^^^^ | value xxx is not a member of Int --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:42:8 ------------------------------------------------------ -42 | 123.yyy // error, no suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:40:8 ------------------------------------------------------ +40 | 123.yyy // error, no suggested import | ^^^^^^^ | value yyy is not a member of Int --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:43:8 ------------------------------------------------------ -43 | 123.zzz // error, suggested import even though there's no instance of Bar in scope +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:41:8 ------------------------------------------------------ +41 | 123.zzz // error, suggested import even though there's no instance of Bar in scope | ^^^^^^^ | value zzz is not a member of Int, but could be made available as an extension method. | diff --git a/tests/neg/missing-implicit6.scala b/tests/neg/missing-implicit6.scala index ded6e5ba8fed..874ae77bb50f 100644 --- a/tests/neg/missing-implicit6.scala +++ b/tests/neg/missing-implicit6.scala @@ -7,13 +7,11 @@ trait Bar { } object instances { - given foo: Foo with { + given foo: Foo: type Out = Bar - } - given bar: Bar with { + given bar: Bar: type Out = Int - } } object Test { diff --git a/tests/neg/struct-given.scala b/tests/neg/struct-given.scala index 9bcd1630d448..13e0d98795fa 100644 --- a/tests/neg/struct-given.scala +++ b/tests/neg/struct-given.scala @@ -1,5 +1,5 @@ class C -given c[T]: C with +given c: [T] => C: def foo = 1 given d[T]: C = new C { def foo = 1 } diff --git a/tests/patmat/i6088.scala b/tests/patmat/i6088.scala index 8d8f676c0101..c88ae35a0c2f 100644 --- a/tests/patmat/i6088.scala +++ b/tests/patmat/i6088.scala @@ -17,7 +17,7 @@ enum ExprF[R[_],I] { /** Companion. */ object ExprF { - given hfunctor: HFunctor[ExprF] with { + given hfunctor: HFunctor[ExprF] { def hmap[A[_], B[_]](nt: A ~> B): ([x] =>> ExprF[A,x]) ~> ([x] =>> ExprF[B,x]) = { new ~>[[x] =>> ExprF[A,x], [x] =>> ExprF[B,x]] { def apply[I](fa: ExprF[A,I]): ExprF[B,I] = fa match { diff --git a/tests/pos-custom-args/captures/i16116.scala b/tests/pos-custom-args/captures/i16116.scala index 979bfdbe4328..fdc386ac40e1 100644 --- a/tests/pos-custom-args/captures/i16116.scala +++ b/tests/pos-custom-args/captures/i16116.scala @@ -9,7 +9,7 @@ trait CpsMonad[F[_]] { object CpsMonad { type Aux[F[_],C] = CpsMonad[F] { type Context = C } - given CpsMonad[Future] with {} + given CpsMonad[Future]() } @experimental diff --git a/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala b/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala index 72bcbe8b6515..2fac9b9a7f8d 100644 --- a/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala +++ b/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala @@ -2,7 +2,7 @@ import scala.compiletime.{erasedValue, summonFrom} import scala.quoted._ -inline given summonAfterTypeMatch[T]: Any = +inline given summonAfterTypeMatch: [T] => Any = ${ summonAfterTypeMatchExpr[T] } private def summonAfterTypeMatchExpr[T: Type](using Quotes): Expr[Any] = @@ -10,4 +10,4 @@ private def summonAfterTypeMatchExpr[T: Type](using Quotes): Expr[Any] = trait Foo[T] -given IntFoo[T <: Int]: Foo[T] = ??? +given IntFoo: [T <: Int] => Foo[T] = ??? diff --git a/tests/pos-macros/i13021/Width.scala b/tests/pos-macros/i13021/Width.scala index a163e1b5ebf1..60c1b47b99d8 100644 --- a/tests/pos-macros/i13021/Width.scala +++ b/tests/pos-macros/i13021/Width.scala @@ -3,7 +3,7 @@ import scala.quoted.* trait Width[T]: type Out <: Int object Width: - transparent inline given [T]: Width[T] = ${ getWidthMacro[T] } + transparent inline given [T] => Width[T] = ${ getWidthMacro[T] } def getWidthMacro[T](using Quotes, Type[T]): Expr[Width[T]] = '{ new Width[T] { diff --git a/tests/pos-macros/i18228.scala b/tests/pos-macros/i18228.scala index f0b8226fc135..2127ea7686f5 100644 --- a/tests/pos-macros/i18228.scala +++ b/tests/pos-macros/i18228.scala @@ -3,7 +3,7 @@ import scala.quoted.* case class QueryMeta[T](map: Map[String, String]) object QueryMeta: - given [T: Type]: FromExpr[QueryMeta[T]] = new FromExpr[QueryMeta[T]]: + given [T: Type] => FromExpr[QueryMeta[T]] = new FromExpr[QueryMeta[T]]: def unapply(expr: Expr[QueryMeta[T]])(using q: Quotes): Option[QueryMeta[T]] = import q.reflect.* expr match diff --git a/tests/pos-macros/macro-docs.scala b/tests/pos-macros/macro-docs.scala index f3cd6e3ef00a..820aae451486 100644 --- a/tests/pos-macros/macro-docs.scala +++ b/tests/pos-macros/macro-docs.scala @@ -2,12 +2,12 @@ import scala.quoted.* object MacrosMD_ToExpr { - given ToExpr[Boolean] with { + given ToExpr[Boolean] { def apply(b: Boolean)(using Quotes) = if (b) '{ true } else '{ false } } - given ToExpr[Int] with { + given ToExpr[Int] { def apply(n: Int)(using Quotes) = n match { case Int.MinValue => '{ Int.MinValue } case _ if n < 0 => '{ - ${ apply(-n) } } @@ -17,7 +17,7 @@ object MacrosMD_ToExpr { } } - given [T: ToExpr : Type]: ToExpr[List[T]] with { + given [T: ToExpr : Type] => ToExpr[List[T]] { def apply(xs: List[T])(using Quotes) = xs match { case head :: tail => '{ ${ Expr(head) } :: ${ apply(tail) } } case Nil => '{ Nil: List[T] } diff --git a/tests/pos-macros/nil-liftable.scala b/tests/pos-macros/nil-liftable.scala index a3277510d7bf..c95fc7698e5a 100644 --- a/tests/pos-macros/nil-liftable.scala +++ b/tests/pos-macros/nil-liftable.scala @@ -1,7 +1,7 @@ import scala.quoted.* class Test: - given NilToExpr: ToExpr[Nil.type] with { + given NilToExpr: ToExpr[Nil.type] { def apply(xs: Nil.type)(using Quotes): Expr[Nil.type] = '{ Nil } } diff --git a/tests/pos/20088.scala b/tests/pos/20088.scala index 308c5a0f0a91..30651ce9d39c 100644 --- a/tests/pos/20088.scala +++ b/tests/pos/20088.scala @@ -1,6 +1,6 @@ trait Foo trait Bar -given (using foo: Foo = new {}): Bar with {} +given (foo: Foo = new {}) => Bar() def Test = summon[Bar] diff --git a/tests/pos/X.scala b/tests/pos/X.scala index 04150b72beeb..72a14478b4cb 100644 --- a/tests/pos/X.scala +++ b/tests/pos/X.scala @@ -2,8 +2,8 @@ import scala.deriving.* trait FunctorK[F[_[_]]] object FunctorK { - given [C]: FunctorK[[F[_]] =>> C]() - given [T]: FunctorK[[F[_]] =>> Tuple1[F[T]]]() + given [C] => FunctorK[[F[_]] =>> C]() + given [T] => FunctorK[[F[_]] =>> Tuple1[F[T]]]() def derived[F[_[_]]](using m: Mirror { type MirroredType[X[_]] = F[X] ; type MirroredElemTypes[_[_]] }, r: FunctorK[m.MirroredElemTypes]): FunctorK[F] = new FunctorK[F] {} } diff --git a/tests/pos/bson/bson.scala b/tests/pos/bson/bson.scala index d901ee3e3a4f..2a1bc808fbc8 100644 --- a/tests/pos/bson/bson.scala +++ b/tests/pos/bson/bson.scala @@ -7,7 +7,7 @@ object BSONWriter extends BSONWriterInstances trait BSONHandler[T] extends BSONWriter[T] private[bson] trait BSONWriterInstances { - given mapWriter[V](using BSONWriter[V]): BSONDocumentWriter[Map[String, V]] = bson.mapWriter[V] + given mapWriter: [V] => BSONWriter[V] => BSONDocumentWriter[Map[String, V]] = bson.mapWriter[V] export bson.collectionWriter } @@ -21,7 +21,7 @@ object ¬ { private[bson] trait DefaultBSONHandlers extends LowPriorityHandlers private[bson] trait LowPriorityHandlers{ - given collectionWriter[T, Repr <: Iterable[T]](using BSONWriter[T], Repr ¬ Option[T]): BSONWriter[Repr] = ??? + given collectionWriter: [T, Repr <: Iterable[T]] => (BSONWriter[T], Repr ¬ Option[T]) => BSONWriter[Repr] = ??? private[bson] def mapWriter[V](implicit valueWriter: BSONWriter[V]): BSONDocumentWriter[Map[String, V]] = ??? } diff --git a/tests/pos/cbproxy-expansion.scala b/tests/pos/cbproxy-expansion.scala index ee145b62d4ed..bd7788ca0da0 100644 --- a/tests/pos/cbproxy-expansion.scala +++ b/tests/pos/cbproxy-expansion.scala @@ -6,7 +6,7 @@ def f1[S, T: TC[S] as tc](x: S, y: tc.Self) = () def f2[S, T: TC[S]](x: S, y: T.Self) = () def f3[S, T: TC[S]](x: S, y: Int) = () -given TC[String] with +given TC[String]: type Self = Int def unit = 42 diff --git a/tests/pos/combine.scala b/tests/pos/combine.scala index 930e5237e249..c9217e593dc8 100644 --- a/tests/pos/combine.scala +++ b/tests/pos/combine.scala @@ -2,7 +2,7 @@ trait Semigroup[A] { extension (x: A) def combine(y: A): A } given Semigroup[Int] = ??? -given [A, B](using Semigroup[A], Semigroup[B]): Semigroup[(A, B)] = ??? +given [A, B] => (Semigroup[A], Semigroup[B]) => Semigroup[(A, B)] = ??? object Test extends App { ((1, 1)) combine ((2, 2)) // doesn't compile ((1, 1): (Int, Int)) combine (2, 2) // compiles diff --git a/tests/pos/end-given.scala b/tests/pos/end-given.scala index 359d7d1b6a6b..0aacfb379c3c 100644 --- a/tests/pos/end-given.scala +++ b/tests/pos/end-given.scala @@ -1,3 +1,3 @@ -given Conversion[Int, String] with +given Conversion[Int, String]: def apply(x: Int) = "" end given diff --git a/tests/pos/erased-conforms.scala b/tests/pos/erased-conforms.scala index 1f366e0683c6..426490d5a53a 100644 --- a/tests/pos/erased-conforms.scala +++ b/tests/pos/erased-conforms.scala @@ -5,7 +5,7 @@ erased class <::<[-From, +To] extends ErasedTerm erased class =::=[From, To] extends (From <::< To) -erased given [X]: (X =::= X) = scala.compiletime.erasedValue +erased given [X] => (X =::= X) = scala.compiletime.erasedValue extension [From](x: From) inline def cast[To](using From <::< To): To = x.asInstanceOf[To] // Safe cast because we know `From <:< To` diff --git a/tests/pos/ext-override.scala b/tests/pos/ext-override.scala index d08439e13c9a..7c082695cbaa 100644 --- a/tests/pos/ext-override.scala +++ b/tests/pos/ext-override.scala @@ -7,6 +7,6 @@ trait Foo[T]: class Bla: def hi: String = "hi" object Bla: - given Foo[Bla] with + given Foo[Bla]: extension (x: Bla) def hi: String = x.hi diff --git a/tests/pos/givenFallback.scala b/tests/pos/givenFallback.scala index 760eb2b5aed2..12d667af05c1 100644 --- a/tests/pos/givenFallback.scala +++ b/tests/pos/givenFallback.scala @@ -1,13 +1,11 @@ trait TC[T] { def x: Int; def y: Int = 0 } -given [T]: TC[T] with { +given [T] => TC[T]: inline val x = 1 -} -given TC[Int] with { +given TC[Int]: inline val x = 2 inline override val y = 3 -} object Test extends App { val z: 2 = summon[TC[Int]].x diff --git a/tests/pos/hylolib-cb-extract.scala b/tests/pos/hylolib-cb-extract.scala index b80a88485a2b..0ea38d508093 100644 --- a/tests/pos/hylolib-cb-extract.scala +++ b/tests/pos/hylolib-cb-extract.scala @@ -14,5 +14,5 @@ class BitArray given Value[Boolean] {} -given Collection[BitArray] with +given Collection[BitArray]: type Element = Boolean diff --git a/tests/pos/hylolib-cb/AnyCollection.scala b/tests/pos/hylolib-cb/AnyCollection.scala index 50f4313e46ce..61300a7b2bfe 100644 --- a/tests/pos/hylolib-cb/AnyCollection.scala +++ b/tests/pos/hylolib-cb/AnyCollection.scala @@ -42,12 +42,12 @@ object AnyCollection { } -given anyCollectionIsCollection[T: Value]: Collection[AnyCollection[T]] with { +given anyCollectionIsCollection: [T: Value] => Collection[AnyCollection[T]]: type Element = T type Position = AnyValue - extension (self: AnyCollection[T]) { + extension (self: AnyCollection[T]) def startPosition = self._start() @@ -60,7 +60,3 @@ given anyCollectionIsCollection[T: Value]: Collection[AnyCollection[T]] with { def at(p: Position) = self._at(p) - - } - -} diff --git a/tests/pos/hylolib-cb/AnyValue.scala b/tests/pos/hylolib-cb/AnyValue.scala index b9d39869c09a..88709b7b4da2 100644 --- a/tests/pos/hylolib-cb/AnyValue.scala +++ b/tests/pos/hylolib-cb/AnyValue.scala @@ -58,7 +58,7 @@ object AnyValue { } -given anyValueIsValue: Value[AnyValue] with { +given anyValueIsValue: Value[AnyValue] { extension (self: AnyValue) { diff --git a/tests/pos/hylolib-cb/BitArray.scala b/tests/pos/hylolib-cb/BitArray.scala index 3a0b4658f747..0c8f98fb1ba4 100644 --- a/tests/pos/hylolib-cb/BitArray.scala +++ b/tests/pos/hylolib-cb/BitArray.scala @@ -318,7 +318,7 @@ object BitArray { } -given bitArrayPositionIsValue: Value[BitArray.Position] with { +given bitArrayPositionIsValue: Value[BitArray.Position] { extension (self: BitArray.Position) { @@ -335,7 +335,7 @@ given bitArrayPositionIsValue: Value[BitArray.Position] with { } -given bitArrayIsCollection: Collection[BitArray] with { +given bitArrayIsCollection: Collection[BitArray] { type Element = Boolean type Position = BitArray.Position @@ -361,7 +361,7 @@ given bitArrayIsCollection: Collection[BitArray] with { } -given bitArrayIsStringConvertible: StringConvertible[BitArray] with { +given bitArrayIsStringConvertible: StringConvertible[BitArray] { extension (self: BitArray) override def description: String = diff --git a/tests/pos/hylolib-cb/Integers.scala b/tests/pos/hylolib-cb/Integers.scala index b9bc203a88ea..02a0cd6df111 100644 --- a/tests/pos/hylolib-cb/Integers.scala +++ b/tests/pos/hylolib-cb/Integers.scala @@ -1,6 +1,6 @@ package hylo -given booleanIsValue: Value[Boolean] with { +given booleanIsValue: Value[Boolean] { extension (self: Boolean) { @@ -18,7 +18,7 @@ given booleanIsValue: Value[Boolean] with { } -given intIsValue: Value[Int] with { +given intIsValue: Value[Int] { extension (self: Int) { @@ -36,7 +36,7 @@ given intIsValue: Value[Int] with { } -given intIsComparable: Comparable[Int] with { +given intIsComparable: Comparable[Int] { extension (self: Int) { @@ -55,4 +55,4 @@ given intIsComparable: Comparable[Int] with { } -given intIsStringConvertible: StringConvertible[Int] with {} +given intIsStringConvertible: StringConvertible[Int] {} diff --git a/tests/pos/hylolib-cb/Slice.scala b/tests/pos/hylolib-cb/Slice.scala index b577ceeb3739..10d0ffd36fb0 100644 --- a/tests/pos/hylolib-cb/Slice.scala +++ b/tests/pos/hylolib-cb/Slice.scala @@ -24,7 +24,7 @@ final class Slice[Base: Collection as b]( } -given sliceIsCollection[T: Collection as c]: Collection[Slice[T]] with { +given sliceIsCollection: [T: Collection as c] => Collection[Slice[T]] { type Element = c.Element type Position = c.Position diff --git a/tests/pos/hylolib-deferred-given-extract.scala b/tests/pos/hylolib-deferred-given-extract.scala index 02d889dc9aac..220dad944198 100644 --- a/tests/pos/hylolib-deferred-given-extract.scala +++ b/tests/pos/hylolib-deferred-given-extract.scala @@ -13,7 +13,7 @@ trait Collection[Self]: class BitArray -given Value[Boolean] {} +given Value[Boolean]() -given Collection[BitArray] with +given Collection[BitArray]: type Element = Boolean diff --git a/tests/pos/hylolib-deferred-given/AnyCollection.scala b/tests/pos/hylolib-deferred-given/AnyCollection.scala index 55e453d6dc87..e2a946fca484 100644 --- a/tests/pos/hylolib-deferred-given/AnyCollection.scala +++ b/tests/pos/hylolib-deferred-given/AnyCollection.scala @@ -42,7 +42,7 @@ object AnyCollection { } -given anyCollectionIsCollection[T](using tIsValue: Value[T]): Collection[AnyCollection[T]] with { +given anyCollectionIsCollection: [T] => (tIsValue: Value[T]) => Collection[AnyCollection[T]] { type Element = T //given elementIsValue: Value[Element] = tIsValue diff --git a/tests/pos/hylolib-deferred-given/AnyValue.scala b/tests/pos/hylolib-deferred-given/AnyValue.scala index 21f2965e102e..5c1b68a52366 100644 --- a/tests/pos/hylolib-deferred-given/AnyValue.scala +++ b/tests/pos/hylolib-deferred-given/AnyValue.scala @@ -58,7 +58,7 @@ object AnyValue { } -given anyValueIsValue: Value[AnyValue] with { +given anyValueIsValue: Value[AnyValue] { extension (self: AnyValue) { diff --git a/tests/pos/hylolib-deferred-given/BitArray.scala b/tests/pos/hylolib-deferred-given/BitArray.scala index 485f30472847..d653f5e4d630 100644 --- a/tests/pos/hylolib-deferred-given/BitArray.scala +++ b/tests/pos/hylolib-deferred-given/BitArray.scala @@ -318,7 +318,7 @@ object BitArray { } -given bitArrayPositionIsValue: Value[BitArray.Position] with { +given bitArrayPositionIsValue: Value[BitArray.Position] { extension (self: BitArray.Position) { @@ -335,7 +335,7 @@ given bitArrayPositionIsValue: Value[BitArray.Position] with { } -given bitArrayIsCollection: Collection[BitArray] with { +given bitArrayIsCollection: Collection[BitArray] { type Element = Boolean //given elementIsValue: Value[Boolean] = booleanIsValue @@ -364,7 +364,7 @@ given bitArrayIsCollection: Collection[BitArray] with { } -given bitArrayIsStringConvertible: StringConvertible[BitArray] with { +given bitArrayIsStringConvertible: StringConvertible[BitArray] { extension (self: BitArray) override def description: String = diff --git a/tests/pos/hylolib-deferred-given/HyArray.scala b/tests/pos/hylolib-deferred-given/HyArray.scala index 98632dcb65bc..e82ce06d920b 100644 --- a/tests/pos/hylolib-deferred-given/HyArray.scala +++ b/tests/pos/hylolib-deferred-given/HyArray.scala @@ -162,7 +162,7 @@ object HyArray { } -given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { +given hyArrayIsValue: [T] => (tIsValue: Value[T]) => Value[HyArray[T]] { extension (self: HyArray[T]) { @@ -179,7 +179,7 @@ given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { } -given hyArrayIsCollection[T](using tIsValue: Value[T]): Collection[HyArray[T]] with { +given hyArrayIsCollection: [T] => (tIsValue: Value[T]) => Collection[HyArray[T]] { type Element = T //given elementIsValue: Value[T] = tIsValue diff --git a/tests/pos/hylolib-deferred-given/Integers.scala b/tests/pos/hylolib-deferred-given/Integers.scala index b9bc203a88ea..02a0cd6df111 100644 --- a/tests/pos/hylolib-deferred-given/Integers.scala +++ b/tests/pos/hylolib-deferred-given/Integers.scala @@ -1,6 +1,6 @@ package hylo -given booleanIsValue: Value[Boolean] with { +given booleanIsValue: Value[Boolean] { extension (self: Boolean) { @@ -18,7 +18,7 @@ given booleanIsValue: Value[Boolean] with { } -given intIsValue: Value[Int] with { +given intIsValue: Value[Int] { extension (self: Int) { @@ -36,7 +36,7 @@ given intIsValue: Value[Int] with { } -given intIsComparable: Comparable[Int] with { +given intIsComparable: Comparable[Int] { extension (self: Int) { @@ -55,4 +55,4 @@ given intIsComparable: Comparable[Int] with { } -given intIsStringConvertible: StringConvertible[Int] with {} +given intIsStringConvertible: StringConvertible[Int] {} diff --git a/tests/pos/hylolib-deferred-given/Slice.scala b/tests/pos/hylolib-deferred-given/Slice.scala index 57cdb38f6e53..234b16dfc428 100644 --- a/tests/pos/hylolib-deferred-given/Slice.scala +++ b/tests/pos/hylolib-deferred-given/Slice.scala @@ -26,7 +26,7 @@ final class Slice[Base](using } -given sliceIsCollection[T](using c: Collection[T]): Collection[Slice[T]] with { +given sliceIsCollection: [T] => (c: Collection[T]) => Collection[Slice[T]] { type Element = c.Element //given elementIsValue: Value[Element] = c.elementIsValue diff --git a/tests/pos/i10259.scala b/tests/pos/i10259.scala index 101a3d869a04..e8da9e6a7be7 100644 --- a/tests/pos/i10259.scala +++ b/tests/pos/i10259.scala @@ -4,7 +4,7 @@ trait S[T] extends (T => T): def apply(x: T) = ??? extension (x: T) def show: String -given S[Int] with +given S[Int]: extension (x: Int) def show = x.toString val x = 10.show diff --git a/tests/pos/i10929.scala b/tests/pos/i10929.scala index e916e4547e59..358de9ce30a8 100644 --- a/tests/pos/i10929.scala +++ b/tests/pos/i10929.scala @@ -5,11 +5,11 @@ infix abstract class TupleOf[T, +A]: object TupleOf: - given TupleOf[EmptyTuple, Nothing] with + given TupleOf[EmptyTuple, Nothing]: type Mapped[+A] = EmptyTuple def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x - given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + given [A, Rest <: Tuple] => (tracked val tup: Rest TupleOf A) => TupleOf[A *: Rest, A]: type Mapped[+A] = A *: tup.Mapped[A] def map[B](x: A *: Rest)(f: A => B): Mapped[B] = (f(x.head) *: tup.map(x.tail)(f)) diff --git a/tests/pos/i11174minimisation.scala b/tests/pos/i11174minimisation.scala index c40c34552bb8..7e0aa42721bf 100644 --- a/tests/pos/i11174minimisation.scala +++ b/tests/pos/i11174minimisation.scala @@ -20,6 +20,6 @@ object EnumerateNames { class MainClass { enum Shape: case Point - inline given auto[T]: EnumerateNames[T] = EnumerateNames.derived[T] + inline given auto: [T] => EnumerateNames[T] = EnumerateNames.derived[T] def shapeNames: EnumerateNames[Shape] = EnumerateNames.derived[Shape] } \ No newline at end of file diff --git a/tests/pos/i11175.scala b/tests/pos/i11175.scala index bfefe84dde95..5a6f9fe852bb 100644 --- a/tests/pos/i11175.scala +++ b/tests/pos/i11175.scala @@ -10,10 +10,10 @@ object A object B -given Printer[A.type] with +given Printer[A.type]: def print(a:A.type):String = "a" -given Printer[B.type] with +given Printer[B.type]: def print(b:B.type):String = "b" diff --git a/tests/pos/i11243.scala b/tests/pos/i11243.scala index 7966df0c8243..42294ed5bf76 100644 --- a/tests/pos/i11243.scala +++ b/tests/pos/i11243.scala @@ -54,7 +54,7 @@ object WriterTest extends App { end Monad - given eitherMonad[Err]: Monad[[X] =>> Either[Err,X]] with + given eitherMonad: [Err] => Monad[[X] =>> Either[Err,X]]: def pure[A](a: A): Either[Err, A] = Right(a) extension [A,B](x: Either[Err,A]) def flatMap(f: A => Either[Err, B]) = { x match { @@ -63,7 +63,7 @@ object WriterTest extends App { } } - given optionMonad: Monad[Option] with + given optionMonad: Monad[Option]: def pure[A](a: A) = Some(a) extension[A,B](fa: Option[A]) def flatMap(f: A => Option[B]) = { @@ -75,7 +75,7 @@ object WriterTest extends App { } } - given listMonad: Monad[List] with + given listMonad: Monad[List]: def pure[A](a: A): List[A] = List(a) extension[A,B](x: List[A]) @@ -88,7 +88,7 @@ object WriterTest extends App { case class Transformer[F[_]: Monad,A](val wrapped: F[A]) - given transformerMonad[F[_]: Monad]: Monad[[X] =>> Transformer[F,X]] with { + given transformerMonad: [F[_]: Monad] => Monad[[X] =>> Transformer[F,X]] { def pure[A](a: A): Transformer[F,A] = Transformer(summon[Monad[F]].pure(a)) diff --git a/tests/pos/i11538a.scala b/tests/pos/i11538a.scala index 243900c43b44..51ecdb227ab9 100644 --- a/tests/pos/i11538a.scala +++ b/tests/pos/i11538a.scala @@ -3,7 +3,7 @@ package a: trait Printer[A]: def print(a: A): Unit - given Printer[String] with + given Printer[String]: def print(s: String) = println(s) package b: diff --git a/tests/pos/i11732.scala b/tests/pos/i11732.scala index a1138a94fcd7..c8c74eccc51b 100644 --- a/tests/pos/i11732.scala +++ b/tests/pos/i11732.scala @@ -6,7 +6,7 @@ trait TupleConversion[A, B] { } object TupleConversion { - inline given autoTupleConversion[Prod <: Product](using m: Mirror.ProductOf[Prod]): TupleConversion[Prod, m.MirroredElemTypes] = + inline given autoTupleConversion: [Prod <: Product] => (m: Mirror.ProductOf[Prod]) => TupleConversion[Prod, m.MirroredElemTypes] = new TupleConversion[Prod, m.MirroredElemTypes] { def to(a: Prod): m.MirroredElemTypes = Tuple.fromProductTyped(a) def from(b: m.MirroredElemTypes): Prod = m.fromProduct(b) diff --git a/tests/pos/i11864.scala b/tests/pos/i11864.scala index 4f7735f1c8c5..ba43336e13ca 100644 --- a/tests/pos/i11864.scala +++ b/tests/pos/i11864.scala @@ -40,7 +40,7 @@ final class CallbackTo[+A] { object CallbackTo { type MapGuard[A] = { type Out = A } - erased given MapGuard[A]: MapGuard[A] = compiletime.erasedValue + erased given MapGuard: [A] => MapGuard[A] = compiletime.erasedValue def traverse[A, B](ta: List[A]): CallbackTo[List[B]] = val x: CallbackTo[List[A] => List[B]] = ??? diff --git a/tests/pos/i12126.scala b/tests/pos/i12126.scala index cffa7fbcbbca..aac2f39d4d95 100644 --- a/tests/pos/i12126.scala +++ b/tests/pos/i12126.scala @@ -22,12 +22,12 @@ object Structures: def map2[B, C](fb: F[B], f: (A, B) => C): F[C] = flatMap(a => fb.map(b => f(a, b))) - given Monad[List] with + given Monad[List]: def pure[A](a: A) = List(a) extension[A](fa: List[A]) def flatMap[B](f: A => List[B]) = fa.flatMap(f) - given Monad[Option] with + given Monad[Option]: def pure[A](a: A) = Some(a) extension[A](fa: Option[A]) def flatMap[B](f: A => Option[B]) = fa.flatMap(f) @@ -41,7 +41,7 @@ object Structures: object Kleisli: def apply[F[_], A, B](f: A => F[B]): Kleisli[F, A, B] = f - given [F[_], A](using F: Monad[F]): Monad[[B] =>> Kleisli[F, A, B]] with + given [F[_], A] => (F: Monad[F]) => Monad[[B] =>> Kleisli[F, A, B]]: def pure[B](b: B) = Kleisli(_ => F.pure(b)) extension[B](k: Kleisli[F, A, B]) def flatMap[C](f: B => Kleisli[F, A, C]) = diff --git a/tests/pos/i12379b.scala b/tests/pos/i12379b.scala index 2a83f69bae3d..6586f7b85a42 100644 --- a/tests/pos/i12379b.scala +++ b/tests/pos/i12379b.scala @@ -4,7 +4,7 @@ inline def convFail[Of, From](inline from : From) : Unit = inline def convOK[Of, From](inline from : From)(using c : Conversion[From, Of]) : Unit = {} class Bar[T](value : T) -given [T <: Int] : Conversion[T, Bar[T]] = Bar(_) +given [T <: Int] => Conversion[T, Bar[T]] = Bar(_) @main def main : Unit = { convOK[Bar[1],1](1) diff --git a/tests/pos/i12591/Inner.scala b/tests/pos/i12591/Inner.scala index 2f8018c4d824..a0b3e2f6410d 100644 --- a/tests/pos/i12591/Inner.scala +++ b/tests/pos/i12591/Inner.scala @@ -4,7 +4,7 @@ package inner sealed trait Foo object Foo: trait TC[T] - given ofFoo[T <: Foo]: TC[T] = ??? + given ofFoo: [T <: Foo] => TC[T] = ??? trait Bar extends Foo import Foo.TC diff --git a/tests/pos/i12910.scala b/tests/pos/i12910.scala index e78408aa5e5f..4c0d4ac369e1 100644 --- a/tests/pos/i12910.scala +++ b/tests/pos/i12910.scala @@ -3,7 +3,7 @@ trait Type[T]: type varchar -given Type[varchar] with +given Type[varchar]: type Out = String class Placeholder[T, U] @@ -17,7 +17,7 @@ trait Encoder[P, X]: object Encoder: def apply[P, X](placeholder: P)(using e: Encoder[P, X]): X => String = e.encode - given [T, X]: Encoder[Placeholder[T, X], X] with + given [T, X] => Encoder[Placeholder[T, X], X]: def encode(x: X): String = ??? def Test = diff --git a/tests/pos/i12945/A_1.scala b/tests/pos/i12945/A_1.scala index 890171a63051..62519fde262b 100644 --- a/tests/pos/i12945/A_1.scala +++ b/tests/pos/i12945/A_1.scala @@ -3,7 +3,7 @@ object Lie: trait TC[-T]: type Out object TC: - given [W <: Int]: TC[Lie[W]] with + given [W <: Int] => TC[Lie[W]]: type Out = W val x = summon[Lie.TC[Lie[7]]] diff --git a/tests/pos/i12949.scala b/tests/pos/i12949.scala index 5a886aa894b3..71a8ce67d032 100644 --- a/tests/pos/i12949.scala +++ b/tests/pos/i12949.scala @@ -6,11 +6,11 @@ object Catch22: object Hodor: object TC: import Catch22.TC - given fromString[V <: String]: TC[V] = ??? - transparent inline given fromDouble[V <: Double]: TC[V] = + given fromString: [V <: String] => TC[V] = ??? + transparent inline given fromDouble: [V <: Double] => TC[V] = new TC[V]: type Out = Double - given fromInt[V <: Int]: TC[V] with + given fromInt: [V <: Int] => TC[V]: type Out = Int object Test: diff --git a/tests/pos/i13001/Main_1.scala b/tests/pos/i13001/Main_1.scala index ad46df9f2fe1..9cdce20ef3bf 100644 --- a/tests/pos/i13001/Main_1.scala +++ b/tests/pos/i13001/Main_1.scala @@ -4,13 +4,13 @@ trait Arbitrary[T] trait Gen[+T] object ArbitraryDerivation: - given deriveArb[A](using gen: DerivedGen[A]): Arbitrary[A] = ??? + given deriveArb: [A] => (gen: DerivedGen[A]) => Arbitrary[A] = ??? opaque type DerivedGen[A] = Gen[A] object DerivedGen extends DerivedGenInstances sealed abstract class DerivedGenInstances: - inline given derived[A](using gen: K0.Generic[A]): DerivedGen[A] = + inline given derived: [A] => (gen: K0.Generic[A]) => DerivedGen[A] = val dummy: DerivedGen[A] = ??? gen.derive(dummy, dummy) diff --git a/tests/pos/i13044.scala b/tests/pos/i13044.scala index 36299d9e8366..ce483b4a403c 100644 --- a/tests/pos/i13044.scala +++ b/tests/pos/i13044.scala @@ -36,7 +36,7 @@ trait SchemaDerivation { } } - inline given gen[A]: Schema[A] = derived + inline given gen: [A] => Schema[A] = derived } case class H(i: Int) diff --git a/tests/pos/i13460.scala b/tests/pos/i13460.scala index fd57cd9b8521..90b2a9aff1fe 100644 --- a/tests/pos/i13460.scala +++ b/tests/pos/i13460.scala @@ -5,7 +5,7 @@ class Lazy[A](obj: => A) { lazy val value: A = obj } object Lazy { - given [A](using obj: => A ): Lazy[A] = new Lazy(obj) + given [A] => (obj: => A) => Lazy[A] = new Lazy(obj) } trait MyTypeClass[A] { @@ -13,10 +13,10 @@ trait MyTypeClass[A] { } object MyTypeClass { - given IntTypeClass: MyTypeClass[Int] with + given IntTypeClass: MyTypeClass[Int]: def makeString(a: Int): String = a.toString - inline given derived[A](using m: Mirror.Of[A]): MyTypeClass[A] = + inline given derived: [A] => (m: Mirror.Of[A]) => MyTypeClass[A] = inline m match case p: Mirror.ProductOf[A] => productConverter(p) diff --git a/tests/pos/i13503.scala b/tests/pos/i13503.scala index c60b0e05862c..020ba437e141 100644 --- a/tests/pos/i13503.scala +++ b/tests/pos/i13503.scala @@ -1,5 +1,5 @@ trait First {type Out} -given First with {type Out = 123} +given First {type Out = 123} trait Second {type Out} transparent inline given (using f: First): Second = new Second {type Out = f.Out} diff --git a/tests/pos/i13668.scala b/tests/pos/i13668.scala index 2199e0eb82c5..aebdd0478407 100644 --- a/tests/pos/i13668.scala +++ b/tests/pos/i13668.scala @@ -1,6 +1,6 @@ class MyType() trait Candidate[R] -given Candidate[MyType] with {} +given Candidate[MyType]() class Fuzzy[W]() class Fuzzy1() class Bear() diff --git a/tests/pos/i13900.scala b/tests/pos/i13900.scala index e6197d7f3f33..8b23fc2bdc20 100644 --- a/tests/pos/i13900.scala +++ b/tests/pos/i13900.scala @@ -3,10 +3,10 @@ import scala.annotation.targetName opaque type Inlined[T] = T object Inlined: extension [T](inlined: Inlined[T]) def value: T = inlined - inline given fromValue[T <: Singleton]: Conversion[T, Inlined[T]] = + inline given fromValue: [T <: Singleton] => Conversion[T, Inlined[T]] = value => value @targetName("fromValueWide") - given fromValue[Wide]: Conversion[Wide, Inlined[Wide]] = value => value + given fromValue: [Wide] => Conversion[Wide, Inlined[Wide]] = value => value def forced[T](value: Any): Inlined[T] = value.asInstanceOf[T] extension [T <: Int](lhs: Inlined[T]) diff --git a/tests/pos/i14013.scala b/tests/pos/i14013.scala index d20112392002..e1a586af9d45 100644 --- a/tests/pos/i14013.scala +++ b/tests/pos/i14013.scala @@ -47,7 +47,7 @@ object App2 extends App { object Foo2 { case class Bar(i: Int) - private given BarOps: AnyRef with { + private given BarOps: AnyRef { extension (bar: Bar) def twice: Bar = Bar(bar.i * 2) } diff --git a/tests/pos/i14177b.scala b/tests/pos/i14177b.scala index 6da9a72ae551..4d3810116617 100644 --- a/tests/pos/i14177b.scala +++ b/tests/pos/i14177b.scala @@ -1,15 +1,15 @@ class T -inline given fail1: T with +inline given fail1: T: val cs = scala.compiletime.summonAll[EmptyTuple] -inline given fail2[X]: T with +inline given fail2: [X] => T: val cs = scala.compiletime.summonAll[EmptyTuple] -inline given fail3(using DummyImplicit): T with +inline given fail3: () => T: val cs = scala.compiletime.summonAll[EmptyTuple] inline given ok1: T = new T: val cs = scala.compiletime.summonAll[EmptyTuple] -inline given ok2[X]: T = new T: +inline given ok2: [X] => T = new T: val cs = scala.compiletime.summonAll[EmptyTuple] -inline given ok3(using DummyImplicit): T = new T: +inline given ok3: () => T = new T: val cs = scala.compiletime.summonAll[EmptyTuple] diff --git a/tests/pos/i14282.scala b/tests/pos/i14282.scala index 2cc3ff1226e2..2dd768183124 100644 --- a/tests/pos/i14282.scala +++ b/tests/pos/i14282.scala @@ -2,12 +2,12 @@ trait Foo[A] { inline def foo(): Unit } -inline given FooA[A]: Foo[A] with { +inline given FooA: [A] => Foo[A]: inline def foo(): Unit = println() -} + def test1 = FooA.foo() -inline given FooInt: Foo[Int] with { +inline given FooInt: Foo[Int]: inline def foo(): Unit = println() -} + def test2 = FooInt.foo() diff --git a/tests/pos/i14637.scala b/tests/pos/i14637.scala index 5ae3b5e7a881..512471f7b19a 100644 --- a/tests/pos/i14637.scala +++ b/tests/pos/i14637.scala @@ -3,6 +3,6 @@ class C object Givens: - given cOrdering: Ordering[C] with + given cOrdering: Ordering[C]: override def compare(c0: C, c1: C) = 0 val greeting = "we love Givens" \ No newline at end of file diff --git a/tests/pos/i15160.scala b/tests/pos/i15160.scala index cc55e0f5fb19..bef59f3c9d80 100644 --- a/tests/pos/i15160.scala +++ b/tests/pos/i15160.scala @@ -2,7 +2,7 @@ trait Eq[A] { def eqv(a1: A, a2: A): Boolean } -given stringEq: Eq[String] with { +given stringEq: Eq[String] { def eqv(a1: String, a2: String) = a1 == a2 } diff --git a/tests/pos/i15177.hylolib.scala b/tests/pos/i15177.hylolib.scala index 96cf87680a1c..be2f5900094b 100644 --- a/tests/pos/i15177.hylolib.scala +++ b/tests/pos/i15177.hylolib.scala @@ -6,6 +6,6 @@ trait Coll[Self]: extension (self: Self) def pos: Pos extension [Self: Coll](self: Self) def trigger = self.pos class Slice[Base] -given SliceIsColl[T: Coll as c]: Coll[Slice[T]] with +given SliceIsColl: [T: Coll as c] => Coll[Slice[T]]: type Pos = c.Pos extension (self: Slice[T]) def pos: Pos = ??? diff --git a/tests/pos/i15183/decoder_1.scala b/tests/pos/i15183/decoder_1.scala index 532fd7246c3a..fd8f21deffed 100644 --- a/tests/pos/i15183/decoder_1.scala +++ b/tests/pos/i15183/decoder_1.scala @@ -4,11 +4,11 @@ sealed trait Decoder[T] object Decoder { given Decoder[Double] = ??? - inline given summonEmptyTuple[H]: Tuple.Map[EmptyTuple.type, Decoder] = + inline given summonEmptyTuple: [H] => Tuple.Map[EmptyTuple.type, Decoder] = EmptyTuple - inline given summonTuple[H, T <: Tuple](using hd: Decoder[H], td: Tuple.Map[T, Decoder]): Tuple.Map[H *: T, Decoder] = + inline given summonTuple: [H, T <: Tuple] => (hd: Decoder[H], td: Tuple.Map[T, Decoder]) => Tuple.Map[H *: T, Decoder] = hd *: td - inline given derived[T](using m: Mirror.Of[T], d: Tuple.Map[m.MirroredElemTypes, Decoder]): Decoder[T] = ??? + inline given derived: [T] => (m: Mirror.Of[T], d: Tuple.Map[m.MirroredElemTypes, Decoder]) => Decoder[T] = ??? } diff --git a/tests/pos/i15264.scala b/tests/pos/i15264.scala index 18ca92df6cb1..57036393b90a 100644 --- a/tests/pos/i15264.scala +++ b/tests/pos/i15264.scala @@ -36,9 +36,9 @@ object repro: given gc: C[Int] = new C[Int] // these seem like they should work but don't - given gcq[V](using p0: priority.Prio0)(using c: C[V]): C[Q[V]] = new C[Q[V]] - given gbq[V](using p1: priority.Prio1)(using b: B[V]): B[Q[V]] = new B[Q[V]] - given gaq[V](using p2: priority.Prio2)(using a: A[V]): A[Q[V]] = new A[Q[V]] + given gcq: [V] => priority.Prio0 => C[V] => C[Q[V]] = new C[Q[V]] + given gbq: [V] => priority.Prio1 => B[V] => B[Q[V]] = new B[Q[V]] + given gaq: [V] => priority.Prio2 => A[V] => A[Q[V]] = new A[Q[V]] object test1: import repro.* diff --git a/tests/pos/i15331.scala b/tests/pos/i15331.scala index faf9992cb4f2..337493e73f05 100644 --- a/tests/pos/i15331.scala +++ b/tests/pos/i15331.scala @@ -4,7 +4,7 @@ object Test: trait Arrow {type Dom; type Codom} - given composeArrows[A, Arr1 <: Arrow, Arr2 <: Arrow]: Composable[Arr1 {type Dom = A}, Arr2 {type Codom = A}] with + given composeArrows: [A, Arr1 <: Arrow, Arr2 <: Arrow] => Composable[Arr1 {type Dom = A}, Arr2 {type Codom = A}]: def compose(a: Arr1 {type Dom = A}, b: Arr2 {type Codom = A}): Arrow {type Dom = b.Dom; type Codom = a.Codom} = ??? object arr1 extends Arrow { type Dom = Int; type Codom = Int} diff --git a/tests/pos/i15664.scala b/tests/pos/i15664.scala index 7db8fe8cf23f..7b00e9ec7e85 100644 --- a/tests/pos/i15664.scala +++ b/tests/pos/i15664.scala @@ -11,7 +11,7 @@ sealed abstract class ZManaged[-R, +E, +A] type RManaged[-R, +A] = ZManaged[R, Throwable, A] type ForZManaged[R, E] = [X] =>> ZManaged[R, E, X] -given zManagedCpsMonad[R, E]: CpsMonadInstanceContext[ForZManaged[R, E]] = ??? +given zManagedCpsMonad: [R, E] => CpsMonadInstanceContext[ForZManaged[R, E]] = ??? // Usage def failing[R, E](using diff --git a/tests/pos/i15670.scala b/tests/pos/i15670.scala index b46b3708fe4e..c07290321b20 100644 --- a/tests/pos/i15670.scala +++ b/tests/pos/i15670.scala @@ -5,8 +5,8 @@ trait Read[T] trait Codec[T] extends Read[T] trait CodecTypeProjection[C[_]] object JsonTransform { - given SetCodec[T, C[_]: CodecTypeProjection]: scala.Conversion[C[T], C[Set[T]]] = ??? - given SetCodecExp[T, C[_]: CodecTypeProjection](using codec: C[T]): C[Set[T]] = codec + given SetCodec: [T, C[_]: CodecTypeProjection] => scala.Conversion[C[T], C[Set[T]]] = ??? + given SetCodecExp: [T, C[_]: CodecTypeProjection] => (codec: C[T]) => C[Set[T]] = codec given Codec[String] = ??? given CodecTypeProjection[Read] = ??? } diff --git a/tests/pos/i15867.scala b/tests/pos/i15867.scala index 2e62177ba590..b4f552c6b8c5 100644 --- a/tests/pos/i15867.scala +++ b/tests/pos/i15867.scala @@ -4,7 +4,7 @@ enum SUB[-A, +B]: class Pow(self: Int): def **(other: Int): Int = math.pow(self, other).toInt -given fromList[T]: Conversion[List[T], Pow] = ??? +given fromList: [T] => Conversion[List[T], Pow] = ??? given fromInt: Conversion[Int, Pow] = Pow(_) diff --git a/tests/pos/i15867.specs2.scala b/tests/pos/i15867.specs2.scala index da89b2cba9f0..1e519169d868 100644 --- a/tests/pos/i15867.specs2.scala +++ b/tests/pos/i15867.specs2.scala @@ -1,5 +1,5 @@ class Foo: - given Conversion[String, Data] with + given Conversion[String, Data]: def apply(str: String): Data = new Data(str) class Data(str: String): diff --git a/tests/pos/i16104.scala b/tests/pos/i16104.scala index 7624d5c68a4a..16f935d0e641 100644 --- a/tests/pos/i16104.scala +++ b/tests/pos/i16104.scala @@ -8,7 +8,7 @@ def Case2 = { object WriteOf: final inline def tuple[T <: Tuple]: Write[T] = ??? - given EntryToJson[T]: scala.Conversion[T, JsonStructureEntry[T]] = ??? + given EntryToJson: [T] => scala.Conversion[T, JsonStructureEntry[T]] = ??? class JsonStructureEntry[T](t: T): def writeAs[X >: T](using Write[X]): util.Try[JsonVal] = ??? diff --git a/tests/pos/i16596.more.scala b/tests/pos/i16596.more.scala index 2470eb9eb3c2..c3cb0800be61 100644 --- a/tests/pos/i16596.more.scala +++ b/tests/pos/i16596.more.scala @@ -7,7 +7,7 @@ object NatExample { case class Succ[N <: Nat](prev: N) extends Nat given zero: Zero.type = Zero - given buildSucc[N <: Nat](using n: N): Succ[N] = Succ(n) + given buildSucc: [N <: Nat] => (n: N) => Succ[N] = Succ(n) def value[N <: Nat](using n: N): N = n diff --git a/tests/pos/i17002.scala b/tests/pos/i17002.scala index d33c1bd386d9..f0382f72b7eb 100644 --- a/tests/pos/i17002.scala +++ b/tests/pos/i17002.scala @@ -7,4 +7,4 @@ class Test object Test: @methOnly - given test2[T]: Test with {} + given test2: [T] => Test() diff --git a/tests/pos/i18062.scala b/tests/pos/i18062.scala index 48863c4349c7..d638f7449972 100644 --- a/tests/pos/i18062.scala +++ b/tests/pos/i18062.scala @@ -7,7 +7,7 @@ object WrapperConvert: implicit def id[F[_]]: WrapperConvert[F, F] = new WrapperConvert[F, F]: def conv[X](fx: F[X]): F[X] = fx -transparent inline given convertX[F[_], X](using wc: WrapperConvert[F, CB]): Conversion[F[X], X] = +transparent inline given convertX: [F[_], X] => (wc: WrapperConvert[F, CB]) => Conversion[F[X], X] = new Conversion[F[X], X]: def apply(fx: F[X]) = wc.conv(fx).get diff --git a/tests/pos/i18175.scala b/tests/pos/i18175.scala index 2480ddccc320..76879c10fb39 100644 --- a/tests/pos/i18175.scala +++ b/tests/pos/i18175.scala @@ -10,8 +10,8 @@ object Regex: abstract class Sanitizer[T] object Sanitizer: given Sanitizer[EmptyTuple] = ??? - given stringcase[T <: Tuple: Sanitizer]: Sanitizer[String *: T] = ??? - given optioncase[T <: Tuple: Sanitizer]: Sanitizer[Option[String] *: T] = ??? + given stringcase: [T <: Tuple: Sanitizer] => Sanitizer[String *: T] = ??? + given optioncase: [T <: Tuple: Sanitizer] => Sanitizer[Option[String] *: T] = ??? given Sanitizer[String] = ??? given Sanitizer[Option[String]] = ??? diff --git a/tests/pos/i18211.scala b/tests/pos/i18211.scala index c5ec30ba5d61..45a51932adce 100644 --- a/tests/pos/i18211.scala +++ b/tests/pos/i18211.scala @@ -21,7 +21,7 @@ object Indexes { next: Indexes[A, Tuple.Drop[T, S[IndexOf[A, T]]]] ): Indexes[A, T] = ??? - given empty[A, T <: Tuple](using IndexOf[A, T] =:= -1): Indexes[A, T] = ??? + given empty: [A, T <: Tuple] => (IndexOf[A, T] =:= -1) => Indexes[A, T] = ??? } class GetAll[A]: diff --git a/tests/pos/i18253.orig.scala b/tests/pos/i18253.orig.scala index 9efe1224ebfd..0eaf93ed9ac4 100644 --- a/tests/pos/i18253.orig.scala +++ b/tests/pos/i18253.orig.scala @@ -4,7 +4,7 @@ trait DFSInt[W <: Int] trait Candidate[R]: type OutW <: Int object Candidate: - given [W <: Int, R <: DFSInt[W]]: Candidate[R] with + given [W <: Int, R <: DFSInt[W]] => Candidate[R]: type OutW = W def foo[R](rhs: R)(using icR: Candidate[R]): DFSInt[Max[8, icR.OutW]] = ??? diff --git a/tests/pos/i18253.scala b/tests/pos/i18253.scala index 8f395ee8e943..c9d043a8804d 100644 --- a/tests/pos/i18253.scala +++ b/tests/pos/i18253.scala @@ -4,7 +4,7 @@ trait Foo[A] trait Bar[B]: type Out <: Int object Bar: - given inst[C <: Int]: Bar[C] with + given [C <: Int] => Bar[C]: type Out = C class Test: diff --git a/tests/pos/i18261.min/Main_0.scala b/tests/pos/i18261.min/Main_0.scala index 23d7cbe28198..4d8f79ca241c 100644 --- a/tests/pos/i18261.min/Main_0.scala +++ b/tests/pos/i18261.min/Main_0.scala @@ -2,4 +2,4 @@ type Id[T] = Any match { case Any => T } class Foo[A] object Foo: - given inst[X, Y <: Id[X]]: Foo[Y] = new Foo[Y] + given inst: [X, Y <: Id[X]] => Foo[Y] = new Foo[Y] diff --git a/tests/pos/i18261/DFBits_0.scala b/tests/pos/i18261/DFBits_0.scala index 63b5abb495ef..bbdf871fefb4 100644 --- a/tests/pos/i18261/DFBits_0.scala +++ b/tests/pos/i18261/DFBits_0.scala @@ -3,5 +3,5 @@ trait DFBits[W <: Int] trait Candidate[R]: type OutW <: Int object Candidate: - given [W <: Int, R <: Foo[DFBits[W]]]: Candidate[R] with + given [W <: Int, R <: Foo[DFBits[W]]] => Candidate[R]: type OutW = W diff --git a/tests/pos/i18276a.scala b/tests/pos/i18276a.scala index 46c2722fd8be..eb92066715ef 100644 --- a/tests/pos/i18276a.scala +++ b/tests/pos/i18276a.scala @@ -11,5 +11,5 @@ class ParsersBase { abstract class Parser[+T]: def map[U](f: T => U): Parser[U] = ??? - given [A, B, X]: Conversion[(A, B) => X, (A ~ B) => X] = ??? + given [A, B, X] => Conversion[(A, B) => X, (A ~ B) => X] = ??? } diff --git a/tests/pos/i19404.scala b/tests/pos/i19404.scala index 8d6d4406ebb2..d57027bc46e0 100644 --- a/tests/pos/i19404.scala +++ b/tests/pos/i19404.scala @@ -1,4 +1,4 @@ -given ipEncoder[IP <: IpAddress]: Encoder[IP] = Encoder[String].contramap(_.toString) +given ipEncoder: [IP <: IpAddress] => Encoder[IP] = Encoder[String].contramap(_.toString) class Encoder[A] { final def contramap[B](f: B => A): Encoder[B] = new Encoder[B] diff --git a/tests/pos/i19407.scala b/tests/pos/i19407.scala index b7440a53540d..11c8e746ac43 100644 --- a/tests/pos/i19407.scala +++ b/tests/pos/i19407.scala @@ -6,6 +6,6 @@ object Decoder: object GeneratedEnumDecoder: - given [A <: GeneratedEnum]: Decoder[A] = + given [A <: GeneratedEnum] => Decoder[A] = summon[Decoder[Int]] ??? \ No newline at end of file diff --git a/tests/pos/i19623.scala b/tests/pos/i19623.scala index 8ab8cde159a9..ca3e6b0806e2 100644 --- a/tests/pos/i19623.scala +++ b/tests/pos/i19623.scala @@ -22,7 +22,7 @@ trait RecordLike[R] { val r: %{ val name: String; } = ??? // originally derived in macro, use dummy instance instead - transparent inline given outputRecordLike[R <: %]: RecordLike[R] = null.asInstanceOf[ + transparent inline given outputRecordLike: [R <: %] => RecordLike[R] = null.asInstanceOf[ RecordLike[R] { type ElemTypes = String *: EmptyTuple } diff --git a/tests/pos/i19724.scala b/tests/pos/i19724.scala index 776cf9167890..3054a2541850 100644 --- a/tests/pos/i19724.scala +++ b/tests/pos/i19724.scala @@ -1,5 +1,5 @@ object repro: abstract class Mapper[A, B] extends (A => B) - given Mapper[Int, Double] with + given Mapper[Int, Double]: inline def apply(v: Int): Double = v.toDouble diff --git a/tests/pos/i19749.scala b/tests/pos/i19749.scala index 367c5f61bdce..bbffe3a94961 100644 --- a/tests/pos/i19749.scala +++ b/tests/pos/i19749.scala @@ -5,7 +5,7 @@ case class A(x: Int, y: String) trait SomeTrait[T] object SomeTrait: - given [T]: SomeTrait[T] with {} + given [T] => SomeTrait[T]() def f1[T](using p: Mirror.ProductOf[T]): Tuple.Elem[p.MirroredElemTypes, 0] = ??? diff --git a/tests/pos/i19857.scala b/tests/pos/i19857.scala index aeb6e49111c6..9414290fc369 100644 --- a/tests/pos/i19857.scala +++ b/tests/pos/i19857.scala @@ -18,6 +18,6 @@ type FromProduct[T <: Product] <: DFTypeAny = T match trait Width2[T] object Width2: - inline given [T]: Width2[T] = new Width2[T] {} + inline given [T] => Width2[T] = new Width2[T] {} val x = summon[Width2[Of[(DFBit, DFBit)]]] diff --git a/tests/pos/i19942.1.scala b/tests/pos/i19942.1.scala index 20f923886089..b4ae15bd2b2e 100644 --- a/tests/pos/i19942.1.scala +++ b/tests/pos/i19942.1.scala @@ -13,11 +13,11 @@ object DerivedAlternative: inline def apply[F[_]]: Alternative[F] = import DerivedAlternative.given summonInline[DerivedAlternative[F]].instance - given nested[F[_], G[_]]: DerivedAlternative[F <<< G] = ??? + given nested: [F[_], G[_]] => DerivedAlternative[F <<< G] = ??? object auto: object alternative: - transparent inline given [F[_]]: Alternative[F] = DerivedAlternative[F] + transparent inline given [F[_]] => Alternative[F] = DerivedAlternative[F] trait Test: import Test.* diff --git a/tests/pos/i19955a.scala b/tests/pos/i19955a.scala index b8ea95d41d24..4c89aa615511 100644 --- a/tests/pos/i19955a.scala +++ b/tests/pos/i19955a.scala @@ -2,7 +2,7 @@ trait Summon[R, T <: R]: type Out object Summon: - given [R, T <: R]: Summon[R, T] with + given [R, T <: R] => Summon[R, T]: type Out = R trait DFTypeAny @@ -14,7 +14,7 @@ trait Candidate[R]: type OutW <: Int object Candidate: type Aux[R, O <: Int] = Candidate[R] { type OutW = O } - given [W <: Int, R <: DFValOf[DFBits[W]]]: Candidate[R] with + given [W <: Int, R <: DFValOf[DFBits[W]]] => Candidate[R]: type OutW = W extension [L](lhs: L) def foo(using es: Summon[L, lhs.type]): Unit = ??? diff --git a/tests/pos/i19955b.scala b/tests/pos/i19955b.scala index 99e101b312b1..845b6df9d91f 100644 --- a/tests/pos/i19955b.scala +++ b/tests/pos/i19955b.scala @@ -3,11 +3,11 @@ trait Wrap[W] trait IsWrapOfInt[R]: type Out <: Int -given [W <: Int, R <: Wrap[W]]: IsWrapOfInt[R] with +given [W <: Int, R <: Wrap[W]] => IsWrapOfInt[R]: type Out = Int trait IsInt[U <: Int] -given [U <: Int]: IsInt[U] = ??? +given [U <: Int] => IsInt[U] = ??? extension [L](lhs: L) def get(using ev: IsWrapOfInt[L]): ev.Out = ??? extension (lhs: Int) def isInt(using IsInt[lhs.type]): Unit = ??? diff --git a/tests/pos/i20053b.scala b/tests/pos/i20053b.scala index 25180d56bbae..aedd4e88019c 100644 --- a/tests/pos/i20053b.scala +++ b/tests/pos/i20053b.scala @@ -1,10 +1,10 @@ trait Sub[R, T >: R] -given [R, T >: R]: Sub[R, T] with {} +given [R, T >: R] => Sub[R, T]() trait Candidate[-R]: type OutP -given [P]: Candidate[Option[P]] with +given [P] => Candidate[Option[P]]: type OutP = P extension [L](lhs: L) diff --git a/tests/pos/i20080.scala b/tests/pos/i20080.scala index dbf6843fcbc4..cd8dc15c8add 100644 --- a/tests/pos/i20080.scala +++ b/tests/pos/i20080.scala @@ -4,11 +4,11 @@ trait Zippable[-A, -B]: def zip(left: A, right: B): Out object Zippable extends ZippableLowPrio: - given append[A <: Tuple, B]: (Zippable[A, B] { type Out = Tuple.Append[A, B] }) = + given append: [A <: Tuple, B] => (Zippable[A, B] { type Out = Tuple.Append[A, B] }) = (left, right) => left :* right trait ZippableLowPrio: - given pair[A, B]: (Zippable[A, B] { type Out = (A, B) }) = + given pair: [A, B] => (Zippable[A, B] { type Out = (A, B) }) = (left, right) => (left, right) @@ -17,16 +17,16 @@ object Minimization: trait Fun1: type Out def apply(x: Any): Out - + type M[X] = X match case String => X - + def test[A] = - + val _: Fun1 { type Out = M[A] } = new Fun1: type Out = M[A] def apply(x: Any): Out = ??? - + val _: Fun1 { type Out = M[A] } = x => ??? - + val _: Fun1 { type Out = A match {case String => A} } = x => ??? diff --git a/tests/pos/i20344.scala b/tests/pos/i20344.scala index d3b2a060d6e2..0e8a3d8baabc 100644 --- a/tests/pos/i20344.scala +++ b/tests/pos/i20344.scala @@ -19,7 +19,7 @@ type Beta[R] = [F[_]] =>> GenBeta[R, R, R][F] object Beta: trait Params[+A, +B] trait BetaInstances: - given schrodingerRandomBetaForDouble[F[_]: Monad]: Beta[Double][F] = ??? + given schrodingerRandomBetaForDouble: [F[_]: Monad] => Beta[Double][F] = ??? object all extends BetaInstances diff --git a/tests/pos/i20377.scala b/tests/pos/i20377.scala index 7a4c0fccfd7e..661fa7adfca9 100644 --- a/tests/pos/i20377.scala +++ b/tests/pos/i20377.scala @@ -6,7 +6,7 @@ class ClassToMap[A]() abstract class ClassToFind[Rows <: AnyNamedTuple]: def mapped: NamedTuple.Map[Rows, ClassToMap] -given TDB: ClassToFind[(t1: Int, t2: String)] with +given TDB: ClassToFind[(t1: Int, t2: String)]: override def mapped = ( t1 = ClassToMap[Int](), t2 = ClassToMap[String]() diff --git a/tests/pos/i20572.scala b/tests/pos/i20572.scala index 4ee4490c839c..b1c1d549936e 100644 --- a/tests/pos/i20572.scala +++ b/tests/pos/i20572.scala @@ -1,7 +1,7 @@ //> using options -Werror trait Writes[T] trait Format[T] extends Writes[T] -given [T: List]: Writes[T] = null -given [T]: Format[T] = null +given [T: List] => Writes[T] = null +given [T] => Format[T] = null val _ = summon[Writes[Int]] diff --git a/tests/pos/i20858/defns_1.scala b/tests/pos/i20858/defns_1.scala index 7b4b84745b58..0479f30af22b 100644 --- a/tests/pos/i20858/defns_1.scala +++ b/tests/pos/i20858/defns_1.scala @@ -16,7 +16,7 @@ trait AutoLayer[A]: ): ZLayer[IAnyType[p.MirroredElemTypes], Nothing, A] object AutoLayer: - inline given derived[A](using p: Mirror.ProductOf[A]): AutoLayer[A] = { + inline given derived: [A] => (p: Mirror.ProductOf[A]) => AutoLayer[A] = { val a: ZIO[IAnyType[p.MirroredElemTypes], Nothing, A] = ??? new AutoLayer[A]: override def zlayer(using diff --git a/tests/pos/i21036.scala b/tests/pos/i21036.scala index 1c98346e4ef3..c80889a976eb 100644 --- a/tests/pos/i21036.scala +++ b/tests/pos/i21036.scala @@ -7,10 +7,10 @@ opaque type Id = String object Id: given SameRuntime[Id, String] = ??? -given BSONHandler[String] = ??? -given [T: BSONHandler]: BSONHandler[List[T]] = ??? +given BSONHandler[String] = ??? +given [T: BSONHandler] => BSONHandler[List[T]] = ??? -given opaqueWriter[T, A](using rs: SameRuntime[T, A], writer: BSONWriter[A]): BSONWriter[T] = ??? +given opaqueWriter: [T, A] => (rs: SameRuntime[T, A], writer: BSONWriter[A]) => BSONWriter[T] = ??? val x = summon[BSONHandler[List[Id]]] // this doesn't emit warning val y = summon[BSONWriter[List[Id]]] // this did emit warning diff --git a/tests/pos/i21303/Test.scala b/tests/pos/i21303/Test.scala index fe3efa6e38f3..be6d0df22850 100644 --- a/tests/pos/i21303/Test.scala +++ b/tests/pos/i21303/Test.scala @@ -11,11 +11,11 @@ trait TSNamedType[T] extends TSType[T] trait DefaultTSTypes extends JavaTSTypes trait JavaTSTypes { - given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSType[E] = ??? + given javaEnumTSType: [E <: java.lang.Enum[E]: ClassTag] => TSType[E] = ??? } object DefaultTSTypes extends DefaultTSTypes trait TSTypeMacros { - inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline given [T: Mirror.Of] => TSType[T] = derived[T] inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { val elemInstances = summonAll[m.MirroredElemTypes] ??? diff --git a/tests/pos/i21303a/Test.scala b/tests/pos/i21303a/Test.scala index 83a598b5f17f..af1471fe3c12 100644 --- a/tests/pos/i21303a/Test.scala +++ b/tests/pos/i21303a/Test.scala @@ -11,12 +11,12 @@ trait TSNamedType[T] extends TSType[T] trait DefaultTSTypes extends JavaTSTypes trait JavaTSTypes { - given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSType[E] = ??? - given javaEnumTSNamedType[E <: java.lang.Enum[E]: ClassTag]: TSNamedType[E] = ??? + given javaEnumTSType: [E <: java.lang.Enum[E]: ClassTag] => TSType[E] = ??? + given javaEnumTSNamedType: [E <: java.lang.Enum[E]: ClassTag] => TSNamedType[E] = ??? } object DefaultTSTypes extends DefaultTSTypes trait TSTypeMacros { - inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline given [T: Mirror.Of] => TSType[T] = derived[T] inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { val elemInstances = summonAll[m.MirroredElemTypes] ??? diff --git a/tests/pos/i21320a.scala b/tests/pos/i21320a.scala index 0a7e0d1941d1..03930f0b90c9 100644 --- a/tests/pos/i21320a.scala +++ b/tests/pos/i21320a.scala @@ -6,7 +6,7 @@ trait ConfigMonoid[T]: def orElse(main: T, defaults: T): T object ConfigMonoid: - given option[T]: ConfigMonoid[Option[T]] = ??? + given option: [T] => ConfigMonoid[Option[T]] = ??? inline def zeroTuple[C <: Tuple]: Tuple = inline erasedValue[C] match @@ -25,7 +25,7 @@ object ConfigMonoid: defaults ) - inline given derive[T](using m: Mirror.ProductOf[T]): ConfigMonoid[T] = + inline given derive: [T] => (m: Mirror.ProductOf[T]) => ConfigMonoid[T] = new ConfigMonoid[T]: def zero: T = m.fromProduct(zeroTuple[m.MirroredElemTypes]) def orElse(main: T, defaults: T): T = m.fromProduct(valueTuple[m.MirroredElemTypes, T](0, main, defaults)) diff --git a/tests/pos/i21320b.scala b/tests/pos/i21320b.scala index 6711d3d9d952..2c4963ec2f03 100644 --- a/tests/pos/i21320b.scala +++ b/tests/pos/i21320b.scala @@ -6,7 +6,7 @@ trait ConfigMonoid[T]: def orElse(main: T, defaults: T): T object ConfigMonoid: - given option[T]: ConfigMonoid[Option[T]] = ??? + given option: [T] => ConfigMonoid[Option[T]] = ??? inline def zeroTuple[C <: Tuple]: Tuple = inline erasedValue[C] match @@ -25,7 +25,7 @@ object ConfigMonoid: defaults ) - inline given derive[T](using m: Mirror.ProductOf[T]): ConfigMonoid[T] = + inline given derive: [T] => (m: Mirror.ProductOf[T]) => ConfigMonoid[T] = new ConfigMonoid[T]: def zero: T = m.fromProduct(zeroTuple[m.MirroredElemTypes]) def orElse(main: T, defaults: T): T = m.fromProduct(valueTuple[m.MirroredElemTypes, T](0, main, defaults)) diff --git a/tests/pos/i21352a/schema.scala b/tests/pos/i21352a/schema.scala index 486e1bb1ea34..c8b5dd3c2398 100644 --- a/tests/pos/i21352a/schema.scala +++ b/tests/pos/i21352a/schema.scala @@ -11,7 +11,7 @@ object Schema extends SchemaCompanionMacros: ??? trait SchemaCompanionMacros extends SchemaDerivation: - given derivedStringBasedUnionEnumeration[S](using IsUnionOf[String, S]): Schema[S] = + given derivedStringBasedUnionEnumeration: [S] => IsUnionOf[String, S] => Schema[S] = val x: Schema[S] = ??? x.name(None) diff --git a/tests/pos/i21352a/schemaDerivation.scala b/tests/pos/i21352a/schemaDerivation.scala index d34a59c51a61..9a64167ac98d 100644 --- a/tests/pos/i21352a/schemaDerivation.scala +++ b/tests/pos/i21352a/schemaDerivation.scala @@ -15,7 +15,7 @@ object CaseClass: sealed trait IsUnionOf[T, A] object IsUnionOf: - transparent inline given derived[T, A]: IsUnionOf[T, A] = ${ deriveImpl[T, A] } + transparent inline given derived: [T, A] => IsUnionOf[T, A] = ${ deriveImpl[T, A] } private def deriveImpl[T, A](using quotes: Quotes): Expr[IsUnionOf[T, A]] = ??? trait SchemaDerivation: diff --git a/tests/pos/i21352b.scala b/tests/pos/i21352b.scala index 6e1dfbd18e3c..ddf268000969 100644 --- a/tests/pos/i21352b.scala +++ b/tests/pos/i21352b.scala @@ -6,7 +6,7 @@ object serializer: implicit val UnitReader: Reader[Unit] = ??? implicit val StringReader: Reader[String] = ??? // A way to derive instances needs to be available - inline given superTypeReader[T: scala.reflect.ClassTag]: Reader[T] = ??? + inline given superTypeReader: [T: scala.reflect.ClassTag] => Reader[T] = ??? import serializer.Reader trait Codec[T] @@ -25,7 +25,7 @@ trait Communicate[F[_]]: def notification[X <: LSPNotification](notif: X, in: notif.In): F[Unit] object Communicate: - given codec[T: Reader]: Codec[T] = ??? + given codec: [T: Reader] => Codec[T] = ??? def channel[F[_]: Monadic](channel: Channel[F]) = new Communicate[F]: diff --git a/tests/pos/i21352c.scala b/tests/pos/i21352c.scala index 22169f3560a8..a06fd64b1fd8 100644 --- a/tests/pos/i21352c.scala +++ b/tests/pos/i21352c.scala @@ -7,7 +7,7 @@ trait ReadImplicits: import scala.deriving.* given roe: Read[Option[EmptyTuple]] = ??? given rou: Read[Option[Unit]] = ??? - given cons1[H, T <: Tuple](using Read[Option[H]], Read[Option[T]]): Read[Option[H *: T]] = ??? + given cons1: [H, T <: Tuple] => (Read[Option[H]], Read[Option[T]]) => Read[Option[H *: T]] = ??? trait Fragment: def query[B: Read]: String = ??? diff --git a/tests/pos/i21390.zio.scala b/tests/pos/i21390.zio.scala index 3aece69632b3..c04d99b64b27 100644 --- a/tests/pos/i21390.zio.scala +++ b/tests/pos/i21390.zio.scala @@ -5,7 +5,7 @@ class Has[A] object Has: class Union[B, C] object Union: - given HasHasUnion[B0 <: Has[?], C0 <: Has[?]]: Union[B0, C0] = ??? + given HasHasUnion: [B0 <: Has[?], C0 <: Has[?]] => Union[B0, C0] = ??? class Lay[+D]: def and1[B1 >: D, C1](that: Lay[C1])(using Has.Union[B1, C1]): Lay[B1 & C1] = ??? diff --git a/tests/pos/i5915.scala b/tests/pos/i5915.scala index acccbf64a684..c6df5eaebe53 100644 --- a/tests/pos/i5915.scala +++ b/tests/pos/i5915.scala @@ -2,8 +2,8 @@ trait RunDSL val rdsl = new RunDSL {} -given RunNNFExpr[B]: RunDSL = rdsl +given RunNNFExpr: [B] => RunDSL = rdsl -given RunNNFImpl[B]: RunDSL with { +given RunNNFImpl: [B] => RunDSL { //override def runDSL(b: NNF[B]): B = b.terminal } \ No newline at end of file diff --git a/tests/pos/i5978.scala b/tests/pos/i5978.scala index f1954b8c8275..d254d78e93aa 100644 --- a/tests/pos/i5978.scala +++ b/tests/pos/i5978.scala @@ -8,7 +8,7 @@ trait TokenParser[Token, R] package p1 { object TextParser { - given TP: TokenParser[Char, Position[CharSequence]] with {} + given TP: TokenParser[Char, Position[CharSequence]]() def f (using TokenParser[Char, Position[CharSequence]]) = ??? @@ -85,9 +85,9 @@ package p3 { package p4 { class TC - given A: TC with {} + given A: TC() - given B[X[_], Y]: TC with {} + given B: [X[_], Y] => TC() - given C(using TC): TC with {} + given C: TC => TC() } \ No newline at end of file diff --git a/tests/pos/i6716.scala b/tests/pos/i6716.scala index 617adc3c09f0..721a9eb9a794 100644 --- a/tests/pos/i6716.scala +++ b/tests/pos/i6716.scala @@ -2,7 +2,7 @@ trait Monad[T]: def id: String class Foo object Foo { - given Monad[Foo] with { def id = "Foo" } + given Monad[Foo] { def id = "Foo" } } opaque type Bar = Foo diff --git a/tests/pos/i6900.scala b/tests/pos/i6900.scala index 55587a18b4ba..1aa04b1ee177 100644 --- a/tests/pos/i6900.scala +++ b/tests/pos/i6900.scala @@ -8,7 +8,7 @@ object Test1 { // But not with newstyle /* - given [A]: Conversion[A, Foo[A]] with + given [A]: Conversion[A, Foo[A]]: def apply(a: A) = new Foo[A]: def foo[C]: C => A = _ => a */ diff --git a/tests/pos/i6914.scala b/tests/pos/i6914.scala index 2d8cf164f422..c4d840f7bbdc 100644 --- a/tests/pos/i6914.scala +++ b/tests/pos/i6914.scala @@ -5,7 +5,7 @@ object test1 { class ToExpr[T](using Liftable[T]) extends Conversion[T, Expr[T]] { def apply(x: T): Expr[T] = ??? } - given toExpr[T](using Liftable[T]): ToExpr[T] with {} + given toExpr: [T] => Liftable[T] => ToExpr[T]() given Liftable[Int] = ??? given Liftable[String] = ??? @@ -18,7 +18,7 @@ object test1 { object test2 { - given autoToExpr[T](using Liftable[T]): Conversion[T, Expr[T]] with { + given autoToExpr: [T] => Liftable[T] => Conversion[T, Expr[T]] { def apply(x: T): Expr[T] = ??? } diff --git a/tests/pos/i6938.scala b/tests/pos/i6938.scala index e58ab2829cf9..8127fc133f72 100644 --- a/tests/pos/i6938.scala +++ b/tests/pos/i6938.scala @@ -1,5 +1,5 @@ trait Foo[T] object Foo: - given [T]: Foo[Tuple1[T]] with {} - given [T, U]: Foo[(T, U)] with {} - given [T, U, V]: Foo[(T, U, V)] with {} \ No newline at end of file + given [T] => Foo[Tuple1[T]]() + given [T, U] => Foo[(T, U)]() + given [T, U, V] => Foo[(T, U, V)]() \ No newline at end of file diff --git a/tests/pos/i7056.scala b/tests/pos/i7056.scala index a347dfe7b519..fa3c132b58b3 100644 --- a/tests/pos/i7056.scala +++ b/tests/pos/i7056.scala @@ -9,7 +9,7 @@ trait T1[T] { extension (t1: T) def idnt1: Any } -given [T <: A](using PartialId[T]): T1[T] = new T1[T] { +given [T <: A] => PartialId[T] => T1[T] = new T1[T] { extension (t1: T) def idnt1: Any = ??? } diff --git a/tests/pos/i7375.scala b/tests/pos/i7375.scala index 9da548f99ce7..0320696b0c5d 100644 --- a/tests/pos/i7375.scala +++ b/tests/pos/i7375.scala @@ -5,7 +5,7 @@ trait Entity[M, T, P]: class GreetingPerson(private val name: String) object GreetingPerson: - given GreetingPersonEntity: Entity[GreetingPerson, GreetedPerson, String] with + given GreetingPersonEntity: Entity[GreetingPerson, GreetedPerson, String]: extension (me: GreetingPerson) def receive(sender: GreetedPerson)(msg: String)(using Entity[GreetedPerson, GreetingPerson, String]): Unit = println(f"Thanks for saying $msg, ${sender.name()}") @@ -15,7 +15,7 @@ object GreetingPerson: class GreetedPerson(private val name: String) object GreetedPerson: - given GreetedPersonEntity: Entity[GreetedPerson, GreetingPerson, String] with + given GreetedPersonEntity: Entity[GreetedPerson, GreetingPerson, String]: extension (me: GreetedPerson) def receive(sender: GreetingPerson)(msg: String)(using Entity[GreetingPerson, GreetedPerson, String]): Unit = println(f"Thanks for saying $msg, ${sender.name()}") diff --git a/tests/pos/i7413.scala b/tests/pos/i7413.scala index ebc0c5b2777a..2aaf87a1e82a 100644 --- a/tests/pos/i7413.scala +++ b/tests/pos/i7413.scala @@ -11,7 +11,7 @@ trait Greeter: case class MyFixture(name: String, greeter: Greeter) object Test1: - given conv: Conversion[0, Greeter] with + given conv: Conversion[0, Greeter]: def apply(x: 0): Greeter = ??? val g: Greeter = 0 diff --git a/tests/pos/i7586.scala b/tests/pos/i7586.scala index 364c99478337..b0ecd84e5aff 100644 --- a/tests/pos/i7586.scala +++ b/tests/pos/i7586.scala @@ -5,14 +5,15 @@ case class S[N <: Nat](pred: N) extends Nat type Z = Z.type given zero: Z = Z -given succ[N <: Nat](using n: N): S[N] = S(n) +given succ: [N <: Nat] => (n: N) => S[N] = S(n) case class Sum[N <: Nat, M <: Nat, R <: Nat](result: R) -given sumZ[N <: Nat](using n: N): Sum[Z, N, N] = Sum(n) -given sumS[N <: Nat, M <: Nat, R <: Nat]( - using sum: Sum[N, M, R] -): Sum[S[N], M, S[R]] = Sum(S(sum.result)) +given sumZ: [N <: Nat] => (n: N) => Sum[Z, N, N] = Sum(n) +given sumS: [N <: Nat, M <: Nat, R <: Nat] + => (sum: Sum[N, M, R]) + => Sum[S[N], M, S[R]] + = Sum(S(sum.result)) def add[N <: Nat, M <: Nat, R <: Nat](n: N, m: M)( using sum: Sum[N, M, R] diff --git a/tests/pos/i7851.scala b/tests/pos/i7851.scala index 16d28ad353f9..612a6abeb873 100644 --- a/tests/pos/i7851.scala +++ b/tests/pos/i7851.scala @@ -1,18 +1,18 @@ //> using options -experimental trait Wrappable[T] { } -given Wrappable[Float] with { } +given Wrappable[Float]() case class Wrapped[T: Wrappable](value: T) trait Wrapper[T] { type WrappedT } object Wrapper { type Aux[T <: Tuple, WrappedT0 <: Tuple] = Wrapper[T] { type WrappedT = WrappedT0 } } -given Wrapper[EmptyTuple] with { type WrappedT = EmptyTuple } +given Wrapper[EmptyTuple] { type WrappedT = EmptyTuple } -given [T: Wrappable]: Wrapper[T] with { type WrappedT = Wrapped[T] } +given [T: Wrappable] => Wrapper[T] { type WrappedT = Wrapped[T] } -given [H: Wrappable, T <: Tuple, WrappedT0 <: Tuple](using Wrapper.Aux[T, WrappedT0]): Wrapper[H *: T] with { +given [H: Wrappable, T <: Tuple, WrappedT0 <: Tuple] => Wrapper.Aux[T, WrappedT0] => Wrapper[H *: T] { type WrappedT = Wrapped[H] *: WrappedT0 } diff --git a/tests/pos/i7868.scala b/tests/pos/i7868.scala index f4d7da6acb5b..fa31bd131b0c 100644 --- a/tests/pos/i7868.scala +++ b/tests/pos/i7868.scala @@ -15,16 +15,15 @@ object Coproduct { object At { - given atHead[Head, Tail]: At[Head +: Tail, Head, 0] with { + given atHead: [Head, Tail] => At[Head +: Tail, Head, 0]: def cast: Head <:< Head +: Tail = summon[Head <:< Head +: Tail] - } given atTail[Head, Tail, Value, NextIndex <: Int] (using atNext: At[Tail, Value, NextIndex]) : At[Head +: Tail, Value, S[NextIndex]] with val cast: Value <:< Head +: Tail = atNext.cast - given [A](using A): (() => A) = { () => summon[A]} + given [A] => A => (() => A) = { () => summon[A] } } def upCast[A, B](a: A)(using erased evidence: (A <:< B) ): B = a.asInstanceOf[B] diff --git a/tests/pos/i7878.scala b/tests/pos/i7878.scala index 05a1b6093e6a..68815c35eb96 100644 --- a/tests/pos/i7878.scala +++ b/tests/pos/i7878.scala @@ -4,7 +4,7 @@ object Boom { import scala.compiletime.* trait Fail[A <: Int, B <: Int] - transparent inline given fail[X <: Int, Y <: Int]: Fail[X, Y] = { + transparent inline given fail: [X <: Int, Y <: Int] => Fail[X, Y] = { scala.compiletime.summonFrom { case t: Fail[X, y] if constValue[y] < constValue[Y] => ??? } diff --git a/tests/pos/i8182.scala b/tests/pos/i8182.scala index 9acf2941c570..08b40cb570ab 100644 --- a/tests/pos/i8182.scala +++ b/tests/pos/i8182.scala @@ -3,8 +3,8 @@ package example trait Show[-A]: extension (a: A) def show: String -given (using rec: Show[String]): Show[String] = ??? // must be Show[String] as the argument +given Show[String] => Show[String] = ??? // must be Show[String] as the argument -given (using rec: => Show[String]): Show[Option[String]] = ??? // must be byname argument +given (rec: => Show[String]) => Show[Option[String]] = ??? // must be byname argument def test = Option("").show diff --git a/tests/pos/i8198.scala b/tests/pos/i8198.scala index 5e4efa82924a..7ea8f67d25de 100644 --- a/tests/pos/i8198.scala +++ b/tests/pos/i8198.scala @@ -6,6 +6,6 @@ trait Eq[A] { case class Id[T](id: T) -given idEq[A](using eqA: Eq[A]): Eq[Id[A]] = new { +given idEq: [A] => (eqA: Eq[A]) => Eq[Id[A]] = new { extension (i1: Id[A]) def === (i2: Id[A]) = !(i1.id /== i2.id) } diff --git a/tests/pos/i8276.scala b/tests/pos/i8276.scala index e68cf009bc33..3e2ad15bd8f4 100644 --- a/tests/pos/i8276.scala +++ b/tests/pos/i8276.scala @@ -1,6 +1,6 @@ object NOTHING -inline given [A]: Conversion[NOTHING.type, Option[A]] = _ => None +inline given [A] => Conversion[NOTHING.type, Option[A]] = _ => None def apply[A](p: Vector[A], o: Option[A] = NOTHING): Unit = ??? diff --git a/tests/pos/i8344-1.scala b/tests/pos/i8344-1.scala index 92b393da2497..2707507f39f5 100644 --- a/tests/pos/i8344-1.scala +++ b/tests/pos/i8344-1.scala @@ -5,7 +5,7 @@ enum Datatype[T] { } object Datatype { - given [H, T <: STuple](using ht: Datatype[H], tt: Datatype[T]): Datatype[H *: T] = tt match { + given [H, T <: STuple] => (ht: Datatype[H], tt: Datatype[T]) => Datatype[H *: T] = tt match { case Datatype.Tuple(elems) => Datatype.Tuple(ht *: elems) } } diff --git a/tests/pos/i8397.scala b/tests/pos/i8397.scala index d0b97f84129d..707d8c1f1942 100644 --- a/tests/pos/i8397.scala +++ b/tests/pos/i8397.scala @@ -1,4 +1,4 @@ -given foo(using x: Int): AnyRef with +given foo: (x: Int) => AnyRef: type T = x.type // #7859 @@ -6,17 +6,17 @@ given foo(using x: Int): AnyRef with trait Lub2[A, B]: type Output -given [A <: C, B <: C, C]: Lub2[A, B] with +given [A <: C, B <: C, C] => Lub2[A, B]: type Output = C trait Lub[Union]: type Output -given [A]: Lub[A] with +given [A] => Lub[A]: type Output = A -given [Left, Right]( - using lubLeft: Lub[Right], lubRight: Lub[Right])( - using lub2: Lub2[lubLeft.Output, lubRight.Output]) - : Lub[Left | Right] with +given [Left, Right] + => (lubLeft: Lub[Right], lubRight: Lub[Right]) + => (lub2: Lub2[lubLeft.Output, lubRight.Output]) + => Lub[Left | Right]: type Output = lub2.Output diff --git a/tests/pos/i8623.scala b/tests/pos/i8623.scala index e0df48811e1c..69664bfd5148 100644 --- a/tests/pos/i8623.scala +++ b/tests/pos/i8623.scala @@ -11,7 +11,7 @@ def test1 = unseal.pos def test2 = - given QC with {} + given QC() def unseal(using qctx: QC): qctx.tasty.Tree = ??? unseal.pos diff --git a/tests/pos/i8825.scala b/tests/pos/i8825.scala index d7115babfdc4..a9c487b5404a 100644 --- a/tests/pos/i8825.scala +++ b/tests/pos/i8825.scala @@ -17,7 +17,7 @@ object Length { def instance[L <: HList, Out0 <: Nat]: Aux[L, Out0] = new Length[L] { type Out = Out0 } given hnilLength: Aux[HNil, Zero] = instance - given hconsLength[H, T <: HList] (using length: Length[T]): Aux[HCons[H, T], Succ[length.Out]] = instance // (*) + given hconsLength: [H, T <: HList] => (length: Length[T]) => Aux[HCons[H, T], Succ[length.Out]] = instance // (*) //given hconsLength[H, T <: HList, N <: Nat] (using length: Aux[T, N]): Aux[HCons[H, T], Succ[N]] = instance // (**) } diff --git a/tests/pos/i8927.scala b/tests/pos/i8927.scala index 2dfb419abab3..ab1e850ff8e6 100644 --- a/tests/pos/i8927.scala +++ b/tests/pos/i8927.scala @@ -17,7 +17,7 @@ sealed trait DPair[k <: AnyKind, K[_ <: k], +V[_ <: k]]: case _ => None object DPair: - given pair [k, K[_ <: k], V[_ <: k], C <: k]: Conversion[(K[C], V[C]), DPair[k, K, V]] = tup => + given pair: [k, K[_ <: k], V[_ <: k], C <: k] => Conversion[(K[C], V[C]), DPair[k, K, V]] = tup => case class dpair(key: K[C], value: V[C]) extends DPair[k, K, V]: type A = C dpair(tup._1, tup._2) diff --git a/tests/pos/i9342b.scala b/tests/pos/i9342b.scala index e317391f9bb8..36093876e989 100644 --- a/tests/pos/i9342b.scala +++ b/tests/pos/i9342b.scala @@ -1,7 +1,7 @@ trait Label[A]: def apply(v: A): String -given [A]: Label[A] = _.toString +given [A] => Label[A] = _.toString extension [A](x: A) inline def label(using inline l: Label[A]): String = l(x) diff --git a/tests/pos/i9530.scala b/tests/pos/i9530.scala index 32b2f26dbd6c..ab481bd64d1b 100644 --- a/tests/pos/i9530.scala +++ b/tests/pos/i9530.scala @@ -15,12 +15,12 @@ extension [A <: Animal](animal: A)(using diet: Diet[A]) def food4 = diet.food trait Monkey extends Animal -given Diet[Monkey] with +given Diet[Monkey]: type F = Banana def food: Seq[Banana] = Seq(new Banana("yellow"), Banana("green")) trait FoodOps -given FoodOps with +given FoodOps: extension [A <: Animal](using diet: Diet[A])(animal: A) def food5 = diet.food extension [A <: Animal](animal: A)(using diet: Diet[A]) def food6 = diet.food diff --git a/tests/pos/implicit-conversion.scala b/tests/pos/implicit-conversion.scala index a2d12b8ab709..9154b24681c9 100644 --- a/tests/pos/implicit-conversion.scala +++ b/tests/pos/implicit-conversion.scala @@ -1,6 +1,6 @@ object Test { // a problematic implicit conversion, should we flag it? - given Conversion[String, Int] with { + given Conversion[String, Int] { def apply(x: String): Int = Integer.parseInt(toString) } } \ No newline at end of file diff --git a/tests/pos/implicit-prefix-disambiguation.scala b/tests/pos/implicit-prefix-disambiguation.scala index f7843e7f5831..cd1906fe5043 100644 --- a/tests/pos/implicit-prefix-disambiguation.scala +++ b/tests/pos/implicit-prefix-disambiguation.scala @@ -4,7 +4,7 @@ class J[X] trait A: given I[B] = ??? - given (using I[B]): J[B] = ??? + given I[B] => J[B] = ??? object A extends A trait B extends A diff --git a/tests/pos/interleaving-functor.scala b/tests/pos/interleaving-functor.scala index b588e35f60a2..e4a3804ef928 100644 --- a/tests/pos/interleaving-functor.scala +++ b/tests/pos/interleaving-functor.scala @@ -7,7 +7,7 @@ object functorInterleaving: def map[A](x: F[A])[B](f: A => B): F[B] - given Functor[List] with + given Functor[List]: def map[A](x: List[A])[B](f: A => B): List[B] = x.map(f) diff --git a/tests/pos/mt-deskolemize.scala b/tests/pos/mt-deskolemize.scala index 34f38289b24d..df408e67d767 100644 --- a/tests/pos/mt-deskolemize.scala +++ b/tests/pos/mt-deskolemize.scala @@ -37,7 +37,7 @@ class MyExpr3 extends ProdExprAlt[(Prim, VecExpr[Prim], Prim)] trait Constable[E <: Expr]: def lit(v: ExtractValue[E]): E object Constable: - given [E <: Expr]: Constable[E] = ??? + given [E <: Expr] => Constable[E] = ??? object Test: def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = diff --git a/tests/pos/multi-given.scala b/tests/pos/multi-given.scala index 5553df36f1c9..2b404672e9cb 100644 --- a/tests/pos/multi-given.scala +++ b/tests/pos/multi-given.scala @@ -8,3 +8,5 @@ def foo(implicit a: A, b: B, c: C) = "foo" given A() with B given ops: A() with B() + +given ops2: A(), B diff --git a/tests/pos/multiversal.scala b/tests/pos/multiversal.scala index f7fa14264a3c..d6fd267555f5 100644 --- a/tests/pos/multiversal.scala +++ b/tests/pos/multiversal.scala @@ -1,7 +1,7 @@ object Test { import scala.CanEqual - given [X, Y](using CanEqual[X, Y]): CanEqual[List[X], List[Y]] = CanEqual.derived + given [X, Y] => CanEqual[X, Y] => CanEqual[List[X], List[Y]] = CanEqual.derived val b: Byte = 1 val c: Char = 2 diff --git a/tests/pos/not-looping-implicit.scala b/tests/pos/not-looping-implicit.scala index ebaf25e760f2..d99da915fbf4 100644 --- a/tests/pos/not-looping-implicit.scala +++ b/tests/pos/not-looping-implicit.scala @@ -30,7 +30,7 @@ object Schema { ??? } - inline given gen[A]: Schema[A] = derived[A] + inline given gen: [A] => Schema[A] = derived[A] } sealed trait InputValue diff --git a/tests/pos/ord-over-tracked.scala b/tests/pos/ord-over-tracked.scala index a9b4aba556e1..4016a575a838 100644 --- a/tests/pos/ord-over-tracked.scala +++ b/tests/pos/ord-over-tracked.scala @@ -6,7 +6,7 @@ trait Ord[T]: given Ord[Int] = ??? case class D(tracked val x: Int) -given [T <: D]: Ord[T] = (a, b) => a.x < b.x +given [T <: D] => Ord[T] = (a, b) => a.x < b.x def mySort[T: Ord](x: Array[T]): Array[T] = ??? diff --git a/tests/pos/parsercombinators-ctx-bounds.scala b/tests/pos/parsercombinators-ctx-bounds.scala index d77abea5e539..50338dbc2fa5 100644 --- a/tests/pos/parsercombinators-ctx-bounds.scala +++ b/tests/pos/parsercombinators-ctx-bounds.scala @@ -17,7 +17,7 @@ end Combinator final case class Apply[C, E](action: C => Option[E]) final case class Combine[A, B](first: A, second: B) -given apply[C, E]: Combinator[Apply[C, E]] with { +given apply: [C, E] => Combinator[Apply[C, E]] { type Context = C type Element = E extension(self: Apply[C, E]) { diff --git a/tests/pos/parsercombinators-givens-2.scala b/tests/pos/parsercombinators-givens-2.scala index 8349d69a30af..4b00c3801716 100644 --- a/tests/pos/parsercombinators-givens-2.scala +++ b/tests/pos/parsercombinators-givens-2.scala @@ -18,7 +18,7 @@ end Combinator final case class Apply[C, E](action: C => Option[E]) final case class Combine[A, B](first: A, second: B) -given apply[C, E]: Combinator[Apply[C, E]] with { +given apply: [C, E] => Combinator[Apply[C, E]] { type Context = C type Element = E extension(self: Apply[C, E]) { diff --git a/tests/pos/parsercombinators-givens.scala b/tests/pos/parsercombinators-givens.scala index 5b5588c93840..1fa8080b0688 100644 --- a/tests/pos/parsercombinators-givens.scala +++ b/tests/pos/parsercombinators-givens.scala @@ -18,7 +18,7 @@ end Combinator final case class Apply[C, E](action: C => Option[E]) final case class Combine[A, B](first: A, second: B) -given apply[C, E]: Combinator[Apply[C, E]] with { +given apply: [C, E] => Combinator[Apply[C, E]] { type Context = C type Element = E extension(self: Apply[C, E]) { diff --git a/tests/pos/parsercombinators-this.scala b/tests/pos/parsercombinators-this.scala index 70b423985400..19ac878c888f 100644 --- a/tests/pos/parsercombinators-this.scala +++ b/tests/pos/parsercombinators-this.scala @@ -19,7 +19,7 @@ end Combinator final case class Apply[C, E](action: C => Option[E]) final case class Combine[A, B](first: A, second: B) -given apply[C, E]: Combinator with { +given apply: [C, E] => Combinator { type Self = Apply[C, E] type Context = C type Element = E diff --git a/tests/pos/phantom-Eq.scala b/tests/pos/phantom-Eq.scala index 9f892d353b41..d844c4b110c6 100644 --- a/tests/pos/phantom-Eq.scala +++ b/tests/pos/phantom-Eq.scala @@ -29,5 +29,5 @@ object EqUtil { erased given eqByteNum: PhantomEq[Byte, Number] = compiletime.erasedValue erased given eqNumByte: PhantomEq[Number, Byte] = compiletime.erasedValue - erased given eqSeq[T, U](using erased PhantomEq[T, U]): PhantomEq[Seq[T], Seq[U]] = compiletime.erasedValue + erased given eqSeq: [T, U] => (erased PhantomEq[T, U]) => PhantomEq[Seq[T], Seq[U]] = compiletime.erasedValue } diff --git a/tests/pos/phantom-Eq2/Phantom-Eq_1.scala b/tests/pos/phantom-Eq2/Phantom-Eq_1.scala index 120c68174898..b041a4a87efe 100644 --- a/tests/pos/phantom-Eq2/Phantom-Eq_1.scala +++ b/tests/pos/phantom-Eq2/Phantom-Eq_1.scala @@ -14,6 +14,6 @@ object EqUtil { erased given eqDouble: PhantomEqEq[Double] = new PhantomEq[Double, Double] erased given eqByteNum: PhantomEq[Byte, Number] = new PhantomEq[Byte, Number] erased given eqNumByte: PhantomEq[Number, Byte] = new PhantomEq[Number, Byte] - erased given eqSeq[T, U] (using erased eq: PhantomEq[T, U]): PhantomEq[Seq[T], Seq[U]] = + erased given eqSeq: [T, U] => (erased eq: PhantomEq[T, U]) => PhantomEq[Seq[T], Seq[U]] = new PhantomEq[Seq[T], Seq[U]] } diff --git a/tests/pos/phantom-Evidence.scala b/tests/pos/phantom-Evidence.scala index ecdd84cbc481..f56ce3b798ee 100644 --- a/tests/pos/phantom-Evidence.scala +++ b/tests/pos/phantom-Evidence.scala @@ -26,5 +26,5 @@ object WithNormalState { object Utils { type =::=[From, To] - erased given tpEquals[A]: (A =::= A) = compiletime.erasedValue + erased given tpEquals: [A] => (A =::= A) = compiletime.erasedValue } diff --git a/tests/pos/reference/delegates.scala b/tests/pos/reference/delegates.scala index 1cc7a29eee7d..95b9856b687b 100644 --- a/tests/pos/reference/delegates.scala +++ b/tests/pos/reference/delegates.scala @@ -26,11 +26,11 @@ end Common object Instances extends Common: - given intOrd: Ord[Int] with + given intOrd: Ord[Int]: extension (x: Int) def compareTo(y: Int) = if (x < y) -1 else if (x > y) +1 else 0 - given listOrd[T](using Ord[T]): Ord[List[T]] with + given listOrd: [T] => Ord[T] => Ord[List[T]]: extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -49,13 +49,13 @@ object Instances extends Common: def second = xs.tail.head def third = xs.tail.tail.head - given listMonad: Monad[List] with + given listMonad: Monad[List]: extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = List(x) - given readerMonad[Ctx]: Monad[[X] =>> Ctx => X] with + given readerMonad: [Ctx] => Monad[[X] =>> Ctx => X]: extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -93,7 +93,7 @@ object Instances extends Common: object TastyImpl extends TastyAPI: type Symbol = String - given symDeco: SymDeco with + given symDeco: SymDeco: extension (sym: Symbol) def name = sym class D[T] @@ -122,7 +122,7 @@ object Instances extends Common: class Token(str: String) object Token: - given StringToToken: Conversion[String, Token] with + given StringToToken: Conversion[String, Token]: def apply(str: String): Token = new Token(str) val x: Token = "if" @@ -140,11 +140,11 @@ object PostConditions: end PostConditions object AnonymousInstances extends Common: - given Ord[Int] with + given Ord[Int]: extension (x: Int) def compareTo(y: Int) = if (x < y) -1 else if (x > y) +1 else 0 - given [T: Ord]: Ord[List[T]] with + given [T: Ord] => Ord[List[T]]: extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys).match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -165,7 +165,7 @@ object AnonymousInstances extends Common: : Convertible[List[From], List[To]] with extension (x: List[From]) def convert: List[To] = x.map(c.convert) - given Monoid[String] with + given Monoid[String]: extension (x: String) def combine(y: String): String = x.concat(y) def unit: String = "" diff --git a/tests/pos/reference/extension-methods.scala b/tests/pos/reference/extension-methods.scala index 64fd23322c1b..54310d6f6dd9 100644 --- a/tests/pos/reference/extension-methods.scala +++ b/tests/pos/reference/extension-methods.scala @@ -81,7 +81,7 @@ object ExtMethods: trait Ord[T]: extension (x: T) def less (y: T): Boolean object Ord: - given Ord[Int] with + given Ord[Int]: extension (x: Int) def less (y: Int): Boolean = x < y end Ord @@ -90,7 +90,7 @@ object ExtMethods: extension [T](xs: Lst[Lst[T]]) def flatten: Lst[T] = xs.foldLeft(Lst())(_ ++ _) - given ord[T: Ord]: Ord[Lst[T]] with + given ord: [T: Ord] => Ord[Lst[T]]: extension (xs: Lst[T]) def less (ys: Lst[T]): Boolean = ??? end Lst diff --git a/tests/pos/suspend-strawman/choices.scala b/tests/pos/suspend-strawman/choices.scala index 968c223d9c0b..e15626c6ecc7 100644 --- a/tests/pos/suspend-strawman/choices.scala +++ b/tests/pos/suspend-strawman/choices.scala @@ -7,7 +7,7 @@ trait Choice: // the handler def choices[T](body: Choice ?=> T): Seq[T] = boundary[Seq[T]]: - given Choice with + given Choice: def choose[A](choices: A*): A = suspend[A, Seq[T]](s => choices.flatMap(s.resume)) Seq(body) diff --git a/tests/pos/suspend-strawman/generators.scala b/tests/pos/suspend-strawman/generators.scala index a890196e6215..a5c73fe41ffe 100644 --- a/tests/pos/suspend-strawman/generators.scala +++ b/tests/pos/suspend-strawman/generators.scala @@ -15,7 +15,7 @@ object generate: var step: () => Unit = () => boundary[Unit]: - given CanProduce[T] with + given CanProduce[T]: def produce(x: T): Unit = nextElem = Some(x) suspend[Unit, Unit]: k => @@ -55,7 +55,7 @@ object Variant2: var step: () => Option[T] = () => boundary: - given CanProduce[T] with + given CanProduce[T]: def produce(x: T): Unit = suspend[Unit, Option[T]]: k => step = () => k.resume(()) diff --git a/tests/pos/suspend-strawman/monadic-reflect.scala b/tests/pos/suspend-strawman/monadic-reflect.scala index 84c5255c2a96..19bb3a283c8a 100644 --- a/tests/pos/suspend-strawman/monadic-reflect.scala +++ b/tests/pos/suspend-strawman/monadic-reflect.scala @@ -48,7 +48,7 @@ trait Monadic[M[_]: Monad]: */ def reify[R](prog: CanReflect[M] ?=> R): M[R] = boundary [M[R]]: - given CanReflect[M] with + given CanReflect[M]: def reflect[R2](mr: M[R2]): R2 = suspend [R2, M[R]] (k => mr.flatMap(k.resume)) pure(prog) diff --git a/tests/pos/suspend-strawman/simple-futures.scala b/tests/pos/suspend-strawman/simple-futures.scala index 0a80a74d49dc..8e75b0d4870a 100644 --- a/tests/pos/suspend-strawman/simple-futures.scala +++ b/tests/pos/suspend-strawman/simple-futures.scala @@ -32,7 +32,7 @@ object Future: // a handler for Async def async(body: Async ?=> Unit): Unit = boundary [Unit]: - given Async with + given Async: def await[T](f: Future[T]): T = f.result match case Some(x) => x case None => suspend[T, Unit](s => f.addWaiting(s.resume)) diff --git a/tests/pos/the.scala b/tests/pos/the.scala index 52c6e364fd92..90c56573c7cc 100644 --- a/tests/pos/the.scala +++ b/tests/pos/the.scala @@ -2,7 +2,7 @@ object Test { trait Foo { type T; val x: T } - given intFoo: Foo with { + given intFoo: Foo { type T = Int val x = 3 } diff --git a/tests/pos/toplevel-opaque-xm/Logarithm_1.scala b/tests/pos/toplevel-opaque-xm/Logarithm_1.scala index f2744aaae5af..436097fee478 100644 --- a/tests/pos/toplevel-opaque-xm/Logarithm_1.scala +++ b/tests/pos/toplevel-opaque-xm/Logarithm_1.scala @@ -14,7 +14,7 @@ object Logarithm { def exponent(l: Logarithm): Double = l - given AnyRef with { + given AnyRef { // This is the second way to unlift the logarithm type extension (x: Logarithm) def toDouble: Double = math.exp(x) extension (x: Logarithm) def + (y: Logarithm) = Logarithm(math.exp(x) + math.exp(y)) diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala index 5e4551b226b7..bf1a59247170 100644 --- a/tests/pos/typeclass-aggregates.scala +++ b/tests/pos/typeclass-aggregates.scala @@ -42,6 +42,6 @@ val y: Int = ??? : x.Self // given [A, B](using ord: A is Ord, monoid: A is Monoid) => A is Ord & Monoid = // new ord.OrdProxy with monoid.MonoidProxy {} -given [A](using ord: Ord { type Self = A }, monoid: Monoid { type Self = A}): ((Ord & Monoid) { type Self = A}) = +given [A] => (ord: Ord { type Self = A }, monoid: Monoid { type Self = A}) => ((Ord & Monoid) { type Self = A}) = new ord.OrdProxy with monoid.MonoidProxy {} diff --git a/tests/pos/typeclasses-this.scala b/tests/pos/typeclasses-this.scala index 33ccb8d9d653..ebe0dc348a06 100644 --- a/tests/pos/typeclasses-this.scala +++ b/tests/pos/typeclasses-this.scala @@ -36,7 +36,7 @@ end Common object Instances extends Common: - given intOrd: (Int is Ord) with + given intOrd: (Int is Ord): extension (x: Int) def compareTo(y: Int) = if x < y then -1 @@ -44,7 +44,7 @@ object Instances extends Common: else 0 // given [T](using tracked val ev: Ord { type Self = T}): Ord { type Self = List[T] } with - given [T: Ord]: (List[T] is Ord) with + given [T: Ord] => (List[T] is Ord): extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -53,7 +53,7 @@ object Instances extends Common: val fst = x.compareTo(y) if (fst != 0) fst else xs1.compareTo(ys1) - given listMonad: (List is Monad) with + given listMonad: (List is Monad): extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = @@ -61,7 +61,7 @@ object Instances extends Common: type Reader[Ctx] = [X] =>> Ctx => X - given readerMonad[Ctx]: (Reader[Ctx] is Monad) with + given readerMonad: [Ctx] => Reader[Ctx] is Monad: extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -83,7 +83,7 @@ object Instances extends Common: def maximum[T: Ord](xs: List[T]): T = xs.reduce(_ `max` _) - given descending[T: Ord]: (T is Ord) with + given descending: [T: Ord] => T is Ord: extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) def minimum[T: Ord](xs: List[T]) = @@ -123,7 +123,7 @@ class Sheep(val name: String): println(s"$name gets a haircut!") isNaked = true -given Sheep is Animal with +given Sheep is Animal: def apply(name: String) = Sheep(name) extension (self: Self) def name: String = self.name diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala index d0315a318310..40f992cbcb57 100644 --- a/tests/pos/typeclasses.scala +++ b/tests/pos/typeclasses.scala @@ -31,7 +31,7 @@ end Common object Instances extends Common: - given intOrd: (Int is Ord) with + given intOrd: Int is Ord: type Self = Int extension (x: Int) def compareTo(y: Int) = @@ -39,7 +39,7 @@ object Instances extends Common: else if x > y then +1 else 0 - given listOrd[T](using ord: T is Ord): (List[T] is Ord) with + given listOrd: [T] => (ord: T is Ord) => List[T] is Ord: extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -49,7 +49,7 @@ object Instances extends Common: if (fst != 0) fst else xs1.compareTo(ys1) end listOrd - given listMonad: (List is Monad) with + given listMonad: List is Monad: extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = @@ -60,7 +60,7 @@ object Instances extends Common: //given [Ctx] => Reader[Ctx] is Monad as readerMonad: - given readerMonad[Ctx]: (Reader[Ctx] is Monad) with + given readerMonad: [Ctx] => Reader[Ctx] is Monad: extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -132,7 +132,7 @@ instance Sheep: Animal with */ // Implement the `Animal` trait for `Sheep`. -given (Sheep is Animal) with +given Sheep is Animal: def apply(name: String) = Sheep(name) extension (self: Self) def name: String = self.name diff --git a/tests/run-macros/BigFloat/BigFloat_1.scala b/tests/run-macros/BigFloat/BigFloat_1.scala index 5bb5b49587bd..246e3dcd442d 100644 --- a/tests/run-macros/BigFloat/BigFloat_1.scala +++ b/tests/run-macros/BigFloat/BigFloat_1.scala @@ -35,7 +35,7 @@ object BigFloat extends App { def fromDigits(digits: String) = apply(digits) } - given BigFloatFromDigits with { + given BigFloatFromDigits { override inline def fromDigits(digits: String) = ${ BigFloatFromDigitsImpl('digits) } @@ -43,7 +43,7 @@ object BigFloat extends App { // Should be in StdLib: - given ToExpr[BigInt] with { + given ToExpr[BigInt] { def apply(x: BigInt)(using Quotes) = '{BigInt(${Expr(x.toString)})} } diff --git a/tests/run-macros/quoted-liftable-derivation-macro-2/Derivation_1.scala b/tests/run-macros/quoted-liftable-derivation-macro-2/Derivation_1.scala index 1b45835006c0..1212866a3df0 100644 --- a/tests/run-macros/quoted-liftable-derivation-macro-2/Derivation_1.scala +++ b/tests/run-macros/quoted-liftable-derivation-macro-2/Derivation_1.scala @@ -9,7 +9,7 @@ object Lft { given Lft[Int] with def toExpr(x: Int)(using Quotes) = Expr(x) - inline given derived[T](using inline m: Mirror.Of[T]): Lft[T] = ${ derivedExpr('m) } + inline given derived: [T] => (inline m: Mirror.Of[T]) => Lft[T] = ${ derivedExpr('m) } private def derivedExpr[T](mirrorExpr: Expr[Mirror.Of[T]])(using Quotes, Type[T]): Expr[Lft[T]] = { val tpe = summonExprOrError[Type[T]] diff --git a/tests/run/Signals.scala b/tests/run/Signals.scala index bf5232bcd2d4..f6c67eb3df9f 100644 --- a/tests/run/Signals.scala +++ b/tests/run/Signals.scala @@ -29,7 +29,7 @@ package frp: object Signal: type Caller = Signal[?] - given noCaller: Caller(???) with + given noCaller: Caller(???): override def computeValue() = () end Signal diff --git a/tests/run/Typeable.scala b/tests/run/Typeable.scala index 18bf9a4deb6a..58a9838db9a1 100644 --- a/tests/run/Typeable.scala +++ b/tests/run/Typeable.scala @@ -27,13 +27,13 @@ object Typeable: class instanceOf[T: Typeable]: def unapply(x: Any): Option[T] = Typeable[T].cast(x) - given int: Typeable[Int] with + given int: Typeable[Int]: def cast(x: Any): Option[Int] = x match case x: Int => Some(x) case _ => None def describe = "Int" - given list[T: Typeable]: Typeable[List[T]] with + given list: [T: Typeable] => Typeable[List[T]]: def cast(x: Any): Option[List[T]] = x match case x: List[_] if x.forall(Typeable[T].cast(_).isDefined) => Some(x.asInstanceOf[List[T]]) case _ => None diff --git a/tests/run/abstract-givens.scala b/tests/run/abstract-givens.scala index 6ff966411dde..addbf524ba47 100644 --- a/tests/run/abstract-givens.scala +++ b/tests/run/abstract-givens.scala @@ -6,7 +6,7 @@ trait T: object Test extends T, App: given x: Int = 22 override given y(using Int): String = summon[Int].toString - given z[T](using T): Seq[T] with + given z: [T] => T => Seq[T]: override def apply(x: Int) = ??? override def length = ??? override def iterator = ??? diff --git a/tests/run/cochis-example.scala b/tests/run/cochis-example.scala index 93ce9323d2f7..652d58ef0d81 100644 --- a/tests/run/cochis-example.scala +++ b/tests/run/cochis-example.scala @@ -1,7 +1,7 @@ import Predef.{assert, $conforms as _} trait A { - given id[X]: (X => X) = x => x + given id: [X] => (X => X) = x => x def trans[X](x: X)(using f: X => X) = f(x) // (2) } object Test extends A with App{ diff --git a/tests/run/extension-specificity2.scala b/tests/run/extension-specificity2.scala index eeaad80a3687..aa1aef299163 100644 --- a/tests/run/extension-specificity2.scala +++ b/tests/run/extension-specificity2.scala @@ -4,11 +4,11 @@ trait Foo[F[_]]: def test1 = // Simplified from https://github.com/typelevel/spotted-leopards/issues/2 - given listFoo: Foo[List] with + given listFoo: Foo[List]: extension [A](fa: List[A]) def foo[B](fb: List[B]): Int = 1 - given functionFoo[T]: Foo[[A] =>> T => A] with + given functionFoo: [T] => Foo[[A] =>> T => A]: extension [A](fa: T => A) def foo[B](fb: T => B): Int = 2 @@ -23,9 +23,9 @@ def test2 = trait Bar2: extension (x: Int => 1) def bar(y: Int): Int - given bla1[T]: Bar1[T] with + given bla1: [T] => Bar1[T]: extension (x: T => T) def bar(y: T): Int = 1 - given bla2: Bar2 with + given bla2: Bar2: extension (x: Int => 1) def bar(y: Int): Int = 2 val f: Int => 1 = x => 1 diff --git a/tests/run/extmethod-overload.scala b/tests/run/extmethod-overload.scala index 4a9fe125a8a9..46239ed3290b 100644 --- a/tests/run/extmethod-overload.scala +++ b/tests/run/extmethod-overload.scala @@ -97,7 +97,7 @@ object Test extends App { extension (x: Int) def yy(y: Int) = x + y } - given AnyRef with + given AnyRef: extension (x: Int) { def yy (y: Int) = x - y } diff --git a/tests/run/extmethods2.scala b/tests/run/extmethods2.scala index f876b00a9974..a65082dc9e11 100644 --- a/tests/run/extmethods2.scala +++ b/tests/run/extmethods2.scala @@ -2,7 +2,7 @@ object Test extends App { class TC - given stringListOps(using TC): Object with { + given stringListOps: TC => Object { type T = List[String] extension (x: T) def foo(y: T) = (x ++ y, summon[TC]) extension (x: T) def bar(y: Int) = (x(0)(y), summon[TC]) diff --git a/tests/run/fragables-extension.scala b/tests/run/fragables-extension.scala index 417a612a3f04..1fc738af25d8 100644 --- a/tests/run/fragables-extension.scala +++ b/tests/run/fragables-extension.scala @@ -11,11 +11,11 @@ given Fragable[Int] = x => List(IntFrag(x)) given Fragable[String] = x => List(StringFrag(x)) -given [A: Fragable]: Fragable[List[A]] = +given [A: Fragable] => Fragable[List[A]] = x => x.flatMap(_.toFrags) given Fragable[EmptyTuple] = x => Nil -given [A: Fragable, B <: Tuple: Fragable]: Fragable[A *: B] = +given [A: Fragable, B <: Tuple: Fragable] => Fragable[A *: B] = x => x.head.toFrags ++ x.tail.toFrags def f[T: Fragable](x: T) = diff --git a/tests/run/genericNumLits.scala b/tests/run/genericNumLits.scala index 7f91ab0b3ef5..7d1737903d21 100644 --- a/tests/run/genericNumLits.scala +++ b/tests/run/genericNumLits.scala @@ -10,7 +10,7 @@ object Test extends App { case class Even(n: Int) - given FromDigits[Even] with { + given FromDigits[Even] { def fromDigits(digits: String): Even = { val intValue = digits.toInt if (intValue % 2 == 0) Even(intValue) diff --git a/tests/run/given-eta.scala b/tests/run/given-eta.scala index a01f1c441018..571125f2b4c5 100644 --- a/tests/run/given-eta.scala +++ b/tests/run/given-eta.scala @@ -12,7 +12,7 @@ def g(x: Int)(using d: D) (y: d.T): d.T = d.trans(y) val x = f assert(x(2)(3) == 6) - given D with + given D: type T = Int def trans(other: T) = 2 * other val y = g diff --git a/tests/run/i11050.scala b/tests/run/i11050.scala index 027812c013c4..34613fdaf5fd 100644 --- a/tests/run/i11050.scala +++ b/tests/run/i11050.scala @@ -107,9 +107,9 @@ trait Show[-T]: def show(x: T): String object Show: - given Show[Int] with { def show(x: Int) = s"$x" } - given Show[Char] with { def show(x: Char) = s"'$x'" } - given Show[String] with { def show(x: String) = s"$"$x$"" } + given Show[Int] { def show(x: Int) = s"$x" } + given Show[Char] { def show(x: Char) = s"'$x'" } + given Show[String] { def show(x: String) = s"$"$x$"" } inline def show[T](x: T): String = summonInline[Show[T]].show(x) diff --git a/tests/run/i11174.scala b/tests/run/i11174.scala index 644d3144d8d8..89759d37b076 100644 --- a/tests/run/i11174.scala +++ b/tests/run/i11174.scala @@ -38,7 +38,7 @@ class MainClass { case Square(width: Int, height: Int) extends Shape case Circle(radius: Int) extends Shape - given EnumerateNames[Int] with { + given EnumerateNames[Int] { def apply: String = "int" } inline given auto[T]:EnumerateNames[T] = EnumerateNames.derived diff --git a/tests/run/i11174local.scala b/tests/run/i11174local.scala index a0c9b8e253b9..1f18c67680d9 100644 --- a/tests/run/i11174local.scala +++ b/tests/run/i11174local.scala @@ -34,10 +34,10 @@ trait EnumerateNames[T] { } class MainClass { - given EnumerateNames[Int] with { + given EnumerateNames[Int] { def apply: String = "int" } - inline given auto[T]: EnumerateNames[T] = EnumerateNames.derived + inline given auto: [T] => EnumerateNames[T] = EnumerateNames.derived def deriveEnumerateNames[T](using en: EnumerateNames[T]) = en.apply def run = { diff --git a/tests/run/i11542.scala b/tests/run/i11542.scala index 32e940e745d0..2d799f4233f9 100644 --- a/tests/run/i11542.scala +++ b/tests/run/i11542.scala @@ -14,7 +14,7 @@ object demo { childReaders: List[Reader[_]] ) extends Reader[A] - inline given rdr[A <: Tuple](using m: deriving.Mirror.ProductOf[A]): Reader[A] = { + inline given rdr: [A <: Tuple] => (m: deriving.Mirror.ProductOf[A]) => Reader[A] = { new CombinedReader(m, summonReader[m.MirroredElemTypes]) } diff --git a/tests/run/i11542a.scala b/tests/run/i11542a.scala index db4142fb1a86..37a0c816d1f2 100644 --- a/tests/run/i11542a.scala +++ b/tests/run/i11542a.scala @@ -1,6 +1,6 @@ type Foo = Tuple2[Int, Int] // case class Foo(x: Int, y: Int) // works class Reader(m: deriving.Mirror.ProductOf[Foo]) -given reader1(using m: deriving.Mirror.ProductOf[Foo]): Reader = new Reader(m) +given reader1: (m: deriving.Mirror.ProductOf[Foo]) => Reader = new Reader(m) inline def summonReader(): Reader = compiletime.summonInline[Reader] @main def Test() = summonReader() diff --git a/tests/run/i11563.scala b/tests/run/i11563.scala index 97f60140e402..182bcc11927e 100644 --- a/tests/run/i11563.scala +++ b/tests/run/i11563.scala @@ -4,7 +4,7 @@ import scala.deriving.Mirror trait Printer[T]: def format: String -given Printer[String] with +given Printer[String]: def format: String = "String" inline given[T](using mirror: Mirror.ProductOf[T]): Printer[T] = Printer.derived[T] diff --git a/tests/run/i11583.scala b/tests/run/i11583.scala index fd4d63faa084..bbb00671e528 100644 --- a/tests/run/i11583.scala +++ b/tests/run/i11583.scala @@ -20,11 +20,11 @@ extension [A](a: A) @main def Test = - given Context with + given Context: type Type = String type Term = Boolean - given Env with + given Env: type Extra = Int val t1: (String, Boolean, Int) = true.:#:("hello")(23) diff --git a/tests/run/i11961.scala b/tests/run/i11961.scala index f289f6b415b6..1ecb205b2ba5 100644 --- a/tests/run/i11961.scala +++ b/tests/run/i11961.scala @@ -17,10 +17,10 @@ trait Printable[T]: object Printable: - given Printable[String] with + given Printable[String]: def print: Unit = println("STRING") - given Printable[Boolean] with + given Printable[Boolean]: def print: Unit = println("BOOLEAN") def printProduct[T](p: Mirror.ProductOf[T], elems: => List[Printable[_]]): Printable[T] = @@ -28,7 +28,7 @@ object Printable: def print: Unit = elems.foreach(_.print) - inline given derived[T](using m: Mirror.Of[T]): Printable[T] = + inline given derived: [T] => (m: Mirror.Of[T]) => Printable[T] = val elemInstances = summonAllPrintable[m.MirroredElemTypes] inline m match case p: Mirror.ProductOf[T] => printProduct(p, elemInstances) diff --git a/tests/run/i11966.scala b/tests/run/i11966.scala index 184e68f7599c..57956b9d7274 100644 --- a/tests/run/i11966.scala +++ b/tests/run/i11966.scala @@ -6,11 +6,11 @@ trait B[T: A]: trait C[T: A] extends B[T] -given a1: A[Int] with +given a1: A[Int]: def f = 1 class D extends C[Int]: - given a2: A[Int] with + given a2: A[Int]: def f = 2 @main def Test = D() diff --git a/tests/run/i12328.scala b/tests/run/i12328.scala index 0f9f00ca19aa..04e97c618e10 100644 --- a/tests/run/i12328.scala +++ b/tests/run/i12328.scala @@ -20,7 +20,7 @@ object Schema { new Schema[T] { } } - inline given gen[T]: Schema[T] = derived + inline given gen: [T] => Schema[T] = derived } @main def Test: Unit = { diff --git a/tests/run/i13146.scala b/tests/run/i13146.scala index cbaee29a5ec0..6d77e6ddc209 100644 --- a/tests/run/i13146.scala +++ b/tests/run/i13146.scala @@ -10,7 +10,7 @@ trait Eq[-T]: def eqv(x: T, y: T): Boolean object Eq: - given Eq[Int] with + given Eq[Int]: def eqv(x: Int, y: Int) = x == y def check(elem: Eq[_])(x: Any, y: Any): Boolean = @@ -31,7 +31,7 @@ object Eq: case ((x, y), elem) => check(elem)(x, y) } - inline given derived[T](using m: Mirror.Of[T]): Eq[T] = + inline given derived: [T] => (m: Mirror.Of[T]) => Eq[T] = lazy val elemInstances = summonAll[m.MirroredElemTypes] inline m match case s: Mirror.SumOf[T] => eqSum(s, elemInstances) @@ -43,7 +43,7 @@ enum Opt[+T]: case Nn object Opt: - given derivedEq[T]: Eq[Opt[T]] = Eq.derived + given derivedEq: [T] => Eq[Opt[T]] = Eq.derived @main def Test(): Unit = import Opt.* diff --git a/tests/run/i13146a.scala b/tests/run/i13146a.scala index 37e135ac9f76..f9df675510e3 100644 --- a/tests/run/i13146a.scala +++ b/tests/run/i13146a.scala @@ -26,7 +26,7 @@ trait Eq[-T]: object Eq: - given Eq[Int] with + given Eq[Int]: def eqv(x: Int, y: Int) = x == y def check(elem: Eq[_])(x: Any, y: Any): Boolean = @@ -47,7 +47,7 @@ object Eq: case ((x, y), elem) => check(elem)(x, y) } - inline given derived[T](using m: Mirror.Of[T]): Eq[T] = + inline given derived: [T] => (m: Mirror.Of[T]) => Eq[T] = lazy val elemInstances = summonAll[T, m.MirroredElemTypes] inline m match case s: Mirror.SumOf[T] => eqSum(s, elemInstances) diff --git a/tests/run/i13146poly.scala b/tests/run/i13146poly.scala index 849f4dc7eb52..1d011221b931 100644 --- a/tests/run/i13146poly.scala +++ b/tests/run/i13146poly.scala @@ -3,10 +3,10 @@ import scala.deriving.* trait Functor[F[_]] object Functor: - given [C]: Functor[[T] =>> C]() + given [C] => Functor[[T] =>> C]() given Functor[[T] =>> Tuple1[T]]() - given t2 [T]: Functor[[U] =>> (T, U)]() - given t3 [T, U]: Functor[[V] =>> (T, U, V)]() + given t2: [T] => Functor[[U] =>> (T, U)]() + given t3: [T, U] => Functor[[V] =>> (T, U, V)]() def derived[F[_]](using m: Mirror { type MirroredType[X] = F[X] ; type MirroredElemTypes[_] }, r: Functor[m.MirroredElemTypes]): Functor[F] = new Functor[F] {} diff --git a/tests/run/i13304.scala b/tests/run/i13304.scala index f60359b82d47..f8e50f004848 100644 --- a/tests/run/i13304.scala +++ b/tests/run/i13304.scala @@ -1,10 +1,10 @@ trait Zero[F[_]]: def zero[A]: F[A] -given Zero[List] with +given Zero[List]: def zero[A] = List.empty[A] -given Zero[Option] with +given Zero[Option]: def zero[A] = Option.empty[A] diff --git a/tests/run/i13332shapeless.scala b/tests/run/i13332shapeless.scala index 204980d8fe62..fe63a9ba6855 100644 --- a/tests/run/i13332shapeless.scala +++ b/tests/run/i13332shapeless.scala @@ -119,7 +119,7 @@ package GenericTestsAux2 { class Bar[A] object Bar { - given gen[A](using Generic[A]): Bar[A] = Bar() + given gen: [A: Generic] => Bar[A] = Bar() } class Outer1 { @@ -362,7 +362,7 @@ package shapeless { case Mirror.Sum { type MirroredType = T } => CListRefl[Elems] case Mirror.Product { type MirroredType = T } => Elems - transparent inline given [T](using m: Mirror.Of[T]): Generic[T] = apply[T] + transparent inline given [T: Mirror.Of] => Generic[T] = apply[T] transparent inline def apply[T](using m: Mirror.Of[T]) = new Generic[T] { type Repr = Generic.Repr[T, m.type, m.MirroredElemTypes] diff --git a/tests/run/i14150.scala b/tests/run/i14150.scala index 9ddfaf342bb9..89060333105b 100644 --- a/tests/run/i14150.scala +++ b/tests/run/i14150.scala @@ -22,7 +22,7 @@ object GetConstValue { } } - given empty : GetConstValue[EmptyTuple] with { + given empty : GetConstValue[EmptyTuple] { type Out = EmptyTuple def get : Out = EmptyTuple } diff --git a/tests/run/i17115.scala b/tests/run/i17115.scala index 5a7cac5d0dc1..5c82cf13a8ad 100644 --- a/tests/run/i17115.scala +++ b/tests/run/i17115.scala @@ -1,6 +1,6 @@ trait A[T <: Tuple] { val x: Int } -given empty: A[EmptyTuple] with { val x = 1 } -given inductive[Tup <: NonEmptyTuple](using A[Tuple.Tail[Tup]]): A[Tup] with { val x = summon[A[Tuple.Tail[Tup]]].x + 1 } +given empty: A[EmptyTuple] { val x = 1 } +given inductive: [Tup <: NonEmptyTuple] => A[Tuple.Tail[Tup]] => A[Tup] { val x = summon[A[Tuple.Tail[Tup]]].x + 1 } object Test: def main(args: Array[String]): Unit = diff --git a/tests/run/i6716.scala b/tests/run/i6716.scala index e793381cce1c..c0678c51fb30 100644 --- a/tests/run/i6716.scala +++ b/tests/run/i6716.scala @@ -4,7 +4,7 @@ trait Monad[T]: def id: String class Foo object Foo { - given Monad[Foo] with { def id = "Foo" } + given Monad[Foo] { def id = "Foo" } } opaque type Bar = Foo diff --git a/tests/run/i7788.scala b/tests/run/i7788.scala index 99d16ba1521c..3dc4d247ad94 100644 --- a/tests/run/i7788.scala +++ b/tests/run/i7788.scala @@ -4,9 +4,9 @@ trait Show[-A]: given Show[String] = x => x given Show[Int] = _.toString -given showEither[A,B](using sA: Show[A])(using Show[B]): Show[Either[A,B]] = +given showEither: [A,B] => (sA: Show[A]) => Show[B] => Show[Either[A,B]] = _.fold(a => s"Left(${summon[Show[A]].show(a)})", b => s"Right(${summon[Show[B]].show(b)})") -given [A,B](using sA: Show[A])(using sB: Show[B]): Show[(A,B)] = (a,b) => s"(${sA.show(a)}), ${sB.show(b)})" +given [A,B] => (sA: Show[A]) => (sB: Show[B]) => Show[(A,B)] = (a,b) => s"(${sA.show(a)}), ${sB.show(b)})" @main def Test = diff --git a/tests/run/i9011.scala b/tests/run/i9011.scala index 22ab9bc1e1a6..13afbf1a4297 100644 --- a/tests/run/i9011.scala +++ b/tests/run/i9011.scala @@ -10,7 +10,7 @@ trait Eq[T] { } object Eq { - given Eq[Int] with { + given Eq[Int] { def eqv(x: Int, y: Int) = x == y } @@ -40,7 +40,7 @@ object Eq { } } - inline given derived[T](using m: Mirror.Of[T]): Eq[T] = { + inline given derived: [T] => (m: Mirror.Of[T]) => Eq[T] = { val elemInstances = summonAll[m.MirroredElemTypes] inline m match { case s: Mirror.SumOf[T] => eqSum(s, elemInstances) diff --git a/tests/run/i9473.scala b/tests/run/i9473.scala index 384870ea42af..4b4e8d4c1d07 100644 --- a/tests/run/i9473.scala +++ b/tests/run/i9473.scala @@ -11,7 +11,7 @@ trait Eq[T] { } object Eq { - given Eq[Int] with { + given Eq[Int] { def eqv(x: Int, y: Int) = x == y } @@ -36,7 +36,7 @@ object Eq { } } - inline given derived[T](using m: Mirror.Of[T]): Eq[T] = { + inline given derived: [T] => (m: Mirror.Of[T]) => Eq[T] = { lazy val elemInstances = summonAll[m.MirroredElemTypes] inline m match { case s: Mirror.SumOf[T] => eqSum(s, elemInstances) diff --git a/tests/run/i9530.scala b/tests/run/i9530.scala index e0262764039f..0c003a4bf475 100644 --- a/tests/run/i9530.scala +++ b/tests/run/i9530.scala @@ -13,7 +13,7 @@ extension (using s: Scope)(expr: s.Expr) def f(using s: Scope)(x: s.Expr): (String, s.Value) = (x.show, x.eval) -given scope: Scope with +given scope: Scope: case class Expr(str: String) type Value = Int def expr(x: String) = Expr(x) diff --git a/tests/run/i9928.scala b/tests/run/i9928.scala index 3a3f818b17d3..6e4149ef6538 100644 --- a/tests/run/i9928.scala +++ b/tests/run/i9928.scala @@ -2,7 +2,7 @@ trait Magic[F]: extension (x: Int) def read: F trait LowPrio: - given Magic[String] with + given Magic[String]: extension(x: Int) def read: String = println("In string") s"$x" @@ -15,7 +15,7 @@ object test1: import Magic.given def apply(s: String): Foo = s - given Magic[Foo] with + given Magic[Foo]: extension (x: Int) def read: Foo = println("In foo") Foo(s"$x") @@ -25,7 +25,7 @@ object test1: object test2: object Magic extends LowPrio: - given Magic[Foo] with + given Magic[Foo]: extension (x: Int) def read: Foo = println("In foo") Foo(s"$x") diff --git a/tests/run/ift-return.scala b/tests/run/ift-return.scala index 021c73173051..b49f4c647ee0 100644 --- a/tests/run/ift-return.scala +++ b/tests/run/ift-return.scala @@ -11,9 +11,9 @@ def f(x: Boolean): A ?=> (c: Ctx) ?=> (Int, c.T) = (summon[A].x, summon[Ctx].y) @main def Test = - given A with + given A: val x = 22 - given Ctx with + given Ctx: type T = String val x = "abc" val y = "def" diff --git a/tests/run/implicit-alias.scala b/tests/run/implicit-alias.scala index 64117c3fc977..b376e1635496 100644 --- a/tests/run/implicit-alias.scala +++ b/tests/run/implicit-alias.scala @@ -60,7 +60,7 @@ object Test extends App { locally { println("with type params") - given t[X]: TC = new TC + given t: [X] => TC = new TC summon[TC] summon[TC] } diff --git a/tests/run/implicit-specifity.scala b/tests/run/implicit-specifity.scala index 9e59cf5f1869..b12fe5b2788f 100644 --- a/tests/run/implicit-specifity.scala +++ b/tests/run/implicit-specifity.scala @@ -5,19 +5,19 @@ object Show { def apply[T](implicit st: Show[T]): Int = st.i given showInt: Show[Int] = new Show[Int](0) - given fallback[T]: Show[T] = new Show[T](1) + given fallback: [T] => Show[T] = new Show[T](1) } class Generic object Generic { given gen: Generic = new Generic - given showGen[T](using Generic): Show[T] = new Show[T](2) + given showGen: [T] => Generic => Show[T] = new Show[T](2) } class Generic2 object Generic2 { opaque type HiPriority = AnyRef - given showGen[T]: (Show[T] & HiPriority) = new Show[T](2).asInstanceOf + given showGen: [T] => Show[T] & HiPriority = new Show[T](2).asInstanceOf } class SubGen extends Generic @@ -29,11 +29,11 @@ object Contextual { given ctx: Context() - given showGen[T](using Generic): Show[T] = new Show[T](2) + given showGen: [T] => Generic => Show[T] = new Show[T](2) - given showGen[T](using Generic, Context): Show[T] = new Show[T](3) + given showGen: [T] => (Generic, Context) => Show[T] = new Show[T](3) - given showGen[T](using SubGen): Show[T] = new Show[T](4) + given showGen: [T] => SubGen => Show[T] = new Show[T](4) } object Test extends App { diff --git a/tests/run/implied-priority.scala b/tests/run/implied-priority.scala index a9380e117875..52eb07f1679d 100644 --- a/tests/run/implied-priority.scala +++ b/tests/run/implied-priority.scala @@ -77,12 +77,12 @@ def test2a = { */ object Impl3 { trait LowPriority // A marker trait to indicate a lower priority - given t1[T]: E[T]("low") with LowPriority + given t1: [T] => E[T]("low"), LowPriority } object Override { - given over[T]: E[T]("hi") with {} + given over: [T] => E[T]("hi") } def test3 = { @@ -135,7 +135,7 @@ object HigherPriority { } object fallback5 { - given [T](using ev: E[T] = new E[T]("fallback")): (E[T] & HigherPriority.Type) = HigherPriority.inject(ev) + given [T] => (ev: E[T] = new E[T]("fallback")) => E[T] & HigherPriority.Type = HigherPriority.inject(ev) } def test5 = { diff --git a/tests/run/inline-numeric/Fractional.scala b/tests/run/inline-numeric/Fractional.scala index f1bc81246a43..e88442e42c9d 100644 --- a/tests/run/inline-numeric/Fractional.scala +++ b/tests/run/inline-numeric/Fractional.scala @@ -17,13 +17,13 @@ trait Fractional[T] extends Numeric[T]: transparent inline def /(inline y: T) = div(x, y) object Fractional: - given BigDecimalIsFractional: BigDecimalIsConflicted with Fractional[BigDecimal] with + given BigDecimalIsFractional: BigDecimalIsConflicted, Fractional[BigDecimal]: transparent inline def div(inline x: BigDecimal, inline y: BigDecimal): BigDecimal = x / y protected transparent inline def isNaN(inline x: BigDecimal): Boolean = false protected transparent inline def isNegZero(inline x: BigDecimal): Boolean = false - given DoubleIsFractional: Fractional[Double] with Ordering.DoubleIeeeOrdering with + given DoubleIsFractional: Fractional[Double], Ordering.DoubleIeeeOrdering: transparent inline def plus(inline x: Double, inline y: Double): Double = x + y transparent inline def minus(inline x: Double, inline y: Double): Double = x - y transparent inline def times(inline x: Double, inline y: Double): Double = x * y @@ -42,7 +42,7 @@ object Fractional: transparent inline def toFloat: Float = x.toFloat transparent inline def toDouble: Double = x - given FloatIsFractional: Fractional[Float] with Ordering.FloatIeeeOrdering with + given FloatIsFractional: Fractional[Float], Ordering.FloatIeeeOrdering: transparent inline def plus(inline x: Float, inline y: Float): Float = x + y transparent inline def minus(inline x: Float, inline y: Float): Float = x - y transparent inline def times(inline x: Float, inline y: Float): Float = x * y diff --git a/tests/run/inline-numeric/Integral.scala b/tests/run/inline-numeric/Integral.scala index 1a740a3e1d99..3b199aad6b11 100644 --- a/tests/run/inline-numeric/Integral.scala +++ b/tests/run/inline-numeric/Integral.scala @@ -19,11 +19,11 @@ trait Integral[T] extends Numeric[T]: transparent inline def /%(inline y: T) = (quot(x, y), rem(x, y)) object Integral: - given BigDecimalAsIfIntegral: Integral[BigDecimal] with BigDecimalIsConflicted with + given BigDecimalAsIfIntegral: Integral[BigDecimal], BigDecimalIsConflicted: transparent inline def quot(inline x: BigDecimal, inline y: BigDecimal): BigDecimal = x quot y transparent inline def rem(inline x: BigDecimal, inline y: BigDecimal): BigDecimal = x remainder y - given BigIntIsIntegral: Integral[BigInt] with Ordering.BigIntOrdering with + given BigIntIsIntegral: Integral[BigInt], Ordering.BigIntOrdering: transparent inline def plus(inline x: BigInt, inline y: BigInt): BigInt = x + y transparent inline def minus(inline x: BigInt, inline y: BigInt): BigInt = x - y transparent inline def times(inline x: BigInt, inline y: BigInt): BigInt = x * y @@ -41,7 +41,7 @@ object Integral: transparent inline def quot(inline x: BigInt, inline y: BigInt): BigInt = x / y transparent inline def rem(inline x: BigInt, inline y: BigInt): BigInt = x % y - given ByteIsIntegral: Integral[Byte] with Ordering.ByteOrdering with + given ByteIsIntegral: Integral[Byte], Ordering.ByteOrdering: transparent inline def plus(inline x: Byte, inline y: Byte): Byte = (x + y).toByte transparent inline def minus(inline x: Byte, inline y: Byte): Byte = (x - y).toByte transparent inline def times(inline x: Byte, inline y: Byte): Byte = (x * y).toByte @@ -59,7 +59,7 @@ object Integral: transparent inline def toFloat: Float = x.toFloat transparent inline def toDouble: Double = x.toDouble - given CharIsIntegral: Integral[Char] with Ordering.CharOrdering with + given CharIsIntegral: Integral[Char], Ordering.CharOrdering: transparent inline def plus(inline x: Char, inline y: Char): Char = (x + y).toChar transparent inline def minus(inline x: Char, inline y: Char): Char = (x - y).toChar transparent inline def times(inline x: Char, inline y: Char): Char = (x * y).toChar @@ -77,7 +77,7 @@ object Integral: transparent inline def toFloat: Float = x.toFloat transparent inline def toDouble: Double = x.toDouble - given IntIsIntegral: Integral[Int] with Ordering.IntOrdering with + given IntIsIntegral: Integral[Int], Ordering.IntOrdering: transparent inline def plus(inline x: Int, inline y: Int): Int = x + y transparent inline def minus(inline x: Int, inline y: Int): Int = x - y transparent inline def times(inline x: Int, inline y: Int): Int = x * y @@ -95,7 +95,7 @@ object Integral: transparent inline def toFloat: Float = x.toFloat transparent inline def toDouble: Double = x.toDouble - given LongIsIntegral: Integral[Long] with Ordering.LongOrdering with + given LongIsIntegral: Integral[Long], Ordering.LongOrdering: transparent inline def plus(inline x: Long, inline y: Long): Long = x + y transparent inline def minus(inline x: Long, inline y: Long): Long = x - y transparent inline def times(inline x: Long, inline y: Long): Long = x * y @@ -113,7 +113,7 @@ object Integral: transparent inline def toFloat: Float = x.toFloat transparent inline def toDouble: Double = x.toDouble - given ShortIsIntegral: Integral[Short] with Ordering.ShortOrdering with + given ShortIsIntegral: Integral[Short], Ordering.ShortOrdering: transparent inline def plus(inline x: Short, inline y: Short): Short = (x + y).toShort transparent inline def minus(inline x: Short, inline y: Short): Short = (x - y).toShort transparent inline def times(inline x: Short, inline y: Short): Short = (x * y).toShort diff --git a/tests/run/instances-anonymous.scala b/tests/run/instances-anonymous.scala index 7a42496504ee..7fc0f69ee736 100644 --- a/tests/run/instances-anonymous.scala +++ b/tests/run/instances-anonymous.scala @@ -16,7 +16,7 @@ object Test extends App { println(circle.circumference) - given AnyRef with { + given AnyRef { extension (xs: Seq[String]) def longestStrings: Seq[String] = { val maxLength = xs.map(_.length).max xs.filter(_.length == maxLength) @@ -45,7 +45,7 @@ object Test extends App { def unit: T } - given Monoid[String] with { + given Monoid[String] { extension (x: String) def combine(y: String): String = x.concat(y) def unit: String = "" } @@ -63,13 +63,13 @@ object Test extends App { val minimum: T } - given Ord[Int] with { + given Ord[Int] { extension (x: Int) def compareTo(y: Int) = if (x < y) -1 else if (x > y) +1 else 0 val minimum = Int.MinValue } - given [T: Ord]: Ord[List[T]] with { + given [T: Ord] => Ord[List[T]] { extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys).match { case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -101,14 +101,14 @@ object Test extends App { def pure[A](x: A): F[A] } - given Monad[List] with { + given Monad[List] { extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = List(x) } - given [Ctx]: Monad[[X] =>> Ctx => X] with { + given [Ctx] => Monad[[X] =>> Ctx => X] { extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = diff --git a/tests/run/instances.scala b/tests/run/instances.scala index 128ea0700e02..1e1d908cbddf 100644 --- a/tests/run/instances.scala +++ b/tests/run/instances.scala @@ -46,7 +46,7 @@ object Test extends App { trait Monoid[T] extends SemiGroup[T]: def unit: T - given StringMonoid: Monoid[String] with + given StringMonoid: Monoid[String]: extension (x: String) def combine(y: String): String = x.concat(y) def unit: String = "" @@ -63,12 +63,12 @@ object Test extends App { val minimum: T end Ord - given Ord[Int] with + given Ord[Int]: extension (x: Int) def compareTo(y: Int) = if (x < y) -1 else if (x > y) +1 else 0 val minimum = Int.MinValue - given listOrd[T: Ord]: Ord[List[T]] with + given listOrd: [T: Ord] => Ord[List[T]]: extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys).match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -99,13 +99,13 @@ object Test extends App { def pure[A](x: A): F[A] end Monad - given listMonad: Monad[List] with + given listMonad: Monad[List]: extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = List(x) - given readerMonad[Ctx]: Monad[[X] =>> Ctx => X] with + given readerMonad: [Ctx] => Monad[[X] =>> Ctx => X]: extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = diff --git a/tests/run/poly-kinded-derives.scala b/tests/run/poly-kinded-derives.scala index 0d3c7e9f8ffd..627d2780a802 100644 --- a/tests/run/poly-kinded-derives.scala +++ b/tests/run/poly-kinded-derives.scala @@ -22,10 +22,10 @@ object Test extends App { { trait Functor[F[_]] object Functor { - given [C]: Functor[[T] =>> C]() + given [C] => Functor[[T] =>> C]() given Functor[[T] =>> Tuple1[T]]() - given t2 [T]: Functor[[U] =>> (T, U)]() - given t3 [T, U]: Functor[[V] =>> (T, U, V)]() + given t2: [T] => Functor[[U] =>> (T, U)]() + given t3: [T, U] => Functor[[V] =>> (T, U, V)]() def derived[F[_]](using m: Mirror { type MirroredType[X] = F[X] ; type MirroredElemTypes[_] }, r: Functor[m.MirroredElemTypes]): Functor[F] = new Functor[F] {} } @@ -40,8 +40,8 @@ object Test extends App { { trait FunctorK[F[_[_]]] object FunctorK { - given [C]: FunctorK[[F[_]] =>> C]() - given [T]: FunctorK[[F[_]] =>> Tuple1[F[T]]]() + given [C] => FunctorK[[F[_]] =>> C]() + given [T] => FunctorK[[F[_]] =>> Tuple1[F[T]]]() def derived[F[_[_]]](using m: Mirror { type MirroredType[X[_]] = F[X] ; type MirroredElemTypes[_[_]] }, r: FunctorK[m.MirroredElemTypes]): FunctorK[F] = new FunctorK[F] {} } @@ -56,10 +56,10 @@ object Test extends App { { trait Bifunctor[F[_, _]] object Bifunctor { - given [C]: Bifunctor[[T, U] =>> C]() + given [C] => Bifunctor[[T, U] =>> C]() given Bifunctor[[T, U] =>> Tuple1[U]]() given t2: Bifunctor[[T, U] =>> (T, U)]() - given t3 [T]: Bifunctor[[U, V] =>> (T, U, V)]() + given t3: [T] => Bifunctor[[U, V] =>> (T, U, V)]() def derived[F[_, _]](using m: Mirror { type MirroredType[X, Y] = F[X, Y] ; type MirroredElemTypes[_, _] }, r: Bifunctor[m.MirroredElemTypes]): Bifunctor[F] = ??? } diff --git a/tests/run/publicInBinary/Lib_1.scala b/tests/run/publicInBinary/Lib_1.scala index d9936670a458..e7b5a0780d1c 100644 --- a/tests/run/publicInBinary/Lib_1.scala +++ b/tests/run/publicInBinary/Lib_1.scala @@ -47,9 +47,9 @@ class Qux() extends Foo(5, 5): trait A[T]: def f: T -@publicInBinary given A[Int] with +@publicInBinary given A[Int]: def f: Int = 1 -@publicInBinary given (using Double): A[Int] with +@publicInBinary given Double => A[Int]: def f: Int = 1 package inlines { diff --git a/tests/run/string-context-implicits-with-conversion.scala b/tests/run/string-context-implicits-with-conversion.scala index adcbae38830c..7173155e5bf9 100644 --- a/tests/run/string-context-implicits-with-conversion.scala +++ b/tests/run/string-context-implicits-with-conversion.scala @@ -4,7 +4,7 @@ object Lib { opaque type Showed = String - given [T](using show: Show[T]): Conversion[T, Showed] = x => show(x) + given [T] => (show: Show[T]) => Conversion[T, Showed] = x => show(x) trait Show[T] { def apply(x: T): String diff --git a/tests/run/structural-contextual.scala b/tests/run/structural-contextual.scala index e1d0890b73cd..43872856e4ed 100644 --- a/tests/run/structural-contextual.scala +++ b/tests/run/structural-contextual.scala @@ -15,7 +15,7 @@ type Person = ResolvingSelectable { @main def Test = - given Resolver with + given Resolver: def resolve(label: String) = label match case "name" => "Emma" case "age" => 8 diff --git a/tests/run/tagless.scala b/tests/run/tagless.scala index 5abc32d84578..7c1ffd3b433c 100644 --- a/tests/run/tagless.scala +++ b/tests/run/tagless.scala @@ -40,12 +40,12 @@ object Test extends App { add(lit(8), neg(add(lit(1), lit(2)))) // Base operations as type classes - given Exp[Int] with + given Exp[Int]: def lit(i: Int): Int = i def neg(t: Int): Int = -t def add(l: Int, r: Int): Int = l + r - given Exp[String] with + given Exp[String]: def lit(i: Int): String = i.toString def neg(t: String): String = s"(-$t)" def add(l: String, r: String): String = s"($l + $r)" @@ -65,10 +65,10 @@ object Test extends App { def tfm1[T: Exp : Mult] = add(lit(7), neg(mul(lit(1), lit(2)))) def tfm2[T: Exp : Mult] = mul(lit(7), tf1) - given Mult[Int] with + given Mult[Int]: def mul(l: Int, r: Int): Int = l * r - given Mult[String] with + given Mult[String]: def mul(l: String, r: String): String = s"$l * $r" println(tfm1[Int]) @@ -83,7 +83,7 @@ object Test extends App { } import Tree.* - given Exp[Tree] with Mult[Tree] with + given Exp[Tree], Mult[Tree]: def lit(i: Int): Tree = Node("Lit", Leaf(i.toString)) def neg(t: Tree): Tree = Node("Neg", t) def add(l: Tree, r: Tree): Tree = Node("Add", l , r) @@ -148,7 +148,7 @@ object Test extends App { def value[T](using Exp[T]): T } - given Exp[Wrapped] with + given Exp[Wrapped]: def lit(i: Int) = new Wrapped { def value[T](using e: Exp[T]): T = e.lit(i) } @@ -190,7 +190,7 @@ object Test extends App { // Added operation: negation pushdown enum NCtx { case Pos, Neg } - given [T](using e: Exp[T]): Exp[NCtx => T] with + given [T] => (e: Exp[T]) => Exp[NCtx => T]: import NCtx.* def lit(i: Int) = { case Pos => e.lit(i) @@ -209,7 +209,7 @@ object Test extends App { println(pushNeg(tf1[NCtx => String])) println(pushNeg(pushNeg(pushNeg(tf1))): String) - given [T](using e: Mult[T]): Mult[NCtx => T] with + given [T] => (e: Mult[T]) => Mult[NCtx => T]: import NCtx.* def mul(l: NCtx => T, r: NCtx => T): NCtx => T = { case Pos => e.mul(l(Pos), r(Pos)) @@ -222,7 +222,7 @@ object Test extends App { import IExp.* // Going from type class encoding to ADT encoding - given initialize: Exp[IExp] with + given initialize: Exp[IExp]: def lit(i: Int): IExp = Lit(i) def neg(t: IExp): IExp = Neg(t) def add(l: IExp, r: IExp): IExp = Add(l, r) diff --git a/tests/run/typeclass-derivation-doc-example.scala b/tests/run/typeclass-derivation-doc-example.scala index bc00311478bd..71c811608f5a 100644 --- a/tests/run/typeclass-derivation-doc-example.scala +++ b/tests/run/typeclass-derivation-doc-example.scala @@ -11,7 +11,7 @@ trait Eq[T] { } object Eq { - given Eq[Int] with { + given Eq[Int] { def eqv(x: Int, y: Int) = x == y } @@ -36,7 +36,7 @@ object Eq { } } - inline given derived[T](using m: Mirror.Of[T]): Eq[T] = { + inline given derived: [T] => (m: Mirror.Of[T]) => Eq[T] = { val elemInstances = summonAll[m.MirroredElemTypes] inline m match { case s: Mirror.SumOf[T] => eqSum(s, elemInstances) diff --git a/tests/run/tyql.scala b/tests/run/tyql.scala index 35777e9a4c13..8fe253b559ac 100644 --- a/tests/run/tyql.scala +++ b/tests/run/tyql.scala @@ -94,7 +94,7 @@ object Expr: extension [A <: AnyNamedTuple](x: A) def toRow: Join[A] = Join(x) /** Same as _.toRow, as an implicit conversion */ - given [A <: AnyNamedTuple]: Conversion[A, Expr.Join[A]] = Expr.Join(_) + given [A <: AnyNamedTuple] => Conversion[A, Expr.Join[A]] = Expr.Join(_) end Expr diff --git a/tests/warn/implicit-conversions.scala b/tests/warn/implicit-conversions.scala index 946d0c96fd5a..2896d7dc4447 100644 --- a/tests/warn/implicit-conversions.scala +++ b/tests/warn/implicit-conversions.scala @@ -5,11 +5,11 @@ class B object A { - given Conversion[A, B] with { + given Conversion[A, B] { def apply(x: A): B = ??? } - given Conversion[B, A] with { + given Conversion[B, A] { def apply(x: B): A = ??? } } @@ -17,7 +17,7 @@ object A { class C object D { - given Conversion[A, C] with { + given Conversion[A, C] { def apply(x: A): C = ??? } } From a99097804bef56c3d84efd0b61f0f602a33f9605 Mon Sep 17 00:00:00 2001 From: Oliver Bracevac Date: Mon, 30 Sep 2024 20:05:28 +0200 Subject: [PATCH 595/827] Let show behave more robustly for Recheck The pretty-printing logic for a Recheck phase applies the phase to the tree. But if there was a type error, then the pretty printing would have previously crashed the compiler. --- compiler/src/dotty/tools/dotc/transform/Recheck.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 03f0001110d3..ecf5c69d1f91 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -618,7 +618,13 @@ abstract class Recheck extends Phase, SymTransformer: override def show(tree: untpd.Tree)(using Context): String = atPhase(thisPhase): withMode(Mode.Printing): - super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) + val ttree0 = tree.asInstanceOf[tpd.Tree] + val ttree1 = + try + addRecheckedTypes.transform(ttree0) + catch + case _:TypeError => ttree0 + super.show(ttree1) end Recheck /** A class that can be used to test basic rechecking without any customaization */ From 87cdbc8c7851830c16de0c8c828cde0f7cd0ce48 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 1 Oct 2024 11:39:00 +0200 Subject: [PATCH 596/827] Make named tuples a standard feature - Deprecate experimental language import - Make named tuple features conditional on -source >= 3.6 instead - Make the NamedTuple object non-experimental. - Move NamedTuple it to src-bootstrapped since it relies on clause interleaving which is only standard in 3.6 as well. - Drop the experimental.namedTuple import from tests --- .../src/dotty/tools/dotc/config/Feature.scala | 2 - .../dotc/config/ScalaSettingsProperties.scala | 2 +- .../dotty/tools/dotc/parsing/Parsers.scala | 4 +- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- .../scala/NamedTuple.scala | 4 - .../runtime/stdLibPatches/language.scala | 1 + .../pc/tests/completion/CompletionSuite.scala | 8 +- tests/neg/i20517.check | 14 +-- tests/neg/i20517.scala | 1 - tests/neg/named-tuple-selectable.scala | 2 - tests/neg/named-tuples-2.check | 8 +- tests/neg/named-tuples-2.scala | 1 - tests/neg/named-tuples-3.check | 4 +- tests/neg/named-tuples-3.scala | 2 - tests/neg/named-tuples.check | 92 +++++++++---------- tests/neg/named-tuples.scala | 3 +- tests/new/test.scala | 2 - tests/pos/fieldsOf.scala | 2 - tests/pos/i20377.scala | 1 - tests/pos/i21300.scala | 6 +- tests/pos/named-tuple-combinators.scala | 1 - tests/pos/named-tuple-selectable.scala | 1 - tests/pos/named-tuple-selections.scala | 1 - tests/pos/named-tuple-unstable.scala | 1 - tests/pos/named-tuple-widen.scala | 1 - tests/pos/named-tuples-ops-mirror.scala | 1 - tests/pos/named-tuples1.scala | 1 - tests/pos/namedtuple-src-incompat.scala | 1 - tests/pos/tuple-ops.scala | 1 - .../stdlibExperimentalDefinitions.scala | 6 -- tests/run/named-patmatch.scala | 1 - tests/run/named-patterns.scala | 1 - tests/run/named-tuple-ops.scala | 1 - tests/run/named-tuples-xxl.scala | 1 - tests/run/named-tuples.scala | 1 - tests/run/tyql.scala | 1 - 36 files changed, 70 insertions(+), 112 deletions(-) rename library/{src => src-bootstrapped}/scala/NamedTuple.scala (98%) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 8b9a64924ace..ad20bab46c1e 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -34,7 +34,6 @@ object Feature: val pureFunctions = experimental("pureFunctions") val captureChecking = experimental("captureChecking") val into = experimental("into") - val namedTuples = experimental("namedTuples") val modularity = experimental("modularity") val betterMatchTypeExtractors = experimental("betterMatchTypeExtractors") val quotedPatternsWithPolymorphicFunctions = experimental("quotedPatternsWithPolymorphicFunctions") @@ -66,7 +65,6 @@ object Feature: (pureFunctions, "Enable pure functions for capture checking"), (captureChecking, "Enable experimental capture checking"), (into, "Allow into modifier on parameter types"), - (namedTuples, "Allow named tuples"), (modularity, "Enable experimental modularity features"), (betterMatchTypeExtractors, "Enable better match type extractors"), (betterFors, "Enable improvements in `for` comprehensions") diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala index 022916cc9f53..e42d2d53529e 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala @@ -25,7 +25,7 @@ object ScalaSettingsProperties: ScalaRelease.values.toList.map(_.show) def supportedSourceVersions: List[String] = - SourceVersion.values.toList.map(_.toString) + (SourceVersion.values.toList.diff(SourceVersion.illegalSourceVersionNames)).toList.map(_.toString) def supportedLanguageFeatures: List[ChoiceWithHelp[String]] = Feature.values.map((n, d) => ChoiceWithHelp(n.toString, d)) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 8a173faa3cec..3acaf009bd36 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -651,7 +651,7 @@ object Parsers { else leading :: Nil def maybeNamed(op: () => Tree): () => Tree = () => - if isIdent && in.lookahead.token == EQUALS && in.featureEnabled(Feature.namedTuples) then + if isIdent && in.lookahead.token == EQUALS && sourceVersion.isAtLeast(`3.6`) then atSpan(in.offset): val name = ident() in.nextToken() @@ -2137,7 +2137,7 @@ object Parsers { if namedOK && isIdent && in.lookahead.token == EQUALS then commaSeparated(() => namedArgType()) - else if tupleOK && isIdent && in.lookahead.isColon && in.featureEnabled(Feature.namedTuples) then + else if tupleOK && isIdent && in.lookahead.isColon && sourceVersion.isAtLeast(`3.6`) then commaSeparated(() => namedElem()) else commaSeparated(() => argType()) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 159ce8354a30..93ea3f3c3ae0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -789,7 +789,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def tryNamedTupleSelection() = val namedTupleElems = qual.tpe.widenDealias.namedTupleElementTypes val nameIdx = namedTupleElems.indexWhere(_._1 == selName) - if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then + if nameIdx >= 0 && sourceVersion.isAtLeast(`3.6`) then typed( untpd.Apply( untpd.Select(untpd.TypedSplice(qual), nme.apply), diff --git a/library/src/scala/NamedTuple.scala b/library/src-bootstrapped/scala/NamedTuple.scala similarity index 98% rename from library/src/scala/NamedTuple.scala rename to library/src-bootstrapped/scala/NamedTuple.scala index f237d1d487fe..71bcd26a16e2 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src-bootstrapped/scala/NamedTuple.scala @@ -1,9 +1,6 @@ package scala -import scala.language.experimental.clauseInterleaving -import annotation.experimental import compiletime.ops.boolean.* -@experimental object NamedTuple: /** The type to which named tuples get mapped to. For instance, @@ -133,7 +130,6 @@ object NamedTuple: end NamedTuple /** Separate from NamedTuple object so that we can match on the opaque type NamedTuple. */ -@experimental object NamedTupleDecomposition: import NamedTuple.* extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 547710d55293..b8d990cf56f5 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -97,6 +97,7 @@ object language: * @see [[https://dotty.epfl.ch/docs/reference/experimental/named-tuples]] */ @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") + @deprecated("The experimental.namedTuples language import is no longer needed since the feature is now standard", since = "3.6") object namedTuples /** Experimental support for new features for better modularity, including diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 1cd26858b934..57975d2c8e98 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -1988,8 +1988,7 @@ class CompletionSuite extends BaseCompletionSuite: @Test def `namedTuple completions` = check( - """|import scala.language.experimental.namedTuples - |import scala.NamedTuple.* + """|import scala.NamedTuple.* | |val person = (name = "Jamie", city = "Lausanne") | @@ -2000,8 +1999,7 @@ class CompletionSuite extends BaseCompletionSuite: @Test def `Selectable with namedTuple Fields member` = check( - """|import scala.language.experimental.namedTuples - |import scala.NamedTuple.* + """|import scala.NamedTuple.* | |class NamedTupleSelectable extends Selectable { | type Fields <: AnyNamedTuple @@ -2091,7 +2089,7 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin ) - @Test def `conflict-3` = + @Test def `conflict-3` = check( """|package a |object A { diff --git a/tests/neg/i20517.check b/tests/neg/i20517.check index 55aeff46572b..119c34025ee0 100644 --- a/tests/neg/i20517.check +++ b/tests/neg/i20517.check @@ -1,7 +1,7 @@ --- [E007] Type Mismatch Error: tests/neg/i20517.scala:10:43 ------------------------------------------------------------ -10 | def dep(foo: Foo[Any]): From[foo.type] = (elem = "") // error - | ^^^^^^^^^^^ - | Found: (elem : String) - | Required: NamedTuple.From[(foo : Foo[Any])] - | - | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i20517.scala:9:43 ------------------------------------------------------------- +9 | def dep(foo: Foo[Any]): From[foo.type] = (elem = "") // error + | ^^^^^^^^^^^ + | Found: (elem : String) + | Required: NamedTuple.From[(foo : Foo[Any])] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20517.scala b/tests/neg/i20517.scala index 11c4432434dd..342a7d86ca7e 100644 --- a/tests/neg/i20517.scala +++ b/tests/neg/i20517.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.namedTuples import NamedTuple.From case class Foo[+T](elem: T) diff --git a/tests/neg/named-tuple-selectable.scala b/tests/neg/named-tuple-selectable.scala index 5cf7e68654ef..c81eba1237ff 100644 --- a/tests/neg/named-tuple-selectable.scala +++ b/tests/neg/named-tuple-selectable.scala @@ -1,5 +1,3 @@ -import scala.language.experimental.namedTuples - class FromFields extends Selectable: type Fields = (i: Int) def selectDynamic(key: String) = diff --git a/tests/neg/named-tuples-2.check b/tests/neg/named-tuples-2.check index 0a52d5f3989b..daa1c0d69069 100644 --- a/tests/neg/named-tuples-2.check +++ b/tests/neg/named-tuples-2.check @@ -1,8 +1,8 @@ --- Error: tests/neg/named-tuples-2.scala:5:9 --------------------------------------------------------------------------- -5 | case (name, age) => () // error +-- Error: tests/neg/named-tuples-2.scala:4:9 --------------------------------------------------------------------------- +4 | case (name, age) => () // error | ^ | this case is unreachable since type (String, Int, Boolean) is not a subclass of class Tuple2 --- Error: tests/neg/named-tuples-2.scala:6:9 --------------------------------------------------------------------------- -6 | case (n, a, m, x) => () // error +-- Error: tests/neg/named-tuples-2.scala:5:9 --------------------------------------------------------------------------- +5 | case (n, a, m, x) => () // error | ^ | this case is unreachable since type (String, Int, Boolean) is not a subclass of class Tuple4 diff --git a/tests/neg/named-tuples-2.scala b/tests/neg/named-tuples-2.scala index 0507891e0549..b3917d9ad57c 100644 --- a/tests/neg/named-tuples-2.scala +++ b/tests/neg/named-tuples-2.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples def Test = val person = (name = "Bob", age = 33, married = true) person match diff --git a/tests/neg/named-tuples-3.check b/tests/neg/named-tuples-3.check index 2091c36191c0..2809836b4803 100644 --- a/tests/neg/named-tuples-3.check +++ b/tests/neg/named-tuples-3.check @@ -1,5 +1,5 @@ --- [E007] Type Mismatch Error: tests/neg/named-tuples-3.scala:7:16 ----------------------------------------------------- -7 |val p: Person = f // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples-3.scala:5:16 ----------------------------------------------------- +5 |val p: Person = f // error | ^ | Found: NamedTuple.NamedTuple[(Int, Any), (Int, String)] | Required: Person diff --git a/tests/neg/named-tuples-3.scala b/tests/neg/named-tuples-3.scala index 0f1215338b0a..21e6ed9b3741 100644 --- a/tests/neg/named-tuples-3.scala +++ b/tests/neg/named-tuples-3.scala @@ -1,5 +1,3 @@ -import language.experimental.namedTuples - def f: NamedTuple.NamedTuple[(Int, Any), (Int, String)] = ??? type Person = (name: Int, age: String) diff --git a/tests/neg/named-tuples.check b/tests/neg/named-tuples.check index db3cc703722f..8ec958b6a75d 100644 --- a/tests/neg/named-tuples.check +++ b/tests/neg/named-tuples.check @@ -1,101 +1,101 @@ --- Error: tests/neg/named-tuples.scala:9:19 ---------------------------------------------------------------------------- -9 | val illformed = (_2 = 2) // error +-- Error: tests/neg/named-tuples.scala:8:19 ---------------------------------------------------------------------------- +8 | val illformed = (_2 = 2) // error | ^^^^^^ | _2 cannot be used as the name of a tuple element because it is a regular tuple selector --- Error: tests/neg/named-tuples.scala:10:20 --------------------------------------------------------------------------- -10 | type Illformed = (_1: Int) // error - | ^^^^^^^ - | _1 cannot be used as the name of a tuple element because it is a regular tuple selector --- Error: tests/neg/named-tuples.scala:11:40 --------------------------------------------------------------------------- -11 | val illformed2 = (name = "", age = 0, name = true) // error +-- Error: tests/neg/named-tuples.scala:9:20 ---------------------------------------------------------------------------- +9 | type Illformed = (_1: Int) // error + | ^^^^^^^ + | _1 cannot be used as the name of a tuple element because it is a regular tuple selector +-- Error: tests/neg/named-tuples.scala:10:40 --------------------------------------------------------------------------- +10 | val illformed2 = (name = "", age = 0, name = true) // error | ^^^^^^^^^^^ | Duplicate tuple element name --- Error: tests/neg/named-tuples.scala:12:45 --------------------------------------------------------------------------- -12 | type Illformed2 = (name: String, age: Int, name: Boolean) // error +-- Error: tests/neg/named-tuples.scala:11:45 --------------------------------------------------------------------------- +11 | type Illformed2 = (name: String, age: Int, name: Boolean) // error | ^^^^^^^^^^^^^ | Duplicate tuple element name --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:20:20 ------------------------------------------------------ -20 | val _: NameOnly = person // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:19:20 ------------------------------------------------------ +19 | val _: NameOnly = person // error | ^^^^^^ | Found: (Test.person : (name : String, age : Int)) | Required: Test.NameOnly | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:21:18 ------------------------------------------------------ -21 | val _: Person = nameOnly // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:20:18 ------------------------------------------------------ +20 | val _: Person = nameOnly // error | ^^^^^^^^ | Found: (Test.nameOnly : (name : String)) | Required: Test.Person | | longer explanation available when compiling with `-explain` --- [E172] Type Error: tests/neg/named-tuples.scala:22:41 --------------------------------------------------------------- -22 | val _: Person = (name = "") ++ nameOnly // error +-- [E172] Type Error: tests/neg/named-tuples.scala:21:41 --------------------------------------------------------------- +21 | val _: Person = (name = "") ++ nameOnly // error | ^ | Cannot prove that Tuple.Disjoint[Tuple1[("name" : String)], Tuple1[("name" : String)]] =:= (true : Boolean). --- [E008] Not Found Error: tests/neg/named-tuples.scala:23:9 ----------------------------------------------------------- -23 | person._1 // error +-- [E008] Not Found Error: tests/neg/named-tuples.scala:22:9 ----------------------------------------------------------- +22 | person._1 // error | ^^^^^^^^^ | value _1 is not a member of (name : String, age : Int) --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:25:36 ------------------------------------------------------ -25 | val _: (age: Int, name: String) = person // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:24:36 ------------------------------------------------------ +24 | val _: (age: Int, name: String) = person // error | ^^^^^^ | Found: (Test.person : (name : String, age : Int)) | Required: (age : Int, name : String) | | longer explanation available when compiling with `-explain` --- Error: tests/neg/named-tuples.scala:27:17 --------------------------------------------------------------------------- -27 | val (name = x, agee = y) = person // error +-- Error: tests/neg/named-tuples.scala:26:17 --------------------------------------------------------------------------- +26 | val (name = x, agee = y) = person // error | ^^^^^^^^ | No element named `agee` is defined in selector type (name : String, age : Int) --- Error: tests/neg/named-tuples.scala:30:10 --------------------------------------------------------------------------- -30 | case (name = n, age = a) => () // error // error +-- Error: tests/neg/named-tuples.scala:29:10 --------------------------------------------------------------------------- +29 | case (name = n, age = a) => () // error // error | ^^^^^^^^ | No element named `name` is defined in selector type (String, Int) --- Error: tests/neg/named-tuples.scala:30:20 --------------------------------------------------------------------------- -30 | case (name = n, age = a) => () // error // error +-- Error: tests/neg/named-tuples.scala:29:20 --------------------------------------------------------------------------- +29 | case (name = n, age = a) => () // error // error | ^^^^^^^ | No element named `age` is defined in selector type (String, Int) --- [E172] Type Error: tests/neg/named-tuples.scala:32:27 --------------------------------------------------------------- -32 | val pp = person ++ (1, 2) // error +-- [E172] Type Error: tests/neg/named-tuples.scala:31:27 --------------------------------------------------------------- +31 | val pp = person ++ (1, 2) // error | ^ | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). --- [E172] Type Error: tests/neg/named-tuples.scala:35:18 --------------------------------------------------------------- -35 | person ++ (1, 2) match // error +-- [E172] Type Error: tests/neg/named-tuples.scala:34:18 --------------------------------------------------------------- +34 | person ++ (1, 2) match // error | ^ | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). --- Error: tests/neg/named-tuples.scala:38:17 --------------------------------------------------------------------------- -38 | val bad = ("", age = 10) // error +-- Error: tests/neg/named-tuples.scala:37:17 --------------------------------------------------------------------------- +37 | val bad = ("", age = 10) // error | ^^^^^^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:41:20 --------------------------------------------------------------------------- -41 | case (name = n, age) => () // error +-- Error: tests/neg/named-tuples.scala:40:20 --------------------------------------------------------------------------- +40 | case (name = n, age) => () // error | ^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:42:16 --------------------------------------------------------------------------- -42 | case (name, age = a) => () // error +-- Error: tests/neg/named-tuples.scala:41:16 --------------------------------------------------------------------------- +41 | case (name, age = a) => () // error | ^^^^^^^ | Illegal combination of named and unnamed tuple elements --- Error: tests/neg/named-tuples.scala:45:10 --------------------------------------------------------------------------- -45 | case (age = x) => // error +-- Error: tests/neg/named-tuples.scala:44:10 --------------------------------------------------------------------------- +44 | case (age = x) => // error | ^^^^^^^ | No element named `age` is defined in selector type Tuple --- [E172] Type Error: tests/neg/named-tuples.scala:47:27 --------------------------------------------------------------- -47 | val p2 = person ++ person // error +-- [E172] Type Error: tests/neg/named-tuples.scala:46:27 --------------------------------------------------------------- +46 | val p2 = person ++ person // error | ^ |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("name" : String), ("age" : String))] =:= (true : Boolean). --- [E172] Type Error: tests/neg/named-tuples.scala:48:43 --------------------------------------------------------------- -48 | val p3 = person ++ (first = 11, age = 33) // error +-- [E172] Type Error: tests/neg/named-tuples.scala:47:43 --------------------------------------------------------------- +47 | val p3 = person ++ (first = 11, age = 33) // error | ^ |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("first" : String), ("age" : String))] =:= (true : Boolean). --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:50:22 ------------------------------------------------------ -50 | val p5 = person.zip((first = 11, age = 33)) // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:49:22 ------------------------------------------------------ +49 | val p5 = person.zip((first = 11, age = 33)) // error | ^^^^^^^^^^^^^^^^^^^^^^ | Found: (first : Int, age : Int) | Required: NamedTuple.NamedTuple[(("name" : String), ("age" : String)), Tuple] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:61:32 ------------------------------------------------------ -61 | val typo: (name: ?, age: ?) = (name = "he", ag = 1) // error +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:60:32 ------------------------------------------------------ +60 | val typo: (name: ?, age: ?) = (name = "he", ag = 1) // error | ^^^^^^^^^^^^^^^^^^^^^ | Found: (name : String, ag : Int) | Required: (name : ?, age : ?) diff --git a/tests/neg/named-tuples.scala b/tests/neg/named-tuples.scala index 8f78f7915206..daae6e26bac2 100644 --- a/tests/neg/named-tuples.scala +++ b/tests/neg/named-tuples.scala @@ -1,7 +1,6 @@ import annotation.experimental -import language.experimental.namedTuples -@experimental object Test: +object Test: type Person = (name: String, age: Int) val person = (name = "Bob", age = 33): (name: String, age: Int) diff --git a/tests/new/test.scala b/tests/new/test.scala index 18644422ab06..dc1891f3525c 100644 --- a/tests/new/test.scala +++ b/tests/new/test.scala @@ -1,5 +1,3 @@ -import language.experimental.namedTuples - type Person = (name: String, age: Int) trait A: diff --git a/tests/pos/fieldsOf.scala b/tests/pos/fieldsOf.scala index 2594dae2cbf7..08f20a1f7e8e 100644 --- a/tests/pos/fieldsOf.scala +++ b/tests/pos/fieldsOf.scala @@ -1,5 +1,3 @@ -import language.experimental.namedTuples - case class Person(name: String, age: Int) type PF = NamedTuple.From[Person] diff --git a/tests/pos/i20377.scala b/tests/pos/i20377.scala index 7a4c0fccfd7e..27b546402467 100644 --- a/tests/pos/i20377.scala +++ b/tests/pos/i20377.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples import NamedTuple.{NamedTuple, AnyNamedTuple} // Repros for bugs or questions diff --git a/tests/pos/i21300.scala b/tests/pos/i21300.scala index 22859482ef98..e7c7965b0e9a 100644 --- a/tests/pos/i21300.scala +++ b/tests/pos/i21300.scala @@ -1,17 +1,15 @@ -import scala.language.experimental.namedTuples - class Test[S <: String & Singleton](name: S): type NT = NamedTuple.NamedTuple[(S, "foo"), (Int, Long)] def nt: NT = ??? type Name = S - + type NT2 = NamedTuple.NamedTuple[(Name, "foo"), (Int, Long)] def nt2: NT2 = ??? def test = val foo = new Test("bar") - + foo.nt.bar foo.nt2.bar diff --git a/tests/pos/named-tuple-combinators.scala b/tests/pos/named-tuple-combinators.scala index a5134b2e7d26..c027ba688d02 100644 --- a/tests/pos/named-tuple-combinators.scala +++ b/tests/pos/named-tuple-combinators.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.namedTuples object Test: // original code from issue https://github.com/scala/scala3/issues/20427 diff --git a/tests/pos/named-tuple-selectable.scala b/tests/pos/named-tuple-selectable.scala index be5f0400e58c..0e1324f70ae6 100644 --- a/tests/pos/named-tuple-selectable.scala +++ b/tests/pos/named-tuple-selectable.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.namedTuples class FromFields extends Selectable: type Fields = (xs: List[Int], poly: [T] => (x: List[T]) => Option[T]) diff --git a/tests/pos/named-tuple-selections.scala b/tests/pos/named-tuple-selections.scala index c3569f21b323..7b73daad2e72 100644 --- a/tests/pos/named-tuple-selections.scala +++ b/tests/pos/named-tuple-selections.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.namedTuples object Test1: // original code from issue https://github.com/scala/scala3/issues/20439 diff --git a/tests/pos/named-tuple-unstable.scala b/tests/pos/named-tuple-unstable.scala index 6a6a36732a14..d15bdc578a3a 100644 --- a/tests/pos/named-tuple-unstable.scala +++ b/tests/pos/named-tuple-unstable.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.namedTuples import NamedTuple.{AnyNamedTuple, NamedTuple} trait Foo extends Selectable: diff --git a/tests/pos/named-tuple-widen.scala b/tests/pos/named-tuple-widen.scala index 410832e04c17..cc12a5f09b16 100644 --- a/tests/pos/named-tuple-widen.scala +++ b/tests/pos/named-tuple-widen.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples class A class B diff --git a/tests/pos/named-tuples-ops-mirror.scala b/tests/pos/named-tuples-ops-mirror.scala index f66eb89534fb..b8745cf785d5 100644 --- a/tests/pos/named-tuples-ops-mirror.scala +++ b/tests/pos/named-tuples-ops-mirror.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples import NamedTuple.* @FailsWith[HttpError] diff --git a/tests/pos/named-tuples1.scala b/tests/pos/named-tuples1.scala index 58e3fc065e61..532f1df7efd4 100644 --- a/tests/pos/named-tuples1.scala +++ b/tests/pos/named-tuples1.scala @@ -1,5 +1,4 @@ import annotation.experimental -import language.experimental.namedTuples @main def Test = val bob = (name = "Bob", age = 33): (name: String, age: Int) diff --git a/tests/pos/namedtuple-src-incompat.scala b/tests/pos/namedtuple-src-incompat.scala index 57451a4321b7..76eb5e4aa850 100644 --- a/tests/pos/namedtuple-src-incompat.scala +++ b/tests/pos/namedtuple-src-incompat.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples var age = 22 val x = (age = 1) val _: (age: Int) = x diff --git a/tests/pos/tuple-ops.scala b/tests/pos/tuple-ops.scala index 739b1ebeeb02..e89c0e8e51aa 100644 --- a/tests/pos/tuple-ops.scala +++ b/tests/pos/tuple-ops.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples import Tuple.* def test = diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 15ccd38f860c..7df4a05e6973 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -74,12 +74,6 @@ val experimentalDefinitionInLibrary = Set( // New feature: fromNullable for explicit nulls "scala.Predef$.fromNullable", - // New feature: named tuples - "scala.NamedTuple", - "scala.NamedTuple$", - "scala.NamedTupleDecomposition", - "scala.NamedTupleDecomposition$", - // New feature: modularity "scala.Precise", "scala.annotation.internal.WitnessNames", diff --git a/tests/run/named-patmatch.scala b/tests/run/named-patmatch.scala index e62497e4aa8f..6fe1934f008e 100644 --- a/tests/run/named-patmatch.scala +++ b/tests/run/named-patmatch.scala @@ -1,5 +1,4 @@ import annotation.experimental -import language.experimental.namedTuples @main def Test = locally: diff --git a/tests/run/named-patterns.scala b/tests/run/named-patterns.scala index 7c24dc8d683a..e92bbf751c22 100644 --- a/tests/run/named-patterns.scala +++ b/tests/run/named-patterns.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples object Test1: class Person(val name: String, val age: Int) diff --git a/tests/run/named-tuple-ops.scala b/tests/run/named-tuple-ops.scala index 076ab5028c6c..8c6db6f2fa1c 100644 --- a/tests/run/named-tuple-ops.scala +++ b/tests/run/named-tuple-ops.scala @@ -1,5 +1,4 @@ //> using options -source future -import language.experimental.namedTuples import scala.compiletime.asMatchable type City = (name: String, zip: Int, pop: Int) diff --git a/tests/run/named-tuples-xxl.scala b/tests/run/named-tuples-xxl.scala index 3a0a1e5e1294..8c831fb1d223 100644 --- a/tests/run/named-tuples-xxl.scala +++ b/tests/run/named-tuples-xxl.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples import NamedTuple.toTuple type Person = ( diff --git a/tests/run/named-tuples.scala b/tests/run/named-tuples.scala index 406c6195cf0f..c99393a403b3 100644 --- a/tests/run/named-tuples.scala +++ b/tests/run/named-tuples.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples import NamedTuple.* type Person = (name: String, age: Int) diff --git a/tests/run/tyql.scala b/tests/run/tyql.scala index 35777e9a4c13..b81e0c5bf6dd 100644 --- a/tests/run/tyql.scala +++ b/tests/run/tyql.scala @@ -1,4 +1,3 @@ -import language.experimental.namedTuples import NamedTuple.{NamedTuple, AnyNamedTuple} /* This is a demonstrator that shows how to map regular for expressions to From 0f2613c7aec3c222e191ca86301db1fb8127a716 Mon Sep 17 00:00:00 2001 From: Oliver Bracevac Date: Tue, 1 Oct 2024 11:43:51 +0200 Subject: [PATCH 597/827] Localize TypeError handling in addRecheckedTypes --- .../dotty/tools/dotc/transform/Recheck.scala | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index ecf5c69d1f91..7520767c918c 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -33,10 +33,13 @@ object Recheck: val addRecheckedTypes = new TreeMap: override def transform(tree: Tree)(using Context): Tree = - val tree1 = super.transform(tree) - tree.getAttachment(RecheckedType) match - case Some(tpe) => tree1.withType(tpe) - case None => tree1 + try + val tree1 = super.transform(tree) + tree.getAttachment(RecheckedType) match + case Some(tpe) => tree1.withType(tpe) + case None => tree1 + catch + case _:TypeError => tree extension (sym: Symbol)(using Context) @@ -618,13 +621,7 @@ abstract class Recheck extends Phase, SymTransformer: override def show(tree: untpd.Tree)(using Context): String = atPhase(thisPhase): withMode(Mode.Printing): - val ttree0 = tree.asInstanceOf[tpd.Tree] - val ttree1 = - try - addRecheckedTypes.transform(ttree0) - catch - case _:TypeError => ttree0 - super.show(ttree1) + super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) end Recheck /** A class that can be used to test basic rechecking without any customaization */ From 6263944205c0c5b784d7e36ac6beb2f4250ef90c Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 1 Oct 2024 12:17:42 +0200 Subject: [PATCH 598/827] Update reference docs --- .../named-tuples.md | 146 +++++++++--------- docs/sidebar.yml | 2 +- 2 files changed, 75 insertions(+), 73 deletions(-) rename docs/_docs/reference/{experimental => other-new-features}/named-tuples.md (88%) diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/other-new-features/named-tuples.md similarity index 88% rename from docs/_docs/reference/experimental/named-tuples.md rename to docs/_docs/reference/other-new-features/named-tuples.md index 3867b4d13f15..bf1ae4ca7046 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/other-new-features/named-tuples.md @@ -1,10 +1,10 @@ --- layout: doc-page title: "Named Tuples" -nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/named-tuples.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/named-tuples.html --- -The elements of a tuple can now be named. Example: +Starting in Scala 3.6, the elements of a tuple can be named. Example: ```scala type Person = (name: String, age: Int) val Bob: Person = (name = "Bob", age = 33) @@ -94,6 +94,24 @@ Bob match case (age = x, name = y) => ... ``` +### Pattern Matching with Named Fields in General + +We allow named patterns not just for named tuples but also for case classes. For instance: +```scala +city match + case c @ City(name = "London") => println(p.population) + case City(name = n, zip = 1026, population = pop) => println(pop) +``` + +Named constructor patterns are analogous to named tuple patterns. In both cases + + - every name must match the name some field of the selector, + - names can come in any order, + - not all fields of the selector need to be matched. + +Named patterns are compatible with extensible pattern matching simply because +`unapply` results can be named tuples. + ### Expansion Named tuples are in essence just a convenient syntax for regular tuples. In the internal representation, a named tuple type is represented at compile time as a pair of two tuples. One tuple contains the names as literal constant string types, the other contains the element types. The runtime representation of a named tuples consists of just the element values, whereas the names are forgotten. This is achieved by declaring `NamedTuple` @@ -119,6 +137,47 @@ The translation of named tuples to instances of `NamedTuple` is fixed by the spe - All tuple operations also work with named tuples "out of the box". - Macro libraries can rely on this expansion. +### Computed Field Names + +The `Selectable` trait now has a `Fields` type member that can be instantiated +to a named tuple. + +```scala +trait Selectable: + type Fields <: NamedTuple.AnyNamedTuple +``` + +If `Fields` is instantiated in a subclass of `Selectable` to some named tuple type, +then the available fields and their types will be defined by that type. Assume `n: T` +is an element of the `Fields` type in some class `C` that implements `Selectable`, +that `c: C`, and that `n` is not otherwise legal as a name of a selection on `c`. +Then `c.n` is a legal selection, which expands to `c.selectDynamic("n").asInstanceOf[T]`. + +It is the task of the implementation of `selectDynamic` in `C` to ensure that its +computed result conforms to the predicted type `T` + +As an example, assume we have a query type `Q[T]` defined as follows: + +```scala +trait Q[T] extends Selectable: + type Fields = NamedTuple.Map[NamedTuple.From[T], Q] + def selectDynamic(fieldName: String) = ... +``` + +Assume in the user domain: +```scala +case class City(zipCode: Int, name: String, population: Int) +val city: Q[City] +``` +Then +```scala +city.zipCode +``` +has type `Q[Int]` and it expands to +```scala +city.selectDynamic("zipCode").asInstanceOf[Q[Int]] +``` + ### The NamedTuple.From Type The `NamedTuple` object contains a type definition @@ -137,33 +196,36 @@ then `NamedTuple.From[City]` is the named tuple (zip: Int, name: String, population: Int) ``` The same works for enum cases expanding to case classes, abstract types with case classes as upper bound, alias types expanding to case classes -and singleton types with case classes as underlying type. +and singleton types with case classes as underlying type (in terms of the implementation, the `classSymbol` of a type must be a case class). `From` is also defined on named tuples. If `NT` is a named tuple type, then `From[NT] = NT`. +### Operations on Named Tuples + +The operations on named tuples are defined in object [scala.NamedTuple](https://www.scala-lang.org/api/3.x/scala/NamedTuple$.html). + ### Restrictions -The following restrictions apply to named tuple elements: +The following restrictions apply to named tuples and named pattern arguments: - 1. Either all elements of a tuple are named or none are named. It is illegal to mix named and unnamed elements in a tuple. For instance, the following is in error: + 1. Either all elements of a tuple or constructor pattern are named or none are named. It is illegal to mix named and unnamed elements in a tuple. For instance, the following is in error: ```scala val illFormed1 = ("Bob", age = 33) // error ``` - 2. Each element name in a named tuple must be unique. For instance, the following is in error: + 2. Each element name in a named tuple or constructor pattern must be unique. For instance, the following is in error: ```scala val illFormed2 = (name = "", age = 0, name = true) // error ``` - 3. Named tuples can be matched with either named or regular patterns. But regular tuples and other selector types can only be matched with regular tuple patterns. For instance, the following is in error: + 3. Named tuples and case classes can be matched with either named or regular patterns. But regular tuples and other selector types can only be matched with regular tuple patterns. For instance, the following is in error: ```scala (tuple: Tuple) match case (age = x) => // error ``` - 4. Regular selector names `_1`, `_2`, ... are not allowed as names in named tuples. +## Syntax Changes -### Syntax - -The syntax of Scala is extended as follows to support named tuples: +The syntax of Scala is extended as follows to support named tuples and +named constructor arguments: ``` SimpleType ::= ... | ‘(’ NameAndType {‘,’ NameAndType} ‘)’ @@ -178,31 +240,11 @@ Patterns ::= Pattern {‘,’ Pattern} NamedPattern ::= id '=' Pattern ``` -### Named Pattern Matching - -We allow named patterns not just for named tuples but also for case classes. -For instance: -```scala -city match - case c @ City(name = "London") => println(p.population) - case City(name = n, zip = 1026, population = pop) => println(pop) -``` - -Named constructor patterns are analogous to named tuple patterns. In both cases - - - either all fields are named or none is, - - every name must match the name some field of the selector, - - names can come in any order, - - not all fields of the selector need to be matched. - -This revives SIP 43, with a much simpler desugaring than originally proposed. -Named patterns are compatible with extensible pattern matching simply because -`unapply` results can be named tuples. - ### Source Incompatibilities There are some source incompatibilities involving named tuples of length one. First, what was previously classified as an assignment could now be interpreted as a named tuple. Example: + ```scala var age: Int (age = 1) @@ -221,43 +263,3 @@ c f (age = 1) ``` will now construct a tuple as second operand instead of passing a named parameter. -### Computed Field Names - -The `Selectable` trait now has a `Fields` type member that can be instantiated -to a named tuple. - -```scala -trait Selectable: - type Fields <: NamedTuple.AnyNamedTuple -``` - -If `Fields` is instantiated in a subclass of `Selectable` to some named tuple type, -then the available fields and their types will be defined by that type. Assume `n: T` -is an element of the `Fields` type in some class `C` that implements `Selectable`, -that `c: C`, and that `n` is not otherwise legal as a name of a selection on `c`. -Then `c.n` is a legal selection, which expands to `c.selectDynamic("n").asInstanceOf[T]`. - -It is the task of the implementation of `selectDynamic` in `C` to ensure that its -computed result conforms to the predicted type `T` - -As an example, assume we have a query type `Q[T]` defined as follows: - -```scala -trait Q[T] extends Selectable: - type Fields = NamedTuple.Map[NamedTuple.From[T], Q] - def selectDynamic(fieldName: String) = ... -``` - -Assume in the user domain: -```scala -case class City(zipCode: Int, name: String, population: Int) -val city: Q[City] -``` -Then -```scala -city.zipCode -``` -has type `Q[Int]` and it expands to -```scala -city.selectDynamic("zipCode").asInstanceOf[Q[Int]] -``` diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 5048669ef664..0e06dbf41029 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -68,6 +68,7 @@ subsection: - page: reference/other-new-features/export.md - page: reference/other-new-features/opaques.md - page: reference/other-new-features/opaques-details.md + - page: reference/other-new-features/named-tuples.md - page: reference/other-new-features/open-classes.md - page: reference/other-new-features/parameter-untupling.md - page: reference/other-new-features/parameter-untupling-spec.md @@ -154,7 +155,6 @@ subsection: - page: reference/experimental/cc.md - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md - - page: reference/experimental/named-tuples.md - page: reference/experimental/modularity.md - page: reference/experimental/typeclasses.md - page: reference/experimental/runtimeChecked.md From 2fce61055f4ddbf676b1b691e384d825dc623cd9 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 1 Oct 2024 14:30:04 +0200 Subject: [PATCH 599/827] Define binary symbolic compiletime ops as `infix` That way they get pretty-printed as infix in error messages. --- library/src/scala/compiletime/ops/any.scala | 4 +-- .../src/scala/compiletime/ops/boolean.scala | 6 ++--- .../src/scala/compiletime/ops/double.scala | 22 ++++++++-------- library/src/scala/compiletime/ops/float.scala | 18 ++++++------- library/src/scala/compiletime/ops/int.scala | 26 +++++++++---------- library/src/scala/compiletime/ops/long.scala | 26 +++++++++---------- .../src/scala/compiletime/ops/string.scala | 2 +- 7 files changed, 52 insertions(+), 52 deletions(-) diff --git a/library/src/scala/compiletime/ops/any.scala b/library/src/scala/compiletime/ops/any.scala index e3f030c33634..b3c1930f9715 100644 --- a/library/src/scala/compiletime/ops/any.scala +++ b/library/src/scala/compiletime/ops/any.scala @@ -13,7 +13,7 @@ object any: * ``` * @syntax markdown */ - type ==[X, Y] <: Boolean + infix type ==[X, Y] <: Boolean /** Inequality comparison of two singleton types. * ```scala @@ -26,7 +26,7 @@ object any: * ``` * @syntax markdown */ - type !=[X, Y] <: Boolean + infix type !=[X, Y] <: Boolean /** Tests if a type is a constant. * ```scala diff --git a/library/src/scala/compiletime/ops/boolean.scala b/library/src/scala/compiletime/ops/boolean.scala index f6a8c3d3b37e..3e1b5650a519 100644 --- a/library/src/scala/compiletime/ops/boolean.scala +++ b/library/src/scala/compiletime/ops/boolean.scala @@ -25,7 +25,7 @@ object boolean: * ``` * @syntax markdown */ - type ^[X <: Boolean, Y <: Boolean] <: Boolean + infix type ^[X <: Boolean, Y <: Boolean] <: Boolean /** Conjunction of two `Boolean` singleton types. * ```scala @@ -37,7 +37,7 @@ object boolean: * ``` * @syntax markdown */ - type &&[X <: Boolean, Y <: Boolean] <: Boolean + infix type &&[X <: Boolean, Y <: Boolean] <: Boolean /** Disjunction of two `Boolean` singleton types. * ```scala @@ -49,4 +49,4 @@ object boolean: * ``` * @syntax markdown */ - type ||[X <: Boolean, Y <: Boolean] <: Boolean + infix type ||[X <: Boolean, Y <: Boolean] <: Boolean diff --git a/library/src/scala/compiletime/ops/double.scala b/library/src/scala/compiletime/ops/double.scala index 0e038904221e..4bb4527f14a9 100644 --- a/library/src/scala/compiletime/ops/double.scala +++ b/library/src/scala/compiletime/ops/double.scala @@ -11,7 +11,7 @@ object double: * ``` * @syntax markdown */ - type +[X <: Double, Y <: Double] <: Double + infix type +[X <: Double, Y <: Double] <: Double /** Subtraction of two `Double` singleton types. * ```scala @@ -22,7 +22,7 @@ object double: * ``` * @syntax markdown */ - type -[X <: Double, Y <: Double] <: Double + infix type -[X <: Double, Y <: Double] <: Double /** Multiplication of two `Double` singleton types. * ```scala @@ -33,7 +33,7 @@ object double: * ``` * @syntax markdown */ - type *[X <: Double, Y <: Double] <: Double + infix type *[X <: Double, Y <: Double] <: Double /** Integer division of two `Double` singleton types. * ```scala @@ -44,7 +44,7 @@ object double: * ``` * @syntax markdown */ - type /[X <: Double, Y <: Double] <: Double + infix type /[X <: Double, Y <: Double] <: Double /** Remainder of the division of `X` by `Y`. * ```scala @@ -55,7 +55,7 @@ object double: * ``` * @syntax markdown */ - type %[X <: Double, Y <: Double] <: Double + infix type %[X <: Double, Y <: Double] <: Double /** Less-than comparison of two `Double` singleton types. * ```scala @@ -67,7 +67,7 @@ object double: * ``` * @syntax markdown */ - type <[X <: Double, Y <: Double] <: Boolean + infix type <[X <: Double, Y <: Double] <: Boolean /** Greater-than comparison of two `Double` singleton types. * ```scala @@ -79,7 +79,7 @@ object double: * ``` * @syntax markdown */ - type >[X <: Double, Y <: Double] <: Boolean + infix type >[X <: Double, Y <: Double] <: Boolean /** Greater-or-equal comparison of two `Double` singleton types. * ```scala @@ -91,7 +91,7 @@ object double: * ``` * @syntax markdown */ - type >=[X <: Double, Y <: Double] <: Boolean + infix type >=[X <: Double, Y <: Double] <: Boolean /** Less-or-equal comparison of two `Double` singleton types. * ```scala @@ -103,7 +103,7 @@ object double: * ``` * @syntax markdown */ - type <=[X <: Double, Y <: Double] <: Boolean + infix type <=[X <: Double, Y <: Double] <: Boolean /** Absolute value of an `Double` singleton type. * ```scala @@ -114,7 +114,7 @@ object double: * ``` * @syntax markdown */ - type Abs[X <: Double] <: Double + infix type Abs[X <: Double] <: Double /** Negation of an `Double` singleton type. * ```scala @@ -181,4 +181,4 @@ object double: * ``` * @syntax markdown */ - type ToFloat[X <: Double] <: Float \ No newline at end of file + type ToFloat[X <: Double] <: Float diff --git a/library/src/scala/compiletime/ops/float.scala b/library/src/scala/compiletime/ops/float.scala index d7be87be3d9c..bd9b5c75f1f1 100644 --- a/library/src/scala/compiletime/ops/float.scala +++ b/library/src/scala/compiletime/ops/float.scala @@ -11,7 +11,7 @@ object float: * ``` * @syntax markdown */ - type +[X <: Float, Y <: Float] <: Float + infix type +[X <: Float, Y <: Float] <: Float /** Subtraction of two `Float` singleton types. * ```scala @@ -22,7 +22,7 @@ object float: * ``` * @syntax markdown */ - type -[X <: Float, Y <: Float] <: Float + infix type -[X <: Float, Y <: Float] <: Float /** Multiplication of two `Float` singleton types. * ```scala @@ -33,7 +33,7 @@ object float: * ``` * @syntax markdown */ - type *[X <: Float, Y <: Float] <: Float + infix type *[X <: Float, Y <: Float] <: Float /** Integer division of two `Float` singleton types. * ```scala @@ -44,7 +44,7 @@ object float: * ``` * @syntax markdown */ - type /[X <: Float, Y <: Float] <: Float + infix type /[X <: Float, Y <: Float] <: Float /** Remainder of the division of `X` by `Y`. * ```scala @@ -55,7 +55,7 @@ object float: * ``` * @syntax markdown */ - type %[X <: Float, Y <: Float] <: Float + infix type %[X <: Float, Y <: Float] <: Float /** Less-than comparison of two `Float` singleton types. * ```scala @@ -67,7 +67,7 @@ object float: * ``` * @syntax markdown */ - type <[X <: Float, Y <: Float] <: Boolean + infix type <[X <: Float, Y <: Float] <: Boolean /** Greater-than comparison of two `Float` singleton types. * ```scala @@ -79,7 +79,7 @@ object float: * ``` * @syntax markdown */ - type >[X <: Float, Y <: Float] <: Boolean + infix type >[X <: Float, Y <: Float] <: Boolean /** Greater-or-equal comparison of two `Float` singleton types. * ```scala @@ -91,7 +91,7 @@ object float: * ``` * @syntax markdown */ - type >=[X <: Float, Y <: Float] <: Boolean + infix type >=[X <: Float, Y <: Float] <: Boolean /** Less-or-equal comparison of two `Float` singleton types. * ```scala @@ -103,7 +103,7 @@ object float: * ``` * @syntax markdown */ - type <=[X <: Float, Y <: Float] <: Boolean + infix type <=[X <: Float, Y <: Float] <: Boolean /** Absolute value of an `Float` singleton type. * ```scala diff --git a/library/src/scala/compiletime/ops/int.scala b/library/src/scala/compiletime/ops/int.scala index ed4a3c3c3261..b8ec370421e2 100644 --- a/library/src/scala/compiletime/ops/int.scala +++ b/library/src/scala/compiletime/ops/int.scala @@ -29,7 +29,7 @@ object int: * ``` * @syntax markdown */ - type +[X <: Int, Y <: Int] <: Int + infix type +[X <: Int, Y <: Int] <: Int /** Subtraction of two `Int` singleton types. * ```scala @@ -40,7 +40,7 @@ object int: * ``` * @syntax markdown */ - type -[X <: Int, Y <: Int] <: Int + infix type -[X <: Int, Y <: Int] <: Int /** Multiplication of two `Int` singleton types. * ```scala @@ -51,7 +51,7 @@ object int: * ``` * @syntax markdown */ - type *[X <: Int, Y <: Int] <: Int + infix type *[X <: Int, Y <: Int] <: Int /** Integer division of two `Int` singleton types. * ```scala @@ -62,7 +62,7 @@ object int: * ``` * @syntax markdown */ - type /[X <: Int, Y <: Int] <: Int + infix type /[X <: Int, Y <: Int] <: Int /** Remainder of the division of `X` by `Y`. * ```scala @@ -73,7 +73,7 @@ object int: * ``` * @syntax markdown */ - type %[X <: Int, Y <: Int] <: Int + infix type %[X <: Int, Y <: Int] <: Int /** Binary left shift of `X` by `Y`. * ```scala @@ -84,7 +84,7 @@ object int: * ``` * @syntax markdown */ - type <<[X <: Int, Y <: Int] <: Int + infix type <<[X <: Int, Y <: Int] <: Int /** Binary right shift of `X` by `Y`. * ```scala @@ -95,7 +95,7 @@ object int: * ``` * @syntax markdown */ - type >>[X <: Int, Y <: Int] <: Int + infix type >>[X <: Int, Y <: Int] <: Int /** Binary right shift of `X` by `Y`, filling the left with zeros. * ```scala @@ -106,7 +106,7 @@ object int: * ``` * @syntax markdown */ - type >>>[X <: Int, Y <: Int] <: Int + infix type >>>[X <: Int, Y <: Int] <: Int /** Bitwise xor of `X` and `Y`. * ```scala @@ -117,7 +117,7 @@ object int: * ``` * @syntax markdown */ - type ^[X <: Int, Y <: Int] <: Int + infix type ^[X <: Int, Y <: Int] <: Int /** Less-than comparison of two `Int` singleton types. * ```scala @@ -129,7 +129,7 @@ object int: * ``` * @syntax markdown */ - type <[X <: Int, Y <: Int] <: Boolean + infix type <[X <: Int, Y <: Int] <: Boolean /** Greater-than comparison of two `Int` singleton types. * ```scala @@ -141,7 +141,7 @@ object int: * ``` * @syntax markdown */ - type >[X <: Int, Y <: Int] <: Boolean + infix type >[X <: Int, Y <: Int] <: Boolean /** Greater-or-equal comparison of two `Int` singleton types. * ```scala @@ -153,7 +153,7 @@ object int: * ``` * @syntax markdown */ - type >=[X <: Int, Y <: Int] <: Boolean + infix type >=[X <: Int, Y <: Int] <: Boolean /** Less-or-equal comparison of two `Int` singleton types. * ```scala @@ -165,7 +165,7 @@ object int: * ``` * @syntax markdown */ - type <=[X <: Int, Y <: Int] <: Boolean + infix type <=[X <: Int, Y <: Int] <: Boolean /** Bitwise and of `X` and `Y`. * ```scala diff --git a/library/src/scala/compiletime/ops/long.scala b/library/src/scala/compiletime/ops/long.scala index 25563ac70367..3bda31e4979c 100644 --- a/library/src/scala/compiletime/ops/long.scala +++ b/library/src/scala/compiletime/ops/long.scala @@ -27,7 +27,7 @@ object long: * ``` * @syntax markdown */ - type +[X <: Long, Y <: Long] <: Long + infix type +[X <: Long, Y <: Long] <: Long /** Subtraction of two `Long` singleton types. * ```scala @@ -38,7 +38,7 @@ object long: * ``` * @syntax markdown */ - type -[X <: Long, Y <: Long] <: Long + infix type -[X <: Long, Y <: Long] <: Long /** Multiplication of two `Long` singleton types. * ```scala @@ -49,7 +49,7 @@ object long: * ``` * @syntax markdown */ - type *[X <: Long, Y <: Long] <: Long + infix type *[X <: Long, Y <: Long] <: Long /** Integer division of two `Long` singleton types. * ```scala @@ -60,7 +60,7 @@ object long: * ``` * @syntax markdown */ - type /[X <: Long, Y <: Long] <: Long + infix type /[X <: Long, Y <: Long] <: Long /** Remainder of the division of `X` by `Y`. * ```scala @@ -71,7 +71,7 @@ object long: * ``` * @syntax markdown */ - type %[X <: Long, Y <: Long] <: Long + infix type %[X <: Long, Y <: Long] <: Long /** Binary left shift of `X` by `Y`. * ```scala @@ -82,7 +82,7 @@ object long: * ``` * @syntax markdown */ - type <<[X <: Long, Y <: Long] <: Long + infix type <<[X <: Long, Y <: Long] <: Long /** Binary right shift of `X` by `Y`. * ```scala @@ -93,7 +93,7 @@ object long: * ``` * @syntax markdown */ - type >>[X <: Long, Y <: Long] <: Long + infix type >>[X <: Long, Y <: Long] <: Long /** Binary right shift of `X` by `Y`, filling the left with zeros. * ```scala @@ -104,7 +104,7 @@ object long: * ``` * @syntax markdown */ - type >>>[X <: Long, Y <: Long] <: Long + infix type >>>[X <: Long, Y <: Long] <: Long /** Bitwise xor of `X` and `Y`. * ```scala @@ -115,7 +115,7 @@ object long: * ``` * @syntax markdown */ - type ^[X <: Long, Y <: Long] <: Long + infix type ^[X <: Long, Y <: Long] <: Long /** Less-than comparison of two `Long` singleton types. * ```scala @@ -127,7 +127,7 @@ object long: * ``` * @syntax markdown */ - type <[X <: Long, Y <: Long] <: Boolean + infix type <[X <: Long, Y <: Long] <: Boolean /** Greater-than comparison of two `Long` singleton types. * ```scala @@ -139,7 +139,7 @@ object long: * ``` * @syntax markdown */ - type >[X <: Long, Y <: Long] <: Boolean + infix type >[X <: Long, Y <: Long] <: Boolean /** Greater-or-equal comparison of two `Long` singleton types. * ```scala @@ -151,7 +151,7 @@ object long: * ``` * @syntax markdown */ - type >=[X <: Long, Y <: Long] <: Boolean + infix type >=[X <: Long, Y <: Long] <: Boolean /** Less-or-equal comparison of two `Long` singleton types. * ```scala @@ -163,7 +163,7 @@ object long: * ``` * @syntax markdown */ - type <=[X <: Long, Y <: Long] <: Boolean + infix type <=[X <: Long, Y <: Long] <: Boolean /** Bitwise and of `X` and `Y`. * ```scala diff --git a/library/src/scala/compiletime/ops/string.scala b/library/src/scala/compiletime/ops/string.scala index 63caa9ae6371..90515c62e55b 100644 --- a/library/src/scala/compiletime/ops/string.scala +++ b/library/src/scala/compiletime/ops/string.scala @@ -11,7 +11,7 @@ object string: * ``` * @syntax markdown */ - type +[X <: String, Y <: String] <: String + infix type +[X <: String, Y <: String] <: String /** Length of a `String` singleton type. * ```scala From 2eb52e489c747d80247b5f20534e610027d00dc9 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 1 Oct 2024 14:49:19 +0200 Subject: [PATCH 600/827] Fix ScalaSTM misleading syntax. The previous syntax `before copy (name = ...)` is now interpreted as a binary operation with a named tuple on the RHS. The correct syntax is `before.copy(name = ...)`. --- community-build/community-projects/scala-stm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community-build/community-projects/scala-stm b/community-build/community-projects/scala-stm index cf204977752a..8d443ab107e7 160000 --- a/community-build/community-projects/scala-stm +++ b/community-build/community-projects/scala-stm @@ -1 +1 @@ -Subproject commit cf204977752af7ec2ca3b50c43f27daa6a628f49 +Subproject commit 8d443ab107e75e809848c2fa3ecd666043171ad5 From 7bbed2b0a73e9c4aef9fedf6473a319431a7df7f Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 1 Oct 2024 15:24:46 +0200 Subject: [PATCH 601/827] Revert SAM condition to what it was before Fixes #21676 --- .../src/dotty/tools/dotc/typer/Typer.scala | 4 +++- tests/pos/i21676.scala | 18 ++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21676.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 159ce8354a30..c4c3f6b2d439 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4569,7 +4569,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // convert function literal to SAM closure tree match { - case closure(Nil, id @ Ident(nme.ANON_FUN), _) + case blockEndingInClosure(Nil, id @ Ident(nme.ANON_FUN), _) if defn.isFunctionNType(wtp) && !defn.isFunctionNType(pt) => pt match { case SAMType(samMeth, samParent) @@ -4578,6 +4578,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // but this prevents case blocks from implementing polymorphic partial functions, // since we do not know the result parameter a priori. Have to wait until the // body is typechecked. + // Note: Need to come back to this when we clean up SAMs/PartialFunctions + // These conditions would most likely be affected by a precise spec. return toSAM(tree, samParent) case _ => } diff --git a/tests/pos/i21676.scala b/tests/pos/i21676.scala new file mode 100644 index 000000000000..2f94fda47be5 --- /dev/null +++ b/tests/pos/i21676.scala @@ -0,0 +1,18 @@ +def Test = + val members = collection.immutable.SortedSet.empty[String] + members.collect { + var upNumber = 0 + { + case m => + // upNumber += 1 + m + } + } + + members.collect { + var upNumber = 0 + { + m => m + } + } + From d00bb8e4e3c3d0210860a8c84423449d7d5e7362 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 1 Oct 2024 22:33:15 -0700 Subject: [PATCH 602/827] Always rewrite empty List() to Nil --- .../tools/dotc/transform/ArrayApply.scala | 2 +- .../tools/backend/jvm/ArrayApplyOptTest.scala | 36 +++++++++++++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala index 98ca8f2e2b5b..1a6ec307e289 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala @@ -76,7 +76,7 @@ class ArrayApply extends MiniPhase { tree.args match // (a, b, c) ~> new ::(a, new ::(b, new ::(c, Nil))) but only for reference types case StripAscription(Apply(wrapArrayMeth, List(StripAscription(rest: JavaSeqLiteral)))) :: Nil - if defn.WrapArrayMethods().contains(wrapArrayMeth.symbol) => + if rest.elems.isEmpty || defn.WrapArrayMethods().contains(wrapArrayMeth.symbol) => Some(rest.elems) case _ => None else None diff --git a/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala b/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala index c99de8fcf956..a1fe40e58b56 100644 --- a/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala +++ b/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala @@ -161,6 +161,42 @@ class ArrayApplyOptTest extends DottyBytecodeTest { } } + @Test def emptyListApplyAvoidsIntermediateArray = + checkApplyAvoidsIntermediateArray("EmptyList"): + """import scala.collection.immutable.Nil + |class Foo { + | def meth1: List[String] = List() + | def meth2: List[String] = Nil + |} + """.stripMargin + + @Test def emptyRefListApplyAvoidsIntermediateArray = + checkApplyAvoidsIntermediateArray("EmptyListOfRef"): + """import scala.collection.immutable.Nil + |class Foo { + | def meth1: List[String] = List[String]() + | def meth2: List[String] = Nil + |} + """.stripMargin + + @Test def emptyPrimitiveListApplyAvoidsIntermediateArray = + checkApplyAvoidsIntermediateArray("EmptyListOfInt"): + """import scala.collection.immutable.Nil + |class Foo { + | def meth1: List[Int] = List() + | def meth2: List[Int] = Nil + |} + """.stripMargin + + @Test def primitiveListApplyAvoidsIntermediateArray = + checkApplyAvoidsIntermediateArray("ListOfInt"): + """import scala.collection.immutable.{ ::, Nil } + |class Foo { + | def meth1: List[Int] = List(1, 2, 3) + | def meth2: List[Int] = new ::(1, new ::(2, new ::(3, Nil))) + |} + """.stripMargin + @Test def testListApplyAvoidsIntermediateArray = { checkApplyAvoidsIntermediateArray("List"): """import scala.collection.immutable.{ ::, Nil } From 111124ff62c00accb2ce89b1e546a5e9c999c914 Mon Sep 17 00:00:00 2001 From: kenji yoshida <6b656e6a69@gmail.com> Date: Wed, 2 Oct 2024 20:00:05 +0900 Subject: [PATCH 603/827] fix `dotty.tools.dotc.config.Properties` scaladoc --- compiler/src/dotty/tools/dotc/config/Properties.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala index 2a362a707ade..41cd14955759 100644 --- a/compiler/src/dotty/tools/dotc/config/Properties.scala +++ b/compiler/src/dotty/tools/dotc/config/Properties.scala @@ -10,7 +10,7 @@ import java.io.IOException import java.util.jar.Attributes.{ Name => AttributeName } import java.nio.charset.StandardCharsets -/** Loads `library.properties` from the jar. */ +/** Loads `compiler.properties` from the jar. */ object Properties extends PropertiesTrait { protected def propCategory: String = "compiler" protected def pickJarBasedOn: Class[PropertiesTrait] = classOf[PropertiesTrait] From 6e58d25acd99ff44533730867cfc09873cb95f7a Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Thu, 22 Aug 2024 15:37:26 +0200 Subject: [PATCH 604/827] Avoid using the current denotation in NamedType.disambiguate While recalculating denotation in NamedType we might call NamedType.disambiguate which uses a denotation to decide about the correct overloaded method. Using current denotation here might cause stale symbol errors, so instead we use the lastKnownDenotation, which should be enough for the use case here, as targetName should not change between phases/runs. Later in the denotation recalculation a similar thing happens with SourceLanguage.apply, where we also now avoid using currentDenotation, as whether the symbol comes from java or Scala 2 should also not change between phases/runs. --- .../dotty/tools/dotc/core/Denotations.scala | 2 +- .../dotty/tools/dotc/core/TypeErasure.scala | 9 +++++++-- .../src/dotty/tools/dotc/core/Types.scala | 5 ++++- tests/pos-macros/i20574/Exports.scala | 3 +++ tests/pos-macros/i20574/Macros.scala | 20 +++++++++++++++++++ .../pos-macros/i20574/OverloadedInline.scala | 13 ++++++++++++ tests/pos-macros/i20574/Test.scala | 5 +++++ 7 files changed, 53 insertions(+), 4 deletions(-) create mode 100644 tests/pos-macros/i20574/Exports.scala create mode 100644 tests/pos-macros/i20574/Macros.scala create mode 100644 tests/pos-macros/i20574/OverloadedInline.scala create mode 100644 tests/pos-macros/i20574/Test.scala diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 60a7555456bf..816b28177333 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -961,7 +961,7 @@ object Denotations { } def staleSymbolError(using Context): Nothing = - if symbol.isPackageObject && ctx.run != null && ctx.run.nn.isCompilingSuspended + if symbol.lastKnownDenotation.isPackageObject && ctx.run != null && ctx.run.nn.isCompilingSuspended then throw StaleSymbolTypeError(symbol) else throw StaleSymbolException(staleSymbolMsg) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index ce4956e6e847..9491bdab9de8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -24,11 +24,16 @@ enum SourceLanguage: object SourceLanguage: /** The language in which `sym` was defined. */ def apply(sym: Symbol)(using Context): SourceLanguage = - if sym.is(JavaDefined) then + // We might be using this method while recalculating the denotation, + // so let's use `lastKnownDenotation`. + // This is ok as the source of the symbol and whether it is inline should + // not change between runs/phases. + val denot = sym.lastKnownDenotation + if denot.is(JavaDefined) then SourceLanguage.Java // Scala 2 methods don't have Inline set, except for the ones injected with `patchStdlibClass` // which are really Scala 3 methods. - else if sym.isClass && sym.is(Scala2x) || (sym.maybeOwner.is(Scala2x) && !sym.is(Inline)) then + else if denot.isClass && denot.is(Scala2x) || (denot.maybeOwner.lastKnownDenotation.is(Scala2x) && !denot.is(Inline)) then SourceLanguage.Scala2 else SourceLanguage.Scala3 diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index aba8c3bb31fd..8867f2fbe3db 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2491,7 +2491,10 @@ object Types extends TypeUtils { } private def disambiguate(d: Denotation)(using Context): Denotation = - disambiguate(d, currentSignature, currentSymbol.targetName) + // this method might be triggered while the denotation is already being recomputed + // in NamedType, so it's better to use lastKnownDenotation instead, as targetName + // should not change between phases/runs + disambiguate(d, currentSignature, currentSymbol.lastKnownDenotation.targetName) private def disambiguate(d: Denotation, sig: Signature | Null, target: Name)(using Context): Denotation = if (sig != null) diff --git a/tests/pos-macros/i20574/Exports.scala b/tests/pos-macros/i20574/Exports.scala new file mode 100644 index 000000000000..328d832fad88 --- /dev/null +++ b/tests/pos-macros/i20574/Exports.scala @@ -0,0 +1,3 @@ +object Exports{ + export OverloadedInline.* +} diff --git a/tests/pos-macros/i20574/Macros.scala b/tests/pos-macros/i20574/Macros.scala new file mode 100644 index 000000000000..a40c1f361ce1 --- /dev/null +++ b/tests/pos-macros/i20574/Macros.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macros{ + + inline def A() : String = { + ${ A_impl } + } + + def A_impl(using Quotes): Expr[String] = { + Expr("Whatever") + } + + inline def B[T]: Int = { + ${ B_Impl[T] } + } + + def B_Impl[T](using Quotes): Expr[Int] = { + Expr(0) + } +} diff --git a/tests/pos-macros/i20574/OverloadedInline.scala b/tests/pos-macros/i20574/OverloadedInline.scala new file mode 100644 index 000000000000..5bf2347c45c0 --- /dev/null +++ b/tests/pos-macros/i20574/OverloadedInline.scala @@ -0,0 +1,13 @@ +import Macros.* + +object OverloadedInline{ + + A() + inline def overloaded_inline[T]: Unit = { + overloaded_inline[T](0) + } + + inline def overloaded_inline[T](dummy: Int): Unit = { + val crash = B[T] + } +} diff --git a/tests/pos-macros/i20574/Test.scala b/tests/pos-macros/i20574/Test.scala new file mode 100644 index 000000000000..abc2b4eb0bc9 --- /dev/null +++ b/tests/pos-macros/i20574/Test.scala @@ -0,0 +1,5 @@ +import Exports.* + +object Test { + overloaded_inline[Unit] +} From fa72cb3249bd3e300e475dfd7210c7833347f491 Mon Sep 17 00:00:00 2001 From: Michel Charpentier Date: Wed, 2 Oct 2024 11:38:48 -0400 Subject: [PATCH 605/827] Fix documentation in Iterable.scala The documentation for zipAll is incorrect, e.g., List(1, 2, 3).zipAll(Set("x"), 0, "") produces a list, not a set. --- scala2-library-bootstrapped/src/scala/collection/Iterable.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scala2-library-bootstrapped/src/scala/collection/Iterable.scala b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala index 8f9142583b29..4a7a0129a9ce 100644 --- a/scala2-library-bootstrapped/src/scala/collection/Iterable.scala +++ b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala @@ -756,7 +756,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @param that the iterable providing the second half of each result pair * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. - * @return a new collection of type `That` containing pairs consisting of + * @return a new collection of the type of this $coll containing pairs consisting of * corresponding elements of this $coll and `that`. The length * of the returned collection is the maximum of the lengths of this $coll and `that`. * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. From e817eb1f4c0cd05164a15961f463dcd0196e744a Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Wed, 2 Oct 2024 20:30:15 +0200 Subject: [PATCH 606/827] Add a test --- tests/init-global/warn/Color.scala | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 tests/init-global/warn/Color.scala diff --git a/tests/init-global/warn/Color.scala b/tests/init-global/warn/Color.scala new file mode 100644 index 000000000000..ee1c00701940 --- /dev/null +++ b/tests/init-global/warn/Color.scala @@ -0,0 +1,10 @@ +enum Color: + case None, White, Black + +enum Player: + case Black, White + + val color: Color = + if this == Player.Black // warn + then Color.Black + else Color.White From c83ff66d9510bf16a701b81b12ec08fd02a778e9 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Mon, 30 Sep 2024 10:13:46 +0200 Subject: [PATCH 607/827] fix: Check if a PolyFunction TypeTree has no ByName parameters --- .../dotty/tools/dotc/reporting/messages.scala | 5 ++--- .../src/dotty/tools/dotc/typer/Typer.scala | 19 +++++++++++++++++-- tests/neg/21538.check | 4 ++-- tests/neg/i21652.check | 4 ++++ tests/neg/i21652.scala | 2 ++ 5 files changed, 27 insertions(+), 7 deletions(-) create mode 100644 tests/neg/i21652.check create mode 100644 tests/neg/i21652.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index cb730efbfe89..d65f9a9857e2 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -1829,12 +1829,11 @@ class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathI if sym.isAllOf(Flags.InlineParam) then i""" |Inline parameters are not considered immutable paths and cannot be used as - |singleton types. - | + |singleton types. + | |Hint: Removing the `inline` qualifier from the `${sym.name}` parameter |may help resolve this issue.""" else "" - class WrongNumberOfParameters(tree: untpd.Tree, foundCount: Int, pt: Type, expectedCount: Int)(using Context) extends SyntaxMsg(WrongNumberOfParametersID) { diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 159ce8354a30..2900a702a5d5 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1914,11 +1914,26 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer .showing(i"desugared fun $tree --> $desugared with pt = $pt", typr) } + /** Check that the PolyFunction doesn't have by-name parameters. + * Return the unchanged tree if it's valid, or EmptyTree otherwise. + */ + private def checkPolyTypeTree(tree: untpd.Tree)(using Context): untpd.Tree = + val untpd.PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ untpd.Function(vparamTypes, res)) = tree: @unchecked + var tree1 = tree + vparamTypes.foreach: + case t: ByNameTypeTree => + report.error("By-name parameters are not supported in Polymorphic Functions", t.srcPos) + tree1 = untpd.EmptyTree + case _ => + tree1 def typedPolyFunction(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val tree1 = desugar.normalizePolyFunction(tree) - if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree1), pt) - else typedPolyFunctionValue(tree1, pt) + checkPolyTypeTree(tree1) match + case tree2: untpd.PolyFunction => + if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree2), pt) + else typedPolyFunctionValue(tree2, pt) + case untpd.EmptyTree => TypeTree(NoType) def typedPolyFunctionValue(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val untpd.PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun) = tree: @unchecked diff --git a/tests/neg/21538.check b/tests/neg/21538.check index 0e799bef3611..e0bcb43f9356 100644 --- a/tests/neg/21538.check +++ b/tests/neg/21538.check @@ -3,8 +3,8 @@ | ^^^^^^^^^^ | (value : V) is not a valid singleton type, since it is not an immutable path | Inline parameters are not considered immutable paths and cannot be used as - | singleton types. - | + | singleton types. + | | Hint: Removing the `inline` qualifier from the `value` parameter | may help resolve this issue. | diff --git a/tests/neg/i21652.check b/tests/neg/i21652.check new file mode 100644 index 000000000000..14ca8e2dc9db --- /dev/null +++ b/tests/neg/i21652.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i21652.scala:1:15 ---------------------------------------------------------------------------------- +1 |def k: [A] => (=> A) => A = // error + | ^^^^ + | By-name parameters are not supported in Polymorphic Functions diff --git a/tests/neg/i21652.scala b/tests/neg/i21652.scala new file mode 100644 index 000000000000..a49d7f0eb1ce --- /dev/null +++ b/tests/neg/i21652.scala @@ -0,0 +1,2 @@ +def k: [A] => (=> A) => A = // error + [A] => a => a From 60e957c50ddcbf6c64377af1e413237d2063190d Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Thu, 3 Oct 2024 10:47:45 +0200 Subject: [PATCH 608/827] Let the poly functions validation be handled in Checking --- .../dotty/tools/dotc/core/Definitions.scala | 1 - .../src/dotty/tools/dotc/typer/Typer.scala | 20 ++----------------- tests/neg/i21652.check | 6 +++--- 3 files changed, 5 insertions(+), 22 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index f95bb3cea351..0195a4ddbf34 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1221,7 +1221,6 @@ class Definitions { /** Creates a refined `PolyFunction` with an `apply` method with the given info. */ def apply(mt: MethodOrPoly)(using Context): Type = - assert(isValidPolyFunctionInfo(mt), s"Not a valid PolyFunction refinement: $mt") RefinedType(PolyFunctionClass.typeRef, nme.apply, mt) /** Matches a refined `PolyFunction` type and extracts the apply info. diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 2900a702a5d5..3ed34532aa8f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1914,26 +1914,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer .showing(i"desugared fun $tree --> $desugared with pt = $pt", typr) } - /** Check that the PolyFunction doesn't have by-name parameters. - * Return the unchanged tree if it's valid, or EmptyTree otherwise. - */ - private def checkPolyTypeTree(tree: untpd.Tree)(using Context): untpd.Tree = - val untpd.PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ untpd.Function(vparamTypes, res)) = tree: @unchecked - var tree1 = tree - vparamTypes.foreach: - case t: ByNameTypeTree => - report.error("By-name parameters are not supported in Polymorphic Functions", t.srcPos) - tree1 = untpd.EmptyTree - case _ => - tree1 - def typedPolyFunction(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val tree1 = desugar.normalizePolyFunction(tree) - checkPolyTypeTree(tree1) match - case tree2: untpd.PolyFunction => - if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree2), pt) - else typedPolyFunctionValue(tree2, pt) - case untpd.EmptyTree => TypeTree(NoType) + if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree1), pt) + else typedPolyFunctionValue(tree1, pt) def typedPolyFunctionValue(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val untpd.PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun) = tree: @unchecked diff --git a/tests/neg/i21652.check b/tests/neg/i21652.check index 14ca8e2dc9db..6cc024e1bb55 100644 --- a/tests/neg/i21652.check +++ b/tests/neg/i21652.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i21652.scala:1:15 ---------------------------------------------------------------------------------- +-- Error: tests/neg/i21652.scala:1:8 ----------------------------------------------------------------------------------- 1 |def k: [A] => (=> A) => A = // error - | ^^^^ - | By-name parameters are not supported in Polymorphic Functions + | ^^^^^^^^^^^^^^^^^ + |Implementation restriction: PolyFunction apply must have exactly one parameter list and optionally type arguments. No by-name nor varags are allowed. From 61764dd0f349911cb9e9a7a67f6d95dc1a260477 Mon Sep 17 00:00:00 2001 From: Hamza REMMAL Date: Mon, 6 May 2024 20:58:14 +0200 Subject: [PATCH 609/827] Add the possibility to create a typeSymbol in the Quotes API --- .../scala/quoted/runtime/impl/QuotesImpl.scala | 8 ++++++++ library/src/scala/quoted/Quotes.scala | 18 ++++++++++++++++++ .../quoted-sym-newtype/Macro_1.scala | 10 ++++++++++ .../pos-macros/quoted-sym-newtype/Test_2.scala | 2 ++ 4 files changed, 38 insertions(+) create mode 100644 tests/pos-macros/quoted-sym-newtype/Macro_1.scala create mode 100644 tests/pos-macros/quoted-sym-newtype/Test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index ce8d19aae46a..a81539fba2e8 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2649,6 +2649,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def newBind(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol = checkValidFlags(flags.toTermFlags, Flags.validBindFlags) dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Case, tpe) + + def newType(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol = + checkValidFlags(flags.toTypeFlags, Flags.validTypeFlags) + dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags | dotc.core.Flags.Deferred, tpe, privateWithin) + def noSymbol: Symbol = dotc.core.Symbols.NoSymbol private inline def checkValidFlags(inline flags: Flags, inline valid: Flags): Unit = @@ -2989,6 +2994,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler // Keep: aligned with Quotes's `newBind` doc private[QuotesImpl] def validBindFlags: Flags = Case // Flags that could be allowed: Implicit | Given | Erased + + private[QuotesImpl] def validTypeFlags: Flags = Private | Protected | Override | Deferred | Final | Infix | Local + end Flags given FlagsMethods: FlagsMethods with diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index fad769793bb7..3b762b73ba00 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -3963,6 +3963,24 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // Keep: `flags` doc aligned with QuotesImpl's `validBindFlags` def newBind(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol + /** Generate a new type symbol with the given parent, name and type + * + * This symbol starts without an accompanying definition. + * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing + * this symbol to the TypeDef constructor. + * + * @param parent The owner of the type + * @param name The name of the type + * @param flags extra flags to with which symbol can be constructed. `Deferred` flag will be added. Can be `Private` | `Protected` | `Override` | `Deferred` | `Final` | `Infix` | `Local` + * @param tpe The rhs or bounds of the type + * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. + * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be + * direct or indirect children of the reflection context's owner. + */ + @experimental + // Keep: `flags` doc aligned with QuotesImpl's `validTypeFlags` + def newType(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol + /** Definition not available */ def noSymbol: Symbol diff --git a/tests/pos-macros/quoted-sym-newtype/Macro_1.scala b/tests/pos-macros/quoted-sym-newtype/Macro_1.scala new file mode 100644 index 000000000000..bb41c82553d6 --- /dev/null +++ b/tests/pos-macros/quoted-sym-newtype/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +inline def testMacro = ${ testImpl } + +def testImpl(using Quotes): Expr[Unit] = { + import quotes.reflect.* + val sym = Symbol.newType(Symbol.spliceOwner, "mytype", Flags.EmptyFlags, TypeRepr.of[String], Symbol.noSymbol) + assert(TypeDef(sym).show == "type mytype = java.lang.String") + '{ () } +} \ No newline at end of file diff --git a/tests/pos-macros/quoted-sym-newtype/Test_2.scala b/tests/pos-macros/quoted-sym-newtype/Test_2.scala new file mode 100644 index 000000000000..506f836ab4a5 --- /dev/null +++ b/tests/pos-macros/quoted-sym-newtype/Test_2.scala @@ -0,0 +1,2 @@ + +def test = testMacro \ No newline at end of file From 56893de676677292a09925b3fdece60b985caa42 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Wed, 21 Aug 2024 10:15:58 +0200 Subject: [PATCH 610/827] Split Symbol.newType into 2 methods and add tests --- .../quoted/runtime/impl/QuotesImpl.scala | 11 +++++--- library/src/scala/quoted/Quotes.scala | 26 ++++++++++++++++--- .../quote-sym-newboundedtype/Macro_1.scala | 22 ++++++++++++++++ .../quote-sym-newboundedtype/Test_2.scala | 4 +++ .../quote-sym-newtype-in-trait/Macro_1.scala | 18 +++++++++++++ .../quote-sym-newtype-in-trait/Test_2.scala | 4 +++ .../quote-sym-newtype/Macro_1.scala | 13 ++++++++++ .../pos-macros/quote-sym-newtype/Test_2.scala | 2 ++ .../quoted-sym-newtype/Macro_1.scala | 10 ------- .../quoted-sym-newtype/Test_2.scala | 2 -- .../stdlibExperimentalDefinitions.scala | 3 +++ 11 files changed, 96 insertions(+), 19 deletions(-) create mode 100644 tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala create mode 100644 tests/pos-macros/quote-sym-newboundedtype/Test_2.scala create mode 100644 tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala create mode 100644 tests/pos-macros/quote-sym-newtype-in-trait/Test_2.scala create mode 100644 tests/pos-macros/quote-sym-newtype/Macro_1.scala create mode 100644 tests/pos-macros/quote-sym-newtype/Test_2.scala delete mode 100644 tests/pos-macros/quoted-sym-newtype/Macro_1.scala delete mode 100644 tests/pos-macros/quoted-sym-newtype/Test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index a81539fba2e8..ba6a6bb56253 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2649,11 +2649,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def newBind(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol = checkValidFlags(flags.toTermFlags, Flags.validBindFlags) dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Case, tpe) - - def newType(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol = + + def newTypeAlias(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol = + checkValidFlags(flags.toTypeFlags, Flags.validTypeFlags) + dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags | dotc.core.Flags.Deferred, dotc.core.Types.TypeAlias(tpe), privateWithin) + + def newBoundedType(owner: Symbol, name: String, flags: Flags, tpe: TypeBounds, privateWithin: Symbol): Symbol = checkValidFlags(flags.toTypeFlags, Flags.validTypeFlags) dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags | dotc.core.Flags.Deferred, tpe, privateWithin) - + def noSymbol: Symbol = dotc.core.Symbols.NoSymbol private inline def checkValidFlags(inline flags: Flags, inline valid: Flags): Unit = @@ -2995,6 +2999,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler // Keep: aligned with Quotes's `newBind` doc private[QuotesImpl] def validBindFlags: Flags = Case // Flags that could be allowed: Implicit | Given | Erased + // Keep: aligned with Quotes's 'newType' doc private[QuotesImpl] def validTypeFlags: Flags = Private | Protected | Override | Deferred | Final | Infix | Local end Flags diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 3b762b73ba00..99cf04ec423c 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -3963,8 +3963,26 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // Keep: `flags` doc aligned with QuotesImpl's `validBindFlags` def newBind(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol - /** Generate a new type symbol with the given parent, name and type - * + /** Generate a new type symbol for a type alias with the given parent, name and type + * + * This symbol starts without an accompanying definition. + * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing + * this symbol to the TypeDef constructor. + * + * @param parent The owner of the type + * @param name The name of the type + * @param flags extra flags to with which symbol can be constructed. `Deferred` flag will be added. Can be `Private` | `Protected` | `Override` | `Deferred` | `Final` | `Infix` | `Local` + * @param tpe The rhs the type alias + * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. + * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be + * direct or indirect children of the reflection context's owner. + */ + @experimental + // Keep: `flags` doc aligned with QuotesImpl's `validTypeFlags` + def newTypeAlias(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol + + /** Generate a new type symbol for a type bounds with the given parent, name and type + * * This symbol starts without an accompanying definition. * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing * this symbol to the TypeDef constructor. @@ -3972,14 +3990,14 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * @param parent The owner of the type * @param name The name of the type * @param flags extra flags to with which symbol can be constructed. `Deferred` flag will be added. Can be `Private` | `Protected` | `Override` | `Deferred` | `Final` | `Infix` | `Local` - * @param tpe The rhs or bounds of the type + * @param tpe The bounds of the type * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ @experimental // Keep: `flags` doc aligned with QuotesImpl's `validTypeFlags` - def newType(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol + def newBoundedType(parent: Symbol, name: String, flags: Flags, tpe: TypeBounds, privateWithin: Symbol): Symbol /** Definition not available */ def noSymbol: Symbol diff --git a/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala b/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala new file mode 100644 index 000000000000..b38a4304b9d2 --- /dev/null +++ b/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala @@ -0,0 +1,22 @@ +//> using options -experimental -Yno-experimental +import scala.quoted.* + +inline def testMacro = ${ testImpl } + +transparent inline def transparentTestMacro = ${ testImpl } + +def testImpl(using Quotes): Expr[Object] = { + import quotes.reflect.* + + def makeType(owner: Symbol): Symbol = + Symbol.newBoundedType(owner, "mytype", Flags.EmptyFlags, TypeBounds.lower(TypeRepr.of[String]), Symbol.noSymbol) + + val typeDef = TypeDef(makeType(Symbol.spliceOwner)) + // Expr printer does not work here, see comment: + // https://github.com/scala/scala3/pull/20347#issuecomment-2096824617 + assert(typeDef.toString == "TypeDef(mytype,TypeTree[TypeBounds(TypeRef(TermRef(ThisType(TypeRef(NoPrefix,module class java)),object lang),String),TypeRef(ThisType(TypeRef(NoPrefix,module class scala)),class Any))])") + + val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => List(makeType(sym)), None) + val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List(TypeDef(clsSymbol.typeMember("mytype")))) + Block(List(classDef), Apply(Select(New(TypeIdent(clsSymbol)), clsSymbol.primaryConstructor), List.empty)).asExprOf[Object] +} diff --git a/tests/pos-macros/quote-sym-newboundedtype/Test_2.scala b/tests/pos-macros/quote-sym-newboundedtype/Test_2.scala new file mode 100644 index 000000000000..2d479a09695a --- /dev/null +++ b/tests/pos-macros/quote-sym-newboundedtype/Test_2.scala @@ -0,0 +1,4 @@ +//> using options -experimental -Yno-experimental +def test = + testMacro + transparentTestMacro diff --git a/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala b/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala new file mode 100644 index 000000000000..1d07c5080e26 --- /dev/null +++ b/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala @@ -0,0 +1,18 @@ +//> using options -experimental -Yno-experimental +import scala.quoted.* + +inline def testMacro = ${ testImpl } + +transparent inline def transparentTestMacro = ${ testImpl } + +def testImpl(using Quotes): Expr[Object] = { + import quotes.reflect.* + + def makeType(owner: Symbol): Symbol = + Symbol.newTypeAlias(owner, "mytype", Flags.EmptyFlags, TypeRepr.of[String], Symbol.noSymbol) + + val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => List(makeType(sym)), None) + val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List(TypeDef(clsSymbol.typeMember("mytype")))) + + Block(List(classDef), Apply(Select(New(TypeIdent(clsSymbol)), clsSymbol.primaryConstructor), List.empty)).asExprOf[Object] +} diff --git a/tests/pos-macros/quote-sym-newtype-in-trait/Test_2.scala b/tests/pos-macros/quote-sym-newtype-in-trait/Test_2.scala new file mode 100644 index 000000000000..2d479a09695a --- /dev/null +++ b/tests/pos-macros/quote-sym-newtype-in-trait/Test_2.scala @@ -0,0 +1,4 @@ +//> using options -experimental -Yno-experimental +def test = + testMacro + transparentTestMacro diff --git a/tests/pos-macros/quote-sym-newtype/Macro_1.scala b/tests/pos-macros/quote-sym-newtype/Macro_1.scala new file mode 100644 index 000000000000..9973ba1e047e --- /dev/null +++ b/tests/pos-macros/quote-sym-newtype/Macro_1.scala @@ -0,0 +1,13 @@ +//> using options -experimental -Yno-experimental +import scala.quoted.* + +inline def testMacro = ${ testImpl } + +def testImpl(using Quotes): Expr[Unit] = { + import quotes.reflect.* + val sym = Symbol.newTypeAlias(Symbol.spliceOwner, "mytype", Flags.EmptyFlags, TypeRepr.of[String], Symbol.noSymbol) + val typeDef = TypeDef(sym) + assert(typeDef.show == "type mytype = java.lang.String") + + Block(List(typeDef), '{()}.asTerm).asExprOf[Unit] +} diff --git a/tests/pos-macros/quote-sym-newtype/Test_2.scala b/tests/pos-macros/quote-sym-newtype/Test_2.scala new file mode 100644 index 000000000000..5a272acbdda4 --- /dev/null +++ b/tests/pos-macros/quote-sym-newtype/Test_2.scala @@ -0,0 +1,2 @@ +//> using options -experimental -Yno-experimental +def test = testMacro diff --git a/tests/pos-macros/quoted-sym-newtype/Macro_1.scala b/tests/pos-macros/quoted-sym-newtype/Macro_1.scala deleted file mode 100644 index bb41c82553d6..000000000000 --- a/tests/pos-macros/quoted-sym-newtype/Macro_1.scala +++ /dev/null @@ -1,10 +0,0 @@ -import scala.quoted.* - -inline def testMacro = ${ testImpl } - -def testImpl(using Quotes): Expr[Unit] = { - import quotes.reflect.* - val sym = Symbol.newType(Symbol.spliceOwner, "mytype", Flags.EmptyFlags, TypeRepr.of[String], Symbol.noSymbol) - assert(TypeDef(sym).show == "type mytype = java.lang.String") - '{ () } -} \ No newline at end of file diff --git a/tests/pos-macros/quoted-sym-newtype/Test_2.scala b/tests/pos-macros/quoted-sym-newtype/Test_2.scala deleted file mode 100644 index 506f836ab4a5..000000000000 --- a/tests/pos-macros/quoted-sym-newtype/Test_2.scala +++ /dev/null @@ -1,2 +0,0 @@ - -def test = testMacro \ No newline at end of file diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 15ccd38f860c..bbabc376b07b 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -63,6 +63,9 @@ val experimentalDefinitionInLibrary = Set( "scala.quoted.Quotes.reflectModule.SymbolModule.newModule", "scala.quoted.Quotes.reflectModule.SymbolModule.freshName", "scala.quoted.Quotes.reflectModule.SymbolMethods.info", + // Added for 3.6.0, stabilize after feedback. + "scala.quoted.Quotes.reflectModule.SymbolModule.newBoundedType", + "scala.quoted.Quotes.reflectModule.SymbolModule.newTypeAlias", // New feature: functions with erased parameters. // Need erasedDefinitions enabled. From 75e4f09405de0c2555c8463689f34d0936212206 Mon Sep 17 00:00:00 2001 From: Piotr Chabelski Date: Thu, 3 Oct 2024 13:26:17 +0200 Subject: [PATCH 611/827] Bump Scala CLI to 1.5.1 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index db1cf16588aa..0507fec64c16 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -125,7 +125,7 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.5.0" + val scalaCliLauncherVersion = "1.5.1" /** Version of Coursier to download for initializing the local maven repo of Scala command */ val coursierJarVersion = "2.1.10" From df6a1f82270322d278eaa6124812edda285bb0a4 Mon Sep 17 00:00:00 2001 From: Piotr Chabelski Date: Thu, 3 Oct 2024 13:27:23 +0200 Subject: [PATCH 612/827] Bump `scala-cli-setup` GH action to 1.5.1 --- .github/workflows/lts-backport.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 5a269d889d7c..52eb674c1c17 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.5.0 + - uses: VirtusLab/scala-cli-setup@v1.5.1 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} From 971e04553ad542d3af3f6b77db434e5d6d558e7a Mon Sep 17 00:00:00 2001 From: Piotr Chabelski Date: Thu, 3 Oct 2024 13:29:33 +0200 Subject: [PATCH 613/827] Bump `coursier` to 2.1.13 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 0507fec64c16..2a23f0202eb0 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -127,7 +127,7 @@ object Build { /** Version of Scala CLI to download */ val scalaCliLauncherVersion = "1.5.1" /** Version of Coursier to download for initializing the local maven repo of Scala command */ - val coursierJarVersion = "2.1.10" + val coursierJarVersion = "2.1.13" object CompatMode { final val BinaryCompatible = 0 From 9db0a00023dac264edf3f17fc9bef6aa048f06c2 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Thu, 3 Oct 2024 13:20:22 +0200 Subject: [PATCH 614/827] Address review comments --- .../src/scala/quoted/runtime/impl/QuotesImpl.scala | 14 +++++++++----- library/src/scala/quoted/Quotes.scala | 10 +++++----- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index ba6a6bb56253..901e0038efd5 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2651,12 +2651,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Case, tpe) def newTypeAlias(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol = - checkValidFlags(flags.toTypeFlags, Flags.validTypeFlags) + checkValidFlags(flags.toTypeFlags, Flags.validTypeAliasFlags) + assert(!tpe.isInstanceOf[Types.TypeBounds], "Passed `tpe` into newTypeAlias should not represent TypeBounds") dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags | dotc.core.Flags.Deferred, dotc.core.Types.TypeAlias(tpe), privateWithin) def newBoundedType(owner: Symbol, name: String, flags: Flags, tpe: TypeBounds, privateWithin: Symbol): Symbol = - checkValidFlags(flags.toTypeFlags, Flags.validTypeFlags) - dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags | dotc.core.Flags.Deferred, tpe, privateWithin) + checkValidFlags(flags.toTypeFlags, Flags.validBoundedTypeFlags) + dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags, tpe, privateWithin) def noSymbol: Symbol = dotc.core.Symbols.NoSymbol @@ -2999,8 +3000,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler // Keep: aligned with Quotes's `newBind` doc private[QuotesImpl] def validBindFlags: Flags = Case // Flags that could be allowed: Implicit | Given | Erased - // Keep: aligned with Quotes's 'newType' doc - private[QuotesImpl] def validTypeFlags: Flags = Private | Protected | Override | Deferred | Final | Infix | Local + // Keep: aligned with Quotes's 'newBoundedType' doc + private[QuotesImpl] def validBoundedTypeFlags: Flags = Private | Protected | Override | Deferred | Final | Infix | Local + + // Keep: aligned with Quotes's `newTypeAlias` doc + private[QuotesImpl] def validTypeAliasFlags: Flags = Private | Protected | Override | Final | Infix | Local end Flags diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 99cf04ec423c..7a98d6f6f761 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -3971,14 +3971,14 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * * @param parent The owner of the type * @param name The name of the type - * @param flags extra flags to with which symbol can be constructed. `Deferred` flag will be added. Can be `Private` | `Protected` | `Override` | `Deferred` | `Final` | `Infix` | `Local` + * @param flags extra flags to with which symbol can be constructed. Can be `Private` | `Protected` | `Override` | `Final` | `Infix` | `Local` * @param tpe The rhs the type alias - * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. + * @param privateWithin the symbol within which this new type symbol should be private. May be noSymbol. * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ @experimental - // Keep: `flags` doc aligned with QuotesImpl's `validTypeFlags` + // Keep: `flags` doc aligned with QuotesImpl's `validTypeAliasFlags` def newTypeAlias(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol /** Generate a new type symbol for a type bounds with the given parent, name and type @@ -3991,12 +3991,12 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * @param name The name of the type * @param flags extra flags to with which symbol can be constructed. `Deferred` flag will be added. Can be `Private` | `Protected` | `Override` | `Deferred` | `Final` | `Infix` | `Local` * @param tpe The bounds of the type - * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. + * @param privateWithin the symbol within which this new type symbol should be private. May be noSymbol. * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ @experimental - // Keep: `flags` doc aligned with QuotesImpl's `validTypeFlags` + // Keep: `flags` doc aligned with QuotesImpl's `validBoundedTypeFlags` def newBoundedType(parent: Symbol, name: String, flags: Flags, tpe: TypeBounds, privateWithin: Symbol): Symbol /** Definition not available */ From 63e42d3013c0627953aaded6c1546a89365735c4 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 3 Oct 2024 16:47:33 +0200 Subject: [PATCH 615/827] Add assertion to ensure: if the parents of a module is empty, it must be a package. --- compiler/src/dotty/tools/dotc/typer/Applications.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index c8eb5b145db1..17be2acc7378 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1963,7 +1963,10 @@ trait Applications extends Compatibility { def widenPrefix(alt: TermRef): Type = alt.prefix.widen match case pre: (TypeRef | ThisType) if pre.typeSymbol.is(Module) => val ps = pre.parents - if ps.isEmpty then pre + if ps.isEmpty then + // The parents of a module class are non-empty, unless the module is a package. + assert(pre.typeSymbol.is(Package), pre) + pre else ps.reduceLeft(TypeComparer.andType(_, _)) case wpre => wpre From 0a416d88b5810b1fc86877c38b17e6c833a4c5e9 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 3 Oct 2024 18:31:35 +0200 Subject: [PATCH 616/827] Add an adaptation step in Inliner We sometimes face a problem that we inline a reference `x: T` which upon further inlining is adapted to an expected type `x`. It only seems to occur in complicated scenarios. I could not completely narrow it down. But in any case it's safe to drop the widening cast in order to avoid a type error here. We do that in a last-effort adaptation step that's only enabled in the Inliner: Faced with an expression `x: T` and a singleton expected type `y.type` where `x.type <: y.type`, rewrite to `x`. --- compiler/src/dotty/tools/dotc/inlines/Inliner.scala | 6 ++++++ compiler/src/dotty/tools/dotc/typer/Typer.scala | 10 +++++++++- tests/pos/i21413.scala | 2 ++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21413.scala diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 7c79e972c126..103f3aac7630 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -957,6 +957,12 @@ class Inliner(val call: tpd.Tree)(using Context): case None => tree case _ => tree + + /** For inlining only: Given `(x: T)` with expected type `x.type`, replace the tree with `x`. + */ + override def healAdapt(tree: Tree, pt: Type)(using Context): Tree = (tree, pt) match + case (Typed(tree1, _), pt: SingletonType) if tree1.tpe <:< pt => tree1 + case _ => tree end InlineTyper /** Drop any side-effect-free bindings that are unused in expansion or other reachable bindings. diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 93ea3f3c3ae0..57bbc3ee98e8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4602,7 +4602,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def recover(failure: SearchFailureType) = if canDefineFurther(wtp) || canDefineFurther(pt) then readapt(tree) - else err.typeMismatch(tree, pt, failure) + else + val tree1 = healAdapt(tree, pt) + if tree1 ne tree then readapt(tree1) + else err.typeMismatch(tree, pt, failure) pt match case _: SelectionProto => @@ -4751,6 +4754,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } + /** Hook for inheriting Typers to do a last-effort adaptation. If a different + * tree is returned, we will readpat that one, ptherwise we issue a type error afterwards. + */ + protected def healAdapt(tree: Tree, pt: Type)(using Context): Tree = tree + /** True if this inline typer has already issued errors */ def hasInliningErrors(using Context): Boolean = false diff --git a/tests/pos/i21413.scala b/tests/pos/i21413.scala new file mode 100644 index 000000000000..d2dc52e34630 --- /dev/null +++ b/tests/pos/i21413.scala @@ -0,0 +1,2 @@ +val x = (aaa = 1).aaa +//val y = x.aaa \ No newline at end of file From c32e5354a256acf8ff237e19c5b1258235025b41 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 3 Oct 2024 21:45:02 +0200 Subject: [PATCH 617/827] Fix incorrect caching with path-dependent types The added test case used to fail Ycheck:typer with the seemingly identicals: Found: (a: (aa : A{type B = Int}), b: a.B): CCPoly[(aa : A{type B = Int})] Required: (a: (aa : A{type B = Int}), b: a.B): CCPoly[(aa : A{type B = Int})] In fact one of the `aa` is a a TypeVar instantiated to `A {type B = Int }`. The MethodType comparison failed the signature check because the `a.B` where `a` is backed by a type variable had a stale signature cached. Fixed by changing `isProvisional` to traverse ParamRefs. --- .../src/dotty/tools/dotc/core/Types.scala | 3 +++ tests/neg/i16842.check | 20 +++++++++++++++---- tests/neg/i16842.scala | 2 +- tests/pos/dep-poly-class.scala | 9 +++++++++ 4 files changed, 29 insertions(+), 5 deletions(-) create mode 100644 tests/pos/dep-poly-class.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index aba8c3bb31fd..cf73bda0d131 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -144,6 +144,9 @@ object Types extends TypeUtils { !t.isPermanentlyInstantiated || test(t.permanentInst, theAcc) case t: LazyRef => !t.completed || test(t.ref, theAcc) + case t: ParamRef => + (t: Type).mightBeProvisional = false // break cycles + test(t.underlying, theAcc) case _ => (if theAcc != null then theAcc else ProAcc()).foldOver(false, t) end if diff --git a/tests/neg/i16842.check b/tests/neg/i16842.check index 936b08f95dbb..8cad4bc7656f 100644 --- a/tests/neg/i16842.check +++ b/tests/neg/i16842.check @@ -1,4 +1,16 @@ --- Error: tests/neg/i16842.scala:24:7 ---------------------------------------------------------------------------------- -24 | Liter(SemanticArray[SemanticInt.type], x) // error - | ^ - | invalid new prefix (dim: Int): SemanticArray[SemanticInt.type] cannot replace ty.type in type ty.T +-- [E007] Type Mismatch Error: tests/neg/i16842.scala:24:8 ------------------------------------------------------------- +24 | Liter(SemanticArray[SemanticInt.type], x) // error // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: Int => SemanticArray[SemanticInt.type] + | Required: SemanticArray[SemanticType] + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i16842.scala:24:41 ------------------------------------------------------------ +24 | Liter(SemanticArray[SemanticInt.type], x) // error // error + | ^ + | Found: (x : List[Expr2[SemanticInt.type]]) + | Required: ty.T + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than ty.T + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16842.scala b/tests/neg/i16842.scala index e9935b46c01d..1e7e5cc14339 100644 --- a/tests/neg/i16842.scala +++ b/tests/neg/i16842.scala @@ -21,5 +21,5 @@ def typecheckArrayLiter( a: ArrayLiter ): Liter[SemanticArray[SemanticType]] = { val x: List[Expr2[SemanticInt.type]] = List() - Liter(SemanticArray[SemanticInt.type], x) // error + Liter(SemanticArray[SemanticInt.type], x) // error // error } diff --git a/tests/pos/dep-poly-class.scala b/tests/pos/dep-poly-class.scala new file mode 100644 index 000000000000..3615b699ff3a --- /dev/null +++ b/tests/pos/dep-poly-class.scala @@ -0,0 +1,9 @@ +trait A: + type B + +class CCPoly[T <: A](a: T, b: a.B) + +object Test: + def test(): Unit = + val aa: A { type B = Int } = new A { type B = Int } + val x: CCPoly[aa.type] = CCPoly(aa, 1) From f931a6d66078f22be8f5820261662efa0074d645 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 4 Oct 2024 18:13:09 +0200 Subject: [PATCH 618/827] Use the NamedTuple type ops for the result types of the term ops --- .../src-bootstrapped/scala/NamedTuple.scala | 43 ++++++++----------- 1 file changed, 18 insertions(+), 25 deletions(-) diff --git a/library/src-bootstrapped/scala/NamedTuple.scala b/library/src-bootstrapped/scala/NamedTuple.scala index 71bcd26a16e2..acc318b2be99 100644 --- a/library/src-bootstrapped/scala/NamedTuple.scala +++ b/library/src-bootstrapped/scala/NamedTuple.scala @@ -25,6 +25,7 @@ object NamedTuple: extension [V <: Tuple](x: V) inline def withNames[N <: Tuple]: NamedTuple[N, V] = x + import NamedTupleDecomposition.{Names, DropNames} export NamedTupleDecomposition.{ Names, DropNames, apply, size, init, head, last, tail, take, drop, splitAt, ++, map, reverse, zip, toList, toArray, toIArray @@ -134,65 +135,57 @@ object NamedTupleDecomposition: import NamedTuple.* extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) /** The value (without the name) at index `n` of this tuple */ - inline def apply(n: Int): Tuple.Elem[V, n.type] = + inline def apply(n: Int): Elem[NamedTuple[N, V], n.type] = inline x.toTuple match - case tup: NonEmptyTuple => tup(n).asInstanceOf[Tuple.Elem[V, n.type]] - case tup => tup.productElement(n).asInstanceOf[Tuple.Elem[V, n.type]] + case tup: NonEmptyTuple => tup(n).asInstanceOf[Elem[NamedTuple[N, V], n.type]] + case tup => tup.productElement(n).asInstanceOf[Elem[NamedTuple[N, V], n.type]] /** The number of elements in this tuple */ - inline def size: Tuple.Size[V] = x.toTuple.size + inline def size: Size[NamedTuple[N, V]] = x.toTuple.size /** The first element value of this tuple */ - inline def head: Tuple.Elem[V, 0] = apply(0) + inline def head: Head[NamedTuple[N, V]] = apply(0) /** The last element value of this tuple */ - inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] + inline def last: Last[NamedTuple[N, V]] = apply(size - 1).asInstanceOf[Tuple.Last[V]] /** The tuple consisting of all elements of this tuple except the last one */ - inline def init: NamedTuple[Tuple.Init[N], Tuple.Init[V]] = - x.toTuple.take(size - 1).asInstanceOf[NamedTuple[Tuple.Init[N], Tuple.Init[V]]] + inline def init: Init[NamedTuple[N, V]] = + x.toTuple.take(size - 1).asInstanceOf[Init[NamedTuple[N, V]]] /** The tuple consisting of all elements of this tuple except the first one */ - inline def tail: NamedTuple[Tuple.Tail[N], Tuple.Tail[V]] = - x.toTuple.drop(1).asInstanceOf[NamedTuple[Tuple.Tail[N], Tuple.Tail[V]]] + inline def tail: Tail[NamedTuple[N, V]] = + x.toTuple.drop(1).asInstanceOf[Tail[NamedTuple[N, V]]] /** The tuple consisting of the first `n` elements of this tuple, or all * elements if `n` exceeds `size`. */ - inline def take(n: Int): NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]] = - x.toTuple.take(n) + inline def take(n: Int): Take[NamedTuple[N, V], n.type] = x.toTuple.take(n) /** The tuple consisting of all elements of this tuple except the first `n` ones, * or no elements if `n` exceeds `size`. */ - inline def drop(n: Int): NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]] = - x.toTuple.drop(n) + inline def drop(n: Int): Drop[NamedTuple[N, V], n.type] = x.toTuple.drop(n) /** The tuple `(x.take(n), x.drop(n))` */ - inline def splitAt(n: Int): - (NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]], - NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]]) = - // would be nice if this could have type `Split[NamedTuple[N, V]]` instead, but - // we get a type error then. Similar for other methods here. - x.toTuple.splitAt(n) + inline def splitAt(n: Int): Split[NamedTuple[N, V], n.type] = x.toTuple.splitAt(n) /** The tuple consisting of all elements of this tuple followed by all elements * of tuple `that`. The names of the two tuples must be disjoint. */ inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) - : NamedTuple[Tuple.Concat[N, N2], Tuple.Concat[V, V2]] + : Concat[NamedTuple[N, V], NamedTuple[N2, V2]] = x.toTuple ++ that.toTuple /** The named tuple consisting of all element values of this tuple mapped by * the polymorphic mapping function `f`. The names of elements are preserved. * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. */ - inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = + inline def map[F[_]](f: [t] => t => F[t]): Map[NamedTuple[N, V], F] = x.toTuple.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] /** The named tuple consisting of all elements of this tuple in reverse */ - inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = - x.toTuple.reverse + inline def reverse: Reverse[NamedTuple[N, V]] = x.toTuple.reverse /** The named tuple consisting of all elements values of this tuple zipped * with corresponding element values in named tuple `that`. @@ -201,7 +194,7 @@ object NamedTupleDecomposition: * The names of `x` and `that` at the same index must be the same. * The result tuple keeps the same names as the operand tuples. */ - inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): NamedTuple[N, Tuple.Zip[V, V2]] = + inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): Zip[NamedTuple[N, V], NamedTuple[N, V2]] = x.toTuple.zip(that.toTuple) /** A list consisting of all element values */ From 12eb487179949815102d5a194ac25f5e7c26d0af Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 4 Oct 2024 18:34:19 +0200 Subject: [PATCH 619/827] Drop `asInstanceOf` from the NamedTuple term ops where possible --- library/src-bootstrapped/scala/NamedTuple.scala | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/library/src-bootstrapped/scala/NamedTuple.scala b/library/src-bootstrapped/scala/NamedTuple.scala index acc318b2be99..37ce031fba10 100644 --- a/library/src-bootstrapped/scala/NamedTuple.scala +++ b/library/src-bootstrapped/scala/NamedTuple.scala @@ -147,15 +147,14 @@ object NamedTupleDecomposition: inline def head: Head[NamedTuple[N, V]] = apply(0) /** The last element value of this tuple */ - inline def last: Last[NamedTuple[N, V]] = apply(size - 1).asInstanceOf[Tuple.Last[V]] + inline def last: Last[NamedTuple[N, V]] = apply(size - 1).asInstanceOf[Last[NamedTuple[N, V]]] /** The tuple consisting of all elements of this tuple except the last one */ inline def init: Init[NamedTuple[N, V]] = - x.toTuple.take(size - 1).asInstanceOf[Init[NamedTuple[N, V]]] + x.take(size - 1).asInstanceOf[Init[NamedTuple[N, V]]] /** The tuple consisting of all elements of this tuple except the first one */ - inline def tail: Tail[NamedTuple[N, V]] = - x.toTuple.drop(1).asInstanceOf[Tail[NamedTuple[N, V]]] + inline def tail: Tail[NamedTuple[N, V]] = x.toTuple.drop(1) /** The tuple consisting of the first `n` elements of this tuple, or all * elements if `n` exceeds `size`. @@ -182,7 +181,7 @@ object NamedTupleDecomposition: * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. */ inline def map[F[_]](f: [t] => t => F[t]): Map[NamedTuple[N, V], F] = - x.toTuple.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] + x.toTuple.map(f) /** The named tuple consisting of all elements of this tuple in reverse */ inline def reverse: Reverse[NamedTuple[N, V]] = x.toTuple.reverse @@ -198,7 +197,7 @@ object NamedTupleDecomposition: x.toTuple.zip(that.toTuple) /** A list consisting of all element values */ - inline def toList: List[Tuple.Union[V]] = x.toTuple.toList.asInstanceOf[List[Tuple.Union[V]]] + inline def toList: List[Tuple.Union[V]] = x.toTuple.toList /** An array consisting of all element values */ inline def toArray: Array[Object] = x.toTuple.toArray From 6828fe708c0109e26fddc44072681dba2f95c75c Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 4 Oct 2024 18:49:35 +0200 Subject: [PATCH 620/827] Apply suggestions from code review --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 3 ++- docs/_docs/reference/other-new-features/named-tuples.md | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 57bbc3ee98e8..0ca4f9486f09 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4755,7 +4755,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } /** Hook for inheriting Typers to do a last-effort adaptation. If a different - * tree is returned, we will readpat that one, ptherwise we issue a type error afterwards. + * tree is returned, we will re-adapt that one, otherwise we issue a type error afterwards. +`` */ protected def healAdapt(tree: Tree, pt: Type)(using Context): Tree = tree diff --git a/docs/_docs/reference/other-new-features/named-tuples.md b/docs/_docs/reference/other-new-features/named-tuples.md index bf1ae4ca7046..3813db4defe1 100644 --- a/docs/_docs/reference/other-new-features/named-tuples.md +++ b/docs/_docs/reference/other-new-features/named-tuples.md @@ -99,7 +99,7 @@ Bob match We allow named patterns not just for named tuples but also for case classes. For instance: ```scala city match - case c @ City(name = "London") => println(p.population) + case c @ City(name = "London") => println(c.population) case City(name = n, zip = 1026, population = pop) => println(pop) ``` From 6a971b1eec85f887652482b492ad169c0e98a586 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Fri, 4 Oct 2024 19:06:17 +0200 Subject: [PATCH 621/827] Fix typos --- docs/_docs/reference/other-new-features/named-tuples.md | 4 ++-- library/src-bootstrapped/scala/NamedTuple.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/_docs/reference/other-new-features/named-tuples.md b/docs/_docs/reference/other-new-features/named-tuples.md index bf1ae4ca7046..0abc91e5ecf8 100644 --- a/docs/_docs/reference/other-new-features/named-tuples.md +++ b/docs/_docs/reference/other-new-features/named-tuples.md @@ -127,7 +127,7 @@ NamedTuple[("name", "age"), (String, Int)] A `NamedTuple[N, V]` type is publicly known to be a supertype (but not a subtype) of its value paramater `V`, which means that regular tuples can be assigned to named tuples but not _vice versa_. -The `NamedTuple` object contains a number of extension methods for named tuples hat mirror the same functions in `Tuple`. Examples are +The `NamedTuple` object contains a number of extension methods for named tuples that mirror the same functions in `Tuple`. Examples are `apply`, `head`, `tail`, `take`, `drop`, `++`, `map`, or `zip`. Similar to `Tuple`, the `NamedTuple` object also contains types such as `Elem`, `Head`, `Concat` that describe the results of these extension methods. @@ -154,7 +154,7 @@ that `c: C`, and that `n` is not otherwise legal as a name of a selection on `c` Then `c.n` is a legal selection, which expands to `c.selectDynamic("n").asInstanceOf[T]`. It is the task of the implementation of `selectDynamic` in `C` to ensure that its -computed result conforms to the predicted type `T` +computed result conforms to the predicted type `T`. As an example, assume we have a query type `Q[T]` defined as follows: diff --git a/library/src-bootstrapped/scala/NamedTuple.scala b/library/src-bootstrapped/scala/NamedTuple.scala index 37ce031fba10..f8a821dc45ef 100644 --- a/library/src-bootstrapped/scala/NamedTuple.scala +++ b/library/src-bootstrapped/scala/NamedTuple.scala @@ -186,7 +186,7 @@ object NamedTupleDecomposition: /** The named tuple consisting of all elements of this tuple in reverse */ inline def reverse: Reverse[NamedTuple[N, V]] = x.toTuple.reverse - /** The named tuple consisting of all elements values of this tuple zipped + /** The named tuple consisting of all element values of this tuple zipped * with corresponding element values in named tuple `that`. * If the two tuples have different sizes, * the extra elements of the larger tuple will be disregarded. From c76dc8087d34076dcb7b91fa3ba18da07f08d9c6 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 4 Oct 2024 19:07:15 +0200 Subject: [PATCH 622/827] Address some points in code review. --- docs/_docs/internals/syntax.md | 2 +- docs/_docs/reference/contextual/context-bounds.md | 9 ++++++--- docs/_docs/reference/syntax.md | 2 +- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 0cde7bc127aa..d0074bb503c2 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -486,7 +486,7 @@ GivenConditional ::= DefTypeParamClause GivenType ::= AnnotType1 {id [nl] AnnotType1} OldGivenDef ::= [OldGivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -- syntax up to Scala 3.5, to be deprecated in the future -OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} diff --git a/docs/_docs/reference/contextual/context-bounds.md b/docs/_docs/reference/contextual/context-bounds.md index ef32dc7b08c2..a06cb2d79181 100644 --- a/docs/_docs/reference/contextual/context-bounds.md +++ b/docs/_docs/reference/contextual/context-bounds.md @@ -71,7 +71,7 @@ The witness context parameter(s) generated from context bounds are added as foll 1. If one of the bounds is referred to by its name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. - 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. + 3. Otherwise, the parameters arising from context bounds form a new using clause at the end. Rules (2) and (3) match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility with Scala 2 and earlier Scala 3 versions is maintained. @@ -151,11 +151,14 @@ val less: Comparer = [X] => (x: X, y: X) => (ord: Ord[X]) ?=> The expansion of using clauses does look inside alias types. For instance, here is a variation of the previous example that uses a parameterized type alias: ```scala -type Cmp[X] = (x: X, y: X) => Ord[X] ?=> Boolean +type Cmp[X] = (x: X, y: X) => Boolean type Comparer2 = [X: Ord] => Cmp[X] ``` The expansion of the right hand side of `Comparer2` expands the `Cmp[X]` alias -and then inserts the context function at the same place as what's done for `Comparer`. +and then inserts the context function at the same place as what's done for `Comparer`: +```scala + [X] => (x: X, y: X) => Ord[X] ?=> Boolean +``` ### Context Bounds for Type Members diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index adf25c9342fa..3e71718d0752 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -461,7 +461,7 @@ GivenConditional ::= DefTypeParamClause GivenType ::= AnnotType1 {id [nl] AnnotType1} OldGivenDef ::= [OldGivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -- syntax up to Scala 3.5, to be deprecated in the future -OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} From df75afc7a53e4f3bbd43483813719182d391021f Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 4 Oct 2024 19:11:51 +0200 Subject: [PATCH 623/827] Apply suggestions from code review Co-authored-by: Dimi Racordon --- docs/_docs/reference/contextual/context-bounds.md | 6 +++--- docs/_docs/reference/contextual/deferred-givens.md | 4 ++-- docs/_docs/reference/contextual/givens.md | 6 +++--- docs/_docs/reference/contextual/more-givens.md | 6 ++---- docs/_docs/reference/contextual/previous-givens.md | 7 +++---- docs/_docs/reference/experimental/typeclasses.md | 2 +- docs/_docs/reference/syntax.md | 6 +++--- 7 files changed, 17 insertions(+), 20 deletions(-) diff --git a/docs/_docs/reference/contextual/context-bounds.md b/docs/_docs/reference/contextual/context-bounds.md index a06cb2d79181..60357b3f098d 100644 --- a/docs/_docs/reference/contextual/context-bounds.md +++ b/docs/_docs/reference/contextual/context-bounds.md @@ -199,10 +199,10 @@ The syntax of function types and function literals is generalized as follows to allow context bounds for generic type parameters. ```ebnf -FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type +FunType ::= FunTypeArgs ('=>' | '?=>') Type | DefTypeParamClause '=>' Type -FunExpr ::= FunParams (‘=>’ | ‘?=>’) Expr - | DefTypeParamClause ‘=>’ Expr +FunExpr ::= FunParams ('=>' | '?=>') Expr + | DefTypeParamClause '=>' Expr ``` The syntax for abstract type members is generalized as follows to allow context bounds: diff --git a/docs/_docs/reference/contextual/deferred-givens.md b/docs/_docs/reference/contextual/deferred-givens.md index 232c15afda35..e63e26858d29 100644 --- a/docs/_docs/reference/contextual/deferred-givens.md +++ b/docs/_docs/reference/contextual/deferred-givens.md @@ -40,7 +40,7 @@ class SortedString[A] extends Sorted: override given Ord[String] = ... ``` -Note that the implementing given needs an `override` modifier since the `deferred` given in class `Sorted` counts as a concrete (i.e. not abstract) definition. In a sense, the `deferred` right hand side in `Sorted` is like a (magic, compiler-supported) macro, with the peculiarity that the macro's implementation also affects subclasses. +Note that the implementing given needs an `override` modifier since the `deferred` given in class `Sorted` counts as a concrete (i.e. not abstract) definition. In a sense, `deferred` on the right-hand side in `Sorted` is like a (magic, compiler-supported) macro, with the peculiarity that the macro's implementation also affects subclasses. ## Abstract Givens @@ -52,6 +52,6 @@ trait HasOrd[T]: ``` An abstract given has the form `given name: Type` without a right-hand side or arguments to the type. -Since Scala 3.6, abstract givens are made redundant by deferred givens. Deferred givens can replace abstract givens. They have better ergonomics, since deferred givens get naturally implemented in inheriting classes, so there is no longer any need for boilerplate to fill in definitions of abstract givens. +Since Scala 3.6, abstract givens are made redundant by deferred givens. Deferred givens have better ergonomics, since they get naturally implemented in inheriting classes, so there is no longer any need for boilerplate to fill in definitions of abstract givens. It is therefore recommended that software architectures relying on abstract givens be migrated to use deferred givens instead. Abstract givens are still supported in Scala 3.6, but will likely be deprecated and phased out over time. diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index b7be460c9a34..088ded2e8db4 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -83,11 +83,11 @@ given Position = enclosingTree.position ## Given Instance Initialization An unconditional given instance without parameters is initialized on-demand, the first -time it is accessed. If the given is a simple alias to some immutable value, the given is implemented as a simple forwarder, without incurring the cost of a field to hold a cached value. If a given is conditional, a fresh instance is created for each reference. +time it is accessed. If the given is a mere alias to some immutable value, the given is implemented as a simple forwarder, without incurring the cost of a field to hold a cached value. If a given is conditional, a fresh instance is created for each reference. ## Syntax -Here is the full syntax for given instances. Some of these forms of givens are explained on a separate page on [Other Forms of Givens](../more-givens.md). +Here is the full syntax for given instances. Some of these forms of givens are explained in a separate page: [Other Forms of Givens](../more-givens.md). ```ebnf Here is the complete context-free syntax for all proposed features. @@ -106,7 +106,7 @@ GivenConditional ::= DefTypeParamClause GivenType ::= AnnotType1 {id [nl] AnnotType1} ``` -A given instance starts with the reserved word `given`, which is followed by +A given instance starts with the reserved keyword `given`, which is followed by - An optional name and a colon - An optional list of conditions. diff --git a/docs/_docs/reference/contextual/more-givens.md b/docs/_docs/reference/contextual/more-givens.md index 3d0076543cd7..2f6dd63f7eab 100644 --- a/docs/_docs/reference/contextual/more-givens.md +++ b/docs/_docs/reference/contextual/more-givens.md @@ -43,14 +43,12 @@ given listOrd: [T] => (ord: Ord[T]) => Ord[List[T]]: ## By Name Givens -We sometimes find it necessary that a given alias is re-evaluated each time it is called. For instance, say we have a mutable variable `curCtx` and we want to define a given that returns the current value of that variable. A normal given alias will not do since by default given aliases are mapped to lazy vals. - -In general, we want to avoid re-evaluation of givens. But there are situations like the one above where we want to specify _by-name_ evaluation instead. This is achieved by writing a conditional given with an empty parameter list: +Though in general we want to avoid re-evaluating a given, there are situations where such a re-evaluation may be necessary. For instance, say we have a mutable variable `curCtx` and we want to define a given that returns the current value of that variable. A normal given alias will not do since by default given aliases are mapped to lazy vals. In this case, we can specify a _by-name_ evaluation insteadby writing a conditional given with an empty parameter list: ```scala val curCtx: Context given context: () => Context = curCtx ``` -With this definition, each time a `Context` is summoned we evaluate `context` function, which produces the current value of `curCtx`. +With this definition, each time a `Context` is summoned we evaluate the `context` function, which produces the current value of `curCtx`. ## Given Macros diff --git a/docs/_docs/reference/contextual/previous-givens.md b/docs/_docs/reference/contextual/previous-givens.md index dc88daaab691..a78d8a3751ea 100644 --- a/docs/_docs/reference/contextual/previous-givens.md +++ b/docs/_docs/reference/contextual/previous-givens.md @@ -161,7 +161,6 @@ Note that the inline methods within the given instances may be `transparent`. The inlining of given instances will not inline/duplicate the implementation of the given, it will just inline the instantiation of that instance. This is used to help dead code elimination of the given instances that are not used after inlining. - ## Pattern-Bound Given Instances Given instances can also appear in patterns. Example: @@ -215,12 +214,12 @@ Here is the syntax for given instances: ```ebnf TmplDef ::= ... - | ‘given’ GivenDef + | 'given' GivenDef GivenDef ::= [GivenSig] StructuralInstance | [GivenSig] AnnotType ‘=’ Expr | [GivenSig] AnnotType -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ TemplateBody] +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ':' +StructuralInstance ::= ConstrApp {'with' ConstrApp} [‘with’ TemplateBody] ``` A given instance starts with the reserved word `given` and an optional _signature_. The signature diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index c366c40779b9..add5853e10ba 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -18,7 +18,7 @@ a bit cumbersome and limiting for standard generic programming patterns. Much ha This note shows that with some fairly small and reasonable tweaks to Scala's syntax and typing rules we can obtain a much better scheme for working with type classes, or do generic programming in general. The bulk of the suggested improvements has been implemented and is available -in under source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: +in source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: ``` scala compile -source:future -language:experimental.modularity diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index 3e71718d0752..0f78ff03583e 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -214,9 +214,9 @@ ParamValueType ::= Type [‘*’] TypeArgs ::= ‘[’ Types ‘]’ Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> TypeBounds ::= [‘>:’ Type] [‘<:’ Type] -TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +TypeAndCtxBounds ::= TypeBounds [':' ContextBounds] ContextBounds ::= ContextBound - | ContextBound `:` ContextBounds -- to be deprecated + | ContextBound ':' ContextBounds -- to be deprecated | '{' ContextBound {',' ContextBound} '}' ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} @@ -441,7 +441,7 @@ TypeDef ::= id [HkTypeParamClause] {FunParamClause}TypeBounds TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef | ‘enum’ EnumDef - | ‘given’ (GivenDef | OldGivenDef) + | 'given' (GivenDef | OldGivenDef) ClassDef ::= id ClassConstr [Template] ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] From ca7553ed9d81e4327f6998be77bb0f061f1be936 Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Sat, 5 Oct 2024 09:48:54 +0200 Subject: [PATCH 624/827] Add explanation for code --- tests/init-global/warn/Color.scala | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/init-global/warn/Color.scala b/tests/init-global/warn/Color.scala index ee1c00701940..59554c905cd0 100644 --- a/tests/init-global/warn/Color.scala +++ b/tests/init-global/warn/Color.scala @@ -4,7 +4,25 @@ enum Color: enum Player: case Black, White + // Explanation: See the desugaring below val color: Color = if this == Player.Black // warn then Color.Black else Color.White + +// From the desugaring of Player, we can see the field `Player.Black` is not yet +// initialized during evaluation of the first `new Player`: +// +// class Player: +// val color: Color = +// if this == Player.Black ... +// +// object Player: +// val Black: Player = new Player // <--- problem +// val White: Player = new Player +// +// +// The complex desugaring makes it difficult to see the initialization +// semantics and it is prone to make such hard-to-spot mistakes. +// +// Note: The desugaring above is simplified for presentation. From 127bb7bf5c24dbcac780d3f5c5bc7ef1ca1821e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 4 Oct 2024 17:27:07 +0200 Subject: [PATCH 625/827] Fix #20271: Bring for comprehension spec up to date. Since Scala 3.4, a `withFilter` is generated if and only if the generator has the `case` modifier. If it does not, the pattern must be irrefutable. --- docs/_spec/06-expressions.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/_spec/06-expressions.md b/docs/_spec/06-expressions.md index 5043e752ebe6..a633c30e0e4b 100644 --- a/docs/_spec/06-expressions.md +++ b/docs/_spec/06-expressions.md @@ -729,8 +729,9 @@ A _for loop_ `for (´\mathit{enums}\,´) ´e´` executes expression ´e´ for ea A _for comprehension_ `for (´\mathit{enums}\,´) yield ´e´` evaluates expression ´e´ for each binding generated by the enumerators ´\mathit{enums}´ and collects the results. An enumerator sequence always starts with a generator; this can be followed by further generators, value definitions, or guards. -A _generator_ `´p´ <- ´e´` produces bindings from an expression ´e´ which is matched in some way against pattern ´p´. -Optionally, `case` can appear in front of a generator pattern, this has no meaning in Scala 2 but will be [required in Scala 3 if `p` is not irrefutable](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html). +A _generator_ `´p´ <- ´e´` produces bindings from an expression ´e´ which is deconstructed by the pattern ´p´. +The pattern must be [irrefutable](08-pattern-matching.html#irrefutable-patterns). +A _conditional generator_ `case ´p´ <- ´e´` tests whether elements produced by ´e´ match the pattern and discards the ones that do not match. A _value definition_ `´p´ = ´e´` binds the value name ´p´ (or several names in a pattern ´p´) to the result of evaluating the expression ´e´. A _guard_ `if ´e´` contains a boolean expression which restricts enumerated bindings. @@ -738,7 +739,7 @@ The precise meaning of generators and guards is defined by translation to invoca These methods can be implemented in different ways for different carrier types. The translation scheme is as follows. -In a first step, every generator `´p´ <- ´e´`, where ´p´ is not [irrefutable](08-pattern-matching.html#patterns) for the type of ´e´ is replaced by +In a first step, every generator `case ´p´ <- ´e´` is replaced by ```scala ´p´ <- ´e´.withFilter { case ´p´ => true; case _ => false } @@ -772,7 +773,7 @@ Then, the following rules are applied repeatedly until all comprehensions have b ´e´.foreach { case ´p´ => for (´p'´ <- ´e'; ...´) ´e''´ } ``` - - A generator `´p´ <- ´e´` followed by a guard `if ´g´` is translated to a single generator `´p´ <- ´e´.withFilter((´x_1, ..., x_n´) => ´g\,´)` where ´x_1, ..., x_n´ are the free variables of ´p´. + - A generator `´p´ <- ´e´` followed by a guard `if ´g´` is translated to a single generator `´p´ <- ´e´.withFilter({ case ´p´ => ´g\,´ })`. - A generator `´p´ <- ´e´` followed by a value definition `´p'´ = ´e'´` is translated to the following generator of pairs of values, where ´x´ and ´x'´ are fresh names: From 3b9c7c88c5923b10e80578b000209e257dc4f95b Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 5 Oct 2024 13:27:21 +0200 Subject: [PATCH 626/827] Make failing pc tests work again in non-bootstrapped mode Bring back experimental NamedTuple in non-bootstrapped src/library. Needed to make SignatureHelpTests and CompletionTests to go through. --- .../scala/NamedTuple.scala | 228 ++++++++++++++++++ 1 file changed, 228 insertions(+) create mode 100644 library/src-non-bootstrapped/scala/NamedTuple.scala diff --git a/library/src-non-bootstrapped/scala/NamedTuple.scala b/library/src-non-bootstrapped/scala/NamedTuple.scala new file mode 100644 index 000000000000..f237d1d487fe --- /dev/null +++ b/library/src-non-bootstrapped/scala/NamedTuple.scala @@ -0,0 +1,228 @@ +package scala +import scala.language.experimental.clauseInterleaving +import annotation.experimental +import compiletime.ops.boolean.* + +@experimental +object NamedTuple: + + /** The type to which named tuples get mapped to. For instance, + * (name: String, age: Int) + * gets mapped to + * NamedTuple[("name", "age"), (String, Int)] + */ + opaque type NamedTuple[N <: Tuple, +V <: Tuple] >: V <: AnyNamedTuple = V + + /** A type which is a supertype of all named tuples */ + opaque type AnyNamedTuple = Any + + def apply[N <: Tuple, V <: Tuple](x: V): NamedTuple[N, V] = x + + def unapply[N <: Tuple, V <: Tuple](x: NamedTuple[N, V]): Some[V] = Some(x) + + /** A named tuple expression will desugar to a call to `build`. For instance, + * `(name = "Lyra", age = 23)` will desugar to `build[("name", "age")]()(("Lyra", 23))`. + */ + inline def build[N <: Tuple]()[V <: Tuple](x: V): NamedTuple[N, V] = x + + extension [V <: Tuple](x: V) + inline def withNames[N <: Tuple]: NamedTuple[N, V] = x + + export NamedTupleDecomposition.{ + Names, DropNames, + apply, size, init, head, last, tail, take, drop, splitAt, ++, map, reverse, zip, toList, toArray, toIArray + } + + extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) + + // ALL METHODS DEPENDING ON `toTuple` MUST BE EXPORTED FROM `NamedTupleDecomposition` + /** The underlying tuple without the names */ + inline def toTuple: V = x + + // This intentionally works for empty named tuples as well. I think NonEmptyTuple is a dead end + // and should be reverted, just like NonEmptyList is also appealing at first, but a bad idea + // in the end. + + // inline def :* [L] (x: L): NamedTuple[Append[N, ???], Append[V, L] = ??? + // inline def *: [H] (x: H): NamedTuple[??? *: N], H *: V] = ??? + + end extension + + /** The size of a named tuple, represented as a literal constant subtype of Int */ + type Size[X <: AnyNamedTuple] = Tuple.Size[DropNames[X]] + + /** The type of the element value at position N in the named tuple X */ + type Elem[X <: AnyNamedTuple, N <: Int] = Tuple.Elem[DropNames[X], N] + + /** The type of the first element value of a named tuple */ + type Head[X <: AnyNamedTuple] = Elem[X, 0] + + /** The type of the last element value of a named tuple */ + type Last[X <: AnyNamedTuple] = Tuple.Last[DropNames[X]] + + /** The type of a named tuple consisting of all elements of named tuple X except the first one */ + type Tail[X <: AnyNamedTuple] = Drop[X, 1] + + /** The type of the initial part of a named tuple without its last element */ + type Init[X <: AnyNamedTuple] = + NamedTuple[Tuple.Init[Names[X]], Tuple.Init[DropNames[X]]] + + /** The type of the named tuple consisting of the first `N` elements of `X`, + * or all elements if `N` exceeds `Size[X]`. + */ + type Take[X <: AnyNamedTuple, N <: Int] = + NamedTuple[Tuple.Take[Names[X], N], Tuple.Take[DropNames[X], N]] + + /** The type of the named tuple consisting of all elements of `X` except the first `N` ones, + * or no elements if `N` exceeds `Size[X]`. + */ + type Drop[X <: AnyNamedTuple, N <: Int] = + NamedTuple[Tuple.Drop[Names[X], N], Tuple.Drop[DropNames[X], N]] + + /** The pair type `(Take(X, N), Drop[X, N]). */ + type Split[X <: AnyNamedTuple, N <: Int] = (Take[X, N], Drop[X, N]) + + /** Type of the concatenation of two tuples `X` and `Y` */ + type Concat[X <: AnyNamedTuple, Y <: AnyNamedTuple] = + NamedTuple[Tuple.Concat[Names[X], Names[Y]], Tuple.Concat[DropNames[X], DropNames[Y]]] + + /** The type of the named tuple `X` mapped with the type-level function `F`. + * If `X = (n1 : T1, ..., ni : Ti)` then `Map[X, F] = `(n1 : F[T1], ..., ni : F[Ti])`. + */ + type Map[X <: AnyNamedTuple, F[_ <: Tuple.Union[DropNames[X]]]] = + NamedTuple[Names[X], Tuple.Map[DropNames[X], F]] + + /** A named tuple with the elements of tuple `X` in reversed order */ + type Reverse[X <: AnyNamedTuple] = + NamedTuple[Tuple.Reverse[Names[X]], Tuple.Reverse[DropNames[X]]] + + /** The type of the named tuple consisting of all element values of + * named tuple `X` zipped with corresponding element values of + * named tuple `Y`. If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The names of `X` and `Y` at the same index must be the same. + * The result tuple keeps the same names as the operand tuples. + * For example, if + * ``` + * X = (n1 : S1, ..., ni : Si) + * Y = (n1 : T1, ..., nj : Tj) where j >= i + * ``` + * then + * ``` + * Zip[X, Y] = (n1 : (S1, T1), ..., ni: (Si, Ti)) + * ``` + * @syntax markdown + */ + type Zip[X <: AnyNamedTuple, Y <: AnyNamedTuple] = + Names[X] match + case Names[Y] => + NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] + + /** A type specially treated by the compiler to represent all fields of a + * class argument `T` as a named tuple. Or, if `T` is already a named tuple, + * `From[T]` is the same as `T`. + */ + type From[T] <: AnyNamedTuple + + /** The type of the empty named tuple */ + type Empty = NamedTuple[EmptyTuple, EmptyTuple] + + /** The empty named tuple */ + val Empty: Empty = EmptyTuple + +end NamedTuple + +/** Separate from NamedTuple object so that we can match on the opaque type NamedTuple. */ +@experimental +object NamedTupleDecomposition: + import NamedTuple.* + extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) + /** The value (without the name) at index `n` of this tuple */ + inline def apply(n: Int): Tuple.Elem[V, n.type] = + inline x.toTuple match + case tup: NonEmptyTuple => tup(n).asInstanceOf[Tuple.Elem[V, n.type]] + case tup => tup.productElement(n).asInstanceOf[Tuple.Elem[V, n.type]] + + /** The number of elements in this tuple */ + inline def size: Tuple.Size[V] = x.toTuple.size + + /** The first element value of this tuple */ + inline def head: Tuple.Elem[V, 0] = apply(0) + + /** The last element value of this tuple */ + inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] + + /** The tuple consisting of all elements of this tuple except the last one */ + inline def init: NamedTuple[Tuple.Init[N], Tuple.Init[V]] = + x.toTuple.take(size - 1).asInstanceOf[NamedTuple[Tuple.Init[N], Tuple.Init[V]]] + + /** The tuple consisting of all elements of this tuple except the first one */ + inline def tail: NamedTuple[Tuple.Tail[N], Tuple.Tail[V]] = + x.toTuple.drop(1).asInstanceOf[NamedTuple[Tuple.Tail[N], Tuple.Tail[V]]] + + /** The tuple consisting of the first `n` elements of this tuple, or all + * elements if `n` exceeds `size`. + */ + inline def take(n: Int): NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]] = + x.toTuple.take(n) + + /** The tuple consisting of all elements of this tuple except the first `n` ones, + * or no elements if `n` exceeds `size`. + */ + inline def drop(n: Int): NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]] = + x.toTuple.drop(n) + + /** The tuple `(x.take(n), x.drop(n))` */ + inline def splitAt(n: Int): + (NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]], + NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]]) = + // would be nice if this could have type `Split[NamedTuple[N, V]]` instead, but + // we get a type error then. Similar for other methods here. + x.toTuple.splitAt(n) + + /** The tuple consisting of all elements of this tuple followed by all elements + * of tuple `that`. The names of the two tuples must be disjoint. + */ + inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) + : NamedTuple[Tuple.Concat[N, N2], Tuple.Concat[V, V2]] + = x.toTuple ++ that.toTuple + + /** The named tuple consisting of all element values of this tuple mapped by + * the polymorphic mapping function `f`. The names of elements are preserved. + * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. + */ + inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = + x.toTuple.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] + + /** The named tuple consisting of all elements of this tuple in reverse */ + inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = + x.toTuple.reverse + + /** The named tuple consisting of all elements values of this tuple zipped + * with corresponding element values in named tuple `that`. + * If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The names of `x` and `that` at the same index must be the same. + * The result tuple keeps the same names as the operand tuples. + */ + inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): NamedTuple[N, Tuple.Zip[V, V2]] = + x.toTuple.zip(that.toTuple) + + /** A list consisting of all element values */ + inline def toList: List[Tuple.Union[V]] = x.toTuple.toList.asInstanceOf[List[Tuple.Union[V]]] + + /** An array consisting of all element values */ + inline def toArray: Array[Object] = x.toTuple.toArray + + /** An immutable array consisting of all element values */ + inline def toIArray: IArray[Object] = x.toTuple.toIArray + + end extension + + /** The names of a named tuple, represented as a tuple of literal string values. */ + type Names[X <: AnyNamedTuple] <: Tuple = X match + case NamedTuple[n, _] => n + + /** The value types of a named tuple represented as a regular tuple. */ + type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[_, x] => x From cea0f900ce295abebe9f8614cad66bd1d54862a8 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Sat, 5 Oct 2024 14:09:57 +0200 Subject: [PATCH 627/827] Drop scala3-presentation-compiler-non-bootstrapped --- .github/workflows/ci.yaml | 2 +- build.sbt | 1 - project/Build.scala | 27 +++++++++------------------ 3 files changed, 10 insertions(+), 20 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 79262b1ae7b7..95a6ed24df13 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -228,7 +228,7 @@ jobs: uses: actions/checkout@v4 - name: Test - run: sbt ";scala3-bootstrapped/compile; scala3-bootstrapped/testCompilation; scala3-presentation-compiler-bootstrapped/test; scala3-language-server/test" + run: sbt ";scala3-bootstrapped/compile; scala3-bootstrapped/testCompilation; scala3-presentation-compiler/test; scala3-language-server/test" shell: cmd - name: build binary diff --git a/build.sbt b/build.sbt index f357044c91ca..9d29bfcb6d6a 100644 --- a/build.sbt +++ b/build.sbt @@ -36,7 +36,6 @@ val `dist-linux-aarch64` = Build.`dist-linux-aarch64` val `community-build` = Build.`community-build` val `sbt-community-build` = Build.`sbt-community-build` val `scala3-presentation-compiler` = Build.`scala3-presentation-compiler` -val `scala3-presentation-compiler-bootstrapped` = Build.`scala3-presentation-compiler-bootstrapped` val sjsSandbox = Build.sjsSandbox val sjsJUnitTests = Build.sjsJUnitTests diff --git a/project/Build.scala b/project/Build.scala index 2a23f0202eb0..8993edc45ede 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1374,25 +1374,21 @@ object Build { ) lazy val `scala3-presentation-compiler` = project.in(file("presentation-compiler")) - .asScala3PresentationCompiler(NonBootstrapped) - lazy val `scala3-presentation-compiler-bootstrapped` = project.in(file("presentation-compiler")) - .asScala3PresentationCompiler(Bootstrapped) + .withCommonSettings(Bootstrapped) + .dependsOn(`scala3-compiler-bootstrapped`, `scala3-library-bootstrapped`) + .settings(presentationCompilerSettings) + .settings(scala3PresentationCompilerBuildInfo) .settings( // Add `-Yno-flexible-types` flag for bootstrap, see comments for `bootstrappedDottyCompilerSettings` Compile / scalacOptions += "-Yno-flexible-types" ) - def scala3PresentationCompiler(implicit mode: Mode): Project = mode match { - case NonBootstrapped => `scala3-presentation-compiler` - case Bootstrapped => `scala3-presentation-compiler-bootstrapped` - } - - def scala3PresentationCompilerBuildInfo(implicit mode: Mode) = + def scala3PresentationCompilerBuildInfo = Seq( ideTestsDependencyClasspath := { - val dottyLib = (dottyLibrary / Compile / classDirectory).value + val dottyLib = (`scala3-library-bootstrapped` / Compile / classDirectory).value val scalaLib = - (dottyLibrary / Compile / dependencyClasspath) + (`scala3-library-bootstrapped` / Compile / dependencyClasspath) .value .map(_.data) .filter(_.getName.matches("scala-library.*\\.jar")) @@ -2284,9 +2280,9 @@ object Build { // FIXME: we do not aggregate `bin` because its tests delete jars, thus breaking other tests def asDottyRoot(implicit mode: Mode): Project = project.withCommonSettings. - aggregate(`scala3-interfaces`, dottyLibrary, dottyCompiler, tastyCore, `scala3-sbt-bridge`, scala3PresentationCompiler). + aggregate(`scala3-interfaces`, dottyLibrary, dottyCompiler, tastyCore, `scala3-sbt-bridge`). bootstrappedAggregate(`scala2-library-tasty`, `scala2-library-cc-tasty`, `scala3-language-server`, `scala3-staging`, - `scala3-tasty-inspector`, `scala3-library-bootstrappedJS`, scaladoc). + `scala3-tasty-inspector`, `scala3-library-bootstrappedJS`, scaladoc, `scala3-presentation-compiler`). dependsOn(tastyCore). dependsOn(dottyCompiler). dependsOn(dottyLibrary). @@ -2396,11 +2392,6 @@ object Build { settings(commonBenchmarkSettings). enablePlugins(JmhPlugin) - def asScala3PresentationCompiler(implicit mode: Mode): Project = project.withCommonSettings. - dependsOn(dottyCompiler, dottyLibrary). - settings(presentationCompilerSettings). - settings(scala3PresentationCompilerBuildInfo) - def asDist(implicit mode: Mode): Project = project. enablePlugins(UniversalPlugin, RepublishPlugin). withCommonSettings. From 31b9249edb19ae4d2129632a884d113b26585cd0 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Sat, 5 Oct 2024 15:43:09 +0200 Subject: [PATCH 628/827] Revert #21707 --- .../scala/NamedTuple.scala | 228 ------------------ 1 file changed, 228 deletions(-) delete mode 100644 library/src-non-bootstrapped/scala/NamedTuple.scala diff --git a/library/src-non-bootstrapped/scala/NamedTuple.scala b/library/src-non-bootstrapped/scala/NamedTuple.scala deleted file mode 100644 index f237d1d487fe..000000000000 --- a/library/src-non-bootstrapped/scala/NamedTuple.scala +++ /dev/null @@ -1,228 +0,0 @@ -package scala -import scala.language.experimental.clauseInterleaving -import annotation.experimental -import compiletime.ops.boolean.* - -@experimental -object NamedTuple: - - /** The type to which named tuples get mapped to. For instance, - * (name: String, age: Int) - * gets mapped to - * NamedTuple[("name", "age"), (String, Int)] - */ - opaque type NamedTuple[N <: Tuple, +V <: Tuple] >: V <: AnyNamedTuple = V - - /** A type which is a supertype of all named tuples */ - opaque type AnyNamedTuple = Any - - def apply[N <: Tuple, V <: Tuple](x: V): NamedTuple[N, V] = x - - def unapply[N <: Tuple, V <: Tuple](x: NamedTuple[N, V]): Some[V] = Some(x) - - /** A named tuple expression will desugar to a call to `build`. For instance, - * `(name = "Lyra", age = 23)` will desugar to `build[("name", "age")]()(("Lyra", 23))`. - */ - inline def build[N <: Tuple]()[V <: Tuple](x: V): NamedTuple[N, V] = x - - extension [V <: Tuple](x: V) - inline def withNames[N <: Tuple]: NamedTuple[N, V] = x - - export NamedTupleDecomposition.{ - Names, DropNames, - apply, size, init, head, last, tail, take, drop, splitAt, ++, map, reverse, zip, toList, toArray, toIArray - } - - extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) - - // ALL METHODS DEPENDING ON `toTuple` MUST BE EXPORTED FROM `NamedTupleDecomposition` - /** The underlying tuple without the names */ - inline def toTuple: V = x - - // This intentionally works for empty named tuples as well. I think NonEmptyTuple is a dead end - // and should be reverted, just like NonEmptyList is also appealing at first, but a bad idea - // in the end. - - // inline def :* [L] (x: L): NamedTuple[Append[N, ???], Append[V, L] = ??? - // inline def *: [H] (x: H): NamedTuple[??? *: N], H *: V] = ??? - - end extension - - /** The size of a named tuple, represented as a literal constant subtype of Int */ - type Size[X <: AnyNamedTuple] = Tuple.Size[DropNames[X]] - - /** The type of the element value at position N in the named tuple X */ - type Elem[X <: AnyNamedTuple, N <: Int] = Tuple.Elem[DropNames[X], N] - - /** The type of the first element value of a named tuple */ - type Head[X <: AnyNamedTuple] = Elem[X, 0] - - /** The type of the last element value of a named tuple */ - type Last[X <: AnyNamedTuple] = Tuple.Last[DropNames[X]] - - /** The type of a named tuple consisting of all elements of named tuple X except the first one */ - type Tail[X <: AnyNamedTuple] = Drop[X, 1] - - /** The type of the initial part of a named tuple without its last element */ - type Init[X <: AnyNamedTuple] = - NamedTuple[Tuple.Init[Names[X]], Tuple.Init[DropNames[X]]] - - /** The type of the named tuple consisting of the first `N` elements of `X`, - * or all elements if `N` exceeds `Size[X]`. - */ - type Take[X <: AnyNamedTuple, N <: Int] = - NamedTuple[Tuple.Take[Names[X], N], Tuple.Take[DropNames[X], N]] - - /** The type of the named tuple consisting of all elements of `X` except the first `N` ones, - * or no elements if `N` exceeds `Size[X]`. - */ - type Drop[X <: AnyNamedTuple, N <: Int] = - NamedTuple[Tuple.Drop[Names[X], N], Tuple.Drop[DropNames[X], N]] - - /** The pair type `(Take(X, N), Drop[X, N]). */ - type Split[X <: AnyNamedTuple, N <: Int] = (Take[X, N], Drop[X, N]) - - /** Type of the concatenation of two tuples `X` and `Y` */ - type Concat[X <: AnyNamedTuple, Y <: AnyNamedTuple] = - NamedTuple[Tuple.Concat[Names[X], Names[Y]], Tuple.Concat[DropNames[X], DropNames[Y]]] - - /** The type of the named tuple `X` mapped with the type-level function `F`. - * If `X = (n1 : T1, ..., ni : Ti)` then `Map[X, F] = `(n1 : F[T1], ..., ni : F[Ti])`. - */ - type Map[X <: AnyNamedTuple, F[_ <: Tuple.Union[DropNames[X]]]] = - NamedTuple[Names[X], Tuple.Map[DropNames[X], F]] - - /** A named tuple with the elements of tuple `X` in reversed order */ - type Reverse[X <: AnyNamedTuple] = - NamedTuple[Tuple.Reverse[Names[X]], Tuple.Reverse[DropNames[X]]] - - /** The type of the named tuple consisting of all element values of - * named tuple `X` zipped with corresponding element values of - * named tuple `Y`. If the two tuples have different sizes, - * the extra elements of the larger tuple will be disregarded. - * The names of `X` and `Y` at the same index must be the same. - * The result tuple keeps the same names as the operand tuples. - * For example, if - * ``` - * X = (n1 : S1, ..., ni : Si) - * Y = (n1 : T1, ..., nj : Tj) where j >= i - * ``` - * then - * ``` - * Zip[X, Y] = (n1 : (S1, T1), ..., ni: (Si, Ti)) - * ``` - * @syntax markdown - */ - type Zip[X <: AnyNamedTuple, Y <: AnyNamedTuple] = - Names[X] match - case Names[Y] => - NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] - - /** A type specially treated by the compiler to represent all fields of a - * class argument `T` as a named tuple. Or, if `T` is already a named tuple, - * `From[T]` is the same as `T`. - */ - type From[T] <: AnyNamedTuple - - /** The type of the empty named tuple */ - type Empty = NamedTuple[EmptyTuple, EmptyTuple] - - /** The empty named tuple */ - val Empty: Empty = EmptyTuple - -end NamedTuple - -/** Separate from NamedTuple object so that we can match on the opaque type NamedTuple. */ -@experimental -object NamedTupleDecomposition: - import NamedTuple.* - extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) - /** The value (without the name) at index `n` of this tuple */ - inline def apply(n: Int): Tuple.Elem[V, n.type] = - inline x.toTuple match - case tup: NonEmptyTuple => tup(n).asInstanceOf[Tuple.Elem[V, n.type]] - case tup => tup.productElement(n).asInstanceOf[Tuple.Elem[V, n.type]] - - /** The number of elements in this tuple */ - inline def size: Tuple.Size[V] = x.toTuple.size - - /** The first element value of this tuple */ - inline def head: Tuple.Elem[V, 0] = apply(0) - - /** The last element value of this tuple */ - inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] - - /** The tuple consisting of all elements of this tuple except the last one */ - inline def init: NamedTuple[Tuple.Init[N], Tuple.Init[V]] = - x.toTuple.take(size - 1).asInstanceOf[NamedTuple[Tuple.Init[N], Tuple.Init[V]]] - - /** The tuple consisting of all elements of this tuple except the first one */ - inline def tail: NamedTuple[Tuple.Tail[N], Tuple.Tail[V]] = - x.toTuple.drop(1).asInstanceOf[NamedTuple[Tuple.Tail[N], Tuple.Tail[V]]] - - /** The tuple consisting of the first `n` elements of this tuple, or all - * elements if `n` exceeds `size`. - */ - inline def take(n: Int): NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]] = - x.toTuple.take(n) - - /** The tuple consisting of all elements of this tuple except the first `n` ones, - * or no elements if `n` exceeds `size`. - */ - inline def drop(n: Int): NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]] = - x.toTuple.drop(n) - - /** The tuple `(x.take(n), x.drop(n))` */ - inline def splitAt(n: Int): - (NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]], - NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]]) = - // would be nice if this could have type `Split[NamedTuple[N, V]]` instead, but - // we get a type error then. Similar for other methods here. - x.toTuple.splitAt(n) - - /** The tuple consisting of all elements of this tuple followed by all elements - * of tuple `that`. The names of the two tuples must be disjoint. - */ - inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) - : NamedTuple[Tuple.Concat[N, N2], Tuple.Concat[V, V2]] - = x.toTuple ++ that.toTuple - - /** The named tuple consisting of all element values of this tuple mapped by - * the polymorphic mapping function `f`. The names of elements are preserved. - * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. - */ - inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = - x.toTuple.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] - - /** The named tuple consisting of all elements of this tuple in reverse */ - inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = - x.toTuple.reverse - - /** The named tuple consisting of all elements values of this tuple zipped - * with corresponding element values in named tuple `that`. - * If the two tuples have different sizes, - * the extra elements of the larger tuple will be disregarded. - * The names of `x` and `that` at the same index must be the same. - * The result tuple keeps the same names as the operand tuples. - */ - inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): NamedTuple[N, Tuple.Zip[V, V2]] = - x.toTuple.zip(that.toTuple) - - /** A list consisting of all element values */ - inline def toList: List[Tuple.Union[V]] = x.toTuple.toList.asInstanceOf[List[Tuple.Union[V]]] - - /** An array consisting of all element values */ - inline def toArray: Array[Object] = x.toTuple.toArray - - /** An immutable array consisting of all element values */ - inline def toIArray: IArray[Object] = x.toTuple.toIArray - - end extension - - /** The names of a named tuple, represented as a tuple of literal string values. */ - type Names[X <: AnyNamedTuple] <: Tuple = X match - case NamedTuple[n, _] => n - - /** The value types of a named tuple represented as a regular tuple. */ - type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match - case NamedTuple[_, x] => x From c3f13074d8579acd9f06cb271719ec74d8b5ce62 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Thu, 3 Oct 2024 13:58:34 +0200 Subject: [PATCH 629/827] Add Staging Issue messages and QuotedTypeMissing message --- .../tools/dotc/reporting/ErrorMessageID.scala | 1 + .../tools/dotc/reporting/MessageKind.scala | 2 ++ .../dotty/tools/dotc/reporting/messages.scala | 20 ++++++++++++++ .../dotty/tools/dotc/staging/HealType.scala | 26 +++++++++---------- tests/neg/i21696.check | 13 ++++++++++ tests/neg/i21696.scala | 7 +++++ 6 files changed, 56 insertions(+), 13 deletions(-) create mode 100644 tests/neg/i21696.check create mode 100644 tests/neg/i21696.scala diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index cb5e8a7b314c..db523c879ea2 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -215,6 +215,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case TailrecNestedCallID //errorNumber: 199 case FinalLocalDefID // errorNumber: 200 case NonNamedArgumentInJavaAnnotationID // errorNumber: 201 + case QuotedTypeMissingID // errorNumber: 202 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala b/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala index 10ad4f83d93d..bb02a08d2e46 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala @@ -22,6 +22,7 @@ enum MessageKind: case Compatibility case PotentialIssue case UnusedSymbol + case Staging /** Human readable message that will end up being shown to the user. * NOTE: This is only used in the situation where you have multiple words @@ -39,5 +40,6 @@ enum MessageKind: case MatchCaseUnreachable => "Match case Unreachable" case PotentialIssue => "Potential Issue" case UnusedSymbol => "Unused Symbol" + case Staging => "Staging Issue" case kind => kind.toString end MessageKind diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index d65f9a9857e2..97cd70113c2e 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -108,6 +108,9 @@ end CyclicMsg abstract class ReferenceMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): def kind = MessageKind.Reference +abstract class StagingMessage(errorId: ErrorMessageID)(using Context) extends Message(errorId): + override final def kind = MessageKind.Staging + abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: ErrorMessageID)(using Context) extends SyntaxMsg(errNo) { def explain(using Context) = { @@ -3323,3 +3326,20 @@ class NonNamedArgumentInJavaAnnotation(using Context) extends SyntaxMsg(NonNamed """ end NonNamedArgumentInJavaAnnotation + +final class QuotedTypeMissing(tpe: Type)(using Context) extends StagingMessage(QuotedTypeMissingID): + + private def witness = defn.QuotedTypeClass.typeRef.appliedTo(tpe) + + override protected def msg(using Context): String = + i"Reference to $tpe within quotes requires a given ${witness} in scope" + + override protected def explain(using Context): String = + i"""Referencing `$tpe` inside a quoted expression requires a `${witness}` to be in scope. + |Since Scala is subject to erasure at runtime, the type information will be missing during the execution of the code. + |`${witness}` is therefore needed to carry `$tpe`'s type information into the quoted code. + |Without an implicit `${witness}`, the type `$tpe` cannot be properly referenced within the expression. + |To resolve this, ensure that a `${witness}` is available, either through a context-bound or explicitly. + |""" + +end QuotedTypeMissing diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala index 5a26803c8137..a73f884fbac9 100644 --- a/compiler/src/dotty/tools/dotc/staging/HealType.scala +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -1,17 +1,19 @@ package dotty.tools.dotc package staging -import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.core.Decorators.* -import dotty.tools.dotc.core.Flags.* -import dotty.tools.dotc.core.StdNames.* -import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.core.Types.* -import dotty.tools.dotc.staging.StagingLevel.* -import dotty.tools.dotc.staging.QuoteTypeTags.* +import reporting.* -import dotty.tools.dotc.typer.Implicits.SearchFailureType -import dotty.tools.dotc.util.SrcPos +import core.Contexts.* +import core.Decorators.* +import core.Flags.* +import core.StdNames.* +import core.Symbols.* +import core.Types.* +import StagingLevel.* +import QuoteTypeTags.* + +import typer.Implicits.SearchFailureType +import util.SrcPos class HealType(pos: SrcPos)(using Context) extends TypeMap { @@ -98,9 +100,7 @@ class HealType(pos: SrcPos)(using Context) extends TypeMap { pos) tp case _ => - report.error(em"""Reference to $tp within quotes requires a given $reqType in scope. - | - |""", pos) + report.error(QuotedTypeMissing(tp), pos) tp } diff --git a/tests/neg/i21696.check b/tests/neg/i21696.check new file mode 100644 index 000000000000..9195263040b3 --- /dev/null +++ b/tests/neg/i21696.check @@ -0,0 +1,13 @@ +-- [E202] Staging Issue Error: tests/neg/i21696.scala:7:52 ------------------------------------------------------------- +7 |def foo[T](using Quotes): Expr[Thing[T]] = '{ Thing[T]() } // error + | ^ + | Reference to T within quotes requires a given scala.quoted.Type[T] in scope + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Referencing `T` inside a quoted expression requires a `scala.quoted.Type[T]` to be in scope. + | Since Scala is subject to erasure at runtime, the type information will be missing during the execution of the code. + | `scala.quoted.Type[T]` is therefore needed to carry `T`'s type information into the quoted code. + | Without an implicit `scala.quoted.Type[T]`, the type `T` cannot be properly referenced within the expression. + | To resolve this, ensure that a `scala.quoted.Type[T]` is available, either through a context-bound or explicitly. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i21696.scala b/tests/neg/i21696.scala new file mode 100644 index 000000000000..7ec30a8a2e41 --- /dev/null +++ b/tests/neg/i21696.scala @@ -0,0 +1,7 @@ +//> using options -explain + +import scala.quoted.{Expr, Quotes} + +case class Thing[T]() + +def foo[T](using Quotes): Expr[Thing[T]] = '{ Thing[T]() } // error From e42d883c99c8f6747d78650e3683a228e9962342 Mon Sep 17 00:00:00 2001 From: Oliver Bracevac Date: Sun, 6 Oct 2024 23:53:06 +0200 Subject: [PATCH 630/827] Fix rewrite logic for old ` _` syntax A rewrite would previously produce uncompilable code if the access path to the eta-expanded function goes through at least one `def`. Fixes #21394 --- .../src/dotty/tools/dotc/typer/Migrations.scala | 9 ++++++--- .../dotty/tools/dotc/CompilationTests.scala | 1 + tests/rewrites/i21394.check | 17 +++++++++++++++++ tests/rewrites/i21394.scala | 17 +++++++++++++++++ 4 files changed, 41 insertions(+), 3 deletions(-) create mode 100644 tests/rewrites/i21394.check create mode 100644 tests/rewrites/i21394.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Migrations.scala b/compiler/src/dotty/tools/dotc/typer/Migrations.scala index 7f27f27112a0..f0d1d235a19c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Migrations.scala +++ b/compiler/src/dotty/tools/dotc/typer/Migrations.scala @@ -71,10 +71,13 @@ trait Migrations: } nestedCtx.typerState.commit() + def functionPrefixSuffix(arity: Int) = if (arity > 0) ("", "") else ("(() => ", "())") + lazy val (prefix, suffix) = res match { - case Block(mdef @ DefDef(_, vparams :: Nil, _, _) :: Nil, _: Closure) => - val arity = vparams.length - if (arity > 0) ("", "") else ("(() => ", "())") + case Block(DefDef(_, vparams :: Nil, _, _) :: Nil, _: Closure) => + functionPrefixSuffix(vparams.length) + case Block(ValDef(_, _, _) :: Nil, Block(DefDef(_, vparams :: Nil, _, _) :: Nil, _: Closure)) => + functionPrefixSuffix(vparams.length) case _ => ("(() => ", ")") } diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 0c5d5764949a..d7ef7f6f6085 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -63,6 +63,7 @@ class CompilationTests { compileFile("tests/rewrites/rewrites.scala", defaultOptions.and("-source", "3.0-migration").and("-rewrite", "-indent")), compileFile("tests/rewrites/rewrites3x.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/rewrites3x-fatal-warnings.scala", defaultOptions.and("-rewrite", "-source", "future-migration", "-Xfatal-warnings")), + compileFile("tests/rewrites/i21394.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/uninitialized-var.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/with-type-operator.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/private-this.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), diff --git a/tests/rewrites/i21394.check b/tests/rewrites/i21394.check new file mode 100644 index 000000000000..50e6ab0901b6 --- /dev/null +++ b/tests/rewrites/i21394.check @@ -0,0 +1,17 @@ +trait Container: + def loopDef: Container + val loopVal: Container + def fun(x: Int)(y: Int): Int + +def test(c: Container): Int = + use(c.fun) + + use(c.loopDef.fun) + + use(c.loopVal.fun) + + use(c.loopDef.loopDef.fun) + + use(c.loopVal.loopVal.fun) + + use(c.loopVal.loopDef.fun) + + use(c.loopDef.loopVal.fun) + + use(c.loopVal.loopDef.loopVal.fun) + + use(c.loopVal.loopDef.loopVal.loopDef.fun) + +def use(f: Int => Int => Int): Int = ??? \ No newline at end of file diff --git a/tests/rewrites/i21394.scala b/tests/rewrites/i21394.scala new file mode 100644 index 000000000000..33f183fbed48 --- /dev/null +++ b/tests/rewrites/i21394.scala @@ -0,0 +1,17 @@ +trait Container: + def loopDef: Container + val loopVal: Container + def fun(x: Int)(y: Int): Int + +def test(c: Container): Int = + use(c.fun _) + + use(c.loopDef.fun _) + + use(c.loopVal.fun _) + + use(c.loopDef.loopDef.fun _) + + use(c.loopVal.loopVal.fun _) + + use(c.loopVal.loopDef.fun _) + + use(c.loopDef.loopVal.fun _) + + use(c.loopVal.loopDef.loopVal.fun _) + + use(c.loopVal.loopDef.loopVal.loopDef.fun _) + +def use(f: Int => Int => Int): Int = ??? \ No newline at end of file From fbe0e152ca3dc7149b1ca20e9e942985f79fab78 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Sat, 5 Oct 2024 13:55:59 +0200 Subject: [PATCH 631/827] Add support for clauseInterleaving in JVM generic signatures --- .../dotc/transform/GenericSignatures.scala | 51 +++++++++---------- tests/run/i21346.check | 10 ++++ tests/run/i21346.scala | 14 +++++ 3 files changed, 48 insertions(+), 27 deletions(-) create mode 100644 tests/run/i21346.check create mode 100644 tests/run/i21346.scala diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index b5b75450272c..1798d938272c 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -19,6 +19,7 @@ import config.Printers.transforms import reporting.trace import java.lang.StringBuilder +import scala.annotation.tailrec import scala.collection.mutable.ListBuffer /** Helper object to generate generic java signatures, as defined in @@ -294,36 +295,13 @@ object GenericSignatures { case ExprType(restpe) => jsig(defn.FunctionType(0).appliedTo(restpe)) - case PolyType(tparams, mtpe: MethodType) => - assert(tparams.nonEmpty) + case mtd: MethodOrPoly => + val (tparams, vparams, rte) = collectMethodParams(mtd) if (toplevel && !sym0.isConstructor) polyParamSig(tparams) - jsig(mtpe) - - // Nullary polymorphic method - case PolyType(tparams, restpe) => - assert(tparams.nonEmpty) - if (toplevel) polyParamSig(tparams) - builder.append("()") - methodResultSig(restpe) - - case mtpe: MethodType => - // erased method parameters do not make it to the bytecode. - def effectiveParamInfoss(t: Type)(using Context): List[List[Type]] = t match { - case t: MethodType if t.hasErasedParams => - t.paramInfos.zip(t.erasedParams).collect{ case (i, false) => i } - :: effectiveParamInfoss(t.resType) - case t: MethodType => t.paramInfos :: effectiveParamInfoss(t.resType) - case _ => Nil - } - val params = effectiveParamInfoss(mtpe).flatten - val restpe = mtpe.finalResultType builder.append('(') - // TODO: Update once we support varargs - params.foreach { tp => - jsig(tp) - } + for vparam <- vparams do jsig(vparam) builder.append(')') - methodResultSig(restpe) + methodResultSig(rte) case tp: AndType => // Only intersections appearing as the upper-bound of a type parameter @@ -475,4 +453,23 @@ object GenericSignatures { } else x } + + private def collectMethodParams(mtd: MethodOrPoly)(using Context): (List[TypeParamInfo], List[Type], Type) = + val tparams = ListBuffer.empty[TypeParamInfo] + val vparams = ListBuffer.empty[Type] + + @tailrec def recur(tpe: Type): Type = tpe match + case mtd: MethodType => + vparams ++= mtd.paramInfos.filterNot(_.hasAnnotation(defn.ErasedParamAnnot)) + recur(mtd.resType) + case PolyType(tps, tpe) => + tparams ++= tps + recur(tpe) + case _ => + tpe + end recur + + val rte = recur(mtd) + (tparams.toList, vparams.toList, rte) + end collectMethodParams } diff --git a/tests/run/i21346.check b/tests/run/i21346.check new file mode 100644 index 000000000000..abef81307955 --- /dev/null +++ b/tests/run/i21346.check @@ -0,0 +1,10 @@ +======'bar'====== + +(X,Y) +scala.Tuple2 +============ +======'foo'====== + +(X,Y,Z,A) +scala.Tuple4 +============ diff --git a/tests/run/i21346.scala b/tests/run/i21346.scala new file mode 100644 index 000000000000..40999c3e27dc --- /dev/null +++ b/tests/run/i21346.scala @@ -0,0 +1,14 @@ +// scalajs: --skip + +object Foo: + def foo[X, Y, Z](x: X, y: Y)[A](z: Z, a: A): (X, Y, Z, A) = (x, y, z, a) + def bar[X](x: X)[Y <: x.type](y: Y): (X, Y) = (x, y) + +@main def Test = + val mtds = Foo.getClass().getDeclaredMethods().filterNot(_.getName() == "writeReplace").sortBy(_.getName()) + for mtd <- mtds do + println(s"======'${mtd.getName()}'======") + println(mtd.getTypeParameters().mkString("<", ";", ">")) + println(mtd.getGenericParameterTypes().mkString("(", ",", ")")) + println(mtd.getGenericReturnType()) + println("============") \ No newline at end of file From 8c900f8dad2fd038b24a172afd73f6e0cbfdc05e Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Mon, 7 Oct 2024 08:19:40 +0200 Subject: [PATCH 632/827] Add Color.scala to black list --- .../test/dotc/neg-init-global-scala2-library-tasty.blacklist | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist index 48fe29ebc6bc..03b020db64d9 100644 --- a/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist +++ b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist @@ -18,3 +18,4 @@ global-list.scala t5366.scala mutable-read7.scala t9115.scala +Color.scala \ No newline at end of file From 374cd4f7b5fd45c6d1ae96e7cbff7ca7f71010cf Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Mon, 7 Oct 2024 19:36:37 +0200 Subject: [PATCH 633/827] Always treat underscores as type bounds inside patterns Always treat underscores as type bounds inside patterns, even when `ctx.settings.XkindProjector.value == "underscores"`. Fixes #14952 and #21400. --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 2 +- tests/pos/14952.scala | 9 +++++++++ tests/pos/21400.scala | 7 +++++++ tests/pos/21400b.scala | 10 ++++++++++ 4 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 tests/pos/14952.scala create mode 100644 tests/pos/21400.scala create mode 100644 tests/pos/21400b.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index d933e55a9823..5a3be6505715 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1988,7 +1988,7 @@ object Parsers { if isSimpleLiteral then SingletonTypeTree(simpleLiteral()) else if in.token == USCORE then - if ctx.settings.XkindProjector.value == "underscores" then + if ctx.settings.XkindProjector.value == "underscores" && !inMatchPattern then val start = in.skipToken() Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start)) else diff --git a/tests/pos/14952.scala b/tests/pos/14952.scala new file mode 100644 index 000000000000..350607b7ef77 --- /dev/null +++ b/tests/pos/14952.scala @@ -0,0 +1,9 @@ +//> using options -Xkind-projector:underscores + +import Tuple.* + +type LiftP[F[_], T] <: Tuple = + T match { + case _ *: _ => F[Head[T]] *: LiftP[F, Tail[T]] + case _ => EmptyTuple + } diff --git a/tests/pos/21400.scala b/tests/pos/21400.scala new file mode 100644 index 000000000000..6d1534703a00 --- /dev/null +++ b/tests/pos/21400.scala @@ -0,0 +1,7 @@ +//> using options -Xkind-projector:underscores + +import scala.compiletime.ops.int.S + +type IndexOf[T <: Tuple, E] <: Int = T match + case E *: _ => 0 + case _ *: es => 1 // S[IndexOf[es, E]] diff --git a/tests/pos/21400b.scala b/tests/pos/21400b.scala new file mode 100644 index 000000000000..9e8768d48261 --- /dev/null +++ b/tests/pos/21400b.scala @@ -0,0 +1,10 @@ +//> using options -Xkind-projector:underscores + +import scala.quoted.Type +import scala.quoted.Quotes + +def x[A](t: Type[A])(using Quotes): Boolean = t match + case '[_ *: _] => + true + case _ => + false From 936c009b8a33d4d673a71f4a69c148155ddf0886 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 2 Oct 2024 10:21:57 +0200 Subject: [PATCH 634/827] Quotes type printing: take `infix` type modifier into account This is similar to how the regular compiler `.show` handles `infix` but using explicit parens everywhere to not have to reimplement the precedence logic (maybe quote type printing should just use `.show` eventually). --- .../runtime/impl/printers/SourceCode.scala | 15 ++++++++-- tests/run-macros/type-print.check | 12 ++++++++ tests/run-macros/type-print/Macro_1.scala | 29 +++++++++++++++++++ tests/run-macros/type-print/Test_2.scala | 15 ++++++++++ 4 files changed, 69 insertions(+), 2 deletions(-) create mode 100644 tests/run-macros/type-print.check create mode 100644 tests/run-macros/type-print/Macro_1.scala create mode 100644 tests/run-macros/type-print/Test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index a1f54c5a2069..64a0ff9db9ec 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -1150,8 +1150,19 @@ object SourceCode { case tp: TypeRef if tp.typeSymbol == Symbol.requiredClass("scala.") => this += "_*" case _ => - printType(tp) - inSquare(printTypesOrBounds(args, ", ")) + if !fullNames && args.lengthCompare(2) == 0 && tp.typeSymbol.flags.is(Flags.Infix) then + val lhs = args(0) + val rhs = args(1) + this += "(" + printType(lhs) + this += " " + printType(tp) + this += " " + printType(rhs) + this += ")" + else + printType(tp) + inSquare(printTypesOrBounds(args, ", ")) } case AnnotatedType(tp, annot) => diff --git a/tests/run-macros/type-print.check b/tests/run-macros/type-print.check new file mode 100644 index 000000000000..5eae94d4a1bf --- /dev/null +++ b/tests/run-macros/type-print.check @@ -0,0 +1,12 @@ +List[Int] +scala.collection.immutable.List[scala.Int] +scala.collection.immutable.List[scala.Int] +AppliedType(TypeRef(ThisType(TypeRef(NoPrefix(), "immutable")), "List"), List(TypeRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "Int"))) +(3 + (a * b)) +scala.compiletime.ops.int.+[3, scala.compiletime.ops.int.*[a, b]] +scala.compiletime.ops.int.+[3, scala.compiletime.ops.int.*[a, b]] +AppliedType(TypeRef(TermRef(TermRef(TermRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "compiletime"), "ops"), "int"), "+"), List(ConstantType(IntConstant(3)), AppliedType(TypeRef(TermRef(TermRef(TermRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "compiletime"), "ops"), "int"), "*"), List(TermRef(NoPrefix(), "a"), TermRef(NoPrefix(), "b"))))) +((3 + a) * b) +scala.compiletime.ops.int.*[scala.compiletime.ops.int.+[3, a], b] +scala.compiletime.ops.int.*[scala.compiletime.ops.int.+[3, a], b] +AppliedType(TypeRef(TermRef(TermRef(TermRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "compiletime"), "ops"), "int"), "*"), List(AppliedType(TypeRef(TermRef(TermRef(TermRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "compiletime"), "ops"), "int"), "+"), List(ConstantType(IntConstant(3)), TermRef(NoPrefix(), "a"))), TermRef(NoPrefix(), "b"))) diff --git a/tests/run-macros/type-print/Macro_1.scala b/tests/run-macros/type-print/Macro_1.scala new file mode 100644 index 000000000000..c0dd57e33018 --- /dev/null +++ b/tests/run-macros/type-print/Macro_1.scala @@ -0,0 +1,29 @@ +import scala.quoted.* + +inline def printTypeShort[T]: String = + ${ printTypeShortImpl[T] } + +inline def printType[T]: String = + ${ printTypeImpl[T] } + +inline def printTypeAnsi[T]: String = + ${ printTypeAnsiImpl[T] } + +inline def printTypeStructure[T]: String = + ${ printTypeStructureImpl[T] } + +def printTypeShortImpl[T: Type](using Quotes): Expr[String] = + import quotes.reflect.* + Expr(Printer.TypeReprShortCode.show(TypeRepr.of[T])) + +def printTypeImpl[T: Type](using Quotes): Expr[String] = + import quotes.reflect.* + Expr(Printer.TypeReprCode.show(TypeRepr.of[T])) + +def printTypeAnsiImpl[T: Type](using Quotes): Expr[String] = + import quotes.reflect.* + Expr(Printer.TypeReprAnsiCode.show(TypeRepr.of[T])) + +def printTypeStructureImpl[T: Type](using Quotes): Expr[String] = + import quotes.reflect.* + Expr(Printer.TypeReprStructure.show(TypeRepr.of[T])) diff --git a/tests/run-macros/type-print/Test_2.scala b/tests/run-macros/type-print/Test_2.scala new file mode 100644 index 000000000000..f2ea6c3ba8b1 --- /dev/null +++ b/tests/run-macros/type-print/Test_2.scala @@ -0,0 +1,15 @@ +import scala.compiletime.ops.int.* + +inline def printAll[T]: Unit = + println(printTypeShort[T]) + println(printType[T]) + println(printTypeAnsi[T]) + println(printTypeStructure[T]) + +@main +def Test: Unit = + printAll[List[Int]] + val a = 1 + val b = 2 + printAll[3 + a.type * b.type] + printAll[(3 + a.type) * b.type] From c529ac28bd03c9deacb1daff0279a90e3dc95b2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 4 Oct 2024 10:00:40 +0200 Subject: [PATCH 635/827] Fix #21402: Always allow type member extraction for stable scrutinees in match types. Previously, through the various code paths, we basically allowed type member extraction for stable scrutinees if the type member was an alias or a class member. In the alias case, we took the alias, whereas in the class case, we recreated a selection on the stable scrutinee. We did not allow that on abstract type members. We now uniformly do it for all kinds of type members. If the scrutinee is a (non-skolem) stable type, we do not even look at the info of the type member. We directly create a selection to it, which corresponds to what we did before for class members. We only try to dealias type members if the scrutinee type is not a stable type. --- .../dotty/tools/dotc/core/TypeComparer.scala | 40 +++++++++++++----- tests/pos/i21402.scala | 41 +++++++++++++++++++ .../match-type-extract-path-dependent.scala | 27 ++++++++++++ 3 files changed, 97 insertions(+), 11 deletions(-) create mode 100644 tests/pos/i21402.scala create mode 100644 tests/pos/match-type-extract-path-dependent.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 8fc6307c426c..145a038dd856 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3681,19 +3681,37 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { stableScrut.member(typeMemberName) match case denot: SingleDenotation if denot.exists => - val info = denot.info match - case alias: AliasingBounds => alias.alias // Extract the alias - case ClassInfo(prefix, cls, _, _, _) => prefix.select(cls) // Re-select the class from the prefix - case info => info // Notably, RealTypeBounds, which will eventually give a MatchResult.NoInstances - val info1 = stableScrut match + val info = stableScrut match case skolem: SkolemType => - dropSkolem(info, skolem).orElse: - info match - case info: TypeBounds => info // Will already trigger a MatchResult.NoInstances - case _ => RealTypeBounds(info, info) // Explicitly trigger a MatchResult.NoInstances - case _ => info - rec(capture, info1, variance = 0, scrutIsWidenedAbstract) + /* If it is a skolem type, we cannot have class selections nor + * abstract type selections. If it is an alias, we try to remove + * any reference to the skolem from the right-hand-side. If that + * succeeds, we take the result, otherwise we fail as not-specific. + */ + + def adaptToTriggerNotSpecific(info: Type): Type = info match + case info: TypeBounds => info + case _ => RealTypeBounds(info, info) + + denot.info match + case denotInfo: AliasingBounds => + val alias = denotInfo.alias + dropSkolem(alias, skolem).orElse(adaptToTriggerNotSpecific(alias)) + case ClassInfo(prefix, cls, _, _, _) => + // for clean error messages + adaptToTriggerNotSpecific(prefix.select(cls)) + case denotInfo => + adaptToTriggerNotSpecific(denotInfo) + + case _ => + // The scrutinee type is truly stable. We select the type member directly on it. + stableScrut.select(typeMemberName) + end info + + rec(capture, info, variance = 0, scrutIsWidenedAbstract) + case _ => + // The type member was not found; no match false end rec diff --git a/tests/pos/i21402.scala b/tests/pos/i21402.scala new file mode 100644 index 000000000000..4ddf201ef8b4 --- /dev/null +++ b/tests/pos/i21402.scala @@ -0,0 +1,41 @@ +abstract class AbstractServiceKey: + type Protocol + +abstract class ServiceKey[T] extends AbstractServiceKey: + type Protocol = T + +type Aux[P] = AbstractServiceKey { type Protocol = P } +type Service[K <: Aux[?]] = K match + case Aux[t] => ActorRef[t] +type Subscriber[K <: Aux[?]] = K match + case Aux[t] => ActorRef[ReceptionistMessages.Listing[t]] + +trait ActorRef[-T] + +object ReceptionistMessages: + final case class Listing[T](key: ServiceKey[T]) + +class TypedMultiMap[T <: AnyRef, K[_ <: T]]: + def get(key: T): Set[K[key.type]] = ??? + transparent inline def getInlined(key: T): Set[K[key.type]] = ??? + inline def inserted(key: T, value: K[key.type]): TypedMultiMap[T, K] = ??? + +object LocalReceptionist { + final case class State( + services: TypedMultiMap[AbstractServiceKey, Service], + subscriptions: TypedMultiMap[AbstractServiceKey, Subscriber] + ): + def testInsert(key: AbstractServiceKey)(serviceInstance: ActorRef[key.Protocol]): State = { + val fails = services.inserted(key, serviceInstance) // error + ??? + } + + def testGet[T](key: AbstractServiceKey): Unit = { + val newState: State = ??? + val fails: Set[ActorRef[key.Protocol]] = newState.services.get(key) // error + val works: Set[ActorRef[key.Protocol]] = newState.services.getInlined(key) // workaround + + val fails2: Set[ActorRef[ReceptionistMessages.Listing[key.Protocol]]] = newState.subscriptions.get(key) // error + val works2: Set[ActorRef[ReceptionistMessages.Listing[key.Protocol]]] = newState.subscriptions.getInlined(key) // workaround + } +} diff --git a/tests/pos/match-type-extract-path-dependent.scala b/tests/pos/match-type-extract-path-dependent.scala new file mode 100644 index 000000000000..68c902ef0b8c --- /dev/null +++ b/tests/pos/match-type-extract-path-dependent.scala @@ -0,0 +1,27 @@ +// Test that match types can extract path-dependent abstract types out of singleton types + +trait Base: + type Value + + def getValue(): Value + def setValue(v: Value): Unit +end Base + +object Extractor: + type Helper[X] = Base { type Value = X } + + type Extract[B <: Base] = B match + case Helper[x] => x +end Extractor + +object Test: + import Extractor.Extract + + /* As is, this is a bit silly, since we could use `b.Value` instead. However, + * in larger examples with more indirections, it is not always possible to + * directly use the path-dependent version. See i21402 for a real-world use + * case. + */ + def foo(b: Base): Extract[b.type] = b.getValue() + def bar(b: Base, v: Extract[b.type]): Unit = b.setValue(v) +end Test From 5d06f9676517f18d7827316e58fc4b78dcab538c Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Tue, 8 Oct 2024 01:29:29 +0200 Subject: [PATCH 636/827] Address some review comments --- .../quoted/runtime/impl/QuotesImpl.scala | 4 +- .../quote-sym-newtype/Macro_1.scala | 47 +++++++++++++++++++ .../neg-macros/quote-sym-newtype/Test_2.scala | 6 +++ .../quote-sym-newboundedtype/Macro_1.scala | 39 ++++++++++++--- .../quote-sym-newtype-in-trait/Macro_1.scala | 25 ++++++++-- 5 files changed, 109 insertions(+), 12 deletions(-) create mode 100644 tests/neg-macros/quote-sym-newtype/Macro_1.scala create mode 100644 tests/neg-macros/quote-sym-newtype/Test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 901e0038efd5..e8524a193e5a 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2653,11 +2653,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def newTypeAlias(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol = checkValidFlags(flags.toTypeFlags, Flags.validTypeAliasFlags) assert(!tpe.isInstanceOf[Types.TypeBounds], "Passed `tpe` into newTypeAlias should not represent TypeBounds") - dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags | dotc.core.Flags.Deferred, dotc.core.Types.TypeAlias(tpe), privateWithin) + dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags, dotc.core.Types.TypeAlias(tpe), privateWithin) def newBoundedType(owner: Symbol, name: String, flags: Flags, tpe: TypeBounds, privateWithin: Symbol): Symbol = checkValidFlags(flags.toTypeFlags, Flags.validBoundedTypeFlags) - dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags, tpe, privateWithin) + dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags | dotc.core.Flags.Deferred, tpe, privateWithin) def noSymbol: Symbol = dotc.core.Symbols.NoSymbol diff --git a/tests/neg-macros/quote-sym-newtype/Macro_1.scala b/tests/neg-macros/quote-sym-newtype/Macro_1.scala new file mode 100644 index 000000000000..953be0d5497b --- /dev/null +++ b/tests/neg-macros/quote-sym-newtype/Macro_1.scala @@ -0,0 +1,47 @@ +//> using options -experimental -Yno-experimental +import scala.quoted.* + +inline def testConflictingBounds = ${ testConflictingBoundsImpl } +inline def testConflictingBoundsWithTypeLambda = ${ testConflictingBoundsWithTypeLambdaImpl } + +transparent inline def transparentTestConflictingBounds = ${ testConflictingBoundsImpl } +transparent inline def transparentTestConflictingBoundsWithTypeLambda = ${ testConflictingBoundsWithTypeLambdaImpl } + + +def testConflictingBoundsImpl(using Quotes): Expr[Object] = { + import quotes.reflect.* + + def makeType(owner: Symbol): Symbol = + // type Foo >: Int <: String + Symbol.newBoundedType( + owner, + "Foo", + Flags.EmptyFlags, + TypeBounds(TypeRepr.of[Int], TypeRepr.of[String]), + Symbol.noSymbol + ) + makeClass(makeType) +} + +def testConflictingBoundsWithTypeLambdaImpl(using Quotes): Expr[Object] = { + import quotes.reflect.* + def makeType(owner: Symbol): Symbol = + // type Foo >: [X] =>> Int <: Any + Symbol.newBoundedType( + owner, + "Foo", + Flags.EmptyFlags, + TypeBounds(TypeLambda.apply(List("X"), _ => List(TypeBounds.empty), _ => TypeRepr.of[Int]), TypeRepr.of[Any]), + Symbol.noSymbol + ) + makeClass(makeType) +} + +def makeClass(using quotes: Quotes)(typeCons: quotes.reflect.Symbol => quotes.reflect.Symbol) = { + import quotes.reflect.* + val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => List(typeCons(sym)), None) + val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List(TypeDef(clsSymbol.typeMember("Foo")))) + + Block(List(classDef), Apply(Select(New(TypeIdent(clsSymbol)), clsSymbol.primaryConstructor), List.empty)).asExprOf[Object] +} + diff --git a/tests/neg-macros/quote-sym-newtype/Test_2.scala b/tests/neg-macros/quote-sym-newtype/Test_2.scala new file mode 100644 index 000000000000..60fef3cb7322 --- /dev/null +++ b/tests/neg-macros/quote-sym-newtype/Test_2.scala @@ -0,0 +1,6 @@ +//> using options -experimental -Yno-experimental +def test = + transparentTestConflictingBounds // error + transparentTestConflictingBoundsWithTypeLambda // error + // testConflictingBounds // should throw an error here also, to be implemented before stabilisation + // testConflictingBoundsWithTypeLambda // should throw an error here also, to be implemented before stabilisation diff --git a/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala b/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala index b38a4304b9d2..97b7d7566e9a 100644 --- a/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala +++ b/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala @@ -8,15 +8,42 @@ transparent inline def transparentTestMacro = ${ testImpl } def testImpl(using Quotes): Expr[Object] = { import quotes.reflect.* - def makeType(owner: Symbol): Symbol = - Symbol.newBoundedType(owner, "mytype", Flags.EmptyFlags, TypeBounds.lower(TypeRepr.of[String]), Symbol.noSymbol) + def makeBasicType(owner: Symbol): Symbol = + Symbol.newBoundedType(owner, "tpe", Flags.EmptyFlags, TypeBounds.lower(TypeRepr.of[String]), Symbol.noSymbol) - val typeDef = TypeDef(makeType(Symbol.spliceOwner)) + def makeTypesForClass(owner: Symbol): List[Symbol] = + val typeLambda = TypeLambda.apply(List("X"), _ => List(TypeBounds.empty), _ => TypeRepr.of[Int]) + List( + makeBasicType(owner), + // type Bla >: Nothing <: [X] =>> Int + Symbol.newBoundedType( + owner, + "tpe1", + Flags.EmptyFlags, + TypeBounds.upper(typeLambda), + Symbol.noSymbol + ), + // type Bar >: [X] =>> Int <: [X] =>> Int + Symbol.newBoundedType( + owner, + "tpe2", + Flags.EmptyFlags, + TypeBounds(typeLambda, typeLambda), + Symbol.noSymbol + ) + ) + + val typeDef = TypeDef(makeBasicType(Symbol.spliceOwner)) // Expr printer does not work here, see comment: // https://github.com/scala/scala3/pull/20347#issuecomment-2096824617 - assert(typeDef.toString == "TypeDef(mytype,TypeTree[TypeBounds(TypeRef(TermRef(ThisType(TypeRef(NoPrefix,module class java)),object lang),String),TypeRef(ThisType(TypeRef(NoPrefix,module class scala)),class Any))])") + println(typeDef.toString) + assert(typeDef.toString == "TypeDef(tpe,TypeTree[TypeBounds(TypeRef(TermRef(ThisType(TypeRef(NoPrefix,module class java)),object lang),String),TypeRef(ThisType(TypeRef(NoPrefix,module class scala)),class Any))])") - val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => List(makeType(sym)), None) - val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List(TypeDef(clsSymbol.typeMember("mytype")))) + val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => makeTypesForClass(sym), None) + val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List( + TypeDef(clsSymbol.typeMember("tpe")), + TypeDef(clsSymbol.typeMember("tpe1")), + TypeDef(clsSymbol.typeMember("tpe2")), + )) Block(List(classDef), Apply(Select(New(TypeIdent(clsSymbol)), clsSymbol.primaryConstructor), List.empty)).asExprOf[Object] } diff --git a/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala b/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala index 1d07c5080e26..60f0587b85a7 100644 --- a/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala +++ b/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala @@ -8,11 +8,28 @@ transparent inline def transparentTestMacro = ${ testImpl } def testImpl(using Quotes): Expr[Object] = { import quotes.reflect.* - def makeType(owner: Symbol): Symbol = - Symbol.newTypeAlias(owner, "mytype", Flags.EmptyFlags, TypeRepr.of[String], Symbol.noSymbol) + def makeBasicType(owner: Symbol): Symbol = + Symbol.newTypeAlias(owner, "tpe", Flags.EmptyFlags, TypeRepr.of[String], Symbol.noSymbol) - val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => List(makeType(sym)), None) - val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List(TypeDef(clsSymbol.typeMember("mytype")))) + def makeTypesForClass(owner: Symbol): List[Symbol] = + val typeLambda = TypeLambda.apply(List("X"), _ => List(TypeBounds.empty), _ => TypeRepr.of[Int]) + List( + makeBasicType(owner), + // type Foo = [X] =>> Int + Symbol.newTypeAlias( + owner, + "tpe1", + Flags.EmptyFlags, + typeLambda, + Symbol.noSymbol + ), + ) + + val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => makeTypesForClass(sym), None) + val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List( + TypeDef(clsSymbol.typeMember("tpe")), + TypeDef(clsSymbol.typeMember("tpe1")), + )) Block(List(classDef), Apply(Select(New(TypeIdent(clsSymbol)), clsSymbol.primaryConstructor), List.empty)).asExprOf[Object] } From 8258bb0e327a4c2fabaac23e39c3c860e0fb4038 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Tue, 8 Oct 2024 15:57:11 +0200 Subject: [PATCH 637/827] Drop support for old experimental in community-build --- .../communitybuild/CommunityBuildRunner.scala | 13 ++++++------- .../src/scala/dotty/communitybuild/Main.scala | 5 +---- .../scala/dotty/communitybuild/projects.scala | 17 ++--------------- 3 files changed, 9 insertions(+), 26 deletions(-) diff --git a/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala index 6a0c54c4b00b..b3065fefe87f 100644 --- a/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala +++ b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala @@ -16,13 +16,12 @@ object CommunityBuildRunner: * and avoid network overhead. See https://github.com/lampepfl/dotty-drone * for more infrastructural details. */ - extension (self: CommunityProject) def run()(using suite: CommunityBuildRunner): Unit = - if self.requiresExperimental && !compilerSupportExperimental then - log(s"Skipping ${self.project} - it needs experimental features unsupported in this build.") - return - self.dependencies.foreach(_.publish()) - self.testOnlyDependencies().foreach(_.publish()) - suite.runProject(self) + extension (self: CommunityProject) + def run()(using suite: CommunityBuildRunner): Unit = + self.dependencies.foreach(_.publish()) + self.testOnlyDependencies().foreach(_.publish()) + suite.runProject(self) + end extension trait CommunityBuildRunner: diff --git a/community-build/src/scala/dotty/communitybuild/Main.scala b/community-build/src/scala/dotty/communitybuild/Main.scala index 852cee46af22..c813f5ff684b 100644 --- a/community-build/src/scala/dotty/communitybuild/Main.scala +++ b/community-build/src/scala/dotty/communitybuild/Main.scala @@ -55,10 +55,7 @@ object Main: Seq("rm", "-rf", destStr).! Files.createDirectory(dest) val (toRun, ignored) = - allProjects.partition( p => - p.docCommand != null - && (!p.requiresExperimental || compilerSupportExperimental) - ) + allProjects.partition(_.docCommand != null) val paths = toRun.map { project => val name = project.project diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index a0444505801a..e8d6c3d2894d 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -10,9 +10,6 @@ lazy val compilerVersion: String = val file = communitybuildDir.resolve("scala3-bootstrapped.version") new String(Files.readAllBytes(file), UTF_8) -lazy val compilerSupportExperimental: Boolean = - compilerVersion.contains("SNAPSHOT") || compilerVersion.contains("NIGHTLY") - lazy val sbtPluginFilePath: String = // Workaround for https://github.com/sbt/sbt/issues/4395 new File(sys.props("user.home") + "/.sbt/1.0/plugins").mkdirs() @@ -43,7 +40,6 @@ sealed trait CommunityProject: val testOnlyDependencies: () => List[CommunityProject] val binaryName: String val runCommandsArgs: List[String] = Nil - val requiresExperimental: Boolean val environment: Map[String, String] = Map.empty final val projectDir = communitybuildDir.resolve("community-projects").resolve(project) @@ -53,7 +49,6 @@ sealed trait CommunityProject: /** Publish this project to the local Maven repository */ final def publish(): Unit = - // TODO what should this do with .requiresExperimental? if !published then publishDependencies() log(s"Publishing $project") @@ -65,11 +60,6 @@ sealed trait CommunityProject: published = true final def doc(): Unit = - if this.requiresExperimental && !compilerSupportExperimental then - log( - s"Skipping ${this.project} - it needs experimental features unsupported in this build." - ) - return publishDependencies() log(s"Documenting $project") if docCommand eq null then @@ -89,8 +79,7 @@ final case class MillCommunityProject( baseCommand: String, dependencies: List[CommunityProject] = Nil, testOnlyDependencies: () => List[CommunityProject] = () => Nil, - ignoreDocs: Boolean = false, - requiresExperimental: Boolean = false, + ignoreDocs: Boolean = false ) extends CommunityProject: override val binaryName: String = "./mill" override val testCommand = s"$baseCommand.test" @@ -109,8 +98,7 @@ final case class SbtCommunityProject( testOnlyDependencies: () => List[CommunityProject] = () => Nil, sbtPublishCommand: String = null, sbtDocCommand: String = null, - scalacOptions: List[String] = SbtCommunityProject.scalacOptions, - requiresExperimental: Boolean = false, + scalacOptions: List[String] = SbtCommunityProject.scalacOptions ) extends CommunityProject: override val binaryName: String = "sbt" @@ -260,7 +248,6 @@ object projects: project = "intent", sbtTestCommand = "test", sbtDocCommand = "doc", - requiresExperimental = true, ) lazy val scalacheck = SbtCommunityProject( From 21a7cbd431920f3846c920a5291f39f8a89e7e3a Mon Sep 17 00:00:00 2001 From: Jan Chyb <48855024+jchyb@users.noreply.github.com> Date: Tue, 8 Oct 2024 19:07:59 +0200 Subject: [PATCH 638/827] Fix scaladoc TastyInspector regressions (#21716) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds an `inspectAllTastyFilesInContext` method in scaladoc-only version of TastyInspector in an effort to fix regressions introduced by the switch to the new TastyInspector. Notably, those issues are not being fixed in the stable TastyInspector - this is scaladoc only. Also the scaladoc copy of the TastyInspector had to be renamed, as otherwise we run into classpath issues with the stable tasty inspector taking precedence. I really wanted to include the test for both issues, but both required non-standard specification, so I could not use `scaladoc-testcases` and ended up having to use sbt scripted tests unfortunately. --------- Co-authored-by: Mikołaj Fornal <24961583+Florian3k@users.noreply.github.com> --- .../sbt-dotty/scaladoc-regressions/build.sbt | 9 ++++ .../i18231/src/main/scala/main.scala | 4 ++ .../i20476/src/main/scala/main.scala | 5 +++ .../scaladoc-regressions/project/plugins.sbt | 1 + sbt-test/sbt-dotty/scaladoc-regressions/test | 2 + .../tools/scaladoc/tasty/TastyParser.scala | 4 +- .../tasty/inspector/TastyInspector.scala | 43 ++++++++++++++----- .../no-link-warnings/LinkWarningTest.scala | 3 +- 8 files changed, 57 insertions(+), 14 deletions(-) create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/build.sbt create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/test diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt b/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt new file mode 100644 index 000000000000..bfdadb5ee038 --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt @@ -0,0 +1,9 @@ +ThisBuild / scalaVersion := sys.props("plugin.scalaVersion") + +lazy val i20476 = project + .in(file("i20476")) + .enablePlugins(ScalaJSPlugin) + +lazy val i18231 = project + .in(file("i18231")) + .settings(scalacOptions += "-release:8") diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala b/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala new file mode 100644 index 000000000000..82788aa829f0 --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala @@ -0,0 +1,4 @@ +object Foo { + @Deprecated + def foo(): Unit = ??? +} diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala b/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala new file mode 100644 index 000000000000..31eb78c816cd --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala @@ -0,0 +1,5 @@ +package demo + +import scala.scalajs.js + +def bar: js.Promise[Int] = js.Promise.resolve(()).`then`(_ => 1) diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt b/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt new file mode 100644 index 000000000000..b9ebfd07bf1f --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("org.scala-js" % "sbt-scalajs" % sys.props("plugin.scalaJSVersion")) diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/test b/sbt-test/sbt-dotty/scaladoc-regressions/test new file mode 100644 index 000000000000..816c0be96141 --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/test @@ -0,0 +1,2 @@ +> i18231/doc +> i20476/doc diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala index f55451fdc636..1a8337e0c6b7 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala @@ -5,7 +5,7 @@ package tasty import java.util.regex.Pattern import scala.util.{Try, Success, Failure} -import scala.tasty.inspector.{TastyInspector, Inspector, Tasty} +import scala.tasty.inspector.{ScaladocInternalTastyInspector, Inspector, Tasty} import scala.quoted._ import dotty.tools.dotc @@ -160,7 +160,7 @@ object ScaladocTastyInspector: report.error("File extension is not `tasty` or `jar`: " + invalidPath) if tastyPaths.nonEmpty then - TastyInspector.inspectAllTastyFiles(tastyPaths, jarPaths, classpath)(inspector) + ScaladocInternalTastyInspector.inspectAllTastyFilesInContext(tastyPaths, jarPaths, classpath)(inspector)(using ctx.compilerContext) val all = inspector.topLevels.result() all.groupBy(_._1).map { case (pckName, members) => diff --git a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala index 906578c9d405..190be6a588a1 100644 --- a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala +++ b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala @@ -1,5 +1,7 @@ -// Copy of tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +// Renamed copy of tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala // FIXME remove this copy of the file +// Since copying, an inspectAllTastyFilesInContext method was added for scaladoc only +// to fix regressions introduced by the switch from old to a new TastyInspector package scala.tasty.inspector @@ -21,7 +23,7 @@ import dotty.tools.dotc.report import java.io.File.pathSeparator -object TastyInspector: +object ScaladocInternalTastyInspector: /** Load and process TASTy files using TASTy reflect * @@ -41,6 +43,32 @@ object TastyInspector: def inspectTastyFilesInJar(jar: String)(inspector: Inspector): Boolean = inspectAllTastyFiles(Nil, List(jar), Nil)(inspector) + private def checkFiles(tastyFiles: List[String], jars: List[String]): Unit = + def checkFile(fileName: String, ext: String): Unit = + val file = dotty.tools.io.Path(fileName) + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then + throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") + else if !file.exists then + throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") + tastyFiles.foreach(checkFile(_, "tasty")) + jars.foreach(checkFile(_, "jar")) + + /** + * Added for Scaladoc-only. + * Meant to fix regressions introduces by the switch from old to new TastyInspector: + * https://github.com/scala/scala3/issues/18231 + * https://github.com/scala/scala3/issues/20476 + * Stable TastyInspector API does not support passing compiler context. + */ + def inspectAllTastyFilesInContext(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector)(using Context): Boolean = + checkFiles(tastyFiles, jars) + val classes = tastyFiles ::: jars + classes match + case Nil => true + case _ => + val reporter = inspectorDriver(inspector).process(inspectorArgs(dependenciesClasspath, classes), summon[Context]) + !reporter.hasErrors + /** Load and process TASTy files using TASTy reflect * * @param tastyFiles List of paths of `.tasty` files @@ -50,14 +78,7 @@ object TastyInspector: * @return boolean value indicating whether the process succeeded */ def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = - def checkFile(fileName: String, ext: String): Unit = - val file = dotty.tools.io.Path(fileName) - if !file.ext.toLowerCase.equalsIgnoreCase(ext) then - throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") - else if !file.exists then - throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") - tastyFiles.foreach(checkFile(_, "tasty")) - jars.foreach(checkFile(_, "jar")) + checkFiles(tastyFiles, jars) val files = tastyFiles ::: jars inspectFiles(dependenciesClasspath, files)(inspector) @@ -124,4 +145,4 @@ object TastyInspector: end inspectFiles -end TastyInspector +end ScaladocInternalTastyInspector diff --git a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala index 1d140315cc10..bcaee696b65c 100644 --- a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala @@ -14,6 +14,7 @@ class LinkWarningsTest extends ScaladocTest("noLinkWarnings"): override def runTest = afterRendering { val diagnostics = summon[DocContext].compilerContext.reportedDiagnostics - assertEquals("There should be exactly one warning", 1, diagnostics.warningMsgs.size) + val filteredWarnings = diagnostics.warningMsgs.filter(_ != "1 warning found") + assertEquals("There should be exactly one warning", 1, filteredWarnings.size) assertNoErrors(diagnostics) } From b3fe9bba9a34680c187a18ea5bcb069b699a53cb Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 9 Oct 2024 11:32:42 +0200 Subject: [PATCH 639/827] Revert "Fix scaladoc TastyInspector regressions (#21716)" This reverts commit 21a7cbd431920f3846c920a5291f39f8a89e7e3a. --- .../sbt-dotty/scaladoc-regressions/build.sbt | 9 ---- .../i18231/src/main/scala/main.scala | 4 -- .../i20476/src/main/scala/main.scala | 5 --- .../scaladoc-regressions/project/plugins.sbt | 1 - sbt-test/sbt-dotty/scaladoc-regressions/test | 2 - .../tools/scaladoc/tasty/TastyParser.scala | 4 +- .../tasty/inspector/TastyInspector.scala | 43 +++++-------------- .../no-link-warnings/LinkWarningTest.scala | 3 +- 8 files changed, 14 insertions(+), 57 deletions(-) delete mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/build.sbt delete mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala delete mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala delete mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt delete mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/test diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt b/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt deleted file mode 100644 index bfdadb5ee038..000000000000 --- a/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt +++ /dev/null @@ -1,9 +0,0 @@ -ThisBuild / scalaVersion := sys.props("plugin.scalaVersion") - -lazy val i20476 = project - .in(file("i20476")) - .enablePlugins(ScalaJSPlugin) - -lazy val i18231 = project - .in(file("i18231")) - .settings(scalacOptions += "-release:8") diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala b/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala deleted file mode 100644 index 82788aa829f0..000000000000 --- a/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala +++ /dev/null @@ -1,4 +0,0 @@ -object Foo { - @Deprecated - def foo(): Unit = ??? -} diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala b/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala deleted file mode 100644 index 31eb78c816cd..000000000000 --- a/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala +++ /dev/null @@ -1,5 +0,0 @@ -package demo - -import scala.scalajs.js - -def bar: js.Promise[Int] = js.Promise.resolve(()).`then`(_ => 1) diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt b/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt deleted file mode 100644 index b9ebfd07bf1f..000000000000 --- a/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt +++ /dev/null @@ -1 +0,0 @@ -addSbtPlugin("org.scala-js" % "sbt-scalajs" % sys.props("plugin.scalaJSVersion")) diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/test b/sbt-test/sbt-dotty/scaladoc-regressions/test deleted file mode 100644 index 816c0be96141..000000000000 --- a/sbt-test/sbt-dotty/scaladoc-regressions/test +++ /dev/null @@ -1,2 +0,0 @@ -> i18231/doc -> i20476/doc diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala index 1a8337e0c6b7..f55451fdc636 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala @@ -5,7 +5,7 @@ package tasty import java.util.regex.Pattern import scala.util.{Try, Success, Failure} -import scala.tasty.inspector.{ScaladocInternalTastyInspector, Inspector, Tasty} +import scala.tasty.inspector.{TastyInspector, Inspector, Tasty} import scala.quoted._ import dotty.tools.dotc @@ -160,7 +160,7 @@ object ScaladocTastyInspector: report.error("File extension is not `tasty` or `jar`: " + invalidPath) if tastyPaths.nonEmpty then - ScaladocInternalTastyInspector.inspectAllTastyFilesInContext(tastyPaths, jarPaths, classpath)(inspector)(using ctx.compilerContext) + TastyInspector.inspectAllTastyFiles(tastyPaths, jarPaths, classpath)(inspector) val all = inspector.topLevels.result() all.groupBy(_._1).map { case (pckName, members) => diff --git a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala index 190be6a588a1..906578c9d405 100644 --- a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala +++ b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala @@ -1,7 +1,5 @@ -// Renamed copy of tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +// Copy of tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala // FIXME remove this copy of the file -// Since copying, an inspectAllTastyFilesInContext method was added for scaladoc only -// to fix regressions introduced by the switch from old to a new TastyInspector package scala.tasty.inspector @@ -23,7 +21,7 @@ import dotty.tools.dotc.report import java.io.File.pathSeparator -object ScaladocInternalTastyInspector: +object TastyInspector: /** Load and process TASTy files using TASTy reflect * @@ -43,32 +41,6 @@ object ScaladocInternalTastyInspector: def inspectTastyFilesInJar(jar: String)(inspector: Inspector): Boolean = inspectAllTastyFiles(Nil, List(jar), Nil)(inspector) - private def checkFiles(tastyFiles: List[String], jars: List[String]): Unit = - def checkFile(fileName: String, ext: String): Unit = - val file = dotty.tools.io.Path(fileName) - if !file.ext.toLowerCase.equalsIgnoreCase(ext) then - throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") - else if !file.exists then - throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") - tastyFiles.foreach(checkFile(_, "tasty")) - jars.foreach(checkFile(_, "jar")) - - /** - * Added for Scaladoc-only. - * Meant to fix regressions introduces by the switch from old to new TastyInspector: - * https://github.com/scala/scala3/issues/18231 - * https://github.com/scala/scala3/issues/20476 - * Stable TastyInspector API does not support passing compiler context. - */ - def inspectAllTastyFilesInContext(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector)(using Context): Boolean = - checkFiles(tastyFiles, jars) - val classes = tastyFiles ::: jars - classes match - case Nil => true - case _ => - val reporter = inspectorDriver(inspector).process(inspectorArgs(dependenciesClasspath, classes), summon[Context]) - !reporter.hasErrors - /** Load and process TASTy files using TASTy reflect * * @param tastyFiles List of paths of `.tasty` files @@ -78,7 +50,14 @@ object ScaladocInternalTastyInspector: * @return boolean value indicating whether the process succeeded */ def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = - checkFiles(tastyFiles, jars) + def checkFile(fileName: String, ext: String): Unit = + val file = dotty.tools.io.Path(fileName) + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then + throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") + else if !file.exists then + throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") + tastyFiles.foreach(checkFile(_, "tasty")) + jars.foreach(checkFile(_, "jar")) val files = tastyFiles ::: jars inspectFiles(dependenciesClasspath, files)(inspector) @@ -145,4 +124,4 @@ object ScaladocInternalTastyInspector: end inspectFiles -end ScaladocInternalTastyInspector +end TastyInspector diff --git a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala index bcaee696b65c..1d140315cc10 100644 --- a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala @@ -14,7 +14,6 @@ class LinkWarningsTest extends ScaladocTest("noLinkWarnings"): override def runTest = afterRendering { val diagnostics = summon[DocContext].compilerContext.reportedDiagnostics - val filteredWarnings = diagnostics.warningMsgs.filter(_ != "1 warning found") - assertEquals("There should be exactly one warning", 1, filteredWarnings.size) + assertEquals("There should be exactly one warning", 1, diagnostics.warningMsgs.size) assertNoErrors(diagnostics) } From cd3bd1dc5f84ad15659b8f143c32beca2a74643d Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 9 Oct 2024 16:51:19 +0200 Subject: [PATCH 640/827] [chore] Set base version to 3.6.1 (#21736) --- project/Build.scala | 2 +- project/MiMaFilters.scala | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 8993edc45ede..84ce00d11577 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -93,7 +93,7 @@ object Build { val referenceVersion = "3.5.2-RC1" - val baseVersion = "3.6.0" + val baseVersion = "3.6.1" // Will be required by some automation later val prereleaseVersion = s"$baseVersion-RC1" diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index a04f4fae91aa..00e7153bcb83 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -10,10 +10,17 @@ object MiMaFilters { Build.mimaPreviousDottyVersion -> Seq( ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.betterFors"), ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$betterFors$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.quotedPatternsWithPolymorphicFunctions"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$quotedPatternsWithPolymorphicFunctions$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.runtime.Patterns.higherOrderHoleWithTypes"), ), // Additions since last LTS Build.mimaPreviousLTSDottyVersion -> Seq( + ProblemFilters.exclude[MissingClassProblem]("scala.NamedTuple"), + ProblemFilters.exclude[MissingClassProblem]("scala.NamedTuple$"), + ProblemFilters.exclude[MissingClassProblem]("scala.NamedTupleDecomposition"), + ProblemFilters.exclude[MissingClassProblem]("scala.NamedTupleDecomposition$"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefMethods"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefTypeTest"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#defnModule.FunctionClass"), From bd88a0f510a7c4c910e33bee2bf2ca8626ff78ea Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Wed, 9 Oct 2024 17:03:47 +0200 Subject: [PATCH 641/827] Add an explicit type instantiation in `NamedTuple.map` To avoid triggering a deep subtype comparison needed for the inference of the higher-kinded type parameter. --- library/src-bootstrapped/scala/NamedTuple.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/src-bootstrapped/scala/NamedTuple.scala b/library/src-bootstrapped/scala/NamedTuple.scala index f8a821dc45ef..d105cf042f37 100644 --- a/library/src-bootstrapped/scala/NamedTuple.scala +++ b/library/src-bootstrapped/scala/NamedTuple.scala @@ -181,7 +181,7 @@ object NamedTupleDecomposition: * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. */ inline def map[F[_]](f: [t] => t => F[t]): Map[NamedTuple[N, V], F] = - x.toTuple.map(f) + x.toTuple.map[F](f) /** The named tuple consisting of all elements of this tuple in reverse */ inline def reverse: Reverse[NamedTuple[N, V]] = x.toTuple.reverse From 632278e9c78a3bd4067d8caf649d8d07fd96f2b9 Mon Sep 17 00:00:00 2001 From: friendseeker <66892505+Friendseeker@users.noreply.github.com> Date: Mon, 25 Dec 2023 15:27:28 -0800 Subject: [PATCH 642/827] Flag class file collision as error --- compiler/src/dotty/tools/backend/jvm/PostProcessor.scala | 2 +- tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala index 06c3c7f1cb4f..9f172806a3b5 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -73,7 +73,7 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: else s" (defined in ${pos2.source.file.name})" def nicify(name: String): String = name.replace('/', '.').nn if name1 == name2 then - backendReporting.warning( + backendReporting.error( em"${nicify(name1)} and ${nicify(name2)} produce classes that overwrite one another", pos1) else backendReporting.warning( diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala index 1af7e5dd705a..3bc9fb5592ee 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala @@ -214,7 +214,7 @@ class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrim val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString atPhase(typerPhase) { if (same) - report.warning( // FIXME: This should really be an error, but then FromTasty tests fail + report.error( em"$cl1 and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) else report.warning( From ac05bd5950ac9af26533930ef1d88724aca95f99 Mon Sep 17 00:00:00 2001 From: Friendseeker <66892505+Friendseeker@users.noreply.github.com> Date: Wed, 9 Oct 2024 11:57:15 -0700 Subject: [PATCH 643/827] Add test case neg/i19248 --- tests/neg/i19248/Foo.scala | 7 +++++++ tests/neg/i19248/Main.scala | 3 +++ tests/neg/i19248/Scope.scala | 4 ++++ tests/neg/i19248/empty.scala | 1 + 4 files changed, 15 insertions(+) create mode 100644 tests/neg/i19248/Foo.scala create mode 100644 tests/neg/i19248/Main.scala create mode 100644 tests/neg/i19248/Scope.scala create mode 100644 tests/neg/i19248/empty.scala diff --git a/tests/neg/i19248/Foo.scala b/tests/neg/i19248/Foo.scala new file mode 100644 index 000000000000..f24651234eb9 --- /dev/null +++ b/tests/neg/i19248/Foo.scala @@ -0,0 +1,7 @@ +trait Foo { // error + class Bar + + type T = Foo.this.Bar + + inline def f: Int = ??? +} diff --git a/tests/neg/i19248/Main.scala b/tests/neg/i19248/Main.scala new file mode 100644 index 000000000000..bf4e3a48b279 --- /dev/null +++ b/tests/neg/i19248/Main.scala @@ -0,0 +1,3 @@ +@main +def Main(args: String*): Unit = + () diff --git a/tests/neg/i19248/Scope.scala b/tests/neg/i19248/Scope.scala new file mode 100644 index 000000000000..a3135d93084f --- /dev/null +++ b/tests/neg/i19248/Scope.scala @@ -0,0 +1,4 @@ +object Scope { +} +object Foo { +} diff --git a/tests/neg/i19248/empty.scala b/tests/neg/i19248/empty.scala new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/tests/neg/i19248/empty.scala @@ -0,0 +1 @@ + From 41e8a126443e85bebfc8e9de84fb7949e845f981 Mon Sep 17 00:00:00 2001 From: Friendseeker <66892505+Friendseeker@users.noreply.github.com> Date: Wed, 9 Oct 2024 15:40:43 -0700 Subject: [PATCH 644/827] Reclassify main-functions-nameclash test --- tests/neg/main-functions-nameclash.scala | 3 +++ tests/warn/main-functions-nameclash.scala | 5 ----- 2 files changed, 3 insertions(+), 5 deletions(-) create mode 100644 tests/neg/main-functions-nameclash.scala delete mode 100644 tests/warn/main-functions-nameclash.scala diff --git a/tests/neg/main-functions-nameclash.scala b/tests/neg/main-functions-nameclash.scala new file mode 100644 index 000000000000..23a530e28271 --- /dev/null +++ b/tests/neg/main-functions-nameclash.scala @@ -0,0 +1,3 @@ +object foo { + @main def foo(x: Int) = () // error: class foo and object foo produce classes that overwrite one another +} diff --git a/tests/warn/main-functions-nameclash.scala b/tests/warn/main-functions-nameclash.scala deleted file mode 100644 index bc0fe64379d4..000000000000 --- a/tests/warn/main-functions-nameclash.scala +++ /dev/null @@ -1,5 +0,0 @@ - - -object foo { - @main def foo(x: Int) = () // warn: class foo and object foo produce classes that overwrite one another -} From 69f5f730c9b887ccaf8436729de2a2f52fed871c Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 10 Oct 2024 12:43:42 +0100 Subject: [PATCH 645/827] Fix ctx implicits under case unapplySeq A case class with a varargs has a unapplySeq extractor instead of unapply. When we type an unapply, in typedUnapply, we first look for unapply methods before unapplySeq methods. But when searching for unapply, if a class method isn't found, then an extension method is looked for, which causes context implicits to be cached. The bindings from a pattern (such as from an unapply or unapplySeq extractor) are added to the context in indexPattern. But Context's `implicitCache` doesn't account for the scope changing. I opted for giving the body its own scope context, rather than making indexPattern reset the context implicits cache. --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 4 ++-- tests/pos/i21742.1.scala | 5 +++++ tests/pos/i21742.2.scala | 5 +++++ 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i21742.1.scala create mode 100644 tests/pos/i21742.2.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index f20c2d313ec7..ed0ff5e0bd2f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2198,7 +2198,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Type a case. */ def typedCase(tree: untpd.CaseDef, sel: Tree, wideSelType: Type, pt: Type)(using Context): CaseDef = { val originalCtx = ctx - val gadtCtx: Context = ctx.fresh.setFreshGADTBounds.setNewScope + val gadtCtx: Context = ctx.fresh.setFreshGADTBounds def caseRest(pat: Tree)(using Context) = { val pt1 = instantiateMatchTypeProto(pat, pt) match { @@ -2228,7 +2228,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val pat1 = typedPattern(tree.pat, wideSelType)(using gadtCtx) caseRest(pat1)( using Nullables.caseContext(sel, pat1)( - using gadtCtx)) + using gadtCtx.fresh.setNewScope)) } def typedLabeled(tree: untpd.Labeled)(using Context): Labeled = { diff --git a/tests/pos/i21742.1.scala b/tests/pos/i21742.1.scala new file mode 100644 index 000000000000..c9c1a94b222c --- /dev/null +++ b/tests/pos/i21742.1.scala @@ -0,0 +1,5 @@ +case class C(n: Int, ds: Double*) +class Test: + def m(using n: Int): Int = n + 1 + def t(): Unit = + C(1, 2, 3, 4) match { case C(given Int, ds*) => m } diff --git a/tests/pos/i21742.2.scala b/tests/pos/i21742.2.scala new file mode 100644 index 000000000000..83dc4b6ad46f --- /dev/null +++ b/tests/pos/i21742.2.scala @@ -0,0 +1,5 @@ +case class C(n: Int, ds: Seq[Double]) +class Test: + def m(using n: Int): Int = n + 1 + def t(): Unit = + C(1, Seq(2, 3, 4)) match { case C(given Int, ds) => m } From e0428038159bff79774fd49c889f824f302dba9d Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 11 Oct 2024 01:00:36 +0200 Subject: [PATCH 646/827] fix code rendering error in givens.md an extra code fence was added, so the bottom of the page is incorrectly rendered --- docs/_docs/reference/contextual/givens.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index 088ded2e8db4..9c074da0da6e 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -90,8 +90,6 @@ time it is accessed. If the given is a mere alias to some immutable value, the g Here is the full syntax for given instances. Some of these forms of givens are explained in a separate page: [Other Forms of Givens](../more-givens.md). ```ebnf -Here is the complete context-free syntax for all proposed features. -``` TmplDef ::= ... | 'given' GivenDef GivenDef ::= [id ':'] GivenSig GivenSig ::= GivenImpl From 9de4b7c3b7a0d35f75a5dd050e2eca58b9fd5152 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 10 Oct 2024 18:24:04 -0700 Subject: [PATCH 647/827] REPL: JLine 3.27.0 (was 3.25.1) --- project/Build.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 84ce00d11577..60511f648b57 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -694,9 +694,9 @@ object Build { libraryDependencies ++= Seq( "org.scala-lang.modules" % "scala-asm" % "9.7.0-scala-2", // used by the backend Dependencies.compilerInterface, - "org.jline" % "jline-reader" % "3.25.1", // used by the REPL - "org.jline" % "jline-terminal" % "3.25.1", - "org.jline" % "jline-terminal-jna" % "3.25.1", // needed for Windows + "org.jline" % "jline-reader" % "3.27.0", // used by the REPL + "org.jline" % "jline-terminal" % "3.27.0", + "org.jline" % "jline-terminal-jna" % "3.27.0", // needed for Windows ("io.get-coursier" %% "coursier" % "2.0.16" % Test).cross(CrossVersion.for3Use2_13), ), From aee3230df033af599847d460ecd16064af9c702c Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Fri, 11 Oct 2024 18:43:25 +0200 Subject: [PATCH 648/827] backport: include inline when printing param names --- .../tools/pc/printer/ShortenedTypePrinter.scala | 3 ++- .../pc/tests/completion/CompletionArgSuite.scala | 12 ++++++------ .../pc/tests/completion/CompletionCancelSuite.scala | 4 ++-- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala index a738440c585d..d9c11a5ada8c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala @@ -527,7 +527,8 @@ class ShortenedTypePrinter( else if includeDefaultParam == ShortenedTypePrinter.IncludeDefaultParam.ResolveLater && isDefaultParam then " = ..." else "" // includeDefaultParam == Never or !isDefaultParam - s"$keywordName: ${paramTypeString}$default" + val inline = if(param.is(Flags.Inline)) "inline " else "" + s"$inline$keywordName: ${paramTypeString}$default" end if end paramLabel diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala index 17f21b16d6e8..dc81d2596c6f 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala @@ -796,7 +796,7 @@ class CompletionArgSuite extends BaseCompletionSuite: | def k: Int = m(1, a@@) |""".stripMargin, """|aaa = : Int - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(2), ) @@ -810,7 +810,7 @@ class CompletionArgSuite extends BaseCompletionSuite: | def k: Int = m(inn = 1, a@@) |""".stripMargin, """|aaa = : Int - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(2), ) @@ -912,7 +912,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|aaa = : Int |abb = : Option[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @@ -927,7 +927,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|aaa = : Int |abb = : Option[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @@ -945,7 +945,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|aaa = : Int |abb = : Option[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @@ -963,7 +963,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|abb = : Option[Int] |acc = : List[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala index 4746eb93f25d..c1d0e017def7 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala @@ -90,8 +90,8 @@ class CompletionCancelSuite extends BaseCompletionSuite: | val x = asser@@ |} """.stripMargin, - """|assert(assertion: Boolean): Unit - |assert(assertion: Boolean, message: => Any): Unit + """|assert(inline assertion: Boolean): Unit + |assert(inline assertion: Boolean, inline message: => Any): Unit |""".stripMargin ) From 6429b4b05bfc6904ef1fe76589a3bfe833383ef9 Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Fri, 11 Oct 2024 18:45:20 +0200 Subject: [PATCH 649/827] backport: use uri from params in pc for location --- .../src/main/dotty/tools/pc/PcDefinitionProvider.scala | 8 ++++---- .../dotty/tools/pc/utils/InteractiveEnrichments.scala | 6 ------ 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala index fc97dd1f1176..6f2f4cd5f34e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala @@ -131,13 +131,13 @@ class PcDefinitionProvider( otherDefs.headOption.orElse(exportedDefs.headOption) match case Some(srcTree) => val pos = srcTree.namePos - pos.toLocation match - case None => DefinitionResultImpl.empty - case Some(loc) => + if pos.exists then + val loc = new Location(params.uri().toString(), pos.toLsp) DefinitionResultImpl( SemanticdbSymbols.symbolName(sym), - List(loc).asJava + List(loc).asJava, ) + else DefinitionResultImpl.empty case None => DefinitionResultImpl.empty else diff --git a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala index 8ff11694ff1c..66080a363d51 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala @@ -99,12 +99,6 @@ object InteractiveEnrichments extends CommonMtagsEnrichments: def focusAt(point: Int): SourcePosition = pos.withSpan(pos.span.withPoint(point).focus) - def toLocation: Option[l.Location] = - for - uri <- InteractiveDriver.toUriOption(pos.source) - range <- if pos.exists then Some(pos.toLsp) else None - yield new l.Location(uri.toString(), range) - def encloses(other: SourcePosition): Boolean = pos.start <= other.start && pos.end >= other.end From 87db0da58d70ace996b65c179baa48929526ab22 Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Fri, 11 Oct 2024 19:02:02 +0200 Subject: [PATCH 650/827] backport: Finer grained accessibility check for auto-imports --- .../tools/pc/CompilerSearchVisitor.scala | 2 +- .../dotty/tools/pc/PcDefinitionProvider.scala | 2 +- .../completion/CompletionSnippetSuite.scala | 23 +++++++++++++++++++ .../pc/tests/completion/CompletionSuite.scala | 13 +++++++++++ 4 files changed, 38 insertions(+), 2 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala index 035c1062a3e3..9fb84ee1f513 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala @@ -28,7 +28,7 @@ class CompilerSearchVisitor( owner.isStatic && owner.isPublic private def isAccessible(sym: Symbol): Boolean = try - sym != NoSymbol && sym.isPublic && sym.isStatic || isAccessibleImplicitClass(sym) + (sym != NoSymbol && sym.isAccessibleFrom(ctx.owner.info) && sym.isStatic) || isAccessibleImplicitClass(sym) catch case err: AssertionError => logger.log(Level.WARNING, err.getMessage()) diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala index 6f2f4cd5f34e..3b2284bef1d0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala @@ -137,7 +137,7 @@ class PcDefinitionProvider( SemanticdbSymbols.symbolName(sym), List(loc).asJava, ) - else DefinitionResultImpl.empty + else DefinitionResultImpl.empty case None => DefinitionResultImpl.empty else diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala index 381375c65131..a002e722f1f0 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala @@ -385,6 +385,29 @@ class CompletionSnippetSuite extends BaseCompletionSuite: ) @Test def `no-apply` = + checkSnippet( + s"""|package example + | + |object Widget{} + |object Main { + | Wi@@ + |} + |""".stripMargin, + """|Widget - example + |Window - java.awt + |WindowPeer - java.awt.peer + |WithFilter - [A](p: A => Boolean, xs: Array[A]): WithFilter[A] + |WithFilter - [A, CC[_$$2]](self: IterableOps[A, CC, ?], p: A => Boolean): WithFilter[A, CC] + |WithFilter - [K, V, IterableCC[_$$3], CC[_$$4,_$$5] <: IterableOps[?, AnyConstr, ?]](self: MapOps[K, V, CC, ?] & IterableOps[(K, V), IterableCC, ?], p: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC] + |WithFilter - [K, V, IterableCC[_$$1], MapCC[X,Y] <: scala.collection.Map[X, Y], CC[X,Y] <: scala.collection.Map[X, Y] & SortedMapOps[X, Y, CC, ?]](self: SortedMapOps[K, V, CC, ?] & MapOps[K, V, MapCC, ?] & IterableOps[(K, V), IterableCC, ?], p: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] + |WithFilter - [A, IterableCC[_$$1], CC[X] <: SortedSet[X]](self: SortedSetOps[A, CC, ?] & IterableOps[A, IterableCC, ?], p: A => Boolean): WithFilter[A, IterableCC, CC] + |WithFilter - (p: Char => Boolean, s: String): WithFilter + |WithFilter - [A](l: Stream[A] @uncheckedVariance, p: A => Boolean): WithFilter[A] + |""".stripMargin, + includeDetail = true, + ) + + @Test def `no-apply2` = checkSnippet( s"""|package example | diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 57975d2c8e98..1d525c17bdf1 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -2107,6 +2107,19 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin ) + @Test def `shadowing` = + check( + """|package pkg + |object Main { + | val x = ListBuff@@ + |} + |""".stripMargin, + """|ListBuffer[A](elems: A*): ListBuffer[A] - scala.collection.mutable + |new ListBuffer[A]: ListBuffer[A] - scala.collection.mutable + |ListBuffer - scala.collection.mutable + |""".stripMargin + ) + @Test def `conflict-edit-2` = checkEdit( """|package a From d459140ef14fe66644f90b9cc3b359e3cb3741ce Mon Sep 17 00:00:00 2001 From: Alexander Date: Thu, 10 Oct 2024 22:18:26 +0300 Subject: [PATCH 651/827] Do not consider uninhabited constructors when performing exhaustive match checking --- .../tools/dotc/transform/patmat/Space.scala | 35 ++++++++++++++----- tests/init-global/pos/i18629.scala | 2 +- tests/patmat/i13931.scala | 2 +- tests/warn/patmat-nothing-exhaustive.scala | 10 ++++++ 4 files changed, 38 insertions(+), 11 deletions(-) create mode 100644 tests/warn/patmat-nothing-exhaustive.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 20b0099d82e2..9fb3c00c67c4 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -648,21 +648,38 @@ object SpaceEngine { // we get // <== refineUsingParent(NatT, class Succ, []) = Succ[NatT] // <== isSub(Succ[NatT] <:< Succ[Succ[]]) = false - def getAppliedClass(tp: Type): Type = tp match - case tp @ AppliedType(_: HKTypeLambda, _) => tp - case tp @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => tp + def getAppliedClass(tp: Type): (Type, List[Type]) = tp match + case tp @ AppliedType(_: HKTypeLambda, _) => (tp, Nil) + case tp @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => (tp, tp.args) case tp @ AppliedType(tycon: TypeProxy, _) => getAppliedClass(tycon.superType.applyIfParameterized(tp.args)) - case tp => tp - val tp = getAppliedClass(tpOriginal) - def getChildren(sym: Symbol): List[Symbol] = + case tp => (tp, Nil) + val (tp, typeArgs) = getAppliedClass(tpOriginal) + // This function is needed to get the arguments of the types that will be applied to the class. + // This is necessary because if the arguments of the types contain Nothing, + // then this can affect whether the class will be taken into account during the exhaustiveness check + def getTypeArgs(parent: Symbol, child: Symbol, typeArgs: List[Type]): List[Type] = + val superType = child.typeRef.superType + if typeArgs.exists(_.isBottomType) && superType.isInstanceOf[ClassInfo] then + val parentClass = superType.asInstanceOf[ClassInfo].declaredParents.find(_.classSymbol == parent).get + val paramTypeMap = Map.from(parentClass.argTypes.map(_.typeSymbol).zip(typeArgs)) + val substArgs = child.typeRef.typeParamSymbols.map(param => paramTypeMap.getOrElse(param, WildcardType)) + substArgs + else Nil + def getChildren(sym: Symbol, typeArgs: List[Type]): List[Symbol] = sym.children.flatMap { child => if child eq sym then List(sym) // i3145: sealed trait Baz, val x = new Baz {}, Baz.children returns Baz... else if tp.classSymbol == defn.TupleClass || tp.classSymbol == defn.NonEmptyTupleClass then List(child) // TupleN and TupleXXL classes are used for Tuple, but they aren't Tuple's children - else if (child.is(Private) || child.is(Sealed)) && child.isOneOf(AbstractOrTrait) then getChildren(child) - else List(child) + else if (child.is(Private) || child.is(Sealed)) && child.isOneOf(AbstractOrTrait) then + getChildren(child, getTypeArgs(sym, child, typeArgs)) + else + val childSubstTypes = child.typeRef.applyIfParameterized(getTypeArgs(sym, child, typeArgs)) + // if a class contains a field of type Nothing, + // then it can be ignored in pattern matching, because it is impossible to obtain an instance of it + val existFieldWithBottomType = childSubstTypes.fields.exists(_.info.isBottomType) + if existFieldWithBottomType then Nil else List(child) } - val children = trace(i"getChildren($tp)")(getChildren(tp.classSymbol)) + val children = trace(i"getChildren($tp)")(getChildren(tp.classSymbol, typeArgs)) val parts = children.map { sym => val sym1 = if (sym.is(ModuleClass)) sym.sourceModule else sym diff --git a/tests/init-global/pos/i18629.scala b/tests/init-global/pos/i18629.scala index f97c21ee918d..03f1f5d5cda4 100644 --- a/tests/init-global/pos/i18629.scala +++ b/tests/init-global/pos/i18629.scala @@ -1,6 +1,6 @@ object Foo { val bar = List() match { case List() => ??? - case _ => ??? + case null => ??? } } diff --git a/tests/patmat/i13931.scala b/tests/patmat/i13931.scala index 0d8d9eb9dcd3..562f059771c1 100644 --- a/tests/patmat/i13931.scala +++ b/tests/patmat/i13931.scala @@ -3,5 +3,5 @@ class Test: case Seq() => println("empty") case _ => println("non-empty") - def test2 = IndexedSeq() match { case IndexedSeq() => case _ => } + def test2 = IndexedSeq() match { case IndexedSeq() => case null => } def test3 = IndexedSeq() match { case IndexedSeq(1) => case _ => } diff --git a/tests/warn/patmat-nothing-exhaustive.scala b/tests/warn/patmat-nothing-exhaustive.scala new file mode 100644 index 000000000000..4e9181256fda --- /dev/null +++ b/tests/warn/patmat-nothing-exhaustive.scala @@ -0,0 +1,10 @@ +enum TestAdt: + case Inhabited + case Uninhabited(no: Nothing) + +def test1(t: TestAdt): Int = t match + case TestAdt.Inhabited => 1 + +def test2(o: Option[Option[Nothing]]): Int = o match + case Some(None) => 1 + case None => 2 From c6b23cc2944c183a36bc607e1eed9361cc9e3b83 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 14 Oct 2024 15:20:12 +0200 Subject: [PATCH 652/827] backend computes line number from source of position --- .../src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala | 8 +++++++- compiler/src/dotty/tools/dotc/util/SourceFile.scala | 3 ++- compiler/src/dotty/tools/dotc/util/SourcePosition.scala | 4 ++++ 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index 394700c2898e..38e5617fc3d7 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -623,7 +623,13 @@ trait BCodeSkelBuilder extends BCodeHelpers { } if (emitLines && tree.span.exists && !tree.hasAttachment(SyntheticUnit)) { - val nr = ctx.source.offsetToLine(tree.span.point) + 1 + val nr = + val sourcePos = tree.sourcePos + ( + if sourcePos.exists then sourcePos.finalPosition.line + else ctx.source.offsetToLine(tree.span.point) // fallback + ) + 1 + if (nr != lastEmittedLineNr) { lastEmittedLineNr = nr getNonLabelNode(lastInsn) match { diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala index 9da4f58f2deb..3ea43d16a7c8 100644 --- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala +++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala @@ -119,7 +119,8 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends * For regular source files, simply return the argument. */ def positionInUltimateSource(position: SourcePosition): SourcePosition = - SourcePosition(underlying, position.span shift start) + if isSelfContained then position // return the argument + else SourcePosition(underlying, position.span shift start) private def calculateLineIndicesFromContents() = { val cs = content() diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala index 904704b2349c..384d2f1fb2f3 100644 --- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala +++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala @@ -79,6 +79,10 @@ extends SrcPos, interfaces.SourcePosition, Showable { rec(this) } + def finalPosition: SourcePosition = { + source.positionInUltimateSource(this) + } + override def toString: String = s"${if (source.exists) source.file.toString else "(no source)"}:$span" From 731f61e567fde22cf463bc544b6aea1e8b159910 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Mon, 14 Oct 2024 16:47:53 +0200 Subject: [PATCH 653/827] Shade scalajs.ir under dotty.tools --- .../dotty/tools/backend/sjs/JSCodeGen.scala | 14 ++++---- .../dotty/tools/backend/sjs/JSEncoding.scala | 12 +++---- .../tools/backend/sjs/JSExportsGen.scala | 14 ++++---- .../dotty/tools/backend/sjs/JSPositions.scala | 2 +- .../tools/dotc/transform/CheckReentrant.scala | 2 +- .../tools/dotc/transform/sjs/JSSymUtils.scala | 2 +- .../dotc/transform/sjs/PrepJSExports.scala | 4 +-- .../dotc/transform/sjs/PrepJSInterop.scala | 2 +- project/Build.scala | 35 +++++++++++++++---- .../backend/sjs/JSCodeGen.scala | 12 +++---- .../backend/sjs/JSEncoding.scala | 12 +++---- .../backend/sjs/JSExportsGen.scala | 12 +++---- .../backend/sjs/JSPositions.scala | 2 +- 13 files changed, 73 insertions(+), 52 deletions(-) diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 6e2449b5c299..7ba39768871b 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -25,13 +25,13 @@ import dotty.tools.dotc.transform.{Erasure, ValueClasses} import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.report -import org.scalajs.ir -import org.scalajs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{ClassName, MethodName, SimpleMethodName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Trees.OptimizerHints -import org.scalajs.ir.Version.Unversioned +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{ClassName, MethodName, SimpleMethodName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSSymUtils.* diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala index 098f592daa30..9a7753680bc3 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala @@ -15,12 +15,12 @@ import StdNames.* import dotty.tools.dotc.transform.sjs.JSSymUtils.* -import org.scalajs.ir -import org.scalajs.ir.{Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{LocalName, LabelName, SimpleFieldName, FieldName, SimpleMethodName, MethodName, ClassName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.UTF8String +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{LocalName, LabelName, SimpleFieldName, FieldName, SimpleMethodName, MethodName, ClassName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.UTF8String import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index b5f9446758a9..e6c73357aa4c 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -22,12 +22,12 @@ import TypeErasure.ErasedValueType import dotty.tools.dotc.util.{SourcePosition, SrcPos} import dotty.tools.dotc.report -import org.scalajs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.DefaultModuleID -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Position.NoPosition -import org.scalajs.ir.Trees.OptimizerHints -import org.scalajs.ir.Version.Unversioned +import dotty.tools.sjs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Position.NoPosition +import dotty.tools.sjs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSExportUtils.* import dotty.tools.dotc.transform.sjs.JSSymUtils.* @@ -932,7 +932,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { InstanceOfTypeTest(tpe.tycon.typeSymbol.typeRef) case _ => - import org.scalajs.ir.Names + import dotty.tools.sjs.ir.Names (toIRType(tpe): @unchecked) match { case jstpe.AnyType => NoTypeTest diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala index 3b25187b0acd..a229c9ea0e58 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.report import dotty.tools.dotc.util.{SourceFile, SourcePosition} import dotty.tools.dotc.util.Spans.Span -import org.scalajs.ir +import dotty.tools.sjs.ir /** Conversion utilities from dotty Positions to IR Positions. */ class JSPositions()(using Context) { diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index 073086ac5e2c..e8a402068bfc 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -43,7 +43,7 @@ class CheckReentrant extends MiniPhase { requiredClass("scala.annotation.internal.unshared")) private val scalaJSIRPackageClass = new CtxLazy( - getPackageClassIfDefined("org.scalajs.ir")) + getPackageClassIfDefined("dotty.tools.sjs.ir")) def isIgnored(sym: Symbol)(using Context): Boolean = sym.hasAnnotation(sharableAnnot()) || diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala index 936b6958fb33..87ee2be91465 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala @@ -17,7 +17,7 @@ import Types.* import dotty.tools.backend.sjs.JSDefinitions.jsdefn -import org.scalajs.ir.{Trees => js} +import dotty.tools.sjs.ir.{Trees => js} /** Additional extensions for `Symbol`s that are only relevant for Scala.js. */ object JSSymUtils { diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index f66141bff8ad..5aa35a277cb5 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -21,8 +21,8 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn import JSExportUtils.* import JSSymUtils.* -import org.scalajs.ir.Names.DefaultModuleID -import org.scalajs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName +import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName object PrepJSExports { import tpd.* diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index 1b8fdd268ece..c7316482c193 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -23,7 +23,7 @@ import Types.* import JSSymUtils.* -import org.scalajs.ir.Trees.JSGlobalRef +import dotty.tools.sjs.ir.Trees.JSGlobalRef import dotty.tools.backend.sjs.JSDefinitions.jsdefn diff --git a/project/Build.scala b/project/Build.scala index 84ce00d11577..0cf4e3abcaaf 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -616,18 +616,36 @@ object Build { def findArtifactPath(classpath: Def.Classpath, name: String): String = findArtifact(classpath, name).getAbsolutePath + /** Replace package names in package definitions, for shading. + * It assumes the full package def is written on a single line. + * It does not adapt the imports accordingly. + */ + def replacePackage(lines: List[String])(replace: PartialFunction[String, String]): List[String] = { + def recur(lines: List[String]): List[String] = + lines match { + case head :: tail => + if (head.startsWith("package ")) { + val packageName = head.stripPrefix("package ").trim + val newPackageName = replace.applyOrElse(packageName, (_: String) => packageName) + s"package $newPackageName" :: tail + } else head :: recur(tail) + case _ => lines + } + recur(lines) + } + /** Insert UnsafeNulls Import after package */ - def insertUnsafeNullsImport(lines: Seq[String]): Seq[String] = { - def recur(ls: Seq[String], foundPackage: Boolean): Seq[String] = ls match { - case Seq(l, rest @ _*) => + def insertUnsafeNullsImport(lines: List[String]): List[String] = { + def recur(ls: List[String], foundPackage: Boolean): List[String] = ls match { + case l :: rest => val lt = l.trim() if (foundPackage) { if (!(lt.isEmpty || lt.startsWith("package "))) - "import scala.language.unsafeNulls" +: ls - else l +: recur(rest, foundPackage) + "import scala.language.unsafeNulls" :: ls + else l :: recur(rest, foundPackage) } else { if (lt.startsWith("package ")) l +: recur(rest, true) - else l +: recur(rest, foundPackage) + else l :: recur(rest, foundPackage) } case _ => ls } @@ -928,7 +946,10 @@ object Build { val sjsSources = (trgDir ** "*.scala").get.toSet sjsSources.foreach(f => { val lines = IO.readLines(f) - IO.writeLines(f, insertUnsafeNullsImport(lines)) + val linesWithPackage = replacePackage(lines) { + case "org.scalajs.ir" => "dotty.tools.sjs.ir" + } + IO.writeLines(f, insertUnsafeNullsImport(linesWithPackage)) }) sjsSources } (Set(scalaJSIRSourcesJar)).toSeq diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala index c670b2de97b1..81f09b082850 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala @@ -25,12 +25,12 @@ import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.report -import org.scalajs.ir -import org.scalajs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{ClassName, MethodName, SimpleMethodName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{ClassName, MethodName, SimpleMethodName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Trees.OptimizerHints import dotty.tools.dotc.transform.sjs.JSSymUtils._ diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala b/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala index 73a150c60290..518295543610 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala @@ -15,12 +15,12 @@ import StdNames._ import dotty.tools.dotc.transform.sjs.JSSymUtils._ -import org.scalajs.ir -import org.scalajs.ir.{Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{LocalName, LabelName, FieldName, SimpleMethodName, MethodName, ClassName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.UTF8String +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{LocalName, LabelName, FieldName, SimpleMethodName, MethodName, ClassName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.UTF8String import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala index 78412999bb34..82b69e6a16a7 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala @@ -22,11 +22,11 @@ import TypeErasure.ErasedValueType import dotty.tools.dotc.util.{SourcePosition, SrcPos} import dotty.tools.dotc.report -import org.scalajs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.DefaultModuleID -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Position.NoPosition -import org.scalajs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Position.NoPosition +import dotty.tools.sjs.ir.Trees.OptimizerHints import dotty.tools.dotc.transform.sjs.JSExportUtils._ import dotty.tools.dotc.transform.sjs.JSSymUtils._ @@ -924,7 +924,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { InstanceOfTypeTest(tpe.tycon.typeSymbol.typeRef) case _ => - import org.scalajs.ir.Names + import dotty.tools.sjs.ir.Names (toIRType(tpe): @unchecked) match { case jstpe.AnyType => NoTypeTest diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala b/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala index 2fd007165952..620e76ab4bab 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.report import dotty.tools.dotc.util.{SourceFile, SourcePosition} import dotty.tools.dotc.util.Spans.Span -import org.scalajs.ir +import dotty.tools.sjs.ir /** Conversion utilities from dotty Positions to IR Positions. */ class JSPositions()(using Context) { From 54ddb427898fa1a245a4bed320c113b518ca6081 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 14 Oct 2024 17:43:32 +0200 Subject: [PATCH 654/827] add demonstration of plugin adjusting positions --- .../BootstrappedOnlyCompilationTests.scala | 8 ++- .../run/scriptWrapper/Framework_1.scala | 3 + .../scriptWrapper/LineNumberPlugin_1.scala | 68 +++++++++++++++++++ tests/plugins/run/scriptWrapper/Test_3.scala | 25 +++++++ tests/plugins/run/scriptWrapper/foo_2.scala | 18 +++++ .../run/scriptWrapper/foo_original_2.scala | 8 +++ .../run/scriptWrapper/plugin.properties | 1 + 7 files changed, 129 insertions(+), 2 deletions(-) create mode 100644 tests/plugins/run/scriptWrapper/Framework_1.scala create mode 100644 tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala create mode 100644 tests/plugins/run/scriptWrapper/Test_3.scala create mode 100644 tests/plugins/run/scriptWrapper/foo_2.scala create mode 100644 tests/plugins/run/scriptWrapper/foo_original_2.scala create mode 100644 tests/plugins/run/scriptWrapper/plugin.properties diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index a40c1ec1e5b2..5cd4f837b823 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -193,7 +193,7 @@ class BootstrappedOnlyCompilationTests { // 1. hack with absolute path for -Xplugin // 2. copy `pluginFile` to destination - def compileFilesInDir(dir: String): CompilationTest = { + def compileFilesInDir(dir: String, run: Boolean = false): CompilationTest = { val outDir = defaultOutputDir + "testPlugins/" val sourceDir = new java.io.File(dir) @@ -201,7 +201,10 @@ class BootstrappedOnlyCompilationTests { val targets = dirs.map { dir => val compileDir = createOutputDirsForDir(dir, sourceDir, outDir) Files.copy(dir.toPath.resolve(pluginFile), compileDir.toPath.resolve(pluginFile), StandardCopyOption.REPLACE_EXISTING) - val flags = TestFlags(withCompilerClasspath, noCheckOptions).and("-Xplugin:" + compileDir.getAbsolutePath) + val flags = { + val base = TestFlags(withCompilerClasspath, noCheckOptions).and("-Xplugin:" + compileDir.getAbsolutePath) + if run then base.withRunClasspath(withCompilerClasspath) else base + } SeparateCompilationSource("testPlugins", dir, flags, compileDir) } @@ -210,6 +213,7 @@ class BootstrappedOnlyCompilationTests { compileFilesInDir("tests/plugins/neg").checkExpectedErrors() compileDir("tests/plugins/custom/analyzer", withCompilerOptions.and("-Yretain-trees")).checkCompile() + compileFilesInDir("tests/plugins/run", run = true).checkRuns() } } diff --git a/tests/plugins/run/scriptWrapper/Framework_1.scala b/tests/plugins/run/scriptWrapper/Framework_1.scala new file mode 100644 index 000000000000..c8a15de8342b --- /dev/null +++ b/tests/plugins/run/scriptWrapper/Framework_1.scala @@ -0,0 +1,3 @@ +package framework + +class entrypoint extends scala.annotation.Annotation diff --git a/tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala b/tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala new file mode 100644 index 000000000000..888d5f95838d --- /dev/null +++ b/tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala @@ -0,0 +1,68 @@ +package scriptWrapper + +import dotty.tools.dotc.* +import core.* +import Contexts.Context +import Contexts.ctx +import plugins.* +import ast.tpd +import util.SourceFile + +class LineNumberPlugin extends StandardPlugin { + val name: String = "linenumbers" + val description: String = "adjusts line numbers of script files" + + override def initialize(options: List[String])(using Context): List[PluginPhase] = FixLineNumbers() :: Nil +} + +// Loosely follows Mill linenumbers plugin (scan for marker with "original" source, adjust line numbers to match) +class FixLineNumbers extends PluginPhase { + + val codeMarker = "//USER_CODE_HERE" + + def phaseName: String = "fixLineNumbers" + override def runsAfter: Set[String] = Set("posttyper") + override def runsBefore: Set[String] = Set("pickler") + + override def transformUnit(tree: tpd.Tree)(using Context): tpd.Tree = { + val sourceContent = ctx.source.content() + val lines = new String(sourceContent).linesWithSeparators.toVector + val codeMarkerLine = lines.indexWhere(_.startsWith(codeMarker)) + + if codeMarkerLine < 0 then + tree + else + val adjustedFile = lines.collectFirst { + case s"//USER_SRC_FILE:./$file" => file.trim + }.getOrElse("") + + val adjustedSrc = ctx.source.file.container.lookupName(adjustedFile, directory = false) match + case null => + report.error(s"could not find file $adjustedFile", tree.sourcePos) + return tree + case file => + SourceFile(file, scala.io.Codec.UTF8) + + val userCodeOffset = ctx.source.lineToOffset(codeMarkerLine + 1) // lines.take(codeMarkerLine).map(_.length).sum + val lineMapper = LineMapper(codeMarkerLine, userCodeOffset, adjustedSrc) + lineMapper.transform(tree) + } + +} + +class LineMapper(markerLine: Int, userCodeOffset: Int, adjustedSrc: SourceFile) extends tpd.TreeMapWithPreciseStatContexts() { + + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = { + val tree0 = super.transform(tree) + val pos = tree0.sourcePos + if pos.exists && pos.start >= userCodeOffset then + val tree1 = tree0.cloneIn(adjustedSrc).withSpan(pos.span.shift(-userCodeOffset)) + // if tree1.show.toString == "???" then + // val pos1 = tree1.sourcePos + // sys.error(s"rewrote ??? at ${pos1.source}:${pos1.line + 1}:${pos1.column + 1} (sourced from ${markerLine + 2})") + tree1 + else + tree0 + } + +} diff --git a/tests/plugins/run/scriptWrapper/Test_3.scala b/tests/plugins/run/scriptWrapper/Test_3.scala new file mode 100644 index 000000000000..341af27ee433 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/Test_3.scala @@ -0,0 +1,25 @@ +@main def Test: Unit = { + val mainCls = Class.forName("foo_sc") + val mainMethod = mainCls.getMethod("main", classOf[Array[String]]) + val stackTrace: Array[String] = { + try + mainMethod.invoke(null, Array.empty[String]) + sys.error("Expected an exception") + catch + case e: java.lang.reflect.InvocationTargetException => + val cause = e.getCause + if cause != null then + cause.getStackTrace.map(_.toString) + else + throw e + } + + val expected = Set( + "foo_sc$.getRandom(foo_2.scala:3)", // adjusted line number (11 -> 3) + "foo_sc$.brokenRandom(foo_2.scala:5)", // adjusted line number (13 -> 5) + "foo_sc$.run(foo_2.scala:8)", // adjusted line number (16 -> 8) + ) + + val missing = expected -- stackTrace + assert(missing.isEmpty, s"Missing: $missing") +} diff --git a/tests/plugins/run/scriptWrapper/foo_2.scala b/tests/plugins/run/scriptWrapper/foo_2.scala new file mode 100644 index 000000000000..02e3f034e757 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/foo_2.scala @@ -0,0 +1,18 @@ +// generated code +// script: foo.sc +object foo_sc { +def main(args: Array[String]): Unit = { + run // assume some macro generates this by scanning for @entrypoint +} +//USER_SRC_FILE:./foo_original_2.scala +//USER_CODE_HERE +import framework.* + +def getRandom: Int = brokenRandom // LINE 3; + +def brokenRandom: Int = ??? // LINE 5; + +@entrypoint +def run = println("Hello, here is a random number: " + getRandom) // LINE 8; +//END_USER_CODE_HERE +} diff --git a/tests/plugins/run/scriptWrapper/foo_original_2.scala b/tests/plugins/run/scriptWrapper/foo_original_2.scala new file mode 100644 index 000000000000..162ddd1724a1 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/foo_original_2.scala @@ -0,0 +1,8 @@ +import framework.* + +def getRandom: Int = brokenRandom // LINE 3; + +def brokenRandom: Int = ??? // LINE 5; + +@entrypoint +def run = println("Hello, here is a random number: " + getRandom) // LINE 8; diff --git a/tests/plugins/run/scriptWrapper/plugin.properties b/tests/plugins/run/scriptWrapper/plugin.properties new file mode 100644 index 000000000000..f1fc6067e611 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/plugin.properties @@ -0,0 +1 @@ +pluginClass=scriptWrapper.LineNumberPlugin From 44ecf4beb7b7b748ccf58d2bb3329ec87eb61d2b Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 14 Oct 2024 18:12:41 +0200 Subject: [PATCH 655/827] dont introduce new API --- compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala | 2 +- compiler/src/dotty/tools/dotc/util/SourcePosition.scala | 5 ----- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index 38e5617fc3d7..4f4caf36d92a 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -626,7 +626,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { val nr = val sourcePos = tree.sourcePos ( - if sourcePos.exists then sourcePos.finalPosition.line + if sourcePos.exists then sourcePos.source.positionInUltimateSource(sourcePos).line else ctx.source.offsetToLine(tree.span.point) // fallback ) + 1 diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala index 384d2f1fb2f3..a7358755043c 100644 --- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala +++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala @@ -79,11 +79,6 @@ extends SrcPos, interfaces.SourcePosition, Showable { rec(this) } - def finalPosition: SourcePosition = { - source.positionInUltimateSource(this) - } - - override def toString: String = s"${if (source.exists) source.file.toString else "(no source)"}:$span" From b94160bceeec20950875644f17593f169208e07e Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Tue, 15 Oct 2024 16:47:43 +0200 Subject: [PATCH 656/827] Fix relative docs in `givens.md` (#21774) Previously were referring to non-exisitng file in the outer directory. [skip ci] --- docs/_docs/reference/contextual/givens.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index 9c074da0da6e..2b360dfc7af0 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -87,7 +87,7 @@ time it is accessed. If the given is a mere alias to some immutable value, the g ## Syntax -Here is the full syntax for given instances. Some of these forms of givens are explained in a separate page: [Other Forms of Givens](../more-givens.md). +Here is the full syntax for given instances. Some of these forms of givens are explained in a separate page: [Other Forms of Givens](./more-givens.md). ```ebnf TmplDef ::= ... | 'given' GivenDef @@ -113,4 +113,4 @@ A given instance starts with the reserved keyword `given`, which is followed by - A _structural given_ implements one or more class constructors with a list of member definitions in a template body. -**Note** Parts of the given syntax have changed in Scala 3.6. The original syntax from Scala 3.0 on is described in a separate page [Previous Given Syntax](../previous-givens.md). The original syntax is still supported for now but will be deprecated and phased out over time. +**Note** Parts of the given syntax have changed in Scala 3.6. The original syntax from Scala 3.0 on is described in a separate page [Previous Given Syntax](./previous-givens.md). The original syntax is still supported for now but will be deprecated and phased out over time. From e546f0779905e8a2ca8df68139b808ec89213008 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Tue, 15 Oct 2024 16:58:30 +0200 Subject: [PATCH 657/827] Add so68877939.scala --- tests/pos/so68877939.scala | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 tests/pos/so68877939.scala diff --git a/tests/pos/so68877939.scala b/tests/pos/so68877939.scala new file mode 100644 index 000000000000..41c5ee8ac659 --- /dev/null +++ b/tests/pos/so68877939.scala @@ -0,0 +1,15 @@ +abstract class Quantity[A <: Quantity[A]] +sealed trait UnitOfMeasure[A <: Quantity[A]] + +class Time extends Quantity[Time] +object Minutes extends UnitOfMeasure[Time] + +class PowerRamp extends Quantity[PowerRamp] +object KilowattsPerHour extends UnitOfMeasure[PowerRamp] + +type Test[X <: UnitOfMeasure[?]] = X match + case UnitOfMeasure[t] => t + +@main def main = + summon[Test[Minutes.type] =:= Time] + summon[Test[KilowattsPerHour.type] =:= PowerRamp] From f6bfa0afddd35bc965e5930f1e050e293cf9dfe1 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 15 Oct 2024 11:09:50 -0700 Subject: [PATCH 658/827] Scala 2.13.15 (was .14) (#21648) Co-authored-by: Wojciech Mazur --- .../community-projects/scala-collection-compat | 2 +- community-build/community-projects/stdLib213 | 2 +- .../src/scala/dotty/communitybuild/projects.scala | 4 ++-- project/Build.scala | 10 +++++----- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/community-build/community-projects/scala-collection-compat b/community-build/community-projects/scala-collection-compat index 2bf3fea914b2..c9d3a8b160a3 160000 --- a/community-build/community-projects/scala-collection-compat +++ b/community-build/community-projects/scala-collection-compat @@ -1 +1 @@ -Subproject commit 2bf3fea914b2f13e4805b3e7b519bdf0e595e4c9 +Subproject commit c9d3a8b160a35c9915816dd84a1063e18db4a84a diff --git a/community-build/community-projects/stdLib213 b/community-build/community-projects/stdLib213 index fcc67cd56c67..b6f70d2347f2 160000 --- a/community-build/community-projects/stdLib213 +++ b/community-build/community-projects/stdLib213 @@ -1 +1 @@ -Subproject commit fcc67cd56c67851bf31019ec25ccb09d08b9561b +Subproject commit b6f70d2347f2857695e5c0fe544b0f921544b02a diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index e8d6c3d2894d..31c1bb95743c 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -476,8 +476,8 @@ object projects: lazy val scalaCollectionCompat = SbtCommunityProject( project = "scala-collection-compat", - sbtTestCommand = "compat30/test", - sbtPublishCommand = "compat30/publishLocal", + sbtTestCommand = "compat3/test", + sbtPublishCommand = "compat3/publishLocal", ) lazy val scalaJava8Compat = SbtCommunityProject( diff --git a/project/Build.scala b/project/Build.scala index 9b4e24f37c75..5b04d623f122 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -149,8 +149,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.14" - case Bootstrapped => "2.13.14" + case NonBootstrapped => "2.13.15" + case Bootstrapped => "2.13.15" } /** Version of the scala-library for which we will generate TASTy. @@ -160,7 +160,7 @@ object Build { * We can use nightly versions to tests the future compatibility in development. * Nightly versions: https://scala-ci.typesafe.com/ui/native/scala-integration/org/scala-lang */ - val stdlibBootstrappedVersion = "2.13.14" + val stdlibBootstrappedVersion = "2.13.15" val dottyOrganization = "org.scala-lang" val dottyGithubUrl = "https://github.com/scala/scala3" @@ -1426,7 +1426,7 @@ object Build { BuildInfoPlugin.buildInfoDefaultSettings lazy val presentationCompilerSettings = { - val mtagsVersion = "1.3.4" + val mtagsVersion = "1.3.5" Seq( libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", @@ -1436,7 +1436,7 @@ object Build { .exclude("org.eclipse.lsp4j","org.eclipse.lsp4j.jsonrpc"), "org.eclipse.lsp4j" % "org.eclipse.lsp4j" % "0.20.1", ), - libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.14" % mtagsVersion % SourceDeps), + libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.15" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings From af9ee8ea8f232e08d5248d088fae00949f608f36 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Wed, 16 Oct 2024 10:40:02 +0200 Subject: [PATCH 659/827] Fix tupleTypeFromSeq for XXL tuples --- library/src/scala/quoted/Expr.scala | 2 +- tests/pos/i21779/Macro_1.scala | 36 +++++++++++++++++++++++++++++ tests/pos/i21779/Test_2.scala | 3 +++ 3 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21779/Macro_1.scala create mode 100644 tests/pos/i21779/Test_2.scala diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala index f1045e5bdaca..d1385a0193d6 100644 --- a/library/src/scala/quoted/Expr.scala +++ b/library/src/scala/quoted/Expr.scala @@ -256,7 +256,7 @@ object Expr { private def tupleTypeFromSeq(seq: Seq[Expr[Any]])(using Quotes): quotes.reflect.TypeRepr = import quotes.reflect.* val consRef = Symbol.classSymbol("scala.*:").typeRef - seq.foldLeft(TypeRepr.of[EmptyTuple]) { (ts, expr) => + seq.foldRight(TypeRepr.of[EmptyTuple]) { (expr, ts) => AppliedType(consRef, expr.asTerm.tpe :: ts :: Nil) } diff --git a/tests/pos/i21779/Macro_1.scala b/tests/pos/i21779/Macro_1.scala new file mode 100644 index 000000000000..d40f5e28de5e --- /dev/null +++ b/tests/pos/i21779/Macro_1.scala @@ -0,0 +1,36 @@ +import scala.quoted.* + +object Macro: + transparent inline def tupleXxl: Tuple = + ${tupleXxlExpr} + + def tupleXxlExpr(using Quotes) = + import quotes.reflect.* + Expr.ofTupleFromSeq( + Seq( + Expr("a"), + Expr(2), + Expr(3), + Expr(4), + Expr(5), + Expr(6), + Expr(7), + Expr(8), + Expr(9), + Expr(10), + Expr(11), + Expr(12), + Expr(13), + Expr(14), + Expr(15), + Expr(16), + Expr(17), + Expr(18), + Expr(19), + Expr(20), + Expr(21), + Expr(22), + Expr(23), + ) + ) + diff --git a/tests/pos/i21779/Test_2.scala b/tests/pos/i21779/Test_2.scala new file mode 100644 index 000000000000..e614e3ca564d --- /dev/null +++ b/tests/pos/i21779/Test_2.scala @@ -0,0 +1,3 @@ +object Test: + val result: ("a", 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) = + Macro.tupleXxl From 5a68b16cc6632504ceed509772ef0e4aad8d5025 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Wed, 16 Oct 2024 14:20:13 +0200 Subject: [PATCH 660/827] Add a docs page for betterFors experimental feature --- .../reference/experimental/better-fors.md | 79 +++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 docs/_docs/reference/experimental/better-fors.md diff --git a/docs/_docs/reference/experimental/better-fors.md b/docs/_docs/reference/experimental/better-fors.md new file mode 100644 index 000000000000..7add425caf51 --- /dev/null +++ b/docs/_docs/reference/experimental/better-fors.md @@ -0,0 +1,79 @@ +--- +layout: doc-page +title: "Better fors" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/better-fors.html +--- + +The `betterFors` language extension improves the usability of `for`-comprehensions. + +The extension is enabled by the language import `import scala.language.experimental.betterFors` or by setting the command line option `-language:experimental.betterFors`. + +The biggest user facing change is the new ability to start `for`-comprehensions with with aliases. This means that the following previously invalid code is now valid: + +```scala +for + as = List(1, 2, 3) + bs = List(4, 5, 6) + a <- as + b <- bs +yield a + b +``` + +The desugaring of this code is the same as if the aliases were introduced with `val`: + +```scala +val as = List(1, 2, 3) +val bs = List(4, 5, 6) +for + a <- as + b <- bs +yield a + b +``` + +Additionally this extension changes the way `for`-comprehensions are desugared. The desugaring is now done in a more intuitive way and the desugared code can be more efficient, because it avoids some unnecessary method calls. There are two main changes in the desugaring: + +1. **Simpler Desugaring for Pure Aliases**: + When an alias is not followed by a guard, the desugaring is simplified. The last generator and the aliases don't have to be wrapped in a tuple, and instead the aliases are simply introduced as local variables in a block with the next generator. + **Current Desugaring**: + ```scala + for { + a <- doSth(arg) + b = a + } yield a + b + ``` + Desugars to: + ```scala + doSth(arg).map { a => + val b = a + (a, b) + }.map { case (a, b) => + a + b + } + ``` + **New Desugaring**: + ```scala + doSth(arg).map { a => + val b = a + a + b + } + ``` + This change makes the desugaring more intuitive and avoids unnecessary `map` calls, when an alias is not followed by a guard. + +2. **Avoiding Redundant `map` Calls**: + When the result of the `for`-comprehension is the same expression as the last generator pattern, the desugaring avoids an unnecessary `map` call. but th eequality of the last pattern and the result has to be able to be checked syntactically, so it is either a variable or a tuple of variables. + **Current Desugaring**: + ```scala + for { + a <- List(1, 2, 3) + } yield a + ``` + Desugars to: + ```scala + List(1, 2, 3).map(a => a) + ``` + **New Desugaring**: + ```scala + List(1, 2, 3) + ``` + +For more details on the desugaring scheme see the comment in [`Desugar.scala#makeFor`](https://github.com/scala/scala3/blob/main/compiler/src/dotty/tools/dotc/ast/Desugar.scala#L1928). \ No newline at end of file From a4387dbc8383f76a7be66b03c4598fcc16741727 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pa=C5=82ka?= Date: Wed, 16 Oct 2024 15:55:59 +0200 Subject: [PATCH 661/827] Apply implicit conversion from derived Conversion instance defined as implicit rather than given --- .../dotty/tools/dotc/typer/Implicits.scala | 2 +- tests/pos/i21757.scala | 33 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21757.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 3c2f025dc095..c42b196b8dfb 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1176,7 +1176,7 @@ trait Implicits: case _ => info.derivesFrom(defn.ConversionClass) def tryConversion(using Context) = { val untpdConv = - if ref.symbol.is(Given) && producesConversion(ref.symbol.info) then + if ref.symbol.isOneOf(GivenOrImplicit) && producesConversion(ref.symbol.info) then untpd.Select( untpd.TypedSplice( adapt(generated, diff --git a/tests/pos/i21757.scala b/tests/pos/i21757.scala new file mode 100644 index 000000000000..7595540c4b58 --- /dev/null +++ b/tests/pos/i21757.scala @@ -0,0 +1,33 @@ +object ConversionChain { + + class X(val value: Int) + + class Y(val x: X) + + class Z(val y: Y) + + trait Conv[A, B] extends Conversion[A, B] + + given xy: Conv[X, Y] = { (x: X) => new Y(x) } + + given yz: Conv[Y, Z] = { (y: Y) => new Z(y) } + + object ConvUtils { + implicit def hypotheticalSyllogism[A, B, C]( // implicit def instead of given + using + ab: Conv[A, B], + bc: Conv[B, C] + ): Conv[A, C] = { + + new Conv[A, C] { + def apply(a: A): C = bc(ab(a)) + } + } + } + import ConvUtils.hypotheticalSyllogism + + def test(): Unit = { + val x = new X(42) + val z: Z = x + } +} From 7f70ed3a3d100b697d18b541a0ec09ca788b826d Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Wed, 16 Oct 2024 18:42:12 +0200 Subject: [PATCH 662/827] Add the macro type-print test to an run-macros-scala2-library-tasty.blacklist --- compiler/test/dotc/run-macros-scala2-library-tasty.blacklist | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist b/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist index 63a6e2cee345..6fdfccf7646c 100644 --- a/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist +++ b/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist @@ -2,3 +2,4 @@ tasty-extractors-1 tasty-extractors-2 tasty-extractors-types +type-print From 116f5fe7dab5e5088fd43ad64208e0f0b01c146b Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 16 Oct 2024 20:35:00 +0100 Subject: [PATCH 663/827] Add jzon repro --- tests/pos/jzon/SealedTrait.scala | 132 ++ tests/pos/jzon/encoders.scala | 2932 ++++++++++++++++++++++++++++++ 2 files changed, 3064 insertions(+) create mode 100644 tests/pos/jzon/SealedTrait.scala create mode 100644 tests/pos/jzon/encoders.scala diff --git a/tests/pos/jzon/SealedTrait.scala b/tests/pos/jzon/SealedTrait.scala new file mode 100644 index 000000000000..e83a68a331ef --- /dev/null +++ b/tests/pos/jzon/SealedTrait.scala @@ -0,0 +1,132 @@ +trait Shape[T] +sealed trait SealedTrait[T] extends Shape[T]: + def value: T +sealed trait SealedTrait1[A, A1 <: A] extends SealedTrait[A] +sealed trait SealedTrait2[A, A1 <: A, A2 <: A] extends SealedTrait[A] +sealed trait SealedTrait3[A, A1 <: A, A2 <: A, A3 <: A] extends SealedTrait[A] +sealed trait SealedTrait4[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A] extends SealedTrait[A] +sealed trait SealedTrait5[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A] extends SealedTrait[A] +sealed trait SealedTrait6[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A] extends SealedTrait[A] +sealed trait SealedTrait7[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A] extends SealedTrait[A] +sealed trait SealedTrait8[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A] extends SealedTrait[A] +sealed trait SealedTrait9[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A] extends SealedTrait[A] +sealed trait SealedTrait10[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A] extends SealedTrait[A] +sealed trait SealedTrait11[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A] extends SealedTrait[A] +sealed trait SealedTrait12[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A] extends SealedTrait[A] +sealed trait SealedTrait13[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A] extends SealedTrait[A] +sealed trait SealedTrait14[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A] extends SealedTrait[A] +sealed trait SealedTrait15[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A] extends SealedTrait[A] +sealed trait SealedTrait16[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A] extends SealedTrait[A] +sealed trait SealedTrait17[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A] extends SealedTrait[A] +sealed trait SealedTrait18[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A] extends SealedTrait[A] +sealed trait SealedTrait19[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A] extends SealedTrait[A] +sealed trait SealedTrait20[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A] extends SealedTrait[A] +sealed trait SealedTrait21[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A] extends SealedTrait[A] +sealed trait SealedTrait22[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A] extends SealedTrait[A] +sealed trait SealedTrait23[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A] extends SealedTrait[A] +sealed trait SealedTrait24[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A] extends SealedTrait[A] +sealed trait SealedTrait25[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A] extends SealedTrait[A] +sealed trait SealedTrait26[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A] extends SealedTrait[A] +sealed trait SealedTrait27[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A] extends SealedTrait[A] +sealed trait SealedTrait28[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A] extends SealedTrait[A] +sealed trait SealedTrait29[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A] extends SealedTrait[A] +sealed trait SealedTrait30[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A] extends SealedTrait[A] +sealed trait SealedTrait31[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A] extends SealedTrait[A] +sealed trait SealedTrait32[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A] extends SealedTrait[A] +sealed trait SealedTrait33[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A] extends SealedTrait[A] +sealed trait SealedTrait34[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A] extends SealedTrait[A] +sealed trait SealedTrait35[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A] extends SealedTrait[A] +sealed trait SealedTrait36[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A] extends SealedTrait[A] +sealed trait SealedTrait37[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A] extends SealedTrait[A] +sealed trait SealedTrait38[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A] extends SealedTrait[A] +sealed trait SealedTrait39[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A] extends SealedTrait[A] +sealed trait SealedTrait40[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A] extends SealedTrait[A] +sealed trait SealedTrait41[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A] extends SealedTrait[A] +sealed trait SealedTrait42[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A] extends SealedTrait[A] +sealed trait SealedTrait43[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A] extends SealedTrait[A] +sealed trait SealedTrait44[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A] extends SealedTrait[A] +sealed trait SealedTrait45[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A] extends SealedTrait[A] +sealed trait SealedTrait46[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A] extends SealedTrait[A] +sealed trait SealedTrait47[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A] extends SealedTrait[A] +sealed trait SealedTrait48[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A] extends SealedTrait[A] +sealed trait SealedTrait49[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A] extends SealedTrait[A] +sealed trait SealedTrait50[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A] extends SealedTrait[A] +sealed trait SealedTrait51[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A] extends SealedTrait[A] +sealed trait SealedTrait52[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A] extends SealedTrait[A] +sealed trait SealedTrait53[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A] extends SealedTrait[A] +sealed trait SealedTrait54[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A] extends SealedTrait[A] +sealed trait SealedTrait55[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A] extends SealedTrait[A] +sealed trait SealedTrait56[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A] extends SealedTrait[A] +sealed trait SealedTrait57[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A] extends SealedTrait[A] +sealed trait SealedTrait58[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A] extends SealedTrait[A] +sealed trait SealedTrait59[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A] extends SealedTrait[A] +sealed trait SealedTrait60[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A] extends SealedTrait[A] +sealed trait SealedTrait61[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A] extends SealedTrait[A] +sealed trait SealedTrait62[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A] extends SealedTrait[A] +sealed trait SealedTrait63[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A] extends SealedTrait[A] +sealed trait SealedTrait64[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A] extends SealedTrait[A] +object SealedTrait: + final case class _1[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A1) extends SealedTrait1[A, A1], SealedTrait2[A, A1, A2], SealedTrait3[A, A1, A2, A3], SealedTrait4[A, A1, A2, A3, A4], SealedTrait5[A, A1, A2, A3, A4, A5], SealedTrait6[A, A1, A2, A3, A4, A5, A6], SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _2[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A2) extends SealedTrait2[A, A1, A2], SealedTrait3[A, A1, A2, A3], SealedTrait4[A, A1, A2, A3, A4], SealedTrait5[A, A1, A2, A3, A4, A5], SealedTrait6[A, A1, A2, A3, A4, A5, A6], SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _3[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A3) extends SealedTrait3[A, A1, A2, A3], SealedTrait4[A, A1, A2, A3, A4], SealedTrait5[A, A1, A2, A3, A4, A5], SealedTrait6[A, A1, A2, A3, A4, A5, A6], SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _4[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A4) extends SealedTrait4[A, A1, A2, A3, A4], SealedTrait5[A, A1, A2, A3, A4, A5], SealedTrait6[A, A1, A2, A3, A4, A5, A6], SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _5[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A5) extends SealedTrait5[A, A1, A2, A3, A4, A5], SealedTrait6[A, A1, A2, A3, A4, A5, A6], SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _6[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A6) extends SealedTrait6[A, A1, A2, A3, A4, A5, A6], SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _7[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A7) extends SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _8[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A8) extends SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _9[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A9) extends SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _10[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A10) extends SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _11[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A11) extends SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _12[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A12) extends SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _13[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A13) extends SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _14[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A14) extends SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _15[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A15) extends SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _16[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A16) extends SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _17[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A17) extends SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _18[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A18) extends SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _19[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A19) extends SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _20[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A20) extends SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _21[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A21) extends SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _22[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A22) extends SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _23[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A23) extends SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _24[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A24) extends SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _25[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A25) extends SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _26[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A26) extends SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _27[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A27) extends SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _28[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A28) extends SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _29[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A29) extends SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _30[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A30) extends SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _31[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A31) extends SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _32[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A32) extends SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _33[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A33) extends SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _34[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A34) extends SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _35[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A35) extends SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _36[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A36) extends SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _37[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A37) extends SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _38[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A38) extends SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _39[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A39) extends SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _40[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A40) extends SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _41[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A41) extends SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _42[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A42) extends SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _43[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A43) extends SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _44[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A44) extends SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _45[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A45) extends SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _46[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A46) extends SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _47[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A47) extends SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _48[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A48) extends SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _49[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A49) extends SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _50[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A50) extends SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _51[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A51) extends SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _52[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A52) extends SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _53[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A53) extends SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _54[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A54) extends SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _55[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A55) extends SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _56[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A56) extends SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _57[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A57) extends SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _58[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A58) extends SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _59[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A59) extends SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _60[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A60) extends SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _61[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A61) extends SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _62[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A62) extends SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _63[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A63) extends SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] + final case class _64[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](value: A64) extends SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64] diff --git a/tests/pos/jzon/encoders.scala b/tests/pos/jzon/encoders.scala new file mode 100644 index 000000000000..eded37314d50 --- /dev/null +++ b/tests/pos/jzon/encoders.scala @@ -0,0 +1,2932 @@ +trait Meta[T] +trait Lazy[T]{ + lazy val value: T = ??? +} +trait Encoder[A] { self => + def unsafeEncode(a: A, indent: Option[Int], out: java.io.Writer): Unit = ??? +} +abstract class SealedTraitEncoder[A, ST <: SealedTrait[A]](subs: Array[Meta[?]]) extends Encoder[ST] { + def unsafeEncodeValue(st: ST, indent: Option[Int], out: java.io.Writer): Unit + final override def unsafeEncode(st: ST, indent: Option[Int], out: java.io.Writer): Unit = ??? +} +abstract class SealedTraitDiscrimEncoder[A, ST <: SealedTrait[A]]( + subs: Array[Meta[?]], + hintfield: String +) extends Encoder[ST] { + def unsafeEncodeValue(st: ST, indent: Option[Int], out: java.io.Writer): Unit + final override def unsafeEncode(st: ST, indent: Option[Int], out: java.io.Writer): Unit = ??? +} + +object Encoders{ + implicit def sealedtrait1[A, A1 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]]): Encoder[SealedTrait1[A, A1]] = { + def work(st: SealedTrait1[A, A1], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait1[A, A1]](Array(M1)) { + override def unsafeEncodeValue(st: SealedTrait1[A, A1], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait1[A, A1]](Array(M1), hintfield) { + override def unsafeEncodeValue(st: SealedTrait1[A, A1], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait2[A, A1 <: A, A2 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]]): Encoder[SealedTrait2[A, A1, A2]] = { + def work(st: SealedTrait2[A, A1, A2], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait2[A, A1, A2]](Array(M1, M2)) { + override def unsafeEncodeValue(st: SealedTrait2[A, A1, A2], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait2[A, A1, A2]](Array(M1, M2), hintfield) { + override def unsafeEncodeValue(st: SealedTrait2[A, A1, A2], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait3[A, A1 <: A, A2 <: A, A3 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]]): Encoder[SealedTrait3[A, A1, A2, A3]] = { + def work(st: SealedTrait3[A, A1, A2, A3], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait3[A, A1, A2, A3]](Array(M1, M2, M3)) { + override def unsafeEncodeValue(st: SealedTrait3[A, A1, A2, A3], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait3[A, A1, A2, A3]](Array(M1, M2, M3), hintfield) { + override def unsafeEncodeValue(st: SealedTrait3[A, A1, A2, A3], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait4[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]]): Encoder[SealedTrait4[A, A1, A2, A3, A4]] = { + def work(st: SealedTrait4[A, A1, A2, A3, A4], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait4[A, A1, A2, A3, A4]](Array(M1, M2, M3, M4)) { + override def unsafeEncodeValue(st: SealedTrait4[A, A1, A2, A3, A4], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait4[A, A1, A2, A3, A4]](Array(M1, M2, M3, M4), hintfield) { + override def unsafeEncodeValue(st: SealedTrait4[A, A1, A2, A3, A4], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait5[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]]): Encoder[SealedTrait5[A, A1, A2, A3, A4, A5]] = { + def work(st: SealedTrait5[A, A1, A2, A3, A4, A5], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait5[A, A1, A2, A3, A4, A5]](Array(M1, M2, M3, M4, M5)) { + override def unsafeEncodeValue(st: SealedTrait5[A, A1, A2, A3, A4, A5], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait5[A, A1, A2, A3, A4, A5]](Array(M1, M2, M3, M4, M5), hintfield) { + override def unsafeEncodeValue(st: SealedTrait5[A, A1, A2, A3, A4, A5], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait6[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]]): Encoder[SealedTrait6[A, A1, A2, A3, A4, A5, A6]] = { + def work(st: SealedTrait6[A, A1, A2, A3, A4, A5, A6], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait6[A, A1, A2, A3, A4, A5, A6]](Array(M1, M2, M3, M4, M5, M6)) { + override def unsafeEncodeValue(st: SealedTrait6[A, A1, A2, A3, A4, A5, A6], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait6[A, A1, A2, A3, A4, A5, A6]](Array(M1, M2, M3, M4, M5, M6), hintfield) { + override def unsafeEncodeValue(st: SealedTrait6[A, A1, A2, A3, A4, A5, A6], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait7[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]]): Encoder[SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7]] = { + def work(st: SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7]](Array(M1, M2, M3, M4, M5, M6, M7)) { + override def unsafeEncodeValue(st: SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7]](Array(M1, M2, M3, M4, M5, M6, M7), hintfield) { + override def unsafeEncodeValue(st: SealedTrait7[A, A1, A2, A3, A4, A5, A6, A7], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait8[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]]): Encoder[SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8]] = { + def work(st: SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8]](Array(M1, M2, M3, M4, M5, M6, M7, M8)) { + override def unsafeEncodeValue(st: SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8]](Array(M1, M2, M3, M4, M5, M6, M7, M8), hintfield) { + override def unsafeEncodeValue(st: SealedTrait8[A, A1, A2, A3, A4, A5, A6, A7, A8], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait9[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]]): Encoder[SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9]] = { + def work(st: SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9)) { + override def unsafeEncodeValue(st: SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9), hintfield) { + override def unsafeEncodeValue(st: SealedTrait9[A, A1, A2, A3, A4, A5, A6, A7, A8, A9], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait10[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]]): Encoder[SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10]] = { + def work(st: SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10)) { + override def unsafeEncodeValue(st: SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10), hintfield) { + override def unsafeEncodeValue(st: SealedTrait10[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait11[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]]): Encoder[SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11]] = { + def work(st: SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11)) { + override def unsafeEncodeValue(st: SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11), hintfield) { + override def unsafeEncodeValue(st: SealedTrait11[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait12[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]]): Encoder[SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12]] = { + def work(st: SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12)) { + override def unsafeEncodeValue(st: SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12), hintfield) { + override def unsafeEncodeValue(st: SealedTrait12[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait13[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]]): Encoder[SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13]] = { + def work(st: SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13)) { + override def unsafeEncodeValue(st: SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13), hintfield) { + override def unsafeEncodeValue(st: SealedTrait13[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait14[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]]): Encoder[SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14]] = { + def work(st: SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14)) { + override def unsafeEncodeValue(st: SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14), hintfield) { + override def unsafeEncodeValue(st: SealedTrait14[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait15[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]]): Encoder[SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15]] = { + def work(st: SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15)) { + override def unsafeEncodeValue(st: SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15), hintfield) { + override def unsafeEncodeValue(st: SealedTrait15[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait16[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]]): Encoder[SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16]] = { + def work(st: SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16)) { + override def unsafeEncodeValue(st: SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16), hintfield) { + override def unsafeEncodeValue(st: SealedTrait16[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait17[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]]): Encoder[SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17]] = { + def work(st: SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17)) { + override def unsafeEncodeValue(st: SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17), hintfield) { + override def unsafeEncodeValue(st: SealedTrait17[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait18[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]]): Encoder[SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18]] = { + def work(st: SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18)) { + override def unsafeEncodeValue(st: SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18), hintfield) { + override def unsafeEncodeValue(st: SealedTrait18[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait19[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]]): Encoder[SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19]] = { + def work(st: SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19)) { + override def unsafeEncodeValue(st: SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19), hintfield) { + override def unsafeEncodeValue(st: SealedTrait19[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait20[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]]): Encoder[SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20]] = { + def work(st: SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20)) { + override def unsafeEncodeValue(st: SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20), hintfield) { + override def unsafeEncodeValue(st: SealedTrait20[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait21[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]]): Encoder[SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21]] = { + def work(st: SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21)) { + override def unsafeEncodeValue(st: SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21), hintfield) { + override def unsafeEncodeValue(st: SealedTrait21[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait22[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]]): Encoder[SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22]] = { + def work(st: SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22)) { + override def unsafeEncodeValue(st: SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22), hintfield) { + override def unsafeEncodeValue(st: SealedTrait22[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait23[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]]): Encoder[SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23]] = { + def work(st: SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23)) { + override def unsafeEncodeValue(st: SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23), hintfield) { + override def unsafeEncodeValue(st: SealedTrait23[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait24[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]]): Encoder[SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24]] = { + def work(st: SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24)) { + override def unsafeEncodeValue(st: SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24), hintfield) { + override def unsafeEncodeValue(st: SealedTrait24[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait25[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]]): Encoder[SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25]] = { + def work(st: SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25)) { + override def unsafeEncodeValue(st: SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25), hintfield) { + override def unsafeEncodeValue(st: SealedTrait25[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait26[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]]): Encoder[SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26]] = { + def work(st: SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26)) { + override def unsafeEncodeValue(st: SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26), hintfield) { + override def unsafeEncodeValue(st: SealedTrait26[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait27[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]]): Encoder[SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27]] = { + def work(st: SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27)) { + override def unsafeEncodeValue(st: SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27), hintfield) { + override def unsafeEncodeValue(st: SealedTrait27[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait28[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]]): Encoder[SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28]] = { + def work(st: SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28)) { + override def unsafeEncodeValue(st: SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28), hintfield) { + override def unsafeEncodeValue(st: SealedTrait28[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait29[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]]): Encoder[SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29]] = { + def work(st: SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29)) { + override def unsafeEncodeValue(st: SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29), hintfield) { + override def unsafeEncodeValue(st: SealedTrait29[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait30[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]]): Encoder[SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30]] = { + def work(st: SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30)) { + override def unsafeEncodeValue(st: SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30), hintfield) { + override def unsafeEncodeValue(st: SealedTrait30[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait31[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]]): Encoder[SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31]] = { + def work(st: SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31)) { + override def unsafeEncodeValue(st: SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31), hintfield) { + override def unsafeEncodeValue(st: SealedTrait31[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait32[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]]): Encoder[SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32]] = { + def work(st: SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32)) { + override def unsafeEncodeValue(st: SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32), hintfield) { + override def unsafeEncodeValue(st: SealedTrait32[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait33[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]]): Encoder[SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33]] = { + def work(st: SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33)) { + override def unsafeEncodeValue(st: SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33), hintfield) { + override def unsafeEncodeValue(st: SealedTrait33[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait34[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]]): Encoder[SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34]] = { + def work(st: SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34)) { + override def unsafeEncodeValue(st: SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34), hintfield) { + override def unsafeEncodeValue(st: SealedTrait34[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait35[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]]): Encoder[SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35]] = { + def work(st: SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35)) { + override def unsafeEncodeValue(st: SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35), hintfield) { + override def unsafeEncodeValue(st: SealedTrait35[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait36[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]]): Encoder[SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36]] = { + def work(st: SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36)) { + override def unsafeEncodeValue(st: SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36), hintfield) { + override def unsafeEncodeValue(st: SealedTrait36[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait37[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]]): Encoder[SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37]] = { + def work(st: SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37)) { + override def unsafeEncodeValue(st: SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37), hintfield) { + override def unsafeEncodeValue(st: SealedTrait37[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait38[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]]): Encoder[SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38]] = { + def work(st: SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38)) { + override def unsafeEncodeValue(st: SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38), hintfield) { + override def unsafeEncodeValue(st: SealedTrait38[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait39[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]]): Encoder[SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39]] = { + def work(st: SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39)) { + override def unsafeEncodeValue(st: SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39), hintfield) { + override def unsafeEncodeValue(st: SealedTrait39[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait40[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]]): Encoder[SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40]] = { + def work(st: SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40)) { + override def unsafeEncodeValue(st: SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40), hintfield) { + override def unsafeEncodeValue(st: SealedTrait40[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait41[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]]): Encoder[SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41]] = { + def work(st: SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41)) { + override def unsafeEncodeValue(st: SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41), hintfield) { + override def unsafeEncodeValue(st: SealedTrait41[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait42[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]]): Encoder[SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42]] = { + def work(st: SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42)) { + override def unsafeEncodeValue(st: SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42), hintfield) { + override def unsafeEncodeValue(st: SealedTrait42[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait43[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]]): Encoder[SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43]] = { + def work(st: SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43)) { + override def unsafeEncodeValue(st: SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43), hintfield) { + override def unsafeEncodeValue(st: SealedTrait43[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait44[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]]): Encoder[SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44]] = { + def work(st: SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44)) { + override def unsafeEncodeValue(st: SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44), hintfield) { + override def unsafeEncodeValue(st: SealedTrait44[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait45[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]]): Encoder[SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45]] = { + def work(st: SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45)) { + override def unsafeEncodeValue(st: SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45), hintfield) { + override def unsafeEncodeValue(st: SealedTrait45[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait46[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]]): Encoder[SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46]] = { + def work(st: SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46)) { + override def unsafeEncodeValue(st: SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46), hintfield) { + override def unsafeEncodeValue(st: SealedTrait46[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait47[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]]): Encoder[SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47]] = { + def work(st: SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47)) { + override def unsafeEncodeValue(st: SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47), hintfield) { + override def unsafeEncodeValue(st: SealedTrait47[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait48[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]]): Encoder[SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48]] = { + def work(st: SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48)) { + override def unsafeEncodeValue(st: SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48), hintfield) { + override def unsafeEncodeValue(st: SealedTrait48[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait49[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]]): Encoder[SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49]] = { + def work(st: SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49)) { + override def unsafeEncodeValue(st: SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49), hintfield) { + override def unsafeEncodeValue(st: SealedTrait49[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait50[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]]): Encoder[SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50]] = { + def work(st: SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50)) { + override def unsafeEncodeValue(st: SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50), hintfield) { + override def unsafeEncodeValue(st: SealedTrait50[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait51[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]]): Encoder[SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51]] = { + def work(st: SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51)) { + override def unsafeEncodeValue(st: SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51), hintfield) { + override def unsafeEncodeValue(st: SealedTrait51[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait52[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]]): Encoder[SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52]] = { + def work(st: SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52)) { + override def unsafeEncodeValue(st: SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52), hintfield) { + override def unsafeEncodeValue(st: SealedTrait52[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait53[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]]): Encoder[SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53]] = { + def work(st: SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53)) { + override def unsafeEncodeValue(st: SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53), hintfield) { + override def unsafeEncodeValue(st: SealedTrait53[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait54[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]]): Encoder[SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54]] = { + def work(st: SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54)) { + override def unsafeEncodeValue(st: SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54), hintfield) { + override def unsafeEncodeValue(st: SealedTrait54[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait55[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]]): Encoder[SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55]] = { + def work(st: SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55)) { + override def unsafeEncodeValue(st: SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55), hintfield) { + override def unsafeEncodeValue(st: SealedTrait55[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait56[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]], M56: Meta[A56], A56: Lazy[Encoder[A56]]): Encoder[SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56]] = { + def work(st: SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + case SealedTrait._56(v) => A56.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56)) { + override def unsafeEncodeValue(st: SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56), hintfield) { + override def unsafeEncodeValue(st: SealedTrait56[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait57[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]], M56: Meta[A56], A56: Lazy[Encoder[A56]], M57: Meta[A57], A57: Lazy[Encoder[A57]]): Encoder[SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57]] = { + def work(st: SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + case SealedTrait._56(v) => A56.value.unsafeEncode(v, indent, out) + case SealedTrait._57(v) => A57.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57)) { + override def unsafeEncodeValue(st: SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57), hintfield) { + override def unsafeEncodeValue(st: SealedTrait57[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait58[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]], M56: Meta[A56], A56: Lazy[Encoder[A56]], M57: Meta[A57], A57: Lazy[Encoder[A57]], M58: Meta[A58], A58: Lazy[Encoder[A58]]): Encoder[SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58]] = { + def work(st: SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + case SealedTrait._56(v) => A56.value.unsafeEncode(v, indent, out) + case SealedTrait._57(v) => A57.value.unsafeEncode(v, indent, out) + case SealedTrait._58(v) => A58.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58)) { + override def unsafeEncodeValue(st: SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58), hintfield) { + override def unsafeEncodeValue(st: SealedTrait58[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait59[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]], M56: Meta[A56], A56: Lazy[Encoder[A56]], M57: Meta[A57], A57: Lazy[Encoder[A57]], M58: Meta[A58], A58: Lazy[Encoder[A58]], M59: Meta[A59], A59: Lazy[Encoder[A59]]): Encoder[SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59]] = { + def work(st: SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + case SealedTrait._56(v) => A56.value.unsafeEncode(v, indent, out) + case SealedTrait._57(v) => A57.value.unsafeEncode(v, indent, out) + case SealedTrait._58(v) => A58.value.unsafeEncode(v, indent, out) + case SealedTrait._59(v) => A59.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59)) { + override def unsafeEncodeValue(st: SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59), hintfield) { + override def unsafeEncodeValue(st: SealedTrait59[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait60[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]], M56: Meta[A56], A56: Lazy[Encoder[A56]], M57: Meta[A57], A57: Lazy[Encoder[A57]], M58: Meta[A58], A58: Lazy[Encoder[A58]], M59: Meta[A59], A59: Lazy[Encoder[A59]], M60: Meta[A60], A60: Lazy[Encoder[A60]]): Encoder[SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60]] = { + def work(st: SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + case SealedTrait._56(v) => A56.value.unsafeEncode(v, indent, out) + case SealedTrait._57(v) => A57.value.unsafeEncode(v, indent, out) + case SealedTrait._58(v) => A58.value.unsafeEncode(v, indent, out) + case SealedTrait._59(v) => A59.value.unsafeEncode(v, indent, out) + case SealedTrait._60(v) => A60.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60)) { + override def unsafeEncodeValue(st: SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60), hintfield) { + override def unsafeEncodeValue(st: SealedTrait60[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait61[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]], M56: Meta[A56], A56: Lazy[Encoder[A56]], M57: Meta[A57], A57: Lazy[Encoder[A57]], M58: Meta[A58], A58: Lazy[Encoder[A58]], M59: Meta[A59], A59: Lazy[Encoder[A59]], M60: Meta[A60], A60: Lazy[Encoder[A60]], M61: Meta[A61], A61: Lazy[Encoder[A61]]): Encoder[SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61]] = { + def work(st: SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + case SealedTrait._56(v) => A56.value.unsafeEncode(v, indent, out) + case SealedTrait._57(v) => A57.value.unsafeEncode(v, indent, out) + case SealedTrait._58(v) => A58.value.unsafeEncode(v, indent, out) + case SealedTrait._59(v) => A59.value.unsafeEncode(v, indent, out) + case SealedTrait._60(v) => A60.value.unsafeEncode(v, indent, out) + case SealedTrait._61(v) => A61.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60, M61)) { + override def unsafeEncodeValue(st: SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60, M61), hintfield) { + override def unsafeEncodeValue(st: SealedTrait61[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait62[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]], M56: Meta[A56], A56: Lazy[Encoder[A56]], M57: Meta[A57], A57: Lazy[Encoder[A57]], M58: Meta[A58], A58: Lazy[Encoder[A58]], M59: Meta[A59], A59: Lazy[Encoder[A59]], M60: Meta[A60], A60: Lazy[Encoder[A60]], M61: Meta[A61], A61: Lazy[Encoder[A61]], M62: Meta[A62], A62: Lazy[Encoder[A62]]): Encoder[SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62]] = { + def work(st: SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + case SealedTrait._56(v) => A56.value.unsafeEncode(v, indent, out) + case SealedTrait._57(v) => A57.value.unsafeEncode(v, indent, out) + case SealedTrait._58(v) => A58.value.unsafeEncode(v, indent, out) + case SealedTrait._59(v) => A59.value.unsafeEncode(v, indent, out) + case SealedTrait._60(v) => A60.value.unsafeEncode(v, indent, out) + case SealedTrait._61(v) => A61.value.unsafeEncode(v, indent, out) + case SealedTrait._62(v) => A62.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60, M61, M62)) { + override def unsafeEncodeValue(st: SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60, M61, M62), hintfield) { + override def unsafeEncodeValue(st: SealedTrait62[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait63[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]], M56: Meta[A56], A56: Lazy[Encoder[A56]], M57: Meta[A57], A57: Lazy[Encoder[A57]], M58: Meta[A58], A58: Lazy[Encoder[A58]], M59: Meta[A59], A59: Lazy[Encoder[A59]], M60: Meta[A60], A60: Lazy[Encoder[A60]], M61: Meta[A61], A61: Lazy[Encoder[A61]], M62: Meta[A62], A62: Lazy[Encoder[A62]], M63: Meta[A63], A63: Lazy[Encoder[A63]]): Encoder[SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63]] = { + def work(st: SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + case SealedTrait._56(v) => A56.value.unsafeEncode(v, indent, out) + case SealedTrait._57(v) => A57.value.unsafeEncode(v, indent, out) + case SealedTrait._58(v) => A58.value.unsafeEncode(v, indent, out) + case SealedTrait._59(v) => A59.value.unsafeEncode(v, indent, out) + case SealedTrait._60(v) => A60.value.unsafeEncode(v, indent, out) + case SealedTrait._61(v) => A61.value.unsafeEncode(v, indent, out) + case SealedTrait._62(v) => A62.value.unsafeEncode(v, indent, out) + case SealedTrait._63(v) => A63.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60, M61, M62, M63)) { + override def unsafeEncodeValue(st: SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60, M61, M62, M63), hintfield) { + override def unsafeEncodeValue(st: SealedTrait63[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } + + implicit def sealedtrait64[A, A1 <: A, A2 <: A, A3 <: A, A4 <: A, A5 <: A, A6 <: A, A7 <: A, A8 <: A, A9 <: A, A10 <: A, A11 <: A, A12 <: A, A13 <: A, A14 <: A, A15 <: A, A16 <: A, A17 <: A, A18 <: A, A19 <: A, A20 <: A, A21 <: A, A22 <: A, A23 <: A, A24 <: A, A25 <: A, A26 <: A, A27 <: A, A28 <: A, A29 <: A, A30 <: A, A31 <: A, A32 <: A, A33 <: A, A34 <: A, A35 <: A, A36 <: A, A37 <: A, A38 <: A, A39 <: A, A40 <: A, A41 <: A, A42 <: A, A43 <: A, A44 <: A, A45 <: A, A46 <: A, A47 <: A, A48 <: A, A49 <: A, A50 <: A, A51 <: A, A52 <: A, A53 <: A, A54 <: A, A55 <: A, A56 <: A, A57 <: A, A58 <: A, A59 <: A, A60 <: A, A61 <: A, A62 <: A, A63 <: A, A64 <: A](implicit M: Meta[A], M1: Meta[A1], A1: Lazy[Encoder[A1]], M2: Meta[A2], A2: Lazy[Encoder[A2]], M3: Meta[A3], A3: Lazy[Encoder[A3]], M4: Meta[A4], A4: Lazy[Encoder[A4]], M5: Meta[A5], A5: Lazy[Encoder[A5]], M6: Meta[A6], A6: Lazy[Encoder[A6]], M7: Meta[A7], A7: Lazy[Encoder[A7]], M8: Meta[A8], A8: Lazy[Encoder[A8]], M9: Meta[A9], A9: Lazy[Encoder[A9]], M10: Meta[A10], A10: Lazy[Encoder[A10]], M11: Meta[A11], A11: Lazy[Encoder[A11]], M12: Meta[A12], A12: Lazy[Encoder[A12]], M13: Meta[A13], A13: Lazy[Encoder[A13]], M14: Meta[A14], A14: Lazy[Encoder[A14]], M15: Meta[A15], A15: Lazy[Encoder[A15]], M16: Meta[A16], A16: Lazy[Encoder[A16]], M17: Meta[A17], A17: Lazy[Encoder[A17]], M18: Meta[A18], A18: Lazy[Encoder[A18]], M19: Meta[A19], A19: Lazy[Encoder[A19]], M20: Meta[A20], A20: Lazy[Encoder[A20]], M21: Meta[A21], A21: Lazy[Encoder[A21]], M22: Meta[A22], A22: Lazy[Encoder[A22]], M23: Meta[A23], A23: Lazy[Encoder[A23]], M24: Meta[A24], A24: Lazy[Encoder[A24]], M25: Meta[A25], A25: Lazy[Encoder[A25]], M26: Meta[A26], A26: Lazy[Encoder[A26]], M27: Meta[A27], A27: Lazy[Encoder[A27]], M28: Meta[A28], A28: Lazy[Encoder[A28]], M29: Meta[A29], A29: Lazy[Encoder[A29]], M30: Meta[A30], A30: Lazy[Encoder[A30]], M31: Meta[A31], A31: Lazy[Encoder[A31]], M32: Meta[A32], A32: Lazy[Encoder[A32]], M33: Meta[A33], A33: Lazy[Encoder[A33]], M34: Meta[A34], A34: Lazy[Encoder[A34]], M35: Meta[A35], A35: Lazy[Encoder[A35]], M36: Meta[A36], A36: Lazy[Encoder[A36]], M37: Meta[A37], A37: Lazy[Encoder[A37]], M38: Meta[A38], A38: Lazy[Encoder[A38]], M39: Meta[A39], A39: Lazy[Encoder[A39]], M40: Meta[A40], A40: Lazy[Encoder[A40]], M41: Meta[A41], A41: Lazy[Encoder[A41]], M42: Meta[A42], A42: Lazy[Encoder[A42]], M43: Meta[A43], A43: Lazy[Encoder[A43]], M44: Meta[A44], A44: Lazy[Encoder[A44]], M45: Meta[A45], A45: Lazy[Encoder[A45]], M46: Meta[A46], A46: Lazy[Encoder[A46]], M47: Meta[A47], A47: Lazy[Encoder[A47]], M48: Meta[A48], A48: Lazy[Encoder[A48]], M49: Meta[A49], A49: Lazy[Encoder[A49]], M50: Meta[A50], A50: Lazy[Encoder[A50]], M51: Meta[A51], A51: Lazy[Encoder[A51]], M52: Meta[A52], A52: Lazy[Encoder[A52]], M53: Meta[A53], A53: Lazy[Encoder[A53]], M54: Meta[A54], A54: Lazy[Encoder[A54]], M55: Meta[A55], A55: Lazy[Encoder[A55]], M56: Meta[A56], A56: Lazy[Encoder[A56]], M57: Meta[A57], A57: Lazy[Encoder[A57]], M58: Meta[A58], A58: Lazy[Encoder[A58]], M59: Meta[A59], A59: Lazy[Encoder[A59]], M60: Meta[A60], A60: Lazy[Encoder[A60]], M61: Meta[A61], A61: Lazy[Encoder[A61]], M62: Meta[A62], A62: Lazy[Encoder[A62]], M63: Meta[A63], A63: Lazy[Encoder[A63]], M64: Meta[A64], A64: Lazy[Encoder[A64]]): Encoder[SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64]] = { + def work(st: SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64], indent: Option[Int], out: java.io.Writer): Unit = st match { + case SealedTrait._1(v) => A1.value.unsafeEncode(v, indent, out) + case SealedTrait._2(v) => A2.value.unsafeEncode(v, indent, out) + case SealedTrait._3(v) => A3.value.unsafeEncode(v, indent, out) + case SealedTrait._4(v) => A4.value.unsafeEncode(v, indent, out) + case SealedTrait._5(v) => A5.value.unsafeEncode(v, indent, out) + case SealedTrait._6(v) => A6.value.unsafeEncode(v, indent, out) + case SealedTrait._7(v) => A7.value.unsafeEncode(v, indent, out) + case SealedTrait._8(v) => A8.value.unsafeEncode(v, indent, out) + case SealedTrait._9(v) => A9.value.unsafeEncode(v, indent, out) + case SealedTrait._10(v) => A10.value.unsafeEncode(v, indent, out) + case SealedTrait._11(v) => A11.value.unsafeEncode(v, indent, out) + case SealedTrait._12(v) => A12.value.unsafeEncode(v, indent, out) + case SealedTrait._13(v) => A13.value.unsafeEncode(v, indent, out) + case SealedTrait._14(v) => A14.value.unsafeEncode(v, indent, out) + case SealedTrait._15(v) => A15.value.unsafeEncode(v, indent, out) + case SealedTrait._16(v) => A16.value.unsafeEncode(v, indent, out) + case SealedTrait._17(v) => A17.value.unsafeEncode(v, indent, out) + case SealedTrait._18(v) => A18.value.unsafeEncode(v, indent, out) + case SealedTrait._19(v) => A19.value.unsafeEncode(v, indent, out) + case SealedTrait._20(v) => A20.value.unsafeEncode(v, indent, out) + case SealedTrait._21(v) => A21.value.unsafeEncode(v, indent, out) + case SealedTrait._22(v) => A22.value.unsafeEncode(v, indent, out) + case SealedTrait._23(v) => A23.value.unsafeEncode(v, indent, out) + case SealedTrait._24(v) => A24.value.unsafeEncode(v, indent, out) + case SealedTrait._25(v) => A25.value.unsafeEncode(v, indent, out) + case SealedTrait._26(v) => A26.value.unsafeEncode(v, indent, out) + case SealedTrait._27(v) => A27.value.unsafeEncode(v, indent, out) + case SealedTrait._28(v) => A28.value.unsafeEncode(v, indent, out) + case SealedTrait._29(v) => A29.value.unsafeEncode(v, indent, out) + case SealedTrait._30(v) => A30.value.unsafeEncode(v, indent, out) + case SealedTrait._31(v) => A31.value.unsafeEncode(v, indent, out) + case SealedTrait._32(v) => A32.value.unsafeEncode(v, indent, out) + case SealedTrait._33(v) => A33.value.unsafeEncode(v, indent, out) + case SealedTrait._34(v) => A34.value.unsafeEncode(v, indent, out) + case SealedTrait._35(v) => A35.value.unsafeEncode(v, indent, out) + case SealedTrait._36(v) => A36.value.unsafeEncode(v, indent, out) + case SealedTrait._37(v) => A37.value.unsafeEncode(v, indent, out) + case SealedTrait._38(v) => A38.value.unsafeEncode(v, indent, out) + case SealedTrait._39(v) => A39.value.unsafeEncode(v, indent, out) + case SealedTrait._40(v) => A40.value.unsafeEncode(v, indent, out) + case SealedTrait._41(v) => A41.value.unsafeEncode(v, indent, out) + case SealedTrait._42(v) => A42.value.unsafeEncode(v, indent, out) + case SealedTrait._43(v) => A43.value.unsafeEncode(v, indent, out) + case SealedTrait._44(v) => A44.value.unsafeEncode(v, indent, out) + case SealedTrait._45(v) => A45.value.unsafeEncode(v, indent, out) + case SealedTrait._46(v) => A46.value.unsafeEncode(v, indent, out) + case SealedTrait._47(v) => A47.value.unsafeEncode(v, indent, out) + case SealedTrait._48(v) => A48.value.unsafeEncode(v, indent, out) + case SealedTrait._49(v) => A49.value.unsafeEncode(v, indent, out) + case SealedTrait._50(v) => A50.value.unsafeEncode(v, indent, out) + case SealedTrait._51(v) => A51.value.unsafeEncode(v, indent, out) + case SealedTrait._52(v) => A52.value.unsafeEncode(v, indent, out) + case SealedTrait._53(v) => A53.value.unsafeEncode(v, indent, out) + case SealedTrait._54(v) => A54.value.unsafeEncode(v, indent, out) + case SealedTrait._55(v) => A55.value.unsafeEncode(v, indent, out) + case SealedTrait._56(v) => A56.value.unsafeEncode(v, indent, out) + case SealedTrait._57(v) => A57.value.unsafeEncode(v, indent, out) + case SealedTrait._58(v) => A58.value.unsafeEncode(v, indent, out) + case SealedTrait._59(v) => A59.value.unsafeEncode(v, indent, out) + case SealedTrait._60(v) => A60.value.unsafeEncode(v, indent, out) + case SealedTrait._61(v) => A61.value.unsafeEncode(v, indent, out) + case SealedTrait._62(v) => A62.value.unsafeEncode(v, indent, out) + case SealedTrait._63(v) => A63.value.unsafeEncode(v, indent, out) + case SealedTrait._64(v) => A64.value.unsafeEncode(v, indent, out) + } + Option("hintname") match { + case None => new SealedTraitEncoder[A, SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60, M61, M62, M63, M64)) { + override def unsafeEncodeValue(st: SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + case Some(hintfield) => new SealedTraitDiscrimEncoder[A, SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64]](Array(M1, M2, M3, M4, M5, M6, M7, M8, M9, M10, M11, M12, M13, M14, M15, M16, M17, M18, M19, M20, M21, M22, M23, M24, M25, M26, M27, M28, M29, M30, M31, M32, M33, M34, M35, M36, M37, M38, M39, M40, M41, M42, M43, M44, M45, M46, M47, M48, M49, M50, M51, M52, M53, M54, M55, M56, M57, M58, M59, M60, M61, M62, M63, M64), hintfield) { + override def unsafeEncodeValue(st: SealedTrait64[A, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64], indent: Option[Int], out: java.io.Writer): Unit = work(st, indent, out) + } + } + } +} From 3458da724bd35b226b39c670be76d55ee13b4033 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 16 Oct 2024 20:35:17 +0100 Subject: [PATCH 664/827] Add Show[Long] & Show[Unit] --- compiler/src/dotty/tools/dotc/printing/Formatting.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index a36e6f48533a..bf3ca23c407b 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -115,6 +115,7 @@ object Formatting { given Show[Char] = ShowAny given Show[Boolean] = ShowAny given Show[Integer] = ShowAny + given Show[Long] = ShowAny given Show[String] = ShowAny given Show[Class[?]] = ShowAny given Show[Throwable] = ShowAny @@ -122,6 +123,7 @@ object Formatting { given Show[CompilationUnit] = ShowAny given Show[Phases.Phase] = ShowAny given Show[TyperState] = ShowAny + given Show[Unit] = ShowAny given Show[config.ScalaVersion] = ShowAny given Show[io.AbstractFile] = ShowAny given Show[parsing.Scanners.Scanner] = ShowAny From 8d612de7b0e9a07711db86e6cbc1a7e04194b847 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 16 Oct 2024 20:35:54 +0100 Subject: [PATCH 665/827] Add a >0.1s timer to trace --- compiler/src/dotty/tools/dotc/reporting/trace.scala | 9 ++++++++- .../src/dotty/tools/dotc/typer/ImportSuggestions.scala | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala index 732e779e9bf7..7f05cffb422a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/trace.scala +++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala @@ -96,6 +96,7 @@ trait TraceSyntax: (op: => T)(using Context): T = if ctx.mode.is(Mode.Printing) || !isForced && (printer eq Printers.noPrinter) then op else + val start = System.nanoTime // Avoid evaluating question multiple time, since each evaluation // may cause some extra logging output. val q = question @@ -109,7 +110,13 @@ trait TraceSyntax: def finalize(msg: String) = if !finalized then ctx.base.indent -= 1 - doLog(s"$margin$msg") + val stop = System.nanoTime + val diffNs = stop - start + val diffS = (diffNs / 1000 / 1000).toInt / 1000.0 + if diffS > 0.1 then + doLog(s"$margin$msg (${"%.2f".format(diffS)} s)") + else + doLog(s"$margin$msg") finalized = true try doLog(s"$margin$leading") diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index 8f8c51e2f566..3ae533d58b2e 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -264,7 +264,7 @@ trait ImportSuggestions: end importSuggestions /** Reduce next timeout for import suggestions by the amount of time it took - * for current search, but but never less than to half of the previous budget. + * for current search, but never less than to half of the previous budget. */ private def reduceTimeBudget(used: Int)(using Context) = val run = ctx.run.nn From 28b8c55595dd86351aaac234a270c878f8236730 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 16 Oct 2024 13:03:49 +0100 Subject: [PATCH 666/827] Optimise refineUsingParent to use constrained Using `constrained` on a TypeLambda means adding one TypeLambda for all the type parameters in tp1, while newTypeVar creates a TypeLambda for each type parameter. --- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 4 +++- compiler/src/dotty/tools/dotc/core/Types.scala | 10 +++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index bfda613d0586..5b5086f065e9 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -902,7 +902,9 @@ object TypeOps: } val inferThisMap = new InferPrefixMap - val tvars = tp1.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } + val tvars = tp1.etaExpand match + case eta: TypeLambda => constrained(eta) + case _ => Nil val protoTp1 = inferThisMap.apply(tp1).appliedTo(tvars) if gadtSyms.nonEmpty then diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index cf73bda0d131..2eecc717e940 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4395,9 +4395,11 @@ object Types extends TypeUtils { /** Distributes Lambda inside type bounds. Examples: * - * type T[X] = U becomes type T = [X] -> U - * type T[X] <: U becomes type T >: Nothing <: ([X] -> U) - * type T[X] >: L <: U becomes type T >: ([X] -> L) <: ([X] -> U) + * {{{ + * type T[X] = U becomes type T = [X] =>> U + * type T[X] <: U becomes type T >: Nothing <: ([X] =>> U) + * type T[X] >: L <: U becomes type T >: ([X] =>> L) <: ([X] =>> U) + * }}} * * The variances of regular TypeBounds types, as well as of match aliases * and of opaque aliases are always determined from the given parameters @@ -4409,6 +4411,7 @@ object Types extends TypeUtils { * * Examples: * + * {{{ * type T[X] >: A // X is invariant * type T[X] <: List[X] // X is invariant * type T[X] = List[X] // X is covariant (determined structurally) @@ -4416,6 +4419,7 @@ object Types extends TypeUtils { * opaque type T[+X] = List[X] // X is covariant * type T[A, B] = A => B // A is contravariant, B is covariant (determined structurally) * type T[A, +B] = A => B // A is invariant, B is covariant + * }}} */ def boundsFromParams[PI <: ParamInfo.Of[TypeName]](params: List[PI], bounds: TypeBounds)(using Context): TypeBounds = { def expand(tp: Type, useVariances: Boolean) = From bdafee6a0e2de44b58839c1eb9c357eee809d4ad Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 16 Oct 2024 15:15:03 +0100 Subject: [PATCH 667/827] Optimise SpaceEngine.signature for synthetic unapplies Instead of creating type vars, constraining against them, then instantiating them, just instantiate the PolyType with the scrutinee type args (or the lo/hi bound or bounded wildcard from the param). --- .../src/dotty/tools/dotc/core/Types.scala | 1 + .../tools/dotc/transform/patmat/Space.scala | 32 ++++++++++++++++--- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 2eecc717e940..e1f253941a45 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4706,6 +4706,7 @@ object Types extends TypeUtils { type BT <: LambdaType def paramNum: Int def paramName: binder.ThisName = binder.paramNames(paramNum) + def paramInfo: binder.PInfo = binder.paramInfos(paramNum) override def underlying(using Context): Type = { // TODO: update paramInfos's type to nullable diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 20b0099d82e2..af37135e5196 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -7,7 +7,7 @@ import core.* import Constants.*, Contexts.*, Decorators.*, Flags.*, NullOpsDecorator.*, Symbols.*, Types.* import Names.*, NameOps.*, StdNames.* import ast.*, tpd.* -import config.Printers.* +import config.Printers.exhaustivity import printing.{ Printer, * }, Texts.* import reporting.* import typer.*, Applications.*, Inferencing.*, ProtoTypes.* @@ -524,14 +524,36 @@ object SpaceEngine { val mt: MethodType = unapp.widen match { case mt: MethodType => mt case pt: PolyType => + if unappSym.is(Synthetic) then + val mt = pt.resultType.asInstanceOf[MethodType] + val unapplyArgType = mt.paramInfos.head + val targs = scrutineeTp.baseType(unapplyArgType.classSymbol) match + case AppliedType(_, targs) => targs + case _ => + // Typically when the scrutinee is Null or Nothing (see i5067 and i5067b) + // For performance, do `variances(unapplyArgType)` but without using TypeVars + // so just find the variance, so we know if to min/max to the LB/UB or use a wildcard. + object accu extends TypeAccumulator[VarianceMap[TypeParamRef]]: + def apply(vmap: VarianceMap[TypeParamRef], tp: Type) = tp match + case tp: TypeParamRef if tp.binder eq pt => vmap.recordLocalVariance(tp, variance) + case _ => foldOver(vmap, tp) + val vs = accu(VarianceMap.empty[TypeParamRef], unapplyArgType) + pt.paramRefs.map: p => + vs.computedVariance(p).uncheckedNN match + case -1 => p.paramInfo.lo + case 1 => p.paramInfo.hi + case _ => WildcardType(p.paramInfo) + pt.instantiate(targs).asInstanceOf[MethodType] + else val locked = ctx.typerState.ownedVars val tvars = constrained(pt) val mt = pt.instantiate(tvars).asInstanceOf[MethodType] - scrutineeTp <:< mt.paramInfos(0) + val unapplyArgType = mt.paramInfos.head + scrutineeTp <:< unapplyArgType // force type inference to infer a narrower type: could be singleton // see tests/patmat/i4227.scala - mt.paramInfos(0) <:< scrutineeTp - maximizeType(mt.paramInfos(0), Spans.NoSpan) + unapplyArgType <:< scrutineeTp + maximizeType(unapplyArgType, Spans.NoSpan) if !(ctx.typerState.ownedVars -- locked).isEmpty then // constraining can create type vars out of wildcard types // (in legalBound, by using a LevelAvoidMap) @@ -543,7 +565,7 @@ object SpaceEngine { // but I'd rather have an unassigned new-new type var, than an infinite loop. // After all, there's nothing strictly "wrong" with unassigned type vars, // it just fails TreeChecker's linting. - maximizeType(mt.paramInfos(0), Spans.NoSpan) + maximizeType(unapplyArgType, Spans.NoSpan) mt } From f478d7e2c7260997a7ea3df6fbaf01a06e4052a6 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 17 Oct 2024 16:25:29 +0100 Subject: [PATCH 668/827] Drop inaccessible subclasses from refineUsingParent --- .../src/dotty/tools/dotc/core/Decorators.scala | 2 +- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 6 +++++- tests/pos/i21790.scala | 14 ++++++++++++++ 3 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i21790.scala diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 29d4b3fa4052..96a2d45db80d 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -292,7 +292,7 @@ object Decorators { case _ => String.valueOf(x).nn /** Returns the simple class name of `x`. */ - def className: String = x.getClass.getSimpleName.nn + def className: String = if x == null then "" else x.getClass.getSimpleName.nn extension [T](x: T) def assertingErrorsReported(using Context): T = { diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index bfda613d0586..846e4091c617 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -921,7 +921,11 @@ object TypeOps: for tp <- mixins.reverseIterator do protoTp1 <:< tp maximizeType(protoTp1, NoSpan) - wildApprox(protoTp1) + val inst = wildApprox(protoTp1) + if !inst.classSymbol.exists then + // E.g. i21790, can't instantiate S#CA as a subtype of O.A, because O.CA isn't accessible + NoType + else inst } if (protoTp1 <:< tp2) instantiate() diff --git a/tests/pos/i21790.scala b/tests/pos/i21790.scala new file mode 100644 index 000000000000..0cc7db935ac7 --- /dev/null +++ b/tests/pos/i21790.scala @@ -0,0 +1,14 @@ +package p + +trait S: + sealed trait A + private class CA() extends A + +object O extends S + +trait T + +class Test: + def f(e: T) = e match + case _: O.A => + case _ => From 6d9ee4dcc21a1f6bd644883c099d57f6c7fbbeab Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 17 Oct 2024 19:29:59 +0200 Subject: [PATCH 669/827] Fix CI workflows for publishing releases. (#21789) Reuse SDKs generated in `build-sdk-package` job. Store .sha256 for each of the files, instead of creating sha256.txt with shas for the reach directory to ease validation --- .github/workflows/ci.yaml | 280 ++++++++++++++++++++++++++------------ 1 file changed, 194 insertions(+), 86 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 95a6ed24df13..5baa8d3dea81 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -729,7 +729,7 @@ jobs: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8] + needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8, build-sdk-package] if: "github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/')" @@ -765,31 +765,51 @@ jobs: # Extract the release tag - name: Extract the release tag run : echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV - # BUILD THE SDKs - - name: Build and pack the SDK (universal) - run : | - ./project/scripts/sbt dist/packArchive - sha256sum dist/target/scala3-* > dist/target/sha256sum.txt - - name: Build and pack the SDK (linux x86-64) - run : | - ./project/scripts/sbt dist-linux-x86_64/packArchive - sha256sum dist/linux-x86_64/target/scala3-* > dist/linux-x86_64/target/sha256sum.txt - - name: Build and pack the SDK (linux aarch64) - run : | - ./project/scripts/sbt dist-linux-aarch64/packArchive - sha256sum dist/linux-aarch64/target/scala3-* > dist/linux-aarch64/target/sha256sum.txt - - name: Build and pack the SDK (mac x86-64) - run : | - ./project/scripts/sbt dist-mac-x86_64/packArchive - sha256sum dist/mac-x86_64/target/scala3-* > dist/mac-x86_64/target/sha256sum.txt - - name: Build and pack the SDK (mac aarch64) - run : | - ./project/scripts/sbt dist-mac-aarch64/packArchive - sha256sum dist/mac-aarch64/target/scala3-* > dist/mac-aarch64/target/sha256sum.txt - - name: Build and pack the SDK (win x86-64) + + - name: Prepare the SDKs run : | - ./project/scripts/sbt dist-win-x86_64/packArchive - sha256sum dist/win-x86_64/target/scala3-* > dist/win-x86_64/target/sha256sum.txt + function prepareSDK() { + distroSuffix="$1" + artifactId="$2" + artifactName="scala3-${{ env.RELEASE_TAG }}${distroSuffix}" + + downloadedArchive="./artifact.zip" + if [[ -f "${downloadedArchive}" ]]; then + rm "${downloadedArchive}" + fi + + # Download previously prepared SDK bundle + curl -L \ + -H "Authorization: token ${{secrets.GITHUB_TOKEN}}" \ + -H "Accept: application/vnd.github+json" \ + -o "${downloadedArchive}" \ + --retry 5 --retry-delay 10 --retry-connrefused \ + --max-time 600 --connect-timeout 60 \ + https://api.github.com/repos/scala/scala3/actions/artifacts/${artifactId}/zip + + # Repackage content of .zip to .tar.gz and prepare digest + tmpDir="./archive-tmp-dir" + if [[ -d "${tmpDir}" ]]; then + rm -r "${tmpDir}" + fi + mkdir "${tmpDir}" + unzip "${downloadedArchive}" -d "${tmpDir}" + + mv "${downloadedArchive}" "./${artifactName}.zip" + tar -czf "${artifactName}.tar.gz" -C "${tmpDir}" . + + # Caluclate SHA for each of archive files + for file in "${artifactName}.zip" "${artifactName}.tar.gz"; do + sha256sum "${file}" > "${file}.sha256" + done + } + prepareSDK "" ${{needs.build-sdk-package.outputs.universal-id}} + prepareSDK "-aarch64-pc-linux" ${{needs.build-sdk-package.outputs.linux-aarch64-id}} + prepareSDK "-x86_64-pc-linux" ${{needs.build-sdk-package.outputs.linux-x86_64-id}} + prepareSDK "-aarch64-apple-darwin" ${{needs.build-sdk-package.outputs.mac-aarch64-id}} + prepareSDK "-x86_64-apple-darwin" ${{needs.build-sdk-package.outputs.mac-x86_64-id}} + prepareSDK "-x86_64-pc-win32" ${{needs.build-sdk-package.outputs.win-x86_64-id}} + # Create the GitHub release - name: Create GitHub Release id: create_gh_release @@ -803,179 +823,267 @@ jobs: draft: true prerelease: ${{ contains(env.RELEASE_TAG, '-') }} - - name: Upload zip archive to GitHub Release (universal) + # The following steps are generated using template: + # def template(distribution: String, suffix: String) = + # def upload(kind: String, path: String, contentType: String) = + # s"""- name: Upload $kind to GitHub Release ($distribution) + # uses: actions/upload-release-asset@v1 + # env: + # GITHUB_TOKEN: $${{ secrets.GITHUB_TOKEN }} + # with: + # upload_url: $${{ steps.create_gh_release.outputs.upload_url }} + # asset_path: ./${path} + # asset_name: ${path} + # asset_content_type: ${contentType}""" + # val filename = s"scala3-$${{ env.RELEASE_TAG }}${suffix}" + # s""" + # # $distribution + # ${upload("zip archive", s"$filename.zip", "application/zip")} + # ${upload("zip archive SHA", s"$filename.zip.sha256", "text/plain")} + # ${upload("tar.gz archive", s"$filename.tar.gz", "application/gzip")} + # ${upload("tar.gz archive SHA", s"$filename.tar.gz.sha256", "text/plain")} + # """ + + # @main def gen = + # Seq( + # template("Universal", ""), + # template("Linux x86-64", "-x86_64-pc-linux"), + # template("Linux aarch64", "-aarch64-pc-linux"), + # template("Mac x86-64", "-x86_64-apple-darwin"), + # template("Mac aarch64", "-aarcb64-apple-darwin"), + # template("Windows x86_64", "-x86_64-pc-win32") + # ).foreach(println) + # Universal + - name: Upload zip archive to GitHub Release (Universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/target/scala3-${{ env.RELEASE_TAG }}.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}.zip asset_name: scala3-${{ env.RELEASE_TAG }}.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (universal) + - name: Upload zip archive SHA to GitHub Release (Universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/target/scala3-${{ env.RELEASE_TAG }}.tar.gz + asset_path: ./scala3-${{ env.RELEASE_TAG }}.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Universal) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}.tar.gz asset_name: scala3-${{ env.RELEASE_TAG }}.tar.gz asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Universal) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}.tar.gz.sha256 + asset_content_type: text/plain + - - name: Upload zip archive to GitHub Release (linux x86-64) + # Linux x86-64 + - name: Upload zip archive to GitHub Release (Linux x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (linux x86-64) + - name: Upload zip archive SHA to GitHub Release (Linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Linux x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz.sha256 + asset_content_type: text/plain + - - name: Upload zip archive to GitHub Release (linux aarch64) + # Linux aarch64 + - name: Upload zip archive to GitHub Release (Linux aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (linux aarch64) + - name: Upload zip archive SHA to GitHub Release (Linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Linux aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz.sha256 + asset_content_type: text/plain - - name: Upload zip archive to GitHub Release (mac x86-64) + + # Mac x86-64 + - name: Upload zip archive to GitHub Release (Mac x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (mac x86-64) + - name: Upload zip archive SHA to GitHub Release (Mac x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz - asset_content_type: application/gzip - - - name: Upload zip archive to GitHub Release (mac aarch64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Mac x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip - asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (mac aarch64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Mac x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz - asset_content_type: application/gzip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz.sha256 + asset_content_type: text/plain + - - name: Upload zip archive to GitHub Release (win x86-64) + # Mac aarch64 + - name: Upload zip archive to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (win x86-64) + - name: Upload zip archive SHA to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz - asset_content_type: application/gzip - - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (universal) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/target/sha256sum.txt - asset_name: sha256sum.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux x86-64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.tar.gz + asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-x86_64/target/sha256sum.txt - asset_name: sha256sum-x86_64-pc-linux.txt + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.tar.gz.sha256 asset_content_type: text/plain - - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux aarch64) + + # Windows x86_64 + - name: Upload zip archive to GitHub Release (Windows x86_64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/sha256sum.txt - asset_name: sha256sum-aarch64-pc-linux.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac x86-64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_content_type: application/zip + - name: Upload zip archive SHA to GitHub Release (Windows x86_64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-x86_64/target/sha256sum.txt - asset_name: sha256sum-x86_64-apple-darwin.txt + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip.sha256 asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac aarch64) + - name: Upload tar.gz archive to GitHub Release (Windows x86_64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-aarch64/target/sha256sum.txt - asset_name: sha256sum-aarch64-apple-darwin.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (win x86-64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Windows x86_64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/win-x86_64/target/sha256sum.txt - asset_name: sha256sum-x86_64-pc-win32.txt + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz.sha256 asset_content_type: text/plain - name: Publish Release From a0db7c1ebf476cfdd1c0396b0ba1bd622e2d36b4 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 26 Sep 2024 21:06:14 +0200 Subject: [PATCH 670/827] Allow opaque type def in repl --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 5a3be6505715..4172914c0fb0 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -4696,6 +4696,8 @@ object Parsers { stats += closure(in.offset, Location.InBlock, modifiers(BitSet(IMPLICIT))) else if isIdent(nme.extension) && followingIsExtension() then stats += extension() + else if ctx.mode.is(Mode.Interactive) && isDefIntro(localModifierTokens) then + stats +++= localDef(in.offset) else if isDefIntro(localModifierTokens, excludedSoftModifiers = Set(nme.`opaque`)) then stats +++= localDef(in.offset) else From 16ebbea25de6074ead7388c09434758c424700a6 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Fri, 4 Oct 2024 03:42:03 +0200 Subject: [PATCH 671/827] Only allow opaque type def at outermost --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 4 ++-- compiler/src/dotty/tools/repl/ParseResult.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 4172914c0fb0..6d35aeb9ac82 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -4684,7 +4684,7 @@ object Parsers { * | Expr1 * | */ - def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders { + def blockStatSeq(outermost: Boolean = false): List[Tree] = checkNoEscapingPlaceholders { val stats = new ListBuffer[Tree] while var empty = false @@ -4696,7 +4696,7 @@ object Parsers { stats += closure(in.offset, Location.InBlock, modifiers(BitSet(IMPLICIT))) else if isIdent(nme.extension) && followingIsExtension() then stats += extension() - else if ctx.mode.is(Mode.Interactive) && isDefIntro(localModifierTokens) then + else if outermost && ctx.mode.is(Mode.Interactive) && isDefIntro(localModifierTokens) then stats +++= localDef(in.offset) else if isDefIntro(localModifierTokens, excludedSoftModifiers = Set(nme.`opaque`)) then stats +++= localDef(in.offset) diff --git a/compiler/src/dotty/tools/repl/ParseResult.scala b/compiler/src/dotty/tools/repl/ParseResult.scala index b9139343bca1..24a624173050 100644 --- a/compiler/src/dotty/tools/repl/ParseResult.scala +++ b/compiler/src/dotty/tools/repl/ParseResult.scala @@ -122,7 +122,7 @@ object ParseResult { private def parseStats(using Context): List[untpd.Tree] = { val parser = new Parser(ctx.source) - val stats = parser.blockStatSeq() + val stats = parser.blockStatSeq(outermost = true) parser.accept(Tokens.EOF) stats } From 634d9b79beae0e4098ef93affd25a5d761268e14 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Fri, 11 Oct 2024 05:50:21 +0200 Subject: [PATCH 672/827] Add REPL test --- .../dotty/tools/repl/ReplCompilerTests.scala | 28 ++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index 374f53dbd011..221eb8acb9de 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -462,10 +462,36 @@ class ReplCompilerTests extends ReplTest: .andThen: run("0") // check for crash val last = lines() - println(last) assertTrue(last(0), last(0) == ("Options incompatible with repl will be ignored: -Ybest-effort, -Ywith-best-effort-tasty")) assertTrue(last(1), last(1) == ("val res0: Int = 0")) + @Test def `i9879`: Unit = initially: + run { + """|opaque type A = Int; def getA: A = 0 + |object Wrapper { opaque type A = Int; def getA: A = 1 } + |val x = getA + |val y = Wrapper.getA""".stripMargin + } + val expected = List( + "def getA: A", + "// defined object Wrapper", + "val x: A = 0", + "val y: Wrapper.A = 1" + ) + assertEquals(expected, lines()) + + @Test def `i9879b`: Unit = initially: + run { + """|def test = + | type A = Int + | opaque type B = String + | object Wrapper { opaque type C = Int } + | ()""".stripMargin + } + val all = lines() + assertEquals(6, all.length) + assertTrue(all.head.startsWith("-- [E103] Syntax Error")) + assertTrue(all.exists(_.trim().startsWith("| Illegal start of statement: this modifier is not allowed here"))) object ReplCompilerTests: From 3a98a1cd3efd52fdb7e92f03d7d9ddc7f8ed3282 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Fri, 18 Oct 2024 10:44:58 +0200 Subject: [PATCH 673/827] Remove extra if --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 6d35aeb9ac82..6390d8d32d3f 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -4696,9 +4696,11 @@ object Parsers { stats += closure(in.offset, Location.InBlock, modifiers(BitSet(IMPLICIT))) else if isIdent(nme.extension) && followingIsExtension() then stats += extension() - else if outermost && ctx.mode.is(Mode.Interactive) && isDefIntro(localModifierTokens) then - stats +++= localDef(in.offset) - else if isDefIntro(localModifierTokens, excludedSoftModifiers = Set(nme.`opaque`)) then + else if isDefIntro(localModifierTokens, + excludedSoftModifiers = + // Allow opaque definitions at outermost level in REPL. + if outermost && ctx.mode.is(Mode.Interactive) + then Set.empty else Set(nme.`opaque`)) then stats +++= localDef(in.offset) else empty = true From dd47185afa1a2ef0e0fa6f99ed7c40d0e87b49ee Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Fri, 18 Oct 2024 12:22:18 +0200 Subject: [PATCH 674/827] Add zip and unzip to the CI --- .github/Dockerfile | 3 ++- .github/workflows/ci.yaml | 26 +++++++++++++------------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/.github/Dockerfile b/.github/Dockerfile index d56ec6a59f2d..59d46fd169d9 100644 --- a/.github/Dockerfile +++ b/.github/Dockerfile @@ -15,7 +15,8 @@ RUN apt-get update && \ openjdk-17-jdk-headless \ openjdk-21-jdk-headless && \ (curl -fsSL https://deb.nodesource.com/setup_18.x | bash -) && \ - apt-get install -y nodejs + apt-get install -y nodejs && \ + apt-get install -y zip unzip # Install sbt diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 95a6ed24df13..8d91e22c8ee8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -48,7 +48,7 @@ jobs: test_non_bootstrapped: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -100,7 +100,7 @@ jobs: test: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -160,7 +160,7 @@ jobs: test_scala2_library_tasty: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -287,7 +287,7 @@ jobs: name: MiMa runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -338,7 +338,7 @@ jobs: community_build_a: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -395,7 +395,7 @@ jobs: community_build_b: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -452,7 +452,7 @@ jobs: community_build_c: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -509,7 +509,7 @@ jobs: test_sbt: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -554,7 +554,7 @@ jobs: test_java8: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -614,7 +614,7 @@ jobs: publish_nightly: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -677,7 +677,7 @@ jobs: nightly_documentation: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -723,7 +723,7 @@ jobs: contents: write # for actions/create-release to create a release runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -985,7 +985,7 @@ jobs: open_issue_on_failure: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 needs: [nightly_documentation, test_windows_full] # The `failure()` expression is true iff at least one of the dependencies # of this job (including transitive dependencies) has failed. From 3161e06b3509d93d39ae05f52f95ea407e53fb84 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 18 Oct 2024 13:14:05 +0200 Subject: [PATCH 675/827] Add batter-fors doc link to sidebar --- docs/sidebar.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/sidebar.yml b/docs/sidebar.yml index f12e732f1c6f..74aee3dfc668 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -162,6 +162,7 @@ subsection: - page: reference/experimental/modularity.md - page: reference/experimental/typeclasses.md - page: reference/experimental/runtimeChecked.md + - page: reference/experimental/better-fors.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md From 6d040c517682856fad4b4969e08c4eb405da551e Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 18 Oct 2024 17:47:33 +0200 Subject: [PATCH 676/827] fix: Drop copied parent refinements before generating bytecode (#21733) Refinements are copied over from parents, because they might be needed for tracked members that should have more specific types in the child. These members are generated without an implementation and should not be used in runtime. possible fix for #21213 --------- Co-authored-by: Dale Wijnand Co-authored-by: Matt Bovel --- compiler/src/dotty/tools/dotc/Compiler.scala | 1 + .../transform/DropParentRefinements.scala | 35 +++++++++++++++++++ .../dotty/tools/dotc/transform/Getters.scala | 2 +- .../src/dotty/tools/dotc/typer/Namer.scala | 8 ++--- .../src/dotty/tools/dotc/typer/Typer.scala | 5 ++- .../transformed/lazy-vals-legacy.check | 2 +- .../printing/transformed/lazy-vals-new.check | 2 +- tests/run/i21213-min.check | 1 + tests/run/i21213-min.scala | 9 +++++ tests/run/i21213.check | 1 + tests/run/i21213.scala | 10 ++++++ 11 files changed, 68 insertions(+), 8 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala create mode 100644 tests/run/i21213-min.check create mode 100644 tests/run/i21213-min.scala create mode 100644 tests/run/i21213.check create mode 100644 tests/run/i21213.scala diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 9b130e7d7804..f9503e4f8554 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -132,6 +132,7 @@ class Compiler { new ElimStaticThis, // Replace `this` references to static objects by global identifiers new CountOuterAccesses) :: // Identify outer accessors that can be dropped List(new DropOuterAccessors, // Drop unused outer accessors + new DropParentRefinements, // Drop parent refinements from a template new CheckNoSuperThis, // Check that supercalls don't contain references to `this` new Flatten, // Lift all inner classes to package scope new TransformWildcards, // Replace wildcards with default values diff --git a/compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala b/compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala new file mode 100644 index 000000000000..1960568dc505 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala @@ -0,0 +1,35 @@ +package dotty.tools.dotc.transform + +import dotty.tools.dotc.transform.MegaPhase.MiniPhase +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer +import dotty.tools.dotc.typer.Typer + +object DropParentRefinements: + val name: String = "dropParentRefinements" + val description: String = "drop parent refinements from a template" + +/** Drop parent refinements from a template, as they are generated without + * an implementation. These refinements are unusally required for tracked + * members with more specific types. + */ +class DropParentRefinements extends MiniPhase with IdentityDenotTransformer: + thisPhase => + import tpd.* + + override def phaseName: String = DropParentRefinements.name + + override def description: String = DropParentRefinements.description + + override def runsAfterGroupsOf: Set[String] = Set(CountOuterAccesses.name) + + override def changesMembers: Boolean = true // the phase drops parent refinements + + override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = + val newBody = tree.body.filter(!_.hasAttachment(Typer.RefinementFromParent)) + tree.body.foreach { member => + if member.hasAttachment(Typer.RefinementFromParent) then + member.symbol.dropAfter(thisPhase) + } + cpy.Template(tree)(body = newBody) diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala index 43289209d146..a58dffa04223 100644 --- a/compiler/src/dotty/tools/dotc/transform/Getters.scala +++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala @@ -103,7 +103,7 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => override def transformValDef(tree: ValDef)(using Context): Tree = val sym = tree.symbol if !sym.is(Method) then return tree - val getterDef = DefDef(sym.asTerm, tree.rhs).withSpan(tree.span) + val getterDef = DefDef(sym.asTerm, tree.rhs).withSpan(tree.span).withAttachmentsFrom(tree) if !sym.is(Mutable) then return getterDef ensureSetter(sym.asTerm) if !newSetters.contains(sym.setter) then return getterDef diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 6167db62fbe0..0849e57b8c7d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -877,16 +877,16 @@ class Namer { typer: Typer => protected def addAnnotations(sym: Symbol): Unit = original match { case original: untpd.MemberDef => lazy val annotCtx = annotContext(original, sym) - original.setMods: + original.setMods: original.mods.withAnnotations : - original.mods.annotations.mapConserve: annotTree => + original.mods.annotations.mapConserve: annotTree => val cls = typedAheadAnnotationClass(annotTree)(using annotCtx) if (cls eq sym) report.error(em"An annotation class cannot be annotated with iself", annotTree.srcPos) annotTree else - val ann = - if cls.is(JavaDefined) then Checking.checkNamedArgumentForJavaAnnotation(annotTree, cls.asClass) + val ann = + if cls.is(JavaDefined) then Checking.checkNamedArgumentForJavaAnnotation(annotTree, cls.asClass) else annotTree val ann1 = Annotation.deferred(cls)(typedAheadExpr(ann)(using annotCtx)) sym.addAnnotation(ann1) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ce72aac596c0..89ce01231fed 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -78,6 +78,9 @@ object Typer { /** An attachment for GADT constraints that were inferred for a pattern. */ val InferredGadtConstraints = new Property.StickyKey[core.GadtConstraint] + /** Indicates that a definition was copied over from the parent refinements */ + val RefinementFromParent = new Property.StickyKey[Unit] + /** An attachment on a Select node with an `apply` field indicating that the `apply` * was inserted by the Typer. */ @@ -3071,7 +3074,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ( if sym.isType then TypeDef(sym.asType) else if sym.is(Method) then DefDef(sym.asTerm) else ValDef(sym.asTerm) - ).withSpan(impl.span.startPos) + ).withSpan(impl.span.startPos).withAttachment(RefinementFromParent, ()) body ++ refinements case None => body diff --git a/tests/printing/transformed/lazy-vals-legacy.check b/tests/printing/transformed/lazy-vals-legacy.check index 2768a89b9c9e..d8969619d2b2 100644 --- a/tests/printing/transformed/lazy-vals-legacy.check +++ b/tests/printing/transformed/lazy-vals-legacy.check @@ -1,4 +1,4 @@ -[[syntax trees at end of MegaPhase{dropOuterAccessors, checkNoSuperThis, flatten, transformWildcards, moveStatic, expandPrivate, restoreScopes, selectStatic, Collect entry points, collectSuperCalls, repeatableAnnotations}]] // tests/printing/transformed/lazy-vals-legacy.scala +[[syntax trees at end of MegaPhase{dropOuterAccessors, dropParentRefinements, checkNoSuperThis, flatten, transformWildcards, moveStatic, expandPrivate, restoreScopes, selectStatic, Collect entry points, collectSuperCalls, repeatableAnnotations}]] // tests/printing/transformed/lazy-vals-legacy.scala package { @SourceFile("tests/printing/transformed/lazy-vals-legacy.scala") final module class A extends Object { diff --git a/tests/printing/transformed/lazy-vals-new.check b/tests/printing/transformed/lazy-vals-new.check index 05471e5677dc..8997f0109e79 100644 --- a/tests/printing/transformed/lazy-vals-new.check +++ b/tests/printing/transformed/lazy-vals-new.check @@ -1,4 +1,4 @@ -[[syntax trees at end of MegaPhase{dropOuterAccessors, checkNoSuperThis, flatten, transformWildcards, moveStatic, expandPrivate, restoreScopes, selectStatic, Collect entry points, collectSuperCalls, repeatableAnnotations}]] // tests/printing/transformed/lazy-vals-new.scala +[[syntax trees at end of MegaPhase{dropOuterAccessors, dropParentRefinements, checkNoSuperThis, flatten, transformWildcards, moveStatic, expandPrivate, restoreScopes, selectStatic, Collect entry points, collectSuperCalls, repeatableAnnotations}]] // tests/printing/transformed/lazy-vals-new.scala package { @SourceFile("tests/printing/transformed/lazy-vals-new.scala") final module class A extends Object { diff --git a/tests/run/i21213-min.check b/tests/run/i21213-min.check new file mode 100644 index 000000000000..5716ca5987cb --- /dev/null +++ b/tests/run/i21213-min.check @@ -0,0 +1 @@ +bar diff --git a/tests/run/i21213-min.scala b/tests/run/i21213-min.scala new file mode 100644 index 000000000000..0f6aa6f8ddd5 --- /dev/null +++ b/tests/run/i21213-min.scala @@ -0,0 +1,9 @@ +import scala.language.experimental.modularity +import scala.language.future + +sealed abstract class Foo(tracked val discriminator: String) +class Bar extends Foo("bar") + +val bar: Foo = Bar() +object Test extends App: + println(bar.discriminator) diff --git a/tests/run/i21213.check b/tests/run/i21213.check new file mode 100644 index 000000000000..5716ca5987cb --- /dev/null +++ b/tests/run/i21213.check @@ -0,0 +1 @@ +bar diff --git a/tests/run/i21213.scala b/tests/run/i21213.scala new file mode 100644 index 000000000000..ec609afd7da3 --- /dev/null +++ b/tests/run/i21213.scala @@ -0,0 +1,10 @@ +import scala.language.experimental.modularity +import scala.language.future + +enum Foo(tracked val discriminator: String): + case Bar() extends Foo("bar") + case Baz() extends Foo("baz") + +val bar: Foo = Foo.Bar() +object Test extends App: + println(bar.discriminator) From 34f1c546b968f2d14443e48e268a93c3f19923d1 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Sat, 19 Oct 2024 20:04:40 +0200 Subject: [PATCH 677/827] Fix and future-proof the CI release scripts (#21810) - Forward-ports changes to `publish_release` CI job from `release-3.6.1` branch. * Fix building SDK archives, don't use `build-sdk-packages` job that produced artifacts in SNAPSHOT version * Fix typos introduced in template (forward port from `release-3.6.0`) - Detect and check version used to publish artifacts: * For release builds ensure that `version` is always matching tag used to trigger CI job * For nightly builds ensure that `version` is following the expected pattern --- .github/workflows/ci.yaml | 89 +++++++++++++++++++++------------------ 1 file changed, 47 insertions(+), 42 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d64c02c55042..72f4f5559fd7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -663,6 +663,13 @@ jobs: echo "This build version: $ver" echo "THISBUILD_VERSION=$ver" >> $GITHUB_ENV + - name: Check is version matching pattern + run: | + if ! grep -Eo "3\.[0-9]+\.[0-9]+-RC[0-9]+-bin-[0-9]{8}-[a-zA-Z0-9]{7}-NIGHTLY" <<< "${{ env.THISBUILD_VERSION }}"; then + echo "Version used by compiler to publish nightly release does not match expected pattern" + exit 1 + fi + - name: Check whether not yet published id: not_yet_published continue-on-error: true @@ -766,49 +773,47 @@ jobs: - name: Extract the release tag run : echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV + - name: Check compiler version + shell: bash + run : | + version=$(./project/scripts/sbt "print scala3-compiler-bootstrapped/version" | tail -n1) + echo "This build version: ${version}" + if [ "${version}" != "${{ env.RELEASE_TAG }}" ]; then + echo "Compiler version for this build '${version}', does not match tag: ${{ env.RELEASE_TAG }}" + exit 1 + fi + - name: Prepare the SDKs + shell: bash run : | - function prepareSDK() { + prepareSDK() { distroSuffix="$1" - artifactId="$2" + sbtProject="$2" + distDir="$3" + + # Build binaries + ./project/scripts/sbt "${sbtProject}/Universal/stage" + + outputPath="${distDir}/target/universal/stage" artifactName="scala3-${{ env.RELEASE_TAG }}${distroSuffix}" + zipArchive="${artifactName}.zip" + tarGzArchive="${artifactName}.tar.gz" - downloadedArchive="./artifact.zip" - if [[ -f "${downloadedArchive}" ]]; then - rm "${downloadedArchive}" - fi - - # Download previously prepared SDK bundle - curl -L \ - -H "Authorization: token ${{secrets.GITHUB_TOKEN}}" \ - -H "Accept: application/vnd.github+json" \ - -o "${downloadedArchive}" \ - --retry 5 --retry-delay 10 --retry-connrefused \ - --max-time 600 --connect-timeout 60 \ - https://api.github.com/repos/scala/scala3/actions/artifacts/${artifactId}/zip - - # Repackage content of .zip to .tar.gz and prepare digest - tmpDir="./archive-tmp-dir" - if [[ -d "${tmpDir}" ]]; then - rm -r "${tmpDir}" - fi - mkdir "${tmpDir}" - unzip "${downloadedArchive}" -d "${tmpDir}" - - mv "${downloadedArchive}" "./${artifactName}.zip" - tar -czf "${artifactName}.tar.gz" -C "${tmpDir}" . + cwd=$(pwd) + (cd $outputPath && zip -r ${zipArchive} . && mv ${zipArchive} "${cwd}/") + tar -czf ${tarGzArchive} -C "$outputPath" . # Caluclate SHA for each of archive files - for file in "${artifactName}.zip" "${artifactName}.tar.gz"; do + for file in "${zipArchive}" "${tarGzArchive}"; do sha256sum "${file}" > "${file}.sha256" done } - prepareSDK "" ${{needs.build-sdk-package.outputs.universal-id}} - prepareSDK "-aarch64-pc-linux" ${{needs.build-sdk-package.outputs.linux-aarch64-id}} - prepareSDK "-x86_64-pc-linux" ${{needs.build-sdk-package.outputs.linux-x86_64-id}} - prepareSDK "-aarch64-apple-darwin" ${{needs.build-sdk-package.outputs.mac-aarch64-id}} - prepareSDK "-x86_64-apple-darwin" ${{needs.build-sdk-package.outputs.mac-x86_64-id}} - prepareSDK "-x86_64-pc-win32" ${{needs.build-sdk-package.outputs.win-x86_64-id}} + prepareSDK "" "dist" "./dist/" + prepareSDK "-aarch64-pc-linux" "dist-linux-aarch64" "./dist/linux-aarch64/" + prepareSDK "-x86_64-pc-linux" "dist-linux-x86_64" "./dist/linux-x86_64/" + prepareSDK "-aarch64-apple-darwin" "dist-mac-aarch64" "./dist/mac-aarch64/" + prepareSDK "-x86_64-apple-darwin" "dist-mac-x86_64" "./dist/mac-x86_64/" + prepareSDK "-x86_64-pc-win32" "dist-win-x86_64" "./dist/win-x86_64/" # Create the GitHub release - name: Create GitHub Release @@ -850,7 +855,7 @@ jobs: # template("Linux x86-64", "-x86_64-pc-linux"), # template("Linux aarch64", "-aarch64-pc-linux"), # template("Mac x86-64", "-x86_64-apple-darwin"), - # template("Mac aarch64", "-aarcb64-apple-darwin"), + # template("Mac aarch64", "-aarch64-apple-darwin"), # template("Windows x86_64", "-x86_64-pc-win32") # ).foreach(println) # Universal @@ -1016,8 +1021,8 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip asset_content_type: application/zip - name: Upload zip archive SHA to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 @@ -1025,8 +1030,8 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.zip.sha256 - asset_name: scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.zip.sha256 + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip.sha256 asset_content_type: text/plain - name: Upload tar.gz archive to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 @@ -1034,8 +1039,8 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.tar.gz + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz asset_content_type: application/gzip - name: Upload tar.gz archive SHA to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 @@ -1043,8 +1048,8 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.tar.gz.sha256 - asset_name: scala3-${{ env.RELEASE_TAG }}-aarcb64-apple-darwin.tar.gz.sha256 + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz.sha256 asset_content_type: text/plain From bcbdb5ef52c93dd34b93294a9774fdd57d9fb2aa Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Sat, 19 Oct 2024 20:04:59 +0200 Subject: [PATCH 678/827] Prepare development setup for 3.6.2 (#21811) - Set baseVersiont o 3.6.2 - Set referenceVersion to 3.6.1 and move `src/boostrapped` files to `src` - Set mimaPreviousArtifactVersion to 3.6.1 (exception from the rule due to broken 3.6.0 release) --- .../{src-bootstrapped => src}/scala/NamedTuple.scala | 0 project/Build.scala | 10 ++++++---- 2 files changed, 6 insertions(+), 4 deletions(-) rename library/{src-bootstrapped => src}/scala/NamedTuple.scala (100%) diff --git a/library/src-bootstrapped/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala similarity index 100% rename from library/src-bootstrapped/scala/NamedTuple.scala rename to library/src/scala/NamedTuple.scala diff --git a/project/Build.scala b/project/Build.scala index 5b04d623f122..3433053629f7 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -91,11 +91,12 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.5.2-RC1" + val referenceVersion = "3.6.1" - val baseVersion = "3.6.1" + val baseVersion = "3.6.2" // Will be required by some automation later - val prereleaseVersion = s"$baseVersion-RC1" + // TODO: Introduce automation and handling for RC versions before 3.6.2-RC1 + // val prereleaseVersion = s"$baseVersion-RC1" // LTS or Next val versionLine = "Next" @@ -113,8 +114,9 @@ object Build { * For a baseVersion `3.M.P` the mimaPreviousDottyVersion should be set to: * - `3.M.0` if `P > 0` * - `3.(M-1).0` if `P = 0` + * 3.6.1 is an exception from this rule - 3.6.0 was a broken release */ - val mimaPreviousDottyVersion = "3.5.0" + val mimaPreviousDottyVersion = "3.6.1" /** LTS version against which we check binary compatibility. * From 40956e15df0048d76d8f4e5bebac920743e00cdc Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Sat, 19 Oct 2024 23:10:00 +0200 Subject: [PATCH 679/827] Downgrade referenceVersion to 3.6.0 and add notes how referenceVersion should be set (#21813) The main branch should always use a compiler with experimental tasty - it's required for non_boostrapped tests to pass. It's required because we cannot consume stable version of tasty from it's experimental subversion (it's assumed that stable version > experimental version for the same major/minor pair) We pick 3.6.0 (released by mistake during 3.6.0-RC1) because that's the last non-stable version pushed to Maven. Typically we would use the last RC version [test_non_bootstrapped] --- project/Build.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 3433053629f7..464ba4a86411 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -91,7 +91,13 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.6.1" + /** Version of the Scala compiler used to build the artifacts. + * Reference version should track the latest version pushed to Maven: + * - In main branch it should be the last RC version (using experimental TASTy required for non-bootstrapped tests) + * - In release branch it should be the last stable release + * 3.6.0-RC1 was released as 3.6.0 - it's having and experimental TASTy version + */ + val referenceVersion = "3.6.0" val baseVersion = "3.6.2" // Will be required by some automation later From 654bc6c889251a9bb895d077ac43850ab234ffe4 Mon Sep 17 00:00:00 2001 From: Jentsch Date: Sat, 19 Oct 2024 22:19:46 +0000 Subject: [PATCH 680/827] Fix scaladoc graph highlight background color in dark mode Also fix two typos --- scaladoc/resources/dotty_res/scripts/ux.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scaladoc/resources/dotty_res/scripts/ux.js b/scaladoc/resources/dotty_res/scripts/ux.js index 7b875fbcef8e..97f9bf14939d 100644 --- a/scaladoc/resources/dotty_res/scripts/ux.js +++ b/scaladoc/resources/dotty_res/scripts/ux.js @@ -10,7 +10,7 @@ const attrsToCopy = [ /** * @typedef {Object} SavedPageState - * @property {Strign} mainDiv + * @property {String} mainDiv * @property {String} leftColumn * @property {String} title * @property {Record} attrs @@ -322,7 +322,7 @@ function attachAllListeners() { if (location.hash) { var target = location.hash.substring(1); - // setting the 'expand' class on the top-level container causes undesireable styles + // setting the 'expand' class on the top-level container causes undesirable styles // to apply to the top-level docs, so we avoid this logic for that element. if (target != "container") { var selected = document.getElementById(location.hash.substring(1)); @@ -568,7 +568,7 @@ function showGraph() { .attr("offset", "30%"); radialGradient .append("stop") - .attr("stop-color", "var(--background-default)") + .attr("stop-color", "var(--background-main)") .attr("offset", "100%"); var inner = svg.append("g"); From 022b1ffa21631fb96e424942ba12b49f6996ec77 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Sun, 20 Oct 2024 08:45:12 +0200 Subject: [PATCH 681/827] Use bash shell when using here-strings (#21817) Screenshot 2024-10-20 at 07 14 49 [positive](https://github.com/WojciechMazur/dotty/actions/runs/11416876799/job/31768568868) test in #21810 also uses `bash` instead of `sh` Closes #21815 --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 72f4f5559fd7..91086858c514 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -664,6 +664,7 @@ jobs: echo "THISBUILD_VERSION=$ver" >> $GITHUB_ENV - name: Check is version matching pattern + shell: bash run: | if ! grep -Eo "3\.[0-9]+\.[0-9]+-RC[0-9]+-bin-[0-9]{8}-[a-zA-Z0-9]{7}-NIGHTLY" <<< "${{ env.THISBUILD_VERSION }}"; then echo "Version used by compiler to publish nightly release does not match expected pattern" From e8136b724e74c8c3661a92f2489b52a83b4aed22 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 22 Oct 2024 10:01:21 +0100 Subject: [PATCH 682/827] Avoid orphan param from default arg --- compiler/src/dotty/tools/dotc/typer/Namer.scala | 5 +++++ tests/pos/i21558.orig.scala | 10 ++++++++++ tests/pos/i21558.scala | 8 ++++++++ 3 files changed, 23 insertions(+) create mode 100644 tests/pos/i21558.orig.scala create mode 100644 tests/pos/i21558.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 6167db62fbe0..b1b94288bcbf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -2130,6 +2130,11 @@ class Namer { typer: Typer => val pt = inherited.orElse(expectedDefaultArgType).orElse(fallbackProto).widenExpr val tp = typedAheadRhs(pt).tpe if (defaultTp eq pt) && (tp frozen_<:< defaultTp) then + // See i21558, the default argument new A(1.0) is of type A[?T] + // With an uninterpolated, invariant ?T type variable. + // So before we return the default getter parameter type (A[? <: Double]) + // we want to force ?T to instantiate, so it's poly is removed from the constraint + isFullyDefined(tp, ForceDegree.all) // When possible, widen to the default getter parameter type to permit a // larger choice of overrides (see `default-getter.scala`). // For justification on the use of `@uncheckedVariance`, see diff --git a/tests/pos/i21558.orig.scala b/tests/pos/i21558.orig.scala new file mode 100644 index 000000000000..3a955920a1c7 --- /dev/null +++ b/tests/pos/i21558.orig.scala @@ -0,0 +1,10 @@ +class Base +class A[T <: Float](val f: T) extends Base + +def test() = { + m1(new A(m2())); + +} + +def m1(x: Base) = {} +def m2(p: A[? <: Float] = new A(1.0f)): Int = 1 diff --git a/tests/pos/i21558.scala b/tests/pos/i21558.scala new file mode 100644 index 000000000000..80168992cea9 --- /dev/null +++ b/tests/pos/i21558.scala @@ -0,0 +1,8 @@ +class Base +class A[T <: Double](val f: T) extends Base + +class Test: + def test() = m1(new A(m2())) + + def m1(x: Base): Unit = {} + def m2(p: A[? <: Double] = new A(1.0)): Int = 2 From 959f68c1259aa12734432c990d71d3fa2d99427e Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Mon, 21 Oct 2024 18:22:43 +0200 Subject: [PATCH 683/827] Warn when named tuples resemble assignments MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Nicolas Stucki <3648029+nicolasstucki@users.noreply.github.com> Co-Authored-By: Oliver Bračevac --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 9 ++++++--- .../src/dotty/tools/dotc/reporting/ErrorMessageID.scala | 1 + compiler/src/dotty/tools/dotc/reporting/messages.scala | 9 +++++++++ tests/warn/21681.check | 7 +++++++ tests/warn/21681.scala | 3 +++ tests/warn/21770.check | 7 +++++++ tests/warn/21770.scala | 5 +++++ 7 files changed, 38 insertions(+), 3 deletions(-) create mode 100644 tests/warn/21681.check create mode 100644 tests/warn/21681.scala create mode 100644 tests/warn/21770.check create mode 100644 tests/warn/21770.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index e1a6b97fc7d3..482210845fea 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1605,9 +1605,10 @@ object desugar { /** Translate tuple expressions * - * () ==> () - * (t) ==> t - * (t1, ..., tN) ==> TupleN(t1, ..., tN) + * () ==> () + * (t) ==> t + * (t1, ..., tN) ==> TupleN(t1, ..., tN) + * (n1 = t1, ..., nN = tN) ==> NamedTuple.build[(n1, ..., nN)]()(TupleN(t1, ..., tN)) */ def tuple(tree: Tuple, pt: Type)(using Context): Tree = var elems = checkWellFormedTupleElems(tree.trees) @@ -1638,6 +1639,8 @@ object desugar { if ctx.mode.is(Mode.Type) then AppliedTypeTree(ref(defn.NamedTupleTypeRef), namesTuple :: tup :: Nil) else + if names.length == 1 && ctx.scope.lookup(names.head).is(Flags.Mutable) then + report.migrationWarning(AmbiguousNamedTupleAssignment(names.head, elemValues.head), tree.srcPos) Apply( Apply( TypeApply( diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index db523c879ea2..6d0a85b3ef0f 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -216,6 +216,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case FinalLocalDefID // errorNumber: 200 case NonNamedArgumentInJavaAnnotationID // errorNumber: 201 case QuotedTypeMissingID // errorNumber: 202 + case AmbiguousNamedTupleAssignmentID // errorNumber: 203 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 97cd70113c2e..3b7fba1cb52d 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3343,3 +3343,12 @@ final class QuotedTypeMissing(tpe: Type)(using Context) extends StagingMessage(Q |""" end QuotedTypeMissing + +final class AmbiguousNamedTupleAssignment(key: Name, value: untpd.Tree)(using Context) extends SyntaxMsg(AmbiguousNamedTupleAssignmentID): + override protected def msg(using Context): String = + i"""Ambiguous syntax: this is interpreted as a named tuple with one element, + |not as an assignment. + | + |To assign a value, use curly braces: `{${key} = ${value}}`.""" + + override protected def explain(using Context): String = "" diff --git a/tests/warn/21681.check b/tests/warn/21681.check new file mode 100644 index 000000000000..e86ce4e36134 --- /dev/null +++ b/tests/warn/21681.check @@ -0,0 +1,7 @@ +-- [E203] Syntax Migration Warning: tests/warn/21681.scala:3:2 --------------------------------------------------------- +3 | (age = 29) // warn + | ^^^^^^^^^^ + | Ambiguous syntax: this is interpreted as a named tuple with one element, + | not as an assignment. + | + | To assign a value, use curly braces: `{age = 29}`. diff --git a/tests/warn/21681.scala b/tests/warn/21681.scala new file mode 100644 index 000000000000..76a19c96e1cb --- /dev/null +++ b/tests/warn/21681.scala @@ -0,0 +1,3 @@ +def main() = + var age: Int = 28 + (age = 29) // warn diff --git a/tests/warn/21770.check b/tests/warn/21770.check new file mode 100644 index 000000000000..0899f11d6ca5 --- /dev/null +++ b/tests/warn/21770.check @@ -0,0 +1,7 @@ +-- [E203] Syntax Migration Warning: tests/warn/21770.scala:5:9 --------------------------------------------------------- +5 | f(i => (cache = Some(i))) // warn + | ^^^^^^^^^^^^^^^^^ + | Ambiguous syntax: this is interpreted as a named tuple with one element, + | not as an assignment. + | + | To assign a value, use curly braces: `{cache = Some(i)}`. diff --git a/tests/warn/21770.scala b/tests/warn/21770.scala new file mode 100644 index 000000000000..9696a31d6ba8 --- /dev/null +++ b/tests/warn/21770.scala @@ -0,0 +1,5 @@ +def f(g: Int => Unit) = g(0) + +def test = + var cache: Option[Int] = None + f(i => (cache = Some(i))) // warn From a00a806361666822c2a8098c2f45f8b93df15483 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Mon, 21 Oct 2024 19:51:53 +0200 Subject: [PATCH 684/827] Move AmbiguousNamedTupleAssignment check to Typer MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Nicolas Stucki <3648029+nicolasstucki@users.noreply.github.com> Co-Authored-By: Oliver Bračevac --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 2 -- compiler/src/dotty/tools/dotc/typer/Typer.scala | 13 +++++++++++++ tests/pos/21681d.scala | 16 ++++++++++++++++ tests/warn/21681b.check | 7 +++++++ tests/warn/21681b.scala | 3 +++ 5 files changed, 39 insertions(+), 2 deletions(-) create mode 100644 tests/pos/21681d.scala create mode 100644 tests/warn/21681b.check create mode 100644 tests/warn/21681b.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 482210845fea..e66c71731b4f 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1639,8 +1639,6 @@ object desugar { if ctx.mode.is(Mode.Type) then AppliedTypeTree(ref(defn.NamedTupleTypeRef), namesTuple :: tup :: Nil) else - if names.length == 1 && ctx.scope.lookup(names.head).is(Flags.Mutable) then - report.migrationWarning(AmbiguousNamedTupleAssignment(names.head, elemValues.head), tree.srcPos) Apply( Apply( TypeApply( diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 89ce01231fed..e7bd7d874634 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3398,6 +3398,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Translate tuples of all arities */ def typedTuple(tree: untpd.Tuple, pt: Type)(using Context): Tree = val tree1 = desugar.tuple(tree, pt) + checkAmbiguousNamedTupleAssignment(tree) if tree1 ne tree then typed(tree1, pt) else val arity = tree.trees.length @@ -3423,6 +3424,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) + /** Checks if `tree` is a named tuple with one element that could be + * interpreted as an assignment, such as `(x = 1)`. If so, issues a warning. + */ + def checkAmbiguousNamedTupleAssignment(tree: untpd.Tuple)(using Context): Unit = + tree.trees match + case List(NamedArg(name, value)) => + val typedName = typedIdent(untpd.Ident(name), WildcardType) + val sym = typedName.symbol + if sym.exists && (sym.is(Flags.Mutable) || sym.setter.exists) then + report.migrationWarning(AmbiguousNamedTupleAssignment(name, value), tree.srcPos) + case _ => () + /** Retrieve symbol attached to given tree */ protected def retrieveSym(tree: untpd.Tree)(using Context): Symbol = tree.removeAttachment(SymOfTree) match { case Some(sym) => diff --git a/tests/pos/21681d.scala b/tests/pos/21681d.scala new file mode 100644 index 000000000000..97a01dec74aa --- /dev/null +++ b/tests/pos/21681d.scala @@ -0,0 +1,16 @@ +def test1() = + class Person: + def age: Int = ??? + def age_=(x: Int): Unit = ??? + + val person = Person() + + (person.age = 29) // no warn (interpreted as `person.age_=(29)`) + +def test2() = + class Person: + var age: Int = 28 + + val person = Person() + + (person.age = 29) // no warn (interpreted as `person.age_=(29)`) diff --git a/tests/warn/21681b.check b/tests/warn/21681b.check new file mode 100644 index 000000000000..32760e00ebb6 --- /dev/null +++ b/tests/warn/21681b.check @@ -0,0 +1,7 @@ +-- [E203] Syntax Migration Warning: tests/warn/21681b.scala:3:2 -------------------------------------------------------- +3 | (age = 29) // warn + | ^^^^^^^^^^ + | Ambiguous syntax: this is interpreted as a named tuple with one element, + | not as an assignment. + | + | To assign a value, use curly braces: `{age = 29}`. diff --git a/tests/warn/21681b.scala b/tests/warn/21681b.scala new file mode 100644 index 000000000000..710d69b0dd23 --- /dev/null +++ b/tests/warn/21681b.scala @@ -0,0 +1,3 @@ +object Test: + var age: Int = 28 + (age = 29) // warn From d1e68f19c99d4171f0c4a6f17cef0ceb72a20bd8 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Tue, 22 Oct 2024 14:09:45 +0200 Subject: [PATCH 685/827] Try to type as an `Assign` --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 8 +++++--- tests/warn/21681c.check | 7 +++++++ tests/warn/21681c.scala | 5 +++++ 3 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 tests/warn/21681c.check create mode 100644 tests/warn/21681c.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index e7bd7d874634..23855455e67b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3430,9 +3430,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def checkAmbiguousNamedTupleAssignment(tree: untpd.Tuple)(using Context): Unit = tree.trees match case List(NamedArg(name, value)) => - val typedName = typedIdent(untpd.Ident(name), WildcardType) - val sym = typedName.symbol - if sym.exists && (sym.is(Flags.Mutable) || sym.setter.exists) then + val tmpCtx = ctx.fresh.setNewTyperState() + typedAssign(untpd.Assign(untpd.Ident(name), value), WildcardType)(using tmpCtx) + if !tmpCtx.reporter.hasErrors then + // If there are no errors typing the above, then the named tuple is + // ambiguous and we issue a warning. report.migrationWarning(AmbiguousNamedTupleAssignment(name, value), tree.srcPos) case _ => () diff --git a/tests/warn/21681c.check b/tests/warn/21681c.check new file mode 100644 index 000000000000..11c427f87cfe --- /dev/null +++ b/tests/warn/21681c.check @@ -0,0 +1,7 @@ +-- [E203] Syntax Migration Warning: tests/warn/21681c.scala:5:2 -------------------------------------------------------- +5 | (age = 29) // warn + | ^^^^^^^^^^ + | Ambiguous syntax: this is interpreted as a named tuple with one element, + | not as an assignment. + | + | To assign a value, use curly braces: `{age = 29}`. diff --git a/tests/warn/21681c.scala b/tests/warn/21681c.scala new file mode 100644 index 000000000000..5e2eae11708c --- /dev/null +++ b/tests/warn/21681c.scala @@ -0,0 +1,5 @@ +object Test: + def age: Int = ??? + def age_=(x: Int): Unit = () + age = 29 + (age = 29) // warn From ecc332fe6feccb893226c5705038772fb36b9f9b Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Tue, 22 Oct 2024 18:56:56 +0200 Subject: [PATCH 686/827] Scala 2 forwardport: `-Yprofile-trace` (#19897) * Scala 2 tracing profiler backport from https://github.com/scala/scala/pull/7364 extended with more Scala 3 idiomatic syntax based on inlined methods * Fixes the `context.profiler` which could have been `null`, now it's initially a NoOp Profiler * Check dependencies of `-Yprofile-enabled` dependent tasks, now we get an error if `-Yprofile-trace` is set without `-Yprofile-enabled` --- compiler/src/dotty/tools/dotc/Compiler.scala | 5 +- compiler/src/dotty/tools/dotc/Run.scala | 7 +- .../tools/dotc/config/ScalaSettings.scala | 10 +- .../dotty/tools/dotc/config/Settings.scala | 15 +- .../src/dotty/tools/dotc/core/Contexts.scala | 1 + .../src/dotty/tools/dotc/core/Phases.scala | 2 +- .../dotty/tools/dotc/core/SymbolLoaders.scala | 12 +- .../tools/dotc/profile/ChromeTrace.scala | 190 +++++++++++++ .../dotty/tools/dotc/profile/FileUtils.scala | 204 ++++++++++++++ .../dotty/tools/dotc/profile/Profiler.scala | 256 ++++++++++++++---- .../dotc/profile/ThreadPoolFactory.scala | 4 +- .../dotty/tools/dotc/transform/Splicer.scala | 5 +- .../dotty/tools/dotc/typer/Implicits.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 8 +- .../dotty/tools/dotc/typer/TyperPhase.scala | 2 +- .../tools/dotc/profile/ChromeTraceTest.scala | 93 +++++++ .../tools/dotc/profile/FileUtilsTest.scala | 91 +++++++ .../pc/completions/CompletionProvider.scala | 6 +- 18 files changed, 835 insertions(+), 78 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala create mode 100644 compiler/src/dotty/tools/dotc/profile/FileUtils.scala create mode 100644 compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala create mode 100644 compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index f9503e4f8554..d8ba1ab5dc2e 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -152,7 +152,10 @@ class Compiler { List(new GenBCode) :: // Generate JVM bytecode Nil - var runId: Int = 1 + // TODO: Initially 0, so that the first nextRunId call would return InitialRunId == 1 + // Changing the initial runId from 1 to 0 makes the scala2-library-bootstrap fail to compile, + // when the underlying issue is fixed, please update dotc.profiler.RealProfiler.chromeTrace logic + private var runId: Int = 1 def nextRunId: Int = { runId += 1; runId } diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 11a0430480d9..50fd668c7696 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -339,10 +339,9 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if phaseWillRun then Stats.trackTime(s"phase time ms/$phase") { val start = System.currentTimeMillis - val profileBefore = profiler.beforePhase(phase) - try units = phase.runOn(units) - catch case _: InterruptedException => cancelInterrupted() - profiler.afterPhase(phase, profileBefore) + profiler.onPhase(phase): + try units = phase.runOn(units) + catch case _: InterruptedException => cancelInterrupted() if (ctx.settings.Xprint.value.containsPhase(phase)) for (unit <- units) def printCtx(unit: CompilationUnit) = phase.printingContext( diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 72a051ea8154..6ef33d24f8be 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -441,12 +441,10 @@ private sealed trait YSettings: val YlegacyLazyVals: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") val YcompileScala2Library: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") val YprofileEnabled: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprofile-enabled", "Enable profiling.") - val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileExternalTool: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") - //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "", depends = List(YprofileEnabled -> true)) + val YprofileExternalTool: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer", depends = List(YprofileEnabled -> true)) + val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_", depends = List(YprofileEnabled -> true)) + val YprofileTrace: Setting[String] = StringSetting(ForkSetting, "Yprofile-trace", "file", s"Capture trace of compilation in JSON Chrome Trace format to the specified file. This option requires ${YprofileEnabled.name}. The output file can be visualized using https://ui.perfetto.dev/.", "", depends = List(YprofileEnabled -> true)) val YbestEffort: Setting[Boolean] = BooleanSetting(ForkSetting, "Ybest-effort", "Enable best-effort compilation attempting to produce betasty to the META-INF/best-effort directory, regardless of errors, as part of the pickler phase.") val YwithBestEffortTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Ywith-best-effort-tasty", "Allow to compile using best-effort tasty files. If such file is used, the compiler will stop after the pickler phase.") diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index a5fc6a64aa45..f85f2cc57de4 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -73,6 +73,11 @@ object Settings: def validateSettingString(name: String): Unit = assert(settingCharacters.matches(name), s"Setting string $name contains invalid characters") + /** List of setting-value pairs that are required for another setting to be valid. + * For example, `s = Setting(..., depends = List(YprofileEnabled -> true))` + * means that `s` requires `YprofileEnabled` to be set to `true`. + */ + type SettingDependencies = List[(Setting[?], Any)] case class Setting[T: ClassTag] private[Settings] ( category: SettingCategory, @@ -83,7 +88,7 @@ object Settings: choices: Option[Seq[?]] = None, prefix: Option[String] = None, aliases: List[String] = Nil, - depends: List[(Setting[?], Any)] = Nil, + depends: SettingDependencies = Nil, ignoreInvalidArgs: Boolean = false, preferPrevious: Boolean = false, propertyClass: Option[Class[?]] = None, @@ -385,8 +390,8 @@ object Settings: def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil, preferPrevious: Boolean = false, deprecation: Option[Deprecation] = None, ignoreInvalidArgs: Boolean = false): Setting[Boolean] = publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases, preferPrevious = preferPrevious, deprecation = deprecation, ignoreInvalidArgs = ignoreInvalidArgs)) - def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation)) + def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None, depends: SettingDependencies = Nil): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation, depends = depends)) def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs, deprecation = deprecation)) @@ -412,8 +417,8 @@ object Settings: def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) - def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation)) + def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None, depends: SettingDependencies = Nil): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation, depends = depends)) def PrefixSetting(category: SettingCategory, name0: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] = val name = prependName(name0) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 388720e7f3f4..d69c7408d0b1 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -769,6 +769,7 @@ object Contexts { .updated(settingsStateLoc, settingsGroup.defaultState) .updated(notNullInfosLoc, Nil) .updated(compilationUnitLoc, NoCompilationUnit) + .updated(profilerLoc, Profiler.NoOp) c._searchHistory = new SearchRoot c._gadtState = GadtState(GadtConstraint.empty) c diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 85df3f9f2c18..015cf6fc0f2c 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -370,7 +370,7 @@ object Phases { // Test that we are in a state where we need to check if the phase should be skipped for a java file, // this prevents checking the expensive `unit.typedAsJava` unnecessarily. val doCheckJava = skipIfJava && !isAfterLastJavaPhase - for unit <- units do + for unit <- units do ctx.profiler.onUnit(this, unit): given unitCtx: Context = runCtx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports if ctx.run.enterUnit(unit) then try diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 51e6a5e6138a..5690720a1b3f 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -8,7 +8,7 @@ import java.nio.channels.ClosedByInterruptException import scala.util.control.NonFatal import dotty.tools.dotc.classpath.FileUtils.{hasTastyExtension, hasBetastyExtension} -import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } +import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile, NoAbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions import Contexts.*, Symbols.*, Flags.*, SymDenotations.*, Types.*, Scopes.*, Names.* @@ -333,7 +333,15 @@ abstract class SymbolLoader extends LazyType { self => def description(using Context): String = s"proxy to ${self.description}" } - override def complete(root: SymDenotation)(using Context): Unit = { + private inline def profileCompletion[T](root: SymDenotation)(inline body: T)(using Context): T = { + val sym = root.symbol + def associatedFile = root.symbol.associatedFile match + case file: AbstractFile => file + case _ => NoAbstractFile + ctx.profiler.onCompletion(sym, associatedFile)(body) + } + + override def complete(root: SymDenotation)(using Context): Unit = profileCompletion(root) { def signalError(ex: Exception): Unit = { if (ctx.debug) ex.printStackTrace() val msg = ex.getMessage() diff --git a/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala new file mode 100644 index 000000000000..c33039f46398 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala @@ -0,0 +1,190 @@ +// Scala 2 compiler backport of https://github.com/scala/scala/pull/7364 +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.dotc.profile + +import scala.language.unsafeNulls + +import java.io.Closeable +import java.lang.management.ManagementFactory +import java.nio.file.{Files, Path} +import java.util +import java.util.concurrent.TimeUnit + +import scala.collection.mutable + +object ChromeTrace { + private object EventType { + final val Start = "B" + final val Instant = "I" + final val End = "E" + final val Complete = "X" + + final val Counter = "C" + + final val AsyncStart = "b" + final val AsyncInstant = "n" + final val AsyncEnd = "e" + } +} + +/** Allows writing a subset of captrue traces based on https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview# + * Can be visualized using https://ui.perfetto.dev/, Chrome's about://tracing (outdated) or the tooling in https://www.google.com.au/search?q=catapult+tracing&oq=catapult+tracing+&aqs=chrome..69i57.3974j0j4&sourceid=chrome&ie=UTF-8 */ +final class ChromeTrace(f: Path) extends Closeable { + import ChromeTrace.EventType + private val traceWriter = FileUtils.newAsyncBufferedWriter(f) + private val context = mutable.Stack[JsonContext](TopContext) + private val tidCache = new ThreadLocal[String]() { + override def initialValue(): String = "%05d".format(Thread.currentThread().getId()) + } + objStart() + fld("traceEvents") + context.push(ValueContext) + arrStart() + traceWriter.newLine() + + private val pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "") + + override def close(): Unit = { + arrEnd() + objEnd() + context.pop() + tidCache.remove() + traceWriter.close() + } + + def traceDurationEvent(name: String, startNanos: Long, durationNanos: Long, tid: String = this.tid(), pidSuffix: String = ""): Unit = { + val durationMicros = nanosToMicros(durationNanos) + val startMicros = nanosToMicros(startNanos) + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Complete) + str("tid", tid) + writePid(pidSuffix) + lng("ts", startMicros) + lng("dur", durationMicros) + objEnd() + traceWriter.newLine() + } + + private def writePid(pidSuffix: String) = { + if (pidSuffix == "") + str("pid", pid) + else + str2("pid", pid, "-", pidSuffix) + } + + def traceCounterEvent(name: String, counterName: String, count: Long, processWide: Boolean): Unit = { + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Counter) + str("tid", tid()) + writePid(pidSuffix = if (processWide) "" else tid()) + lng("ts", microTime()) + fld("args") + objStart() + lng(counterName, count) + objEnd() + objEnd() + traceWriter.newLine() + } + + def traceDurationEventStart(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.Start, cat, name, colour, pidSuffix) + def traceDurationEventEnd(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.End, cat, name, colour, pidSuffix) + + private def traceDurationEventStartEnd(eventType: String, cat: String, name: String, colour: String, pidSuffix: String = ""): Unit = { + objStart() + str("cat", cat) + str("name", name) + str("ph", eventType) + writePid(pidSuffix) + str("tid", tid()) + lng("ts", microTime()) + if (colour != "") { + str("cname", colour) + } + objEnd() + traceWriter.newLine() + } + + private def tid(): String = tidCache.get() + + private def nanosToMicros(t: Long): Long = TimeUnit.NANOSECONDS.toMicros(t) + + private def microTime(): Long = nanosToMicros(System.nanoTime()) + + private sealed abstract class JsonContext + private case class ArrayContext(var first: Boolean) extends JsonContext + private case class ObjectContext(var first: Boolean) extends JsonContext + private case object ValueContext extends JsonContext + private case object TopContext extends JsonContext + + private def str(name: String, value: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def str2(name: String, value: String, valueContinued1: String, valueContinued2: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write(valueContinued1) // This assumes no escaping is needed + traceWriter.write(valueContinued2) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def lng(name: String, value: Long): Unit = { + fld(name) + traceWriter.write(String.valueOf(value)) + traceWriter.write("") + } + private def objStart(): Unit = { + context.top match { + case ac @ ArrayContext(first) => + if (first) ac.first = false + else traceWriter.write(",") + case _ => + } + context.push(ObjectContext(true)) + traceWriter.write("{") + } + private def objEnd(): Unit = { + traceWriter.write("}") + context.pop() + } + private def arrStart(): Unit = { + traceWriter.write("[") + context.push(ArrayContext(true)) + } + private def arrEnd(): Unit = { + traceWriter.write("]") + context.pop() + } + + private def fld(name: String) = { + val topContext = context.top + topContext match { + case oc @ ObjectContext(first) => + if (first) oc.first = false + else traceWriter.write(",") + case context => + throw new IllegalStateException("Wrong context: " + context) + } + traceWriter.write("\"") + traceWriter.write(name) + traceWriter.write("\"") + traceWriter.write(":") + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/FileUtils.scala b/compiler/src/dotty/tools/dotc/profile/FileUtils.scala new file mode 100644 index 000000000000..4aec428c05bf --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/FileUtils.scala @@ -0,0 +1,204 @@ +// Scala 2 compiler backport of https://github.com/scala/scala/pull/7364 + +/* +* Scala (https://www.scala-lang.org) +* +* Copyright EPFL and Lightbend, Inc. +* +* Licensed under Apache License 2.0 +* (http://www.apache.org/licenses/LICENSE-2.0). +* +* See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.dotc.profile + +import scala.language.unsafeNulls + +import java.io.{BufferedWriter, IOException, OutputStreamWriter, Writer} +import java.nio.CharBuffer +import java.nio.charset.{Charset, CharsetEncoder, StandardCharsets} +import java.nio.file.{Files, OpenOption, Path} +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.atomic.AtomicBoolean + + +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Promise} +import scala.util.{Failure, Success} +import scala.annotation.internal.sharable + +object FileUtils { + def newAsyncBufferedWriter(path: Path, charset: Charset = StandardCharsets.UTF_8.nn, options: Array[OpenOption] = NO_OPTIONS, threadsafe: Boolean = false): LineWriter = { + val encoder: CharsetEncoder = charset.newEncoder + val writer = new OutputStreamWriter(Files.newOutputStream(path, options: _*), encoder) + newAsyncBufferedWriter(new BufferedWriter(writer), threadsafe) + } + def newAsyncBufferedWriter(underlying: Writer, threadsafe: Boolean): LineWriter = { + val async = new AsyncBufferedWriter(underlying) + if (threadsafe) new ThreadsafeWriter(async) else async + } + private val NO_OPTIONS = new Array[OpenOption](0) + + sealed abstract class LineWriter extends Writer { + def newLine(): Unit + } + private class ThreadsafeWriter(val underlying: AsyncBufferedWriter) extends LineWriter { + lock = underlying + override def write(c: Int): Unit = + lock.synchronized (underlying.write(c)) + + override def write(cbuf: Array[Char]): Unit = + lock.synchronized (underlying.write(cbuf)) + + override def write(cbuf: Array[Char], off: Int, len: Int): Unit = + lock.synchronized (underlying.write(cbuf, off, len)) + + override def write(str: String): Unit = + lock.synchronized (underlying.write(str)) + + override def write(str: String, off: Int, len: Int): Unit = + lock.synchronized (underlying.write(str, off, len)) + + override def flush(): Unit = + lock.synchronized (underlying.flush()) + + override def close(): Unit = + lock.synchronized (underlying.close()) + + override def newLine(): Unit = + lock.synchronized (underlying.newLine()) + + } + + private object AsyncBufferedWriter { + @sharable private val Close = CharBuffer.allocate(0) + @sharable private val Flush = CharBuffer.allocate(0) + } + private class AsyncBufferedWriter(val underlying: Writer, bufferSize : Int = 4096) extends LineWriter { + private var current: CharBuffer = allocate + override def write(c: Int): Unit = super.write(c) + private def flushAsync(): Unit = { + background.ensureProcessed(current) + current = allocate + } +// allocate or reuse a CharArray which is guaranteed to have a backing array + private def allocate: CharBuffer = { + val reused = background.reuseBuffer + if (reused eq null) CharBuffer.allocate(bufferSize) + else { + //we don't care about race conditions + background.reuseBuffer = null + reused.clear() + reused + } + } + + override def write(cbuf: Array[Char], initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(cbuf, offset, length) + length = 0 + } else { + current.put(cbuf, offset, capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + override def write(s: String, initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(s, offset, offset + length) + length = 0 + } else { + current.put(s, offset, offset + capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + def newLine(): Unit = write(scala.util.Properties.lineSeparator) + + /** slightly breaks the flush contract in that the flush is not complete when the method returns */ + override def flush(): Unit = { + flushAsync() + } + + override def close(): Unit = { + background.ensureProcessed(current) + background.ensureProcessed(AsyncBufferedWriter.Close) + current = null + Await.result(background.asyncStatus.future, Duration.Inf) + underlying.close() + } + private object background extends Runnable{ + + import scala.concurrent.ExecutionContext.Implicits.global + + private val pending = new LinkedBlockingQueue[CharBuffer] + //a failure detected will case an Failure, Success indicates a close + val asyncStatus = Promise[Unit]() + private val scheduled = new AtomicBoolean + @volatile var reuseBuffer: CharBuffer = _ + + def ensureProcessed(buffer: CharBuffer): Unit = { + if (asyncStatus.isCompleted) { + asyncStatus.future.value.get match { + case Success(()) => throw new IllegalStateException("closed") + case Failure(t) => throw new IOException("async failure", t) + } + } + + //order is essential - add to the queue before the CAS + pending.add(buffer) + if (scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + + def run(): Unit = { + try { + while (!pending.isEmpty) { + val next = pending.poll() + if (next eq AsyncBufferedWriter.Flush) { + underlying.flush() + } else if (next eq AsyncBufferedWriter.Close) { + underlying.flush() + underlying.close() + asyncStatus.trySuccess(()) + } else { + val array = next.array() + next.flip() + underlying.write(array, next.arrayOffset() + next.position(), next.limit()) + reuseBuffer = next + } + } + } catch { + case t: Throwable => + asyncStatus.tryFailure(t) + throw t + } + finally scheduled.set(false) + + //we are not scheduled any more + //as a last check ensure that we didnt race with an addition to the queue + //order is essential - queue is checked before CAS + if ((!pending.isEmpty) && scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + } + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index a13c9d41b529..69a806215ddd 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -4,6 +4,7 @@ import scala.annotation.* import scala.language.unsafeNulls import java.io.{FileWriter, PrintWriter} +import java.nio.file.Paths import java.lang.management.{ManagementFactory, GarbageCollectorMXBean, RuntimeMXBean, MemoryMXBean, ClassLoadingMXBean, CompilationMXBean} import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger @@ -12,8 +13,15 @@ import javax.management.{Notification, NotificationEmitter, NotificationListener import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Symbols.{Symbol, NoSymbol} +import dotty.tools.dotc.core.Flags +import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions import dotty.tools.io.AbstractFile import annotation.internal.sharable +import dotty.tools.dotc.core.Periods.InitialRunId +import scala.collection.mutable.UnrolledBuffer object Profiler { def apply()(using Context): Profiler = @@ -25,14 +33,19 @@ object Profiler { new RealProfiler(reporter) } - private[profile] val emptySnap: ProfileSnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0) + final def NoOp: Profiler = NoOpProfiler + + private[profile] val emptySnap: ProfileSnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0, 0, 0) } -case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, name:String, action:String, cause:String, threads:Long) +case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, durationMillis: Long, name:String, action:String, cause:String, threads:Long){ + val endNanos = System.nanoTime() +} case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, - allocatedBytes:Long, heapBytes:Long) { + allocatedBytes:Long, heapBytes:Long, + totalClassesLoaded: Long, totalJITCompilationTime: Long) { def updateHeap(heapBytes:Long): ProfileSnap = copy(heapBytes = heapBytes) } @@ -66,22 +79,63 @@ case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpos def retainedHeapMB: Double = toMegaBytes(end.heapBytes - start.heapBytes) } +private opaque type TracedEventId <: String = String +private object TracedEventId: + def apply(stringValue: String): TracedEventId = stringValue + final val Empty: TracedEventId = "" + sealed trait Profiler { def finished(): Unit - def beforePhase(phase: Phase): ProfileSnap - - def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit + inline def onPhase[T](phase: Phase)(inline body: T): T = + val (event, snapshot) = beforePhase(phase) + try body + finally afterPhase(event, phase, snapshot) + protected final val EmptyPhaseEvent = (TracedEventId.Empty, Profiler.emptySnap) + protected def beforePhase(phase: Phase): (TracedEventId, ProfileSnap) = EmptyPhaseEvent + protected def afterPhase(event: TracedEventId, phase: Phase, profileBefore: ProfileSnap): Unit = () + + inline def onUnit[T](phase: Phase, unit: CompilationUnit)(inline body: T): T = + val event = beforeUnit(phase, unit) + try body + finally afterUnit(event) + protected def beforeUnit(phase: Phase, unit: CompilationUnit): TracedEventId = TracedEventId.Empty + protected def afterUnit(event: TracedEventId): Unit = () + + inline def onTypedDef[T](sym: Symbol)(inline body: T): T = + val event = beforeTypedDef(sym) + try body + finally afterTypedDef(event) + protected def beforeTypedDef(sym: Symbol): TracedEventId = TracedEventId.Empty + protected def afterTypedDef(token: TracedEventId): Unit = () + + inline def onImplicitSearch[T](pt: Type)(inline body: T): T = + val event = beforeImplicitSearch(pt) + try body + finally afterImplicitSearch(event) + protected def beforeImplicitSearch(pt: Type): TracedEventId = TracedEventId.Empty + protected def afterImplicitSearch(event: TracedEventId): Unit = () + + inline def onMacroSplice[T](macroSym: Symbol)(inline body: T): T = + val event = beforeMacroSplice(macroSym) + try body + finally afterMacroSplice(event) + protected def beforeMacroSplice(macroSym: Symbol): TracedEventId = TracedEventId.Empty + protected def afterMacroSplice(event: TracedEventId): Unit = () + + inline def onCompletion[T](root: Symbol, associatedFile: => AbstractFile)(inline body: T): T = + val (event, completionName) = beforeCompletion(root, associatedFile) + try body + finally afterCompletion(event, completionName) + protected final val EmptyCompletionEvent = (TracedEventId.Empty, "") + protected def beforeCompletion(root: Symbol, associatedFile: => AbstractFile): (TracedEventId, String) = EmptyCompletionEvent + protected def afterCompletion(event: TracedEventId, completionName: String): Unit = () } private [profile] object NoOpProfiler extends Profiler { - - override def beforePhase(phase: Phase): ProfileSnap = Profiler.emptySnap - - override def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit = () - override def finished(): Unit = () } + private [profile] object RealProfiler { import scala.jdk.CollectionConverters.* val runtimeMx: RuntimeMXBean = ManagementFactory.getRuntimeMXBean @@ -92,17 +146,6 @@ private [profile] object RealProfiler { val threadMx: ExtendedThreadMxBean = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() -} - -private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener { - def completeBackground(threadRange: ProfileRange): Unit = - reporter.reportBackground(this, threadRange) - - def outDir: AbstractFile = ctx.settings.outputDir.value - - val id: Int = RealProfiler.idGen.incrementAndGet() - - private val mainThread = Thread.currentThread() @nowarn("cat=deprecation") private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { @@ -117,13 +160,47 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) cpuTimeNanos = threadMx.getCurrentThreadCpuTime, userTimeNanos = threadMx.getCurrentThreadUserTime, allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), - heapBytes = readHeapUsage() + heapBytes = readHeapUsage(), + totalClassesLoaded = classLoaderMx.getTotalLoadedClassCount, + totalJITCompilationTime = compileMx.getTotalCompilationTime ) } private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed +} + +private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener { + val id: Int = RealProfiler.idGen.incrementAndGet() + private val mainThread = Thread.currentThread() + private val gcEvents = UnrolledBuffer[GcEventData]() + private var nextAfterUnitSnap: Long = System.nanoTime() + + private final val GcThreadId = "GC" + + enum Category: + def name: String = this.toString().toLowerCase() + case Run, Phase, File, TypeCheck, Implicit, Macro, Completion + private [profile] val chromeTrace = + if ctx.settings.YprofileTrace.isDefault + then null + else + val filename = ctx.settings.YprofileTrace.value + // Compilation units requiring multi-stage compilation (macros) would create a new profiler instances + // We need to store the traces in the seperate file to prevent overriding its content. + // Alternatives: sharing ChromeTrace instance between all runs / manual concatation after all runs are done + // FIXME: The first assigned runId is equal to 2 instead of 1 (InitialRunId). + // Fix me when bug described in Compiler.runId is resolved by removing +/- 1 adjustments + val suffix = if ctx.runId > InitialRunId + 1 then s".${ctx.runId - 1}" else "" + ChromeTrace(Paths.get(s"$filename$suffix")) + + private val compilerRunEvent: TracedEventId = traceDurationStart(Category.Run, s"scalac-$id") + + def completeBackground(threadRange: ProfileRange): Unit = + reporter.reportBackground(this, threadRange) + + def outDir: AbstractFile = ctx.settings.outputDir.value @nowarn - private def doGC: Unit = { + private def doGC(): Unit = { System.gc() System.runFinalization() } @@ -142,6 +219,15 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) case gc => } reporter.close(this) + if chromeTrace != null then + traceDurationEnd(Category.Run, compilerRunEvent) + for gcEvent <- gcEvents + do { + val durationNanos = TimeUnit.MILLISECONDS.toNanos(gcEvent.durationMillis) + val startNanos = gcEvent.endNanos - durationNanos + chromeTrace.traceDurationEvent(gcEvent.name, startNanos, durationNanos, tid = GcThreadId) + } + chromeTrace.close() } @@ -150,10 +236,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) import java.lang.{Integer => jInt} val reportNs = System.nanoTime() val data = notification.getUserData - val seq = notification.getSequenceNumber - val message = notification.getMessage val tpe = notification.getType - val time= notification.getTimeStamp data match { case cd: CompositeData if tpe == "com.sun.management.gc.notification" => val name = cd.get("gcName").toString @@ -164,49 +247,127 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) val startTime = info.get("startTime").asInstanceOf[jLong].longValue() val endTime = info.get("endTime").asInstanceOf[jLong].longValue() val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() - reporter.reportGc(GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) + val gcEvent = GcEventData("", reportNs, startTime, endTime, duration, name, action, cause, threads) + synchronized { gcEvents += gcEvent } + reporter.reportGc(gcEvent) } } - override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { + override def afterPhase(event: TracedEventId, phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) - val initialSnap = snapThread(0) + val initialSnap = RealProfiler.snapThread(0) if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { println("Profile hook stop") ExternalToolHook.after() } val finalSnap = if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) { - doGC - initialSnap.updateHeap(readHeapUsage()) + doGC() + initialSnap.updateHeap(RealProfiler.readHeapUsage()) } else initialSnap - + traceDurationEnd(Category.Phase, event) + traceThreadSnapshotCounters() reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) } - override def beforePhase(phase: Phase): ProfileSnap = { + override def beforePhase(phase: Phase): (TracedEventId, ProfileSnap) = { assert(mainThread eq Thread.currentThread()) + traceThreadSnapshotCounters() + val eventId = traceDurationStart(Category.Phase, phase.phaseName) if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) - doGC + doGC() if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { println("Profile hook start") ExternalToolHook.before() } - snapThread(0) + (eventId, RealProfiler.snapThread(0)) + } + + override def beforeUnit(phase: Phase, unit: CompilationUnit): TracedEventId = { + assert(mainThread eq Thread.currentThread()) + if chromeTrace != null then + traceThreadSnapshotCounters() + traceDurationStart(Category.File, unit.source.name) + else TracedEventId.Empty + } + + override def afterUnit(event: TracedEventId): Unit = { + assert(mainThread eq Thread.currentThread()) + if chromeTrace != null then + traceDurationEnd(Category.File, event) + traceThreadSnapshotCounters() } -} + private def traceThreadSnapshotCounters(initialSnap: => ProfileSnap = RealProfiler.snapThread(0)) = + if chromeTrace != null && System.nanoTime() > nextAfterUnitSnap then { + val snap = initialSnap + chromeTrace.traceCounterEvent("allocBytes", "allocBytes", snap.allocatedBytes, processWide = false) + chromeTrace.traceCounterEvent("heapBytes", "heapBytes", snap.heapBytes, processWide = true) + chromeTrace.traceCounterEvent("classesLoaded", "classesLoaded", snap.totalClassesLoaded, processWide = true) + chromeTrace.traceCounterEvent("jitCompilationTime", "jitCompilationTime", snap.totalJITCompilationTime, processWide = true) + chromeTrace.traceCounterEvent("userTime", "userTime", snap.userTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("cpuTime", "cpuTime", snap.cpuTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("idleTime", "idleTime", snap.idleTimeNanos, processWide = false) + nextAfterUnitSnap = System.nanoTime() + 10 * 1000 * 1000 + } -case class EventType(name: String) -object EventType { - //main thread with other tasks - val MAIN: EventType = EventType("main") - //other task ( background thread) - val BACKGROUND: EventType = EventType("background") - //total for compile - val GC: EventType = EventType("GC") + override def beforeTypedDef(sym: Symbol): TracedEventId = traceDurationStart(Category.TypeCheck, symbolName(sym)) + override def afterTypedDef(event: TracedEventId): Unit = traceDurationEnd(Category.TypeCheck, event) + + override def beforeImplicitSearch(pt: Type): TracedEventId = traceDurationStart(Category.Implicit, s"?[${symbolName(pt.typeSymbol)}]", colour = "yellow") + override def afterImplicitSearch(event: TracedEventId): Unit = traceDurationEnd(Category.Implicit, event, colour = "yellow") + + override def beforeMacroSplice(macroSym: Symbol): TracedEventId = traceDurationStart(Category.Macro, s"«${symbolName(macroSym)}»", colour = "olive") + override def afterMacroSplice(event: TracedEventId): Unit = traceDurationEnd(Category.Macro, event, colour = "olive") + + override def beforeCompletion(root: Symbol, associatedFile: => AbstractFile): (TracedEventId, String) = + if chromeTrace == null + then EmptyCompletionEvent + else + val completionName = this.completionName(root, associatedFile) + val event = TracedEventId(associatedFile.name) + chromeTrace.traceDurationEventStart(Category.Completion.name, "↯", colour = "thread_state_sleeping") + chromeTrace.traceDurationEventStart(Category.File.name, event) + chromeTrace.traceDurationEventStart(Category.Completion.name, completionName) + (event, completionName) + + override def afterCompletion(event: TracedEventId, completionName: String): Unit = + if chromeTrace != null + then + chromeTrace.traceDurationEventEnd(Category.Completion.name, completionName) + chromeTrace.traceDurationEventEnd(Category.File.name, event) + chromeTrace.traceDurationEventEnd(Category.Completion.name, "↯", colour = "thread_state_sleeping") + + private inline def traceDurationStart(category: Category, inline eventName: String, colour: String = ""): TracedEventId = + if chromeTrace == null + then TracedEventId.Empty + else + val event = TracedEventId(eventName) + chromeTrace.traceDurationEventStart(category.name, event, colour) + event + + private inline def traceDurationEnd(category: Category, event: TracedEventId, colour: String = ""): Unit = + if chromeTrace != null then + chromeTrace.traceDurationEventEnd(category.name, event, colour) + + private def symbolName(sym: Symbol): String = s"${sym.showKind} ${sym.showName}" + private def completionName(root: Symbol, associatedFile: AbstractFile): String = + def isTopLevel = root.owner != NoSymbol && root.owner.is(Flags.Package) + if root.is(Flags.Package) || isTopLevel + then root.javaBinaryName + else + val enclosing = root.enclosingClass + s"${enclosing.javaBinaryName}::${root.name}" } +enum EventType(name: String): + // main thread with other tasks + case MAIN extends EventType("main") + // other task ( background thread) + case BACKGROUND extends EventType("background") + // total for compile + case GC extends EventType("GC") + sealed trait ProfileReporter { def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit @@ -259,9 +420,8 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") } - override def close(profiler: RealProfiler): Unit = { - out.flush - out.close + out.flush() + out.close() } } diff --git a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala index e3ea69d9be06..1a81153b9b08 100644 --- a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala +++ b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala @@ -94,9 +94,9 @@ object ThreadPoolFactory { val data = new ThreadProfileData localData.set(data) - val profileStart = profiler.snapThread(0) + val profileStart = RealProfiler.snapThread(0) try worker.run finally { - val snap = profiler.snapThread(data.idleNs) + val snap = RealProfiler.snapThread(data.idleNs) val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) profiler.completeBackground(threadRange) } diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index e42f997e7265..b5386d5bd1df 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -47,13 +47,14 @@ object Splicer { def splice(tree: Tree, splicePos: SrcPos, spliceExpansionPos: SrcPos, classLoader: ClassLoader)(using Context): Tree = tree match { case Quote(quotedTree, Nil) => quotedTree case _ => - val macroOwner = newSymbol(ctx.owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) + val owner = ctx.owner + val macroOwner = newSymbol(owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) try val sliceContext = SpliceScope.contextWithNewSpliceScope(splicePos.sourcePos).withOwner(macroOwner) inContext(sliceContext) { val oldContextClassLoader = Thread.currentThread().getContextClassLoader Thread.currentThread().setContextClassLoader(classLoader) - try { + try ctx.profiler.onMacroSplice(owner){ val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index c42b196b8dfb..0727c83d8469 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1082,7 +1082,7 @@ trait Implicits: * it should be applied, EmptyTree otherwise. * @param span The position where errors should be reported. */ - def inferImplicit(pt: Type, argument: Tree, span: Span)(using Context): SearchResult = + def inferImplicit(pt: Type, argument: Tree, span: Span)(using Context): SearchResult = ctx.profiler.onImplicitSearch(pt): trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { record("inferImplicit") assert(ctx.phase.allowsImplicitSearch, diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 23855455e67b..3810bc66841e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2823,7 +2823,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => typed(rhs) - def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { + def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = ctx.profiler.onTypedDef(sym) { val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) @@ -2851,7 +2851,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer sym.owner.info.decls.openForMutations.unlink(sym) EmptyTree - def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else { + def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else ctx.profiler.onTypedDef(sym) { // TODO: - Remove this when `scala.language.experimental.erasedDefinitions` is no longer experimental. // - Modify signature to `erased def erasedValue[T]: T` @@ -2963,7 +2963,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if !sym.is(Module) && !sym.isConstructor && sym.info.finalResultType.isErasedClass then sym.setFlag(Erased) - def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { + def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = ctx.profiler.onTypedDef(sym) { val TypeDef(name, rhs) = tdef completeAnnotations(tdef, sym) val rhs1 = tdef.rhs match @@ -2977,7 +2977,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } - def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = { + def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = ctx.profiler.onTypedDef(cls) { if (!cls.info.isInstanceOf[ClassInfo]) return EmptyTree.assertingErrorsReported val TypeDef(name, impl @ Template(constr, _, self, _)) = cdef: @unchecked diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala index 0c63f5b4ecb1..264d0f170769 100644 --- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala @@ -43,7 +43,7 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { def typeCheck(using Context)(using subphase: SubPhase): Boolean = monitor(subphase.name) { val unit = ctx.compilationUnit try - if !unit.suspended then + if !unit.suspended then ctx.profiler.onUnit(ctx.phase, unit): unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree) typr.println("typed: " + unit.source) record("retained untyped trees", unit.untpdTree.treeSize) diff --git a/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala new file mode 100644 index 000000000000..07dc53da1f83 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala @@ -0,0 +1,93 @@ +package dotty.tools.dotc.profile + +import java.io.* + +import org.junit.Assert.* +import org.junit.* +import java.nio.file.Files +import java.nio.charset.StandardCharsets +import java.util.concurrent.locks.LockSupport +import scala.concurrent.duration.* + +class ChromeTraceTest: + private def testTraceOutputs(generator: ChromeTrace => Unit)(checkContent: PartialFunction[List[String], Unit]): Unit = { + val outfile = Files.createTempFile("trace-", ".json").nn + val tracer = new ChromeTrace(outfile) + try generator(tracer) + finally tracer.close() + val contentLines = scala.io.Source.fromFile(outfile.toFile().nn).getLines().toList + checkContent.applyOrElse( + contentLines, + content => fail(s"Invalid output lines: ${content.mkString(System.lineSeparator().nn)}") + ) + } + + @Test def traceCounterEvent(): Unit = testTraceOutputs{ tracer => + tracer.traceCounterEvent("foo", "counter1", 42, processWide = true) + tracer.traceCounterEvent("bar", "counter2", 21, processWide = false) + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"scalac","name":"foo","ph":"C","tid":"${tid1}","pid":"${pid1}","ts":${ts1},"args":{"counter1":42}}""" :: + s""",{"cat":"scalac","name":"bar","ph":"C","tid":"${tid2}","pid":"${pid2}","ts":${ts2},"args":{"counter2":21}}""" :: + "]}" :: Nil => + assertEquals(tid1, tid2) + assertTrue(tid1.toIntOption.isDefined) + assertNotEquals(pid1, pid2) + assertTrue(pid1.toIntOption.isDefined) + assertEquals(s"$pid1-$tid1", pid2) + assertTrue(ts1.toLong < ts2.toLong) + } + + @Test def traceDurationEvent(): Unit = testTraceOutputs{ tracer => + tracer.traceDurationEvent(name = "name1", startNanos = 1000L, durationNanos = 2500L, tid = "this-thread") + tracer.traceDurationEvent(name = "name2", startNanos = 1000L, durationNanos = 5000L, tid = "this-thread", pidSuffix = "pidSuffix") + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"scalac","name":"name1","ph":"X","tid":"this-thread","pid":"${pid1}","ts":1,"dur":2}""" :: + s""",{"cat":"scalac","name":"name2","ph":"X","tid":"this-thread","pid":"${pid2}","ts":1,"dur":5}""" :: + "]}" :: Nil => + assertTrue(pid1.toIntOption.isDefined) + assertEquals(s"$pid1-pidSuffix", pid2) + } + + @Test def traceDurationEvents(): Unit = { + val testStart = System.nanoTime() + testTraceOutputs{ tracer => + tracer.traceDurationEventStart(cat = "test1", name = "event1") + LockSupport.parkNanos(2.millis.toNanos) + tracer.traceDurationEventStart(cat = "test2", name = "event2", colour = "RED", pidSuffix = "pid-suffix") + LockSupport.parkNanos(4.millis.toNanos) + tracer.traceDurationEventEnd(cat = "test2", name = "event2") + LockSupport.parkNanos(8.millis.toNanos) + tracer.traceDurationEventEnd(cat = "test1", name = "event1", colour = "RED", pidSuffix = "pid-suffix") + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"test1","name":"event1","ph":"B","pid":"${pid1}","tid":"${tid1}","ts":${ts1}}""" :: + s""",{"cat":"test2","name":"event2","ph":"B","pid":"${pid2}","tid":"${tid2}","ts":${ts2},"cname":"RED"}""" :: + s""",{"cat":"test2","name":"event2","ph":"E","pid":"${pid3}","tid":"${tid3}","ts":${ts3}}""" :: + s""",{"cat":"test1","name":"event1","ph":"E","pid":"${pid4}","tid":"${tid4}","ts":${ts4},"cname":"RED"}""" :: + "]}" :: Nil => + val traceEnd = System.nanoTime() + assertTrue(tid1.toIntOption.isDefined) + assertEquals(pid1, pid3) + assertTrue(pid1.endsWith(s"-$tid1")) + assertEquals(pid2, pid4) + assertTrue(pid2.endsWith("-pid-suffix")) + List(tid1, tid2, tid3).foreach: tid => + assertEquals(tid4, tid) + List(pid1, pid2, pid3, pid4).foreach: pid => + assertTrue(pid.takeWhile(_ != '-').toIntOption.isDefined) + + List(ts1, ts2, ts3, ts4).map(_.toLong) match { + case all @ List(ts1, ts2, ts3, ts4) => + all.foreach: ts => + // Timestamps are presented using Epoch microsecondos + assertTrue(ts >= testStart / 1000) + assertTrue(ts <= traceEnd / 1000) + assertTrue(ts2 >= ts1 + 2.millis.toMicros) + assertTrue(ts3 >= ts2 + 4.millis.toMicros) + assertTrue(ts4 >= ts3 + 8.millis.toMicros) + case _ => fail("unreachable") + } + } +} diff --git a/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala b/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala new file mode 100644 index 000000000000..3253cff52057 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala @@ -0,0 +1,91 @@ +package dotty.tools.dotc.profile + +import java.io.* + +import org.junit.Assert.* +import org.junit.* + +class FileUtilsTest { + + @Test def writeIsSame(): Unit = { + val fileTest = File.createTempFile("FileUtilsTest", "t1").nn + val fileExpected = File.createTempFile("FileUtilsTest", "t2").nn + + val sTest = FileUtils.newAsyncBufferedWriter(new FileWriter(fileTest), threadsafe = false) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + def writeBoth(s:String, asChars: Boolean) = { + if (asChars) { + sTest.write(s.toCharArray) + sExpected.write(s.toCharArray) + } else { + sTest.write(s) + sExpected.write(s) + } + } + + for (i <- 1 to 2000) { + writeBoth(s"line $i text;", asChars = true) + writeBoth(s"line $i chars", asChars = false) + sTest.newLine() + sExpected.newLine() + } + sTest.close() + sExpected.close() + + assertEquals(fileExpected.length(),fileTest.length()) + + val expIn = new BufferedReader(new FileReader(fileExpected)) + val testIn = new BufferedReader(new FileReader(fileTest)) + + var exp = expIn.readLine() + while (exp ne null) { + val actual = testIn.readLine() + assertEquals(exp, actual) + exp = expIn.readLine() + } + expIn.close() + testIn.close() + fileTest.delete() + fileExpected.delete() + } + + @Ignore + @Test def showPerformance(): Unit = { + //warmup + for (i <- 1 to 1000) { + writeIsSame() + } + + val fileTest = File.createTempFile("FileUtilsTest", "t1").nn + val fileExpected = File.createTempFile("FileUtilsTest", "t2").nn + + for (i <- 1 to 10) { + val sTest = FileUtils.newAsyncBufferedWriter(fileTest.toPath.nn) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + val t1 = System.nanoTime() + List.tabulate(10000) {i => + sTest.write(s"line $i text;") + sTest.newLine() + } + val t2 = System.nanoTime() + sTest.close() + val t3 = System.nanoTime() + List.tabulate(10000) {i => + sExpected.write(s"line $i text;") + sExpected.newLine() + } + val t4 = System.nanoTime() + sExpected.close() + + println(s"async took ${t2 - t1} ns") + println(s"buffered took ${t4 - t3} ns") + + fileTest.delete() + fileExpected.delete() + } + } + +} + diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 5578fab412d1..adaeadb12978 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -21,6 +21,7 @@ import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.parsing.Tokens +import dotty.tools.dotc.profile.Profiler import dotty.tools.dotc.util.SourceFile import dotty.tools.pc.AutoImports.AutoImportEdits import dotty.tools.pc.AutoImports.AutoImportsGenerator @@ -75,7 +76,10 @@ class CompletionProvider( val pos = driver.sourcePosition(params) val (items, isIncomplete) = driver.compilationUnits.get(uri) match case Some(unit) => - val newctx = ctx.fresh.setCompilationUnit(unit).withPhase(Phases.typerPhase(using ctx)) + val newctx = ctx.fresh + .setCompilationUnit(unit) + .setProfiler(Profiler()(using ctx)) + .withPhase(Phases.typerPhase(using ctx)) val tpdPath0 = Interactive.pathTo(unit.tpdTree, pos.span)(using newctx) val adjustedPath = Interactive.resolveTypedOrUntypedPath(tpdPath0, pos)(using newctx) From f7f51edb424b281f751a9a47ea2397d5cf5b0343 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 23 Oct 2024 10:46:09 +0200 Subject: [PATCH 687/827] Don't warn for deprecated Thread.getId() calls in `ChromeTrace` (#21831) --- compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala index c33039f46398..4950f439640f 100644 --- a/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala +++ b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala @@ -45,6 +45,7 @@ final class ChromeTrace(f: Path) extends Closeable { private val traceWriter = FileUtils.newAsyncBufferedWriter(f) private val context = mutable.Stack[JsonContext](TopContext) private val tidCache = new ThreadLocal[String]() { + @annotation.nowarn("cat=deprecation") override def initialValue(): String = "%05d".format(Thread.currentThread().getId()) } objStart() From aa9115dbfa25f18a8abb5d555be77fbb3dc3b8bc Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Wed, 23 Oct 2024 18:20:20 +0200 Subject: [PATCH 688/827] Fix typo in allTermArguments --- compiler/src/dotty/tools/dotc/ast/TreeInfo.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 23610a0fcfeb..09c855847fac 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -142,9 +142,9 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => /** All term arguments of an application in a single flattened list */ def allTermArguments(tree: Tree): List[Tree] = unsplice(tree) match { - case Apply(fn, args) => allArguments(fn) ::: args - case TypeApply(fn, args) => allArguments(fn) - case Block(_, expr) => allArguments(expr) + case Apply(fn, args) => allTermArguments(fn) ::: args + case TypeApply(fn, args) => allTermArguments(fn) + case Block(_, expr) => allTermArguments(expr) case _ => Nil } From b00649a5a3ab914e999d7c4d7c08e5f23cd41a70 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 23 Oct 2024 17:40:15 +0100 Subject: [PATCH 689/827] Drop the extra Nothing/Null handling in SpaceEngine.signature --- .../tools/dotc/transform/patmat/Space.scala | 29 ++++++------------- 1 file changed, 9 insertions(+), 20 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index af37135e5196..a1b34c11a859 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -524,27 +524,16 @@ object SpaceEngine { val mt: MethodType = unapp.widen match { case mt: MethodType => mt case pt: PolyType => - if unappSym.is(Synthetic) then - val mt = pt.resultType.asInstanceOf[MethodType] - val unapplyArgType = mt.paramInfos.head - val targs = scrutineeTp.baseType(unapplyArgType.classSymbol) match - case AppliedType(_, targs) => targs - case _ => - // Typically when the scrutinee is Null or Nothing (see i5067 and i5067b) - // For performance, do `variances(unapplyArgType)` but without using TypeVars - // so just find the variance, so we know if to min/max to the LB/UB or use a wildcard. - object accu extends TypeAccumulator[VarianceMap[TypeParamRef]]: - def apply(vmap: VarianceMap[TypeParamRef], tp: Type) = tp match - case tp: TypeParamRef if tp.binder eq pt => vmap.recordLocalVariance(tp, variance) - case _ => foldOver(vmap, tp) - val vs = accu(VarianceMap.empty[TypeParamRef], unapplyArgType) - pt.paramRefs.map: p => - vs.computedVariance(p).uncheckedNN match - case -1 => p.paramInfo.lo - case 1 => p.paramInfo.hi - case _ => WildcardType(p.paramInfo) + scrutineeTp match + case AppliedType(tycon, targs) + if unappSym.is(Synthetic) + && (pt.resultType.asInstanceOf[MethodType].paramInfos.head.typeConstructor eq tycon) => + // Special case synthetic unapply/unapplySeq's + // Provided the shapes of the types match: + // the scrutinee type being unapplied and + // the unapply parameter type pt.instantiate(targs).asInstanceOf[MethodType] - else + case _ => val locked = ctx.typerState.ownedVars val tvars = constrained(pt) val mt = pt.instantiate(tvars).asInstanceOf[MethodType] From 15922f93c59dc0ae60ae1b26362962b0b5c04921 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 21 Aug 2024 16:06:44 +0100 Subject: [PATCH 690/827] Fix enclosingClass from returning refinement classes Looking at how i20952 is compiled, the call ctx.owner.enclosingClass.derivesFrom(sym.owner) in SuperAccessors (that I moved to ProtectedAccessors) accidentally fails, because the refinement class returned as the enclosing class of `val prog` doesn't derive from SuperClass. But if the enclosing class of `prog` returned is `trait Child` then no super accessor call is necessary. --- .../tools/dotc/core/SymDenotations.scala | 6 +++--- .../dotc/transform/ProtectedAccessors.scala | 5 ++++- .../tools/dotc/transform/SuperAccessors.scala | 2 +- tests/pos/i20952.scala | 20 +++++++++++++++++++ 4 files changed, 28 insertions(+), 5 deletions(-) create mode 100644 tests/pos/i20952.scala diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 906e74735097..f54b8a62fa25 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1162,10 +1162,10 @@ object SymDenotations { final def enclosingClass(using Context): Symbol = { def enclClass(sym: Symbol, skip: Boolean): Symbol = { def newSkip = sym.is(JavaStaticTerm) - if (!sym.exists) + if !sym.exists then NoSymbol - else if (sym.isClass) - if (skip) enclClass(sym.owner, newSkip) else sym + else if sym.isClass then + if skip || sym.isRefinementClass then enclClass(sym.owner, newSkip) else sym else enclClass(sym.owner, skip || newSkip) } diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala index 482da0edb82b..359ec701d164 100644 --- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala @@ -44,7 +44,10 @@ object ProtectedAccessors { /** Do we need a protected accessor for accessing sym from the current context's owner? */ def needsAccessor(sym: Symbol)(using Context): Boolean = needsAccessorIfNotInSubclass(sym) && - !ctx.owner.enclosingClass.derivesFrom(sym.owner) + !needsAccessorIsSubclass(sym) + + def needsAccessorIsSubclass(sym: Symbol)(using Context): Boolean = + ctx.owner.enclosingClass.derivesFrom(sym.owner) } class ProtectedAccessors extends MiniPhase { diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index ce2b8fa591d8..5a63235fc3c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -195,7 +195,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { * Otherwise, we need to go through an accessor, * which the implementing class will provide an implementation for. */ - if ctx.owner.enclosingClass.derivesFrom(sym.owner) then + if ProtectedAccessors.needsAccessorIsSubclass(sym) then if sym.is(JavaDefined) then report.error(em"${ctx.owner} accesses protected $sym inside a concrete trait method: use super.${sel.name} instead", sel.srcPos) sel diff --git a/tests/pos/i20952.scala b/tests/pos/i20952.scala new file mode 100644 index 000000000000..68344fdab672 --- /dev/null +++ b/tests/pos/i20952.scala @@ -0,0 +1,20 @@ +package object packer: // the super class needs to be in a different package + class SuperClass(): + protected val problem: Any = ??? // needs to be protected + +class SuperClass(): + protected val problem: Any = ??? // needs to be protected + +// type Target = SuperClass // passes +type Target = packer.SuperClass // error + +trait Child extends Target: + + val aliased: problem.type = problem + type Alias = problem.type + + val newProblem: Any {val prog: problem.type} = ??? // error + val newProblem2: Any {val prog: Alias} = ??? // passes + val newProblem3: Any {val prog: aliased.type} = ??? // passes + +class ChildImpl extends Target with Child // concrete implementation is needed From 0cf41d07341a1dc27f8fe4a71b91641612375f90 Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Fri, 20 Sep 2024 23:23:39 -0400 Subject: [PATCH 691/827] Improve warning for wildcard matching only null under explicit nulls (scala#21577) Adds a more detailed warning message when a wildcard case is only reachable by null under explict nulls flag. Fixes scala#21577 --- .../tools/dotc/transform/patmat/Space.scala | 16 +++++++++---- .../dotty/tools/dotc/CompilationTests.scala | 5 ++++ tests/explicit-nulls/warn/i21577.check | 12 ++++++++++ tests/explicit-nulls/warn/i21577.scala | 24 +++++++++++++++++++ 4 files changed, 52 insertions(+), 5 deletions(-) create mode 100644 tests/explicit-nulls/warn/i21577.check create mode 100644 tests/explicit-nulls/warn/i21577.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 9fb3c00c67c4..deaf285c79cc 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -920,7 +920,7 @@ object SpaceEngine { then project(OrType(selTyp, ConstantType(Constant(null)), soft = false)) else project(selTyp) ) - + var i = 0 val len = cases.length var prevs = List.empty[Space] @@ -942,11 +942,17 @@ object SpaceEngine { report.warning(MatchCaseUnreachable(), pat.srcPos) if pat != EmptyTree // rethrow case of catch uses EmptyTree && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase - && isSubspace(covered, prev) + && isSubspace(covered, Or(List(prev, Typ(defn.NullType)))) // for when Null is not subtype of AnyRef under explicit nulls then { - val nullOnly = isNullable && i == len - 1 && isWildcardArg(pat) - val msg = if nullOnly then MatchCaseOnlyNullWarning() else MatchCaseUnreachable() - report.warning(msg, pat.srcPos) + val nullOnly = + (isNullable || (defn.NullType <:< selTyp)) + && i == len - 1 + && isWildcardArg(pat) + if nullOnly then { + report.warning(MatchCaseOnlyNullWarning(), pat.srcPos) + } else if isSubspace(covered, prev) then { + report.warning(MatchCaseUnreachable(), pat.srcPos) + } } deferred = Nil } diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index d7ef7f6f6085..de00faa86406 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -213,6 +213,11 @@ class CompilationTests { ) }.checkCompile() + @Test def explicitNullsWarn: Unit = { + implicit val testGroup: TestGroup = TestGroup("explicitNullsWarn") + compileFilesInDir("tests/explicit-nulls/warn", explicitNullsOptions) + }.checkWarnings() + @Test def explicitNullsRun: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsRun") compileFilesInDir("tests/explicit-nulls/run", explicitNullsOptions) diff --git a/tests/explicit-nulls/warn/i21577.check b/tests/explicit-nulls/warn/i21577.check new file mode 100644 index 000000000000..944b2ccab3c9 --- /dev/null +++ b/tests/explicit-nulls/warn/i21577.check @@ -0,0 +1,12 @@ +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:5:9 -------------------------------------------- +5 | case _ => println(2) // warn + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:12:11 ------------------------------------------ +12 | case _ => println(2) // warn + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:18:11 ------------------------------------------ +18 | case _ => println(2) // warn + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). diff --git a/tests/explicit-nulls/warn/i21577.scala b/tests/explicit-nulls/warn/i21577.scala new file mode 100644 index 000000000000..d5da1403deea --- /dev/null +++ b/tests/explicit-nulls/warn/i21577.scala @@ -0,0 +1,24 @@ +def f(s: String) = + val s2 = s.trim() + s2 match + case s3: String => println(1) + case _ => println(2) // warn + + +def f2(s: String | Null) = + val s2 = s.nn.trim() + s2 match + case s3: String => println(1) + case _ => println(2) // warn + +def f3(s: String | Null) = + val s2 = s + s2 match + case s3: String => println(1) + case _ => println(2) // warn + +def f4(s: String | Int) = + val s2 = s + s2 match + case s3: String => println(1) + case _ => println(2) \ No newline at end of file From 4f775fab0f16a27d6be276dde0834cd286d8b506 Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Thu, 17 Oct 2024 15:00:04 -0400 Subject: [PATCH 692/827] Implement change and add test cases --- .../tools/dotc/transform/patmat/Space.scala | 82 ++++++++----------- .../pos/interop-constructor.scala | 2 +- tests/explicit-nulls/warn/i21577.check | 34 ++++++-- tests/explicit-nulls/warn/i21577.scala | 48 +++++++---- tests/explicit-nulls/warn/interop.check | 8 ++ tests/explicit-nulls/warn/interop/J.java | 6 ++ tests/explicit-nulls/warn/interop/S.scala | 10 +++ 7 files changed, 114 insertions(+), 76 deletions(-) create mode 100644 tests/explicit-nulls/warn/interop.check create mode 100644 tests/explicit-nulls/warn/interop/J.java create mode 100644 tests/explicit-nulls/warn/interop/S.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index deaf285c79cc..c7b304e6caf6 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -14,6 +14,7 @@ import typer.*, Applications.*, Inferencing.*, ProtoTypes.* import util.* import scala.annotation.internal.sharable +import scala.annotation.tailrec import scala.collection.mutable import SpaceEngine.* @@ -696,7 +697,7 @@ object SpaceEngine { else NoType }.filter(_.exists) parts - + case tp: FlexibleType => List(tp.underlying, ConstantType(Constant(null))) case _ => ListOfNoType end rec @@ -876,6 +877,7 @@ object SpaceEngine { case tp: SingletonType => toUnderlying(tp.underlying) case tp: ExprType => toUnderlying(tp.resultType) case AnnotatedType(tp, annot) => AnnotatedType(toUnderlying(tp), annot) + case tp: FlexibleType => tp.derivedFlexibleType(toUnderlying(tp.underlying)) case _ => tp }) @@ -910,58 +912,40 @@ object SpaceEngine { && !sel.tpe.widen.isRef(defn.QuotedExprClass) && !sel.tpe.widen.isRef(defn.QuotedTypeClass) - def checkReachability(m: Match)(using Context): Unit = trace(i"checkReachability($m)") { - val cases = m.cases.toIndexedSeq - + def checkReachability(m: Match)(using Context): Unit = trace(i"checkReachability($m)"): val selTyp = toUnderlying(m.selector.tpe).dealias - - val isNullable = selTyp.classSymbol.isNullableClass - val targetSpace = trace(i"targetSpace($selTyp)")(if isNullable + val isNullable = selTyp.isInstanceOf[FlexibleType] || selTyp.classSymbol.isNullableClass + val targetSpace = trace(i"targetSpace($selTyp)"): + if isNullable && !ctx.mode.is(Mode.SafeNulls) then project(OrType(selTyp, ConstantType(Constant(null)), soft = false)) else project(selTyp) - ) - - var i = 0 - val len = cases.length - var prevs = List.empty[Space] - var deferred = List.empty[Tree] - - while (i < len) { - val CaseDef(pat, guard, _) = cases(i) - val curr = trace(i"project($pat)")(project(pat)) - - val covered = trace("covered")(simplify(intersect(curr, targetSpace))) - - val prev = trace("prev")(simplify(Or(prevs))) - - if prev == Empty && covered == Empty then // defer until a case is reachable - deferred ::= pat - else { - for (pat <- deferred.reverseIterator) - report.warning(MatchCaseUnreachable(), pat.srcPos) - if pat != EmptyTree // rethrow case of catch uses EmptyTree - && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase - && isSubspace(covered, Or(List(prev, Typ(defn.NullType)))) // for when Null is not subtype of AnyRef under explicit nulls - then { - val nullOnly = - (isNullable || (defn.NullType <:< selTyp)) - && i == len - 1 - && isWildcardArg(pat) - if nullOnly then { - report.warning(MatchCaseOnlyNullWarning(), pat.srcPos) - } else if isSubspace(covered, prev) then { - report.warning(MatchCaseUnreachable(), pat.srcPos) - } - } - deferred = Nil - } - - // in redundancy check, take guard as false in order to soundly approximate - prevs ::= (if guard.isEmpty then covered else Empty) - i += 1 - } - } + @tailrec def recur(cases: List[CaseDef], prevs: List[Space], deferred: List[Tree]): Unit = + cases match + case Nil => + case CaseDef(pat, guard, _) :: rest => + val curr = trace(i"project($pat)")(project(pat)) + val covered = trace("covered")(simplify(intersect(curr, targetSpace))) + val prev = trace("prev")(simplify(Or(prevs))) + if prev == Empty && covered == Empty then // defer until a case is reachable + recur(rest, prevs, pat :: deferred) + else + for pat <- deferred.reverseIterator + do report.warning(MatchCaseUnreachable(), pat.srcPos) + + if pat != EmptyTree // rethrow case of catch uses EmptyTree + && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase + && isSubspace(covered, prev) + then + val nullOnly = isNullable && rest.isEmpty && isWildcardArg(pat) + val msg = if nullOnly then MatchCaseOnlyNullWarning() else MatchCaseUnreachable() + report.warning(msg, pat.srcPos) + + val newPrev = if guard.isEmpty then covered :: prevs else prevs + recur(rest, newPrev, Nil) + + recur(m.cases, Nil, Nil) + end checkReachability def checkMatch(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then checkExhaustivity(m) diff --git a/tests/explicit-nulls/pos/interop-constructor.scala b/tests/explicit-nulls/pos/interop-constructor.scala index f222d24b0919..4ebfaa752b3a 100644 --- a/tests/explicit-nulls/pos/interop-constructor.scala +++ b/tests/explicit-nulls/pos/interop-constructor.scala @@ -1,4 +1,4 @@ -// Test that constructors have a non-nullab.e return type. +// Test that constructors have a non-nullable return type. class Foo { val x: java.lang.String = new java.lang.String() diff --git a/tests/explicit-nulls/warn/i21577.check b/tests/explicit-nulls/warn/i21577.check index 944b2ccab3c9..acedd7a9c713 100644 --- a/tests/explicit-nulls/warn/i21577.check +++ b/tests/explicit-nulls/warn/i21577.check @@ -1,12 +1,28 @@ -- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:5:9 -------------------------------------------- -5 | case _ => println(2) // warn +5 | case _ => // warn | ^ | Unreachable case except for null (if this is intentional, consider writing case null => instead). --- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:12:11 ------------------------------------------ -12 | case _ => println(2) // warn - | ^ - | Unreachable case except for null (if this is intentional, consider writing case null => instead). --- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:18:11 ------------------------------------------ -18 | case _ => println(2) // warn - | ^ - | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:12:9 ------------------------------------------- +12 | case _ => // warn + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E030] Match case Unreachable Warning: tests/explicit-nulls/warn/i21577.scala:20:7 ---------------------------------- +20 | case _ => // warn + | ^ + | Unreachable case +-- [E029] Pattern Match Exhaustivity Warning: tests/explicit-nulls/warn/i21577.scala:29:27 ----------------------------- +29 |def f7(s: String | Null) = s match // warn + | ^ + | match may not be exhaustive. + | + | It would fail on pattern case: _: Null + | + | longer explanation available when compiling with `-explain` +-- [E029] Pattern Match Exhaustivity Warning: tests/explicit-nulls/warn/i21577.scala:36:33 ----------------------------- +36 |def f9(s: String | Int | Null) = s match // warn + | ^ + | match may not be exhaustive. + | + | It would fail on pattern case: _: Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/explicit-nulls/warn/i21577.scala b/tests/explicit-nulls/warn/i21577.scala index d5da1403deea..67da6068f22c 100644 --- a/tests/explicit-nulls/warn/i21577.scala +++ b/tests/explicit-nulls/warn/i21577.scala @@ -1,24 +1,38 @@ def f(s: String) = val s2 = s.trim() s2 match - case s3: String => println(1) - case _ => println(2) // warn + case s3: String => + case _ => // warn def f2(s: String | Null) = val s2 = s.nn.trim() - s2 match - case s3: String => println(1) - case _ => println(2) // warn - -def f3(s: String | Null) = - val s2 = s - s2 match - case s3: String => println(1) - case _ => println(2) // warn - -def f4(s: String | Int) = - val s2 = s - s2 match - case s3: String => println(1) - case _ => println(2) \ No newline at end of file + s2 match + case s3: String => + case _ => // warn + +def f3(s: String | Null) = s match + case s2: String => + case _ => + +def f5(s: String) = s match + case _: String => + case _ => // warn + +def f6(s: String) = s.trim() match + case _: String => + case null => + +def f61(s: String) = s.trim() match + case _: String => + +def f7(s: String | Null) = s match // warn + case _: String => + +def f8(s: String | Null) = s match + case _: String => + case null => + +def f9(s: String | Int | Null) = s match // warn + case _: String => + case null => \ No newline at end of file diff --git a/tests/explicit-nulls/warn/interop.check b/tests/explicit-nulls/warn/interop.check new file mode 100644 index 000000000000..0afc1dc0a3cb --- /dev/null +++ b/tests/explicit-nulls/warn/interop.check @@ -0,0 +1,8 @@ +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/interop/S.scala:8:11 ---------------------------------------- +8 | case _ => // warn + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/interop/S.scala:9:9 ----------------------------------------- +9 | case _ => println(2) // warn + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). diff --git a/tests/explicit-nulls/warn/interop/J.java b/tests/explicit-nulls/warn/interop/J.java new file mode 100644 index 000000000000..f81cf685b9a9 --- /dev/null +++ b/tests/explicit-nulls/warn/interop/J.java @@ -0,0 +1,6 @@ +import java.util.ArrayList; + +class J { + ArrayList> foo(String x) { return null; } + static String fooStatic(String x) { return null; } +} diff --git a/tests/explicit-nulls/warn/interop/S.scala b/tests/explicit-nulls/warn/interop/S.scala new file mode 100644 index 000000000000..57beebe4eb76 --- /dev/null +++ b/tests/explicit-nulls/warn/interop/S.scala @@ -0,0 +1,10 @@ +import java.util.ArrayList +def f() = + val j = new J() + val s2 = j.foo(null) + s2 match + case s3: ArrayList[ArrayList[String]] => s3.get(0) match + case _: ArrayList[_] => + case _ => // warn + case _ => println(2) // warn + From 641b67a0ce50ae52cc401ea3855bd4bac9ab4622 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 23 Oct 2024 23:22:44 +0200 Subject: [PATCH 693/827] Add minimal set of tests for runner installed from .msi file (#21835) Add minimal set of tests to check installation of Scala runner from .msi file. --- .github/workflows/ci.yaml | 10 ++++- .github/workflows/test-msi.yml | 77 ++++++++++++++++++++++++++++++++++ 2 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/test-msi.yml diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 91086858c514..ffa7e515b926 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1119,7 +1119,15 @@ jobs: build-msi-package: uses: ./.github/workflows/build-msi.yml if : github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_msi]') - # TODO: ADD A JOB THAT DEPENDS ON THIS TO TEST THE MSI + + test-msi-package: + uses: ./.github/workflows/test-msi.yml + needs: [build-msi-package] + with: + # Ensure that version starts with prefix 3. + # In the future it can be adapted to compare with with git tag or version set in the build.s + version: "3." + java-version: 8 build-sdk-package: uses: ./.github/workflows/build-sdk.yml diff --git a/.github/workflows/test-msi.yml b/.github/workflows/test-msi.yml new file mode 100644 index 000000000000..1299c3d55061 --- /dev/null +++ b/.github/workflows/test-msi.yml @@ -0,0 +1,77 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO TEST SCALA WITH MSI RUNNER ### +### HOW TO USE: ### +### Provide optional `version` to test if installed binaries are installed with ### +### correct Scala version. ### +### NOTE: Requires `scala.msi` artifact uploaded within the same run ### +### ### +################################################################################################### + +name: Test 'scala' MSI Package +run-name: Test 'scala' (${{ inputs.version }}) MSI Package + +on: + workflow_call: + inputs: + version: + required: true + type: string + java-version: + required: true + type : string + +jobs: + test: + runs-on: windows-latest + steps: + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{ inputs.java-version }} + - name: Download MSI artifact + uses: actions/download-artifact@v4 + with: + name: scala.msi + path: . + + # Run the MSI installer + # During normal installation msiexec would modify the PATH automatically. + # However, it seems not to work in GH Actions. Append the PATH manually instead. + - name: Install Scala Runner + shell: pwsh + run: | + Start-Process 'msiexec.exe' -ArgumentList '/I "scala.msi" /L*V "install.log" /qb' -Wait + Get-Content 'install.log' + Add-Content $env:GITHUB_PATH "C:\Program Files (x86)\scala\bin" + + # Run tests to ensure the Scala Runner was installed and works + - name: Test Scala Runner + shell: pwsh + run: | + scala --version + if (-not (scala --version | Select-String "Scala version \(default\): ${{ inputs.version }}")) { + Write-Host "Invalid Scala version of MSI installed runner, expected ${{ inputs.version }}" + Exit 1 + } + - name : Test the `scalac` command + shell: pwsh + run: | + scalac --version + if (-not (scalac --version | Select-String "Scala compiler version ${{ inputs.version }}")) { + Write-Host "Invalid scalac version of MSI installed runner, expected ${{ inputs.version }}" + Exit 1 + } + - name : Test the `scaladoc` command + shell: pwsh + run: | + scaladoc --version + if (-not (scaladoc --version | Select-String "Scaladoc version ${{ inputs.version }}")) { + Write-Host "Invalid scaladoc version of MSI installed runner, expected ${{ inputs.version }}" + Exit 1 + } + - name : Uninstall the `scala` package + shell: pwsh + run: | + Start-Process 'msiexec.exe' -ArgumentList '/X "scala.msi" /L*V "uninstall.log" /qb' -Wait + Get-Content 'uninstall.log' + \ No newline at end of file From e53cb5bfa6d361124ab5b60f41efdd8708253ff6 Mon Sep 17 00:00:00 2001 From: tgodzik Date: Thu, 10 Oct 2024 18:19:03 +0200 Subject: [PATCH 694/827] improvement: Remove workaround for Bloop and update docs --- .../_docs/contributing/setting-up-your-ide.md | 24 +++++++------- project/Build.scala | 4 +-- project/NoBloopExport.scala | 31 ------------------- 3 files changed, 14 insertions(+), 45 deletions(-) delete mode 100644 project/NoBloopExport.scala diff --git a/docs/_docs/contributing/setting-up-your-ide.md b/docs/_docs/contributing/setting-up-your-ide.md index a02c1dee63cb..a15bf651ef74 100644 --- a/docs/_docs/contributing/setting-up-your-ide.md +++ b/docs/_docs/contributing/setting-up-your-ide.md @@ -3,16 +3,15 @@ layout: doc-page title: Setting up your IDE --- -You can use either Metals with your favorite editor (VS Code, Neovim, Sublime) -or [IntelliJ IDEA for -Scala](https://www.jetbrains.com/help/idea/discover-intellij-idea-for-scala.html) +You can use either Metals with your favorite editor or +[IntelliJ IDEA for Scala](https://www.jetbrains.com/help/idea/discover-intellij-idea-for-scala.html) to work on the Scala 3 codebase. There are however a few additional considerations to take into account. ## Bootstrapping Projects -The sbt build for dotty implements bootstrapping within the same build, so each component has -two projects: +The sbt build for dotty implements bootstrapping within the same build, so each +component has two projects: ``` sbt:scala3> projects @@ -33,9 +32,9 @@ you'll actually want these modules exported. In order to achieve this you'll want to make sure you do two things: 1. You'll want to find and change the following under - `commonBootstrappedSettings` which is found in the - [`Build.scala`](https://github.com/scala/scala3/blob/main/project/Build.scala) - file. + `commonBootstrappedSettings` which is found in the + [`Build.scala`](https://github.com/scala/scala3/blob/main/project/Build.scala) + file. ```diff @@ -43,12 +42,13 @@ want to make sure you do two things: + bspEnabled := true, ``` -2. Set `sbt` as your build server instead of the default, Bloop. You can achieve - this with the `Metals: Switch Build Server` command and then choosing sbt. In - VSCode, this looks like this: +2. Run `sbt publishLocal` to get the needed presentation compiler jars. -![bsp-switch](https://user-images.githubusercontent.com/777748/241986423-0724ae74-0ebd-42ef-a1b7-4d17678992b4.png) +By default Metals uses Bloop build server, however you can also use sbt +directly. You can achieve this with the `Metals: Switch Build Server` command +and then choosing sbt. In VSCode, this looks like this: +![bsp-switch](https://user-images.githubusercontent.com/777748/241986423-0724ae74-0ebd-42ef-a1b7-4d17678992b4.png) ### IntelliJ diff --git a/project/Build.scala b/project/Build.scala index 84ce00d11577..6ec933a599b3 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -469,8 +469,8 @@ object Build { } // Settings used when compiling dotty with a non-bootstrapped dotty - lazy val commonBootstrappedSettings = commonDottySettings ++ NoBloopExport.settings ++ Seq( - // To enable support of scaladoc and language-server projects you need to change this to true and use sbt as your build server + lazy val commonBootstrappedSettings = commonDottySettings ++ Seq( + // To enable support of scaladoc and language-server projects you need to change this to true bspEnabled := false, (Compile / unmanagedSourceDirectories) += baseDirectory.value / "src-bootstrapped", diff --git a/project/NoBloopExport.scala b/project/NoBloopExport.scala deleted file mode 100644 index 7a088a405781..000000000000 --- a/project/NoBloopExport.scala +++ /dev/null @@ -1,31 +0,0 @@ -import sbt._ -import Keys._ - -/* With <3 from scala-js */ -object NoBloopExport { - private lazy val bloopGenerateKey: Option[TaskKey[Result[Option[File]]]] = { - val optBloopKeysClass: Option[Class[_]] = try { - Some(Class.forName("bloop.integrations.sbt.BloopKeys")) - } catch { - case _: ClassNotFoundException => None - } - - optBloopKeysClass.map { bloopKeysClass => - val bloopGenerateGetter = bloopKeysClass.getMethod("bloopGenerate") - bloopGenerateGetter.invoke(null).asInstanceOf[TaskKey[Result[Option[File]]]] - } - } - - /** Settings to prevent the project from being exported to IDEs. */ - lazy val settings: Seq[Setting[_]] = { - bloopGenerateKey match { - case None => - Nil - case Some(key) => - Seq( - Compile / key := Value(None), - Test / key := Value(None), - ) - } - } -} From e6b7e3d06faf4b833fd03fe73b5610362b8f4b8c Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 24 Oct 2024 23:47:16 +0200 Subject: [PATCH 695/827] Better error message for polytypes wrapping capturing types A type like [X] -> A ->{c} B is currently not allowed since it expands to a PolyType wrapping a CapturingType and we have an implementation restriction that requires PolyTypes to wrap only FunctionTypes. It would be great if we could lift that implementation restriction. Until we do so, we should have a better error message, which is commit implements. --- .../dotty/tools/dotc/parsing/Parsers.scala | 38 +++++++++---------- .../captures/polyCaptures.check | 8 ++++ .../captures/polyCaptures.scala | 7 ++++ 3 files changed, 33 insertions(+), 20 deletions(-) create mode 100644 tests/neg-custom-args/captures/polyCaptures.check create mode 100644 tests/neg-custom-args/captures/polyCaptures.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index d933e55a9823..293b5590fec6 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -518,6 +518,22 @@ object Parsers { tree } + def makePolyFunction(tparams: List[Tree], body: Tree, + kind: String, errorTree: => Tree, + start: Offset, arrowOffset: Offset): Tree = + atSpan(start, arrowOffset): + getFunction(body) match + case None => + syntaxError(em"Implementation restriction: polymorphic function ${kind}s must have a value parameter", arrowOffset) + errorTree + case Some(Function(_, _: CapturesAndResult)) => + // A function tree like this will be desugared + // into a capturing type in the typer. + syntaxError(em"Implementation restriction: polymorphic function types cannot wrap function types that have capture sets", arrowOffset) + errorTree + case Some(f) => + PolyFunction(tparams, body) + /* --------------- PLACEHOLDERS ------------------------------------------- */ /** The implicit parameters introduced by `_` in the current expression. @@ -1539,11 +1555,6 @@ object Parsers { private def getFunction(tree: Tree): Option[Function] = tree match { case Parens(tree1) => getFunction(tree1) case Block(Nil, tree1) => getFunction(tree1) - case Function(_, _: CapturesAndResult) => - // A function tree like this will be desugared - // into a capturing type in the typer, - // so None is returned. - None case t: Function => Some(t) case _ => None } @@ -1757,13 +1768,7 @@ object Parsers { else if in.token == ARROW || isPureArrow(nme.PUREARROW) then val arrowOffset = in.skipToken() val body = toplevelTyp(nestedIntoOK(in.token)) - atSpan(start, arrowOffset): - getFunction(body) match - case Some(f) => - PolyFunction(tparams, body) - case None => - syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) - Ident(nme.ERROR.toTypeName) + makePolyFunction(tparams, body, "type", Ident(nme.ERROR.toTypeName), start, arrowOffset) else accept(TLARROW) typ() @@ -2360,14 +2365,7 @@ object Parsers { val tparams = typeParamClause(ParamOwner.Type) val arrowOffset = accept(ARROW) val body = expr(location) - atSpan(start, arrowOffset) { - getFunction(body) match - case Some(f) => - PolyFunction(tparams, f) - case None => - syntaxError(em"Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) - errorTermTree(arrowOffset) - } + makePolyFunction(tparams, body, "literal", errorTermTree(arrowOffset), start, arrowOffset) case _ => val saved = placeholderParams placeholderParams = Nil diff --git a/tests/neg-custom-args/captures/polyCaptures.check b/tests/neg-custom-args/captures/polyCaptures.check new file mode 100644 index 000000000000..8173828b7bc8 --- /dev/null +++ b/tests/neg-custom-args/captures/polyCaptures.check @@ -0,0 +1,8 @@ +-- Error: tests/neg-custom-args/captures/polyCaptures.scala:4:22 ------------------------------------------------------- +4 |val runOpsCheck: [C^] -> (ops: List[() ->{C^} Unit]) ->{C^} Unit = runOps // error + | ^ + | Implementation restriction: polymorphic function types cannot wrap function types that have capture sets +-- Error: tests/neg-custom-args/captures/polyCaptures.scala:5:23 ------------------------------------------------------- +5 |val runOpsCheck2: [C^] => (ops: List[() ->{C^} Unit]) ->{C^} Unit = runOps // error + | ^ + | Implementation restriction: polymorphic function types cannot wrap function types that have capture sets diff --git a/tests/neg-custom-args/captures/polyCaptures.scala b/tests/neg-custom-args/captures/polyCaptures.scala new file mode 100644 index 000000000000..776af95e5dcf --- /dev/null +++ b/tests/neg-custom-args/captures/polyCaptures.scala @@ -0,0 +1,7 @@ +class Box[X](val elem: X) + +val runOps = [C^] => (b: Box[() ->{C^} Unit]) => b.elem() +val runOpsCheck: [C^] -> (ops: List[() ->{C^} Unit]) ->{C^} Unit = runOps // error +val runOpsCheck2: [C^] => (ops: List[() ->{C^} Unit]) ->{C^} Unit = runOps // error + + From fcfa6b0a43a4e477867d5e27404946a9de0b9d24 Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Thu, 24 Oct 2024 16:10:53 +0200 Subject: [PATCH 696/827] Remove redundant `with Checking` `ReTyper` already extends `Typer` which extends `Checking` --- compiler/src/dotty/tools/dotc/transform/TreeChecker.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index a2b403fdae6c..c35dc80c04a5 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -236,7 +236,7 @@ object TreeChecker { private[TreeChecker] def isValidJVMMethodName(name: Name): Boolean = name.toString.forall(isValidJVMMethodChar) - class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking { + class Checker(phasesToCheck: Seq[Phase]) extends ReTyper { import ast.tpd.* protected val nowDefinedSyms = util.HashSet[Symbol]() From eb12bfe3f20ca9ad0e395ba20e52d64f084b8c4b Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Fri, 25 Oct 2024 18:03:21 +0200 Subject: [PATCH 697/827] Pretty-print lambdas --- .../tools/dotc/printing/RefinedPrinter.scala | 4 +++ tests/printing/annot-19846b.check | 18 ++--------- tests/printing/lambdas.check | 30 +++++++++++++++++++ tests/printing/lambdas.scala | 14 +++++++++ 4 files changed, 50 insertions(+), 16 deletions(-) create mode 100644 tests/printing/lambdas.check create mode 100644 tests/printing/lambdas.scala diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index b229c7ec29d9..95df9f84c723 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -17,6 +17,7 @@ import Denotations.* import SymDenotations.* import StdNames.{nme, tpnme} import ast.{Trees, tpd, untpd} +import tpd.closureDef import typer.{Implicits, Namer, Applications} import typer.ProtoTypes.* import Trees.* @@ -510,6 +511,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toText(name) ~ (if name.isTermName && arg.isType then " : " else " = ") ~ toText(arg) case Assign(lhs, rhs) => changePrec(GlobalPrec) { toTextLocal(lhs) ~ " = " ~ toText(rhs) } + case closureDef(meth) if !printDebug => + withEnclosingDef(meth): + meth.paramss.map(paramsText).foldRight(toText(meth.rhs))(_ ~ " => " ~ _) case block: Block => blockToText(block) case If(cond, thenp, elsep) => diff --git a/tests/printing/annot-19846b.check b/tests/printing/annot-19846b.check index 3f63a46c4286..3633b1c9e9ad 100644 --- a/tests/printing/annot-19846b.check +++ b/tests/printing/annot-19846b.check @@ -7,27 +7,13 @@ package { final lazy module val Test: Test = new Test() final module class Test() extends Object() { this: Test.type => val y: Int = ??? - val z: - Int @lambdaAnnot( - { - def $anonfun(): Int = Test.y - closure($anonfun) - } - ) - = f(Test.y) + val z: Int @lambdaAnnot(() => Test.y) = f(Test.y) } final lazy module val annot-19846b$package: annot-19846b$package = new annot-19846b$package() final module class annot-19846b$package() extends Object() { this: annot-19846b$package.type => - def f(x: Int): - Int @lambdaAnnot( - { - def $anonfun(): Int = x - closure($anonfun) - } - ) - = x + def f(x: Int): Int @lambdaAnnot(() => x) = x } } diff --git a/tests/printing/lambdas.check b/tests/printing/lambdas.check new file mode 100644 index 000000000000..887a8f2ded35 --- /dev/null +++ b/tests/printing/lambdas.check @@ -0,0 +1,30 @@ +[[syntax trees at end of typer]] // tests/printing/lambdas.scala +package { + final lazy module val Main: Main = new Main() + final module class Main() extends Object() { this: Main.type => + val f1: Int => Int = (x: Int) => x.+(1) + val f2: (Int, Int) => Int = (x: Int, y: Int) => x.+(y) + val f3: Int => Int => Int = (x: Int) => (y: Int) => x.+(y) + val f4: [T] => (x: Int) => Int = [T >: Nothing <: Any] => (x: Int) => x.+(1) + val f5: [T] => (x: Int) => Int => Int = [T >: Nothing <: Any] => (x: Int) + => (y: Int) => x.+(y) + val f6: Int => Int = (x: Int) => + { + val x2: Int = x.+(1) + x2.+(1) + } + def f7(x: Int): Int = x.+(1) + val f8: Int => Int = (x: Int) => Main.f7(x) + val l: List[Int] = List.apply[Int]([1,2,3 : Int]*) + Main.l.map[Int]((_$1: Int) => _$1.+(1)) + Main.l.map[Int]((x: Int) => x.+(1)) + Main.l.map[Int]((x: Int) => + { + val x2: Int = x.+(1) + x2.+(1) + } + ) + Main.l.map[Int]((x: Int) => Main.f7(x)) + } +} + diff --git a/tests/printing/lambdas.scala b/tests/printing/lambdas.scala new file mode 100644 index 000000000000..8df7db3e6dda --- /dev/null +++ b/tests/printing/lambdas.scala @@ -0,0 +1,14 @@ +object Main: + val f1 = (x: Int) => x + 1 + val f2 = (x: Int, y: Int) => x + y + val f3 = (x: Int) => (y: Int) => x + y + val f4 = [T] => (x: Int) => x + 1 + val f5 = [T] => (x: Int) => (y: Int) => x + y + val f6 = (x: Int) => { val x2 = x + 1; x2 + 1 } + def f7(x: Int) = x + 1 + val f8 = f7 + val l = List(1,2,3) + l.map(_ + 1) + l.map(x => x + 1) + l.map(x => { val x2 = x + 1; x2 + 1 }) + l.map(f7) From 305cb9caae2285c1d1bd626f13cdaa42e432223e Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 18 Sep 2024 15:33:16 +0200 Subject: [PATCH 698/827] Add path support for cc --- compiler/src/dotty/tools/dotc/ast/untpd.scala | 4 +- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 3 +- .../src/dotty/tools/dotc/cc/CaptureRef.scala | 84 +++++++++----- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 8 +- .../dotty/tools/dotc/cc/CheckCaptures.scala | 106 ++++++++++-------- .../dotty/tools/dotc/parsing/Parsers.scala | 28 ++--- .../captures/class-contra.check | 13 +-- .../captures/class-contra.scala | 3 +- .../captures/explain-under-approx.check | 14 +-- .../captures/filevar-multi-ios.scala | 41 +++++++ tests/neg-custom-args/captures/i15116.check | 16 ++- tests/neg-custom-args/captures/path-box.scala | 20 ++++ .../captures/path-connection.scala | 46 ++++++++ .../captures/path-illigal.scala | 7 ++ .../captures/path-simple.scala | 27 +++++ .../neg-custom-args/captures/singletons.scala | 8 +- .../captures/filevar-expanded.scala | 3 +- tests/pos-custom-args/captures/filevar.scala | 3 +- 18 files changed, 314 insertions(+), 120 deletions(-) create mode 100644 tests/neg-custom-args/captures/filevar-multi-ios.scala create mode 100644 tests/neg-custom-args/captures/path-box.scala create mode 100644 tests/neg-custom-args/captures/path-connection.scala create mode 100644 tests/neg-custom-args/captures/path-illigal.scala create mode 100644 tests/neg-custom-args/captures/path-simple.scala diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 935e42d5e05c..4684464d477f 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -524,8 +524,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def makeRetaining(parent: Tree, refs: List[Tree], annotName: TypeName)(using Context): Annotated = Annotated(parent, New(scalaAnnotationDot(annotName), List(refs))) - def makeCapsOf(id: Ident)(using Context): Tree = - TypeApply(Select(scalaDot(nme.caps), nme.capsOf), id :: Nil) + def makeCapsOf(tp: Tree)(using Context): Tree = + TypeApply(Select(scalaDot(nme.caps), nme.capsOf), tp :: Nil) def makeCapsBound()(using Context): Tree = makeRetaining( diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 29c6528e36de..79cc7d136e45 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -194,7 +194,8 @@ extension (tp: Type) true case tp: TermRef => ((tp.prefix eq NoPrefix) - || tp.symbol.is(ParamAccessor) && tp.prefix.isThisTypeOf(tp.symbol.owner) + || tp.symbol.isField && !tp.symbol.isStatic && ( + tp.prefix.isThisTypeOf(tp.symbol.owner) || tp.prefix.isTrackableRef) || tp.isRootCapability ) && !tp.symbol.isOneOf(UnstableValueFlags) case tp: TypeRef => diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala index f00c6869cd80..05162907b608 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -61,18 +61,19 @@ trait CaptureRef extends TypeProxy, ValueType: case tp: TermParamRef => tp.underlying.derivesFrom(defn.Caps_Exists) case _ => false - /** Normalize reference so that it can be compared with `eq` for equality */ - final def normalizedRef(using Context): CaptureRef = this match - case tp @ AnnotatedType(parent: CaptureRef, annot) if tp.isTrackableRef => - tp.derivedAnnotatedType(parent.normalizedRef, annot) - case tp: TermRef if tp.isTrackableRef => - tp.symbol.termRef - case _ => this + // With the support of pathes, we don't need to normalize the `TermRef`s anymore. + // /** Normalize reference so that it can be compared with `eq` for equality */ + // final def normalizedRef(using Context): CaptureRef = this match + // case tp @ AnnotatedType(parent: CaptureRef, annot) if tp.isTrackableRef => + // tp.derivedAnnotatedType(parent.normalizedRef, annot) + // case tp: TermRef if tp.isTrackableRef => + // tp.symbol.termRef + // case _ => this /** The capture set consisting of exactly this reference */ final def singletonCaptureSet(using Context): CaptureSet.Const = if mySingletonCaptureSet == null then - mySingletonCaptureSet = CaptureSet(this.normalizedRef) + mySingletonCaptureSet = CaptureSet(this) mySingletonCaptureSet.uncheckedNN /** The capture set of the type underlying this reference */ @@ -99,25 +100,56 @@ trait CaptureRef extends TypeProxy, ValueType: * x: x1.type /\ x1 subsumes y ==> x subsumes y */ final def subsumes(y: CaptureRef)(using Context): Boolean = - (this eq y) - || this.isRootCapability - || y.match - case y: TermRef => - (y.prefix eq this) - || y.info.match - case y1: SingletonCaptureRef => this.subsumes(y1) - case _ => false - case MaybeCapability(y1) => this.stripMaybe.subsumes(y1) - case _ => false - || this.match - case ReachCapability(x1) => x1.subsumes(y.stripReach) - case x: TermRef => - x.info match - case x1: SingletonCaptureRef => x1.subsumes(y) + def compareCaptureRefs(x: Type, y: Type): Boolean = + (x eq y) + || y.match + case y: CaptureRef => x.match + case x: CaptureRef => x.subsumes(y) case _ => false - case x: TermParamRef => subsumesExistentially(x, y) - case x: TypeRef => assumedContainsOf(x).contains(y) - case _ => false + case _ => false + + def compareUndelying(x: Type): Boolean = x match + case x: SingletonCaptureRef => x.subsumes(y) + case x: AndType => compareUndelying(x.tp1) || compareUndelying(x.tp2) + case x: OrType => compareUndelying(x.tp1) && compareUndelying(x.tp2) + case _ => false + + if (this eq y) || this.isRootCapability then return true + + // similar to compareNamed in TypeComparer + y match + case y: TermRef => + this match + case x: TermRef => + val xSym = x.symbol + val ySym = y.symbol + + // check x.f and y.f + if (xSym ne NoSymbol) + && (xSym eq ySym) + && compareCaptureRefs(x.prefix, y.prefix) + || (x.name eq y.name) + && x.isPrefixDependentMemberRef + && compareCaptureRefs(x.prefix, y.prefix) + && x.signature == y.signature + && !(xSym.isClass && ySym.isClass) + then return true + case _ => + + // shorten + if compareCaptureRefs(this, y.prefix) then return true + // underlying + if compareCaptureRefs(this, y.info) then return true + case MaybeCapability(y1) => return this.stripMaybe.subsumes(y1) + case _ => + + return this.match + case ReachCapability(x1) => x1.subsumes(y.stripReach) + case x: TermRef => compareUndelying(x.info) + case CapturingType(x1, _) => compareUndelying(x1) + case x: TermParamRef => subsumesExistentially(x, y) + case x: TypeRef => assumedContainsOf(x).contains(y) + case _ => false def assumedContainsOf(x: TypeRef)(using Context): SimpleIdentitySet[CaptureRef] = CaptureSet.assumedContains.getOrElse(x, SimpleIdentitySet.empty) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 44d5e2cf4b88..81b4287961ba 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -374,7 +374,7 @@ object CaptureSet: def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = if elems.isEmpty then empty - else Const(SimpleIdentitySet(elems.map(_.normalizedRef.ensuring(_.isTrackableRef))*)) + else Const(SimpleIdentitySet(elems.map(_.ensuring(_.isTrackableRef))*)) def apply(elems: Refs)(using Context): CaptureSet.Const = if elems.isEmpty then empty else Const(elems) @@ -508,7 +508,11 @@ object CaptureSet: !noUniversal else elem match case elem: TermRef if level.isDefined => - elem.symbol.ccLevel <= level + elem.prefix match + case prefix: CaptureRef => + levelOK(prefix) + case _ => + elem.symbol.ccLevel <= level case elem: ThisType if level.isDefined => elem.cls.ccLevel.nextInner <= level case ReachCapability(elem1) => diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index b05ab8542137..ec1e63137311 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -122,10 +122,6 @@ object CheckCaptures: * This check is performed at Typer. */ def checkWellformed(parent: Tree, ann: Tree)(using Context): Unit = - parent.tpe match - case _: SingletonType => - report.error(em"Singleton type $parent cannot have capture set", parent.srcPos) - case _ => def check(elem: Tree, pos: SrcPos): Unit = elem.tpe match case ref: CaptureRef => if !ref.isTrackableRef then @@ -373,45 +369,54 @@ class CheckCaptures extends Recheck, SymTransformer: * the environment's owner */ def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = + // A captured reference with the symbol `sym` is visible from the environment + // if `sym` is not defined inside the owner of the environment. + inline def isVisibleFromEnv(sym: Symbol, env: Env) = + if env.kind == EnvKind.NestedInOwner then + !sym.isProperlyContainedIn(env.owner) + else + !sym.isContainedIn(env.owner) + + def checkSubsetEnv(cs: CaptureSet, env: Env)(using Context): Unit = + // Only captured references that are visible from the environment + // should be included. + val included = cs.filter: c => + c.stripReach match + case ref: NamedType => + val refSym = ref.symbol + val refOwner = refSym.owner + val isVisible = isVisibleFromEnv(refOwner, env) + if isVisible && !ref.isRootCapability then + ref match + case ref: TermRef if ref.prefix `ne` NoPrefix => + // If c is a path of a class defined outside the environment, + // we check the capture set of its info. + checkSubsetEnv(ref.captureSetOfInfo, env) + case _ => + if !isVisible + && (c.isReach || ref.isType) + && (!ccConfig.useSealed || refSym.is(Param)) + && refOwner == env.owner + then + if refSym.hasAnnotation(defn.UnboxAnnot) then + capt.println(i"exempt: $ref in $refOwner") + else + // Reach capabilities that go out of scope have to be approximated + // by their underlying capture set, which cannot be universal. + // Reach capabilities of @unboxed parameters are exempted. + val cs = CaptureSet.ofInfo(c) + cs.disallowRootCapability: () => + report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", pos) + checkSubset(cs, env.captured, pos, provenance(env)) + isVisible + case ref: ThisType => isVisibleFromEnv(ref.cls, env) + case _ => false + checkSubset(included, env.captured, pos, provenance(env)) + capt.println(i"Include call or box capture $included from $cs in ${env.owner} --> ${env.captured}") + if !cs.isAlwaysEmpty then forallOuterEnvsUpTo(ctx.owner.topLevelClass): env => - // Whether a symbol is defined inside the owner of the environment? - inline def isContainedInEnv(sym: Symbol) = - if env.kind == EnvKind.NestedInOwner then - sym.isProperlyContainedIn(env.owner) - else - sym.isContainedIn(env.owner) - // A captured reference with the symbol `sym` is visible from the environment - // if `sym` is not defined inside the owner of the environment - inline def isVisibleFromEnv(sym: Symbol) = !isContainedInEnv(sym) - // Only captured references that are visible from the environment - // should be included. - val included = cs.filter: c => - c.stripReach match - case ref: NamedType => - val refSym = ref.symbol - val refOwner = refSym.owner - val isVisible = isVisibleFromEnv(refOwner) - if !isVisible - && (c.isReach || ref.isType) - && (!ccConfig.useSealed || refSym.is(Param)) - && refOwner == env.owner - then - if refSym.hasAnnotation(defn.UnboxAnnot) then - capt.println(i"exempt: $ref in $refOwner") - else - // Reach capabilities that go out of scope have to be approximated - // by their underlying capture set, which cannot be universal. - // Reach capabilities of @unboxed parameters are exempted. - val cs = CaptureSet.ofInfo(c) - cs.disallowRootCapability: () => - report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", pos) - checkSubset(cs, env.captured, pos, provenance(env)) - isVisible - case ref: ThisType => isVisibleFromEnv(ref.cls) - case _ => false - checkSubset(included, env.captured, pos, provenance(env)) - capt.println(i"Include call or box capture $included from $cs in ${env.owner} --> ${env.captured}") + checkSubsetEnv(cs, env) end markFree /** Include references captured by the called method in the current environment stack */ @@ -488,21 +493,28 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => denot val selType = recheckSelection(tree, qualType, name, disambiguate) - val selCs = selType.widen.captureSet - if selCs.isAlwaysEmpty - || selType.widen.isBoxedCapturing + val selWiden = selType.widen + def isStableSel = selType match + case selType: NamedType => selType.symbol.isStableMember + case _ => false + + if pt == LhsProto || qualType.isBoxedCapturing - || pt == LhsProto + || selType.isTrackableRef + || selWiden.isBoxedCapturing + || selWiden.captureSet.isAlwaysEmpty then selType else val qualCs = qualType.captureSet - capt.println(i"pick one of $qualType, ${selType.widen}, $qualCs, $selCs in $tree") + val selCs = selType.captureSet + capt.println(i"pick one of $qualType, ${selType.widen}, $qualCs, $selCs ${selWiden.captureSet} in $tree") + if qualCs.mightSubcapture(selCs) && !selCs.mightSubcapture(qualCs) && !pt.stripCapturing.isInstanceOf[SingletonType] then - selType.widen.stripCapturing.capturing(qualCs) + selWiden.stripCapturing.capturing(qualCs) .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) else selType diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 47391a4114cf..dc3ae4cf7639 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1559,21 +1559,23 @@ object Parsers { case _ => None } - /** CaptureRef ::= ident [`*` | `^`] | `this` + /** CaptureRef ::= (ident | `this`) [`*` | `^`] */ def captureRef(): Tree = - if in.token == THIS then simpleRef() - else - val id = termIdent() - if isIdent(nme.raw.STAR) then - in.nextToken() - atSpan(startOffset(id)): - PostfixOp(id, Ident(nme.CC_REACH)) - else if isIdent(nme.UPARROW) then - in.nextToken() - atSpan(startOffset(id)): - makeCapsOf(cpy.Ident(id)(id.name.toTypeName)) - else id + val ref = singleton() + if isIdent(nme.raw.STAR) then + in.nextToken() + atSpan(startOffset(ref)): + PostfixOp(ref, Ident(nme.CC_REACH)) + else if isIdent(nme.UPARROW) then + in.nextToken() + def toTypeSel(r: Tree): Tree = r match + case id: Ident => cpy.Ident(id)(id.name.toTypeName) + case Select(qual, id) => Select(qual, id.toTypeName) + case _ => r + atSpan(startOffset(ref)): + makeCapsOf(toTypeSel(ref)) + else ref /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under captureChecking */ diff --git a/tests/neg-custom-args/captures/class-contra.check b/tests/neg-custom-args/captures/class-contra.check index 9fc009ac3d48..808118bd1795 100644 --- a/tests/neg-custom-args/captures/class-contra.check +++ b/tests/neg-custom-args/captures/class-contra.check @@ -1,10 +1,7 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/class-contra.scala:12:39 --------------------------------- -12 | def fun(x: K{val f: T^{a}}) = x.setf(a) // error - | ^ - | Found: (a : T^{x, y}) - | Required: T^{} - | - | Note that a capability (K.this.f : T^) in a capture set appearing in contravariant position - | was mapped to (x.f : T^{a}) which is not a capability. Therefore, it was under-approximated to the empty set. +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/class-contra.scala:12:40 --------------------------------- +12 | def fun1(k: K{val f: T^{a}}) = k.setf(a) // error + | ^ + | Found: (a : T^{x, y}) + | Required: T^{k.f} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/class-contra.scala b/tests/neg-custom-args/captures/class-contra.scala index 210fd4e331f1..8ef8e7485a18 100644 --- a/tests/neg-custom-args/captures/class-contra.scala +++ b/tests/neg-custom-args/captures/class-contra.scala @@ -9,5 +9,6 @@ class T def test(x: Cap, y: Cap) = val a: T^{x, y} = ??? - def fun(x: K{val f: T^{a}}) = x.setf(a) // error + def fun1(k: K{val f: T^{a}}) = k.setf(a) // error + def fun2(k: K{val f: a.type}) = k.setf(a) () \ No newline at end of file diff --git a/tests/neg-custom-args/captures/explain-under-approx.check b/tests/neg-custom-args/captures/explain-under-approx.check index 2d2b05b4b95a..c186fc6adb11 100644 --- a/tests/neg-custom-args/captures/explain-under-approx.check +++ b/tests/neg-custom-args/captures/explain-under-approx.check @@ -1,20 +1,14 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/explain-under-approx.scala:12:10 ------------------------- 12 | col.add(Future(() => 25)) // error | ^^^^^^^^^^^^^^^^ - | Found: Future[Int]{val a: (async : Async^)}^{async} - | Required: Future[Int]^{} - | - | Note that a capability Collector.this.futs* in a capture set appearing in contravariant position - | was mapped to col.futs* which is not a capability. Therefore, it was under-approximated to the empty set. + | Found: Future[Int]{val a: (async : Async^)}^{async} + | Required: Future[Int]^{col.futs*} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/explain-under-approx.scala:15:11 ------------------------- 15 | col1.add(Future(() => 25)) // error | ^^^^^^^^^^^^^^^^ - | Found: Future[Int]{val a: (async : Async^)}^{async} - | Required: Future[Int]^{} - | - | Note that a capability Collector.this.futs* in a capture set appearing in contravariant position - | was mapped to col1.futs* which is not a capability. Therefore, it was under-approximated to the empty set. + | Found: Future[Int]{val a: (async : Async^)}^{async} + | Required: Future[Int]^{col1.futs*} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/filevar-multi-ios.scala b/tests/neg-custom-args/captures/filevar-multi-ios.scala new file mode 100644 index 000000000000..8ffc8d8e299c --- /dev/null +++ b/tests/neg-custom-args/captures/filevar-multi-ios.scala @@ -0,0 +1,41 @@ +import language.experimental.modularity +import compiletime.uninitialized + +class IO extends caps.Capability + +class File: + def write(x: String): Unit = ??? + +object test1: + + class Service(val io: IO, val io2: IO): + var file: File^{io} = uninitialized + var file2: File^{io2} = uninitialized + def log = file.write("log") + + def withFile[T](io: IO)(op: File^{io} => T): T = + op(new File) + + def test(io3: IO, io4: IO) = + withFile(io3): f => + val o = Service(io3, io4) + o.file = f // error + o.file2 = f // error + o.log + +object test2: + + class Service(tracked val io: IO, tracked val io2: IO): + var file: File^{io} = uninitialized + var file2: File^{io2} = uninitialized + def log = file.write("log") + + def withFile[T](io: IO)(op: File^{io} => T): T = + op(new File) + + def test(io3: IO, io4: IO) = + withFile(io3): f => + val o = Service(io3, io4) + o.file = f + o.file2 = f // error + o.log diff --git a/tests/neg-custom-args/captures/i15116.check b/tests/neg-custom-args/captures/i15116.check index df05324866e1..0a16af9f6704 100644 --- a/tests/neg-custom-args/captures/i15116.check +++ b/tests/neg-custom-args/captures/i15116.check @@ -18,13 +18,17 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15116.scala:5:13 ---------------------------------------- 5 | val x = Foo(m) // error | ^^^^^^ - | Found: Foo{val m: String^{Baz.this}}^{Baz.this} + | Found: Foo{val m²: (Baz.this.m : String^)}^{Baz.this.m} | Required: Foo | + | where: m is a value in trait Baz + | m² is a value in class Foo + | + | | Note that the expected type Foo | is the previously inferred type of value x | which is also the type seen in separately compiled sources. - | The new inferred type Foo{val m: String^{Baz.this}}^{Baz.this} + | The new inferred type Foo{val m: (Baz.this.m : String^)}^{Baz.this.m} | must conform to this type. | | longer explanation available when compiling with `-explain` @@ -48,13 +52,17 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15116.scala:9:13 ---------------------------------------- 9 | val x = Foo(m) // error | ^^^^^^ - | Found: Foo{val m: String^{Baz2.this}}^{Baz2.this} + | Found: Foo{val m²: (Baz2.this.m : String^)}^{Baz2.this.m} | Required: Foo | + | where: m is a value in trait Baz2 + | m² is a value in class Foo + | + | | Note that the expected type Foo | is the previously inferred type of value x | which is also the type seen in separately compiled sources. - | The new inferred type Foo{val m: String^{Baz2.this}}^{Baz2.this} + | The new inferred type Foo{val m: (Baz2.this.m : String^)}^{Baz2.this.m} | must conform to this type. | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/path-box.scala b/tests/neg-custom-args/captures/path-box.scala new file mode 100644 index 000000000000..3213c236aaf5 --- /dev/null +++ b/tests/neg-custom-args/captures/path-box.scala @@ -0,0 +1,20 @@ +class A: + val m: A^ = ??? + val self: this.type = this + +case class Box[+T](value: T) + +def testBox1(a: A^): Box[A^{a}] = + Box(a.m) + +def testBox2(a: A^): Box[A^{a.m}] = + Box(a.m) + +def testBox3(a: A^): Box[A^{a.m}] = + Box(a) // error + +def testBox4(a: A^): Box[A^{a.m}] = + Box(a.m.m.m) + +def testBox5(a: A^): Box[A^{a.m}] = + Box(a.m.m.self) \ No newline at end of file diff --git a/tests/neg-custom-args/captures/path-connection.scala b/tests/neg-custom-args/captures/path-connection.scala new file mode 100644 index 000000000000..3b3820488c8d --- /dev/null +++ b/tests/neg-custom-args/captures/path-connection.scala @@ -0,0 +1,46 @@ +import language.experimental.modularity + +trait Reader: + def read(): String + +trait Sender: + def send(msg: String): Unit + +class Connection extends Reader, Sender: + def read() = "hello" + def send(msg: String) = () + + val readOnly: Reader^ = new Reader: + def read() = Connection.this.read() + +class ReaderProxy(tracked val r: Reader^) extends Reader: + def read() = "(Proxy)" + r.read() + +class SenderProxy(tracked val s: Sender^) extends Sender: + def send(msg: String) = s.send("(Proxy) " + msg) + +def testConnection(c: Connection^)( + handle1: Reader^{c.readOnly} => String, + handle2: Sender^{c} => Unit, + handle3: Reader^{c} => String, + ) = + val m1 = c.read() + c.send("hello") + + val m2 = c.readOnly.read() + + val m3a = handle1(c.readOnly) + val m3b = handle3(c.readOnly) + + val m4a = handle1(c) // error + val m4b = handle3(c) + + val m5a = handle1(new ReaderProxy(c.readOnly)) + val m5b = handle3(new ReaderProxy(c.readOnly)) + + val m6a = handle1(new ReaderProxy(c)) // error + val m6b = handle3(new ReaderProxy(c)) + + handle2(c) + + handle2(new SenderProxy(c)) \ No newline at end of file diff --git a/tests/neg-custom-args/captures/path-illigal.scala b/tests/neg-custom-args/captures/path-illigal.scala new file mode 100644 index 000000000000..f09db0087ef7 --- /dev/null +++ b/tests/neg-custom-args/captures/path-illigal.scala @@ -0,0 +1,7 @@ +class A: + val m: A^ = ??? + var n: A^ = ??? + +def test1(a: A^) = + val c1: A^{a.m} = a.m + val f1: A^{a.n} = a.n // error \ No newline at end of file diff --git a/tests/neg-custom-args/captures/path-simple.scala b/tests/neg-custom-args/captures/path-simple.scala new file mode 100644 index 000000000000..93b6dacebe74 --- /dev/null +++ b/tests/neg-custom-args/captures/path-simple.scala @@ -0,0 +1,27 @@ + +class A: + val m: A^ = ??? + val self: this.type = this + +case class C(ca: A^) + +def test1(a: A^, b: A^) = + val c1: A^{a} = a.m + val c2: A^{a.m} = a.m + val c3: A^{b} = a.m // error + + val d1: A^{a} = a.self + val d2: A^{a.self} = a.self + val d3: A^{a.self} = a + + val e1: A^{a.m} = a.self.m + val e2: A^{a.self.m} = a.self.m + val e3: A^{a.self.m} = a.m + +def test2(a: A^) = + val b: a.type = a + val c1: C^{a} = new C(a) + val c2: C^{a} = new C(a.m) + val c3: C^{a.m} = new C(a.m) + val c4: C^{b} = new C(a) + val c5: C^{a} = new C(b) \ No newline at end of file diff --git a/tests/neg-custom-args/captures/singletons.scala b/tests/neg-custom-args/captures/singletons.scala index 194e6e850dcd..be0ee67ab1bc 100644 --- a/tests/neg-custom-args/captures/singletons.scala +++ b/tests/neg-custom-args/captures/singletons.scala @@ -1,6 +1,6 @@ val x = () => () -val y1: x.type = x // ok -val y2: x.type^{} = x // error: singleton type cannot have capture set -val y3: x.type^{x} = x // error: singleton type cannot have capture set // error -val y4: x.type^ = x // error: singleton type cannot have capture set +val y1: x.type = x +val y2: x.type^{} = x +val y3: x.type^{x} = x // error +val y4: x.type^ = x diff --git a/tests/pos-custom-args/captures/filevar-expanded.scala b/tests/pos-custom-args/captures/filevar-expanded.scala index a883471e8d2e..58e7a0e67e0a 100644 --- a/tests/pos-custom-args/captures/filevar-expanded.scala +++ b/tests/pos-custom-args/captures/filevar-expanded.scala @@ -1,4 +1,5 @@ import language.experimental.captureChecking +import language.experimental.modularity import compiletime.uninitialized object test1: @@ -22,7 +23,7 @@ object test2: class File: def write(x: String): Unit = ??? - class Service(io: IO^): + class Service(tracked val io: IO^): var file: File^{io} = uninitialized def log = file.write("log") diff --git a/tests/pos-custom-args/captures/filevar.scala b/tests/pos-custom-args/captures/filevar.scala index 9ab34fe617b5..dc8d0b18908b 100644 --- a/tests/pos-custom-args/captures/filevar.scala +++ b/tests/pos-custom-args/captures/filevar.scala @@ -1,4 +1,5 @@ import language.experimental.captureChecking +import language.experimental.modularity import compiletime.uninitialized object test1: @@ -22,7 +23,7 @@ object test2: class File: def write(x: String): Unit = ??? - class Service(io: IO): + class Service(tracked val io: IO): var file: File^{io} = uninitialized def log = file.write("log") From 57dd0304822385d987fc780b5f86874161bcc722 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Thu, 19 Sep 2024 15:30:57 +0200 Subject: [PATCH 699/827] Add comment for path-dependent limitation --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 5 +---- compiler/src/dotty/tools/dotc/cc/Setup.scala | 5 +++++ tests/neg-custom-args/captures/path-connection.scala | 2 ++ 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index ec1e63137311..b3a1ab44d6cd 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -494,10 +494,7 @@ class CheckCaptures extends Recheck, SymTransformer: val selType = recheckSelection(tree, qualType, name, disambiguate) val selWiden = selType.widen - def isStableSel = selType match - case selType: NamedType => selType.symbol.isStableMember - case _ => false - + if pt == LhsProto || qualType.isBoxedCapturing || selType.isTrackableRef diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 22e7899eeea1..76ae41649517 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -518,6 +518,11 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: info match case mt: MethodOrPoly => val psyms = psymss.head + // TODO: the substitution does not work for param-dependent method types. + // For example, `(x: T, y: x.f.type) => Unit`. In this case, when we + // substitute `x.f.type`, `x` becomes a `TermParamRef`. But the new method + // type is still under initialization and `paramInfos` is still `null`, + // so the new `NamedType` will not have a denoation. mt.companion(mt.paramNames)( mt1 => if !paramSignatureChanges && !mt.isParamDependent && prevLambdas.isEmpty then diff --git a/tests/neg-custom-args/captures/path-connection.scala b/tests/neg-custom-args/captures/path-connection.scala index 3b3820488c8d..c65aa75b1ed2 100644 --- a/tests/neg-custom-args/captures/path-connection.scala +++ b/tests/neg-custom-args/captures/path-connection.scala @@ -19,6 +19,8 @@ class ReaderProxy(tracked val r: Reader^) extends Reader: class SenderProxy(tracked val s: Sender^) extends Sender: def send(msg: String) = s.send("(Proxy) " + msg) +// TODO: We have to put `c` in the different argument list to make it work. +// See the comments in `integrateRT`. def testConnection(c: Connection^)( handle1: Reader^{c.readOnly} => String, handle2: Sender^{c} => Unit, From 2f9f4c4be97d64683a4154c3d4f6fb306f8d373c Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 24 Sep 2024 18:05:57 +0200 Subject: [PATCH 700/827] Add alternative subsumes implementations This is done for comparing old with new --- .../src/dotty/tools/dotc/cc/CaptureRef.scala | 46 +++++++++++++++++-- 1 file changed, 41 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala index 05162907b608..107b1a178069 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -93,23 +93,59 @@ trait CaptureRef extends TypeProxy, ValueType: final def invalidateCaches() = myCaptureSetRunId = NoRunId + final def subsumes(y: CaptureRef)(using Context): Boolean = + val was = subsumesOld(y) + val now = subsumesNew(y) + if was != now then + println(i"diff for $this subsumes $y, now: $now, ${this.getClass}, ${y.getClass}") + was + + final def subsumesOld(y: CaptureRef)(using Context): Boolean = + (this eq y) + || this.isRootCapability + || y.match + case y: TermRef => + y.prefix.match + case ypre: CaptureRef => + this.subsumesOld(ypre) + || this.match + case x @ TermRef(xpre: CaptureRef, _) => + x.symbol == y.symbol && xpre =:= ypre + case _ => + false + case _ => false + || y.info.match + case y1: SingletonCaptureRef => this.subsumesOld(y1) + case _ => false + case MaybeCapability(y1) => this.stripMaybe.subsumesOld(y1) + case _ => false + || this.match + case ReachCapability(x1) => x1.subsumesOld(y.stripReach) + case x: TermRef => + x.info match + case x1: SingletonCaptureRef => x1.subsumesOld(y) + case _ => false + case x: TermParamRef => subsumesExistentially(x, y) + case x: TypeRef => assumedContainsOf(x).contains(y) + case _ => false + /** x subsumes x * this subsumes this.f * x subsumes y ==> x* subsumes y, x subsumes y? * x subsumes y ==> x* subsumes y*, x? subsumes y? * x: x1.type /\ x1 subsumes y ==> x subsumes y */ - final def subsumes(y: CaptureRef)(using Context): Boolean = + final def subsumesNew(y: CaptureRef)(using Context): Boolean = def compareCaptureRefs(x: Type, y: Type): Boolean = (x eq y) || y.match case y: CaptureRef => x.match - case x: CaptureRef => x.subsumes(y) + case x: CaptureRef => x.subsumesNew(y) case _ => false case _ => false def compareUndelying(x: Type): Boolean = x match - case x: SingletonCaptureRef => x.subsumes(y) + case x: SingletonCaptureRef => x.subsumesNew(y) case x: AndType => compareUndelying(x.tp1) || compareUndelying(x.tp2) case x: OrType => compareUndelying(x.tp1) && compareUndelying(x.tp2) case _ => false @@ -140,11 +176,11 @@ trait CaptureRef extends TypeProxy, ValueType: if compareCaptureRefs(this, y.prefix) then return true // underlying if compareCaptureRefs(this, y.info) then return true - case MaybeCapability(y1) => return this.stripMaybe.subsumes(y1) + case MaybeCapability(y1) => return this.stripMaybe.subsumesNew(y1) case _ => return this.match - case ReachCapability(x1) => x1.subsumes(y.stripReach) + case ReachCapability(x1) => x1.subsumesNew(y.stripReach) case x: TermRef => compareUndelying(x.info) case CapturingType(x1, _) => compareUndelying(x1) case x: TermParamRef => subsumesExistentially(x, y) From 0c50a31003a9090d4e131ee6654be5a5e033433e Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 24 Sep 2024 18:07:50 +0200 Subject: [PATCH 701/827] Revert to previous subsumes scheme Add the path cases without changing the whole logic --- .../src/dotty/tools/dotc/cc/CaptureRef.scala | 82 +++---------------- 1 file changed, 12 insertions(+), 70 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala index 107b1a178069..195f07f778eb 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -93,21 +93,21 @@ trait CaptureRef extends TypeProxy, ValueType: final def invalidateCaches() = myCaptureSetRunId = NoRunId + /** x subsumes x + * this subsumes this.f + * x subsumes y ==> x* subsumes y, x subsumes y? + * x subsumes y ==> x* subsumes y*, x? subsumes y? + * x: x1.type /\ x1 subsumes y ==> x subsumes y + * TODO: Document path cases + */ final def subsumes(y: CaptureRef)(using Context): Boolean = - val was = subsumesOld(y) - val now = subsumesNew(y) - if was != now then - println(i"diff for $this subsumes $y, now: $now, ${this.getClass}, ${y.getClass}") - was - - final def subsumesOld(y: CaptureRef)(using Context): Boolean = (this eq y) || this.isRootCapability || y.match case y: TermRef => y.prefix.match case ypre: CaptureRef => - this.subsumesOld(ypre) + this.subsumes(ypre) || this.match case x @ TermRef(xpre: CaptureRef, _) => x.symbol == y.symbol && xpre =:= ypre @@ -115,78 +115,20 @@ trait CaptureRef extends TypeProxy, ValueType: false case _ => false || y.info.match - case y1: SingletonCaptureRef => this.subsumesOld(y1) + case y1: SingletonCaptureRef => this.subsumes(y1) case _ => false - case MaybeCapability(y1) => this.stripMaybe.subsumesOld(y1) + case MaybeCapability(y1) => this.stripMaybe.subsumes(y1) case _ => false || this.match - case ReachCapability(x1) => x1.subsumesOld(y.stripReach) + case ReachCapability(x1) => x1.subsumes(y.stripReach) case x: TermRef => x.info match - case x1: SingletonCaptureRef => x1.subsumesOld(y) + case x1: SingletonCaptureRef => x1.subsumes(y) case _ => false case x: TermParamRef => subsumesExistentially(x, y) case x: TypeRef => assumedContainsOf(x).contains(y) case _ => false - /** x subsumes x - * this subsumes this.f - * x subsumes y ==> x* subsumes y, x subsumes y? - * x subsumes y ==> x* subsumes y*, x? subsumes y? - * x: x1.type /\ x1 subsumes y ==> x subsumes y - */ - final def subsumesNew(y: CaptureRef)(using Context): Boolean = - def compareCaptureRefs(x: Type, y: Type): Boolean = - (x eq y) - || y.match - case y: CaptureRef => x.match - case x: CaptureRef => x.subsumesNew(y) - case _ => false - case _ => false - - def compareUndelying(x: Type): Boolean = x match - case x: SingletonCaptureRef => x.subsumesNew(y) - case x: AndType => compareUndelying(x.tp1) || compareUndelying(x.tp2) - case x: OrType => compareUndelying(x.tp1) && compareUndelying(x.tp2) - case _ => false - - if (this eq y) || this.isRootCapability then return true - - // similar to compareNamed in TypeComparer - y match - case y: TermRef => - this match - case x: TermRef => - val xSym = x.symbol - val ySym = y.symbol - - // check x.f and y.f - if (xSym ne NoSymbol) - && (xSym eq ySym) - && compareCaptureRefs(x.prefix, y.prefix) - || (x.name eq y.name) - && x.isPrefixDependentMemberRef - && compareCaptureRefs(x.prefix, y.prefix) - && x.signature == y.signature - && !(xSym.isClass && ySym.isClass) - then return true - case _ => - - // shorten - if compareCaptureRefs(this, y.prefix) then return true - // underlying - if compareCaptureRefs(this, y.info) then return true - case MaybeCapability(y1) => return this.stripMaybe.subsumesNew(y1) - case _ => - - return this.match - case ReachCapability(x1) => x1.subsumesNew(y.stripReach) - case x: TermRef => compareUndelying(x.info) - case CapturingType(x1, _) => compareUndelying(x1) - case x: TermParamRef => subsumesExistentially(x, y) - case x: TypeRef => assumedContainsOf(x).contains(y) - case _ => false - def assumedContainsOf(x: TypeRef)(using Context): SimpleIdentitySet[CaptureRef] = CaptureSet.assumedContains.getOrElse(x, SimpleIdentitySet.empty) From 54c02426699b8095165f3f3df0bfa6dfa0375b1f Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 25 Sep 2024 11:57:39 +0200 Subject: [PATCH 702/827] Add logic to mark paths as used If we refer to a path `a.b`, we should mark `a.b` as used, which is better than marking `a`. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 27 +++++++++++++------ .../dotty/tools/dotc/transform/Recheck.scala | 15 +++++------ .../dotty/tools/dotc/typer/ProtoTypes.scala | 6 +++-- tests/pos-custom-args/captures/path-use.scala | 19 +++++++++++++ 4 files changed, 49 insertions(+), 18 deletions(-) create mode 100644 tests/pos-custom-args/captures/path-use.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index b3a1ab44d6cd..05bcecf86067 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -13,7 +13,7 @@ import Trees.* import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPairsChecker} import typer.Checking.{checkBounds, checkAppliedTypesIn} import typer.ErrorReporting.{Addenda, NothingToAdd, err} -import typer.ProtoTypes.{AnySelectionProto, LhsProto} +import typer.ProtoTypes.{LhsProto, WildcardSelectionProto} import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.{Recheck, PreRecheck, CapturedVars} import Recheck.* @@ -183,6 +183,9 @@ object CheckCaptures: /** Attachment key for bodies of closures, provided they are values */ val ClosureBodyValue = Property.Key[Unit] + /** A prototype that indicates selection with an immutable value */ + class PathSelectionProto(val sym: Symbol, val pt: Type)(using Context) extends WildcardSelectionProto + class CheckCaptures extends Recheck, SymTransformer: thisPhase => @@ -357,12 +360,13 @@ class CheckCaptures extends Recheck, SymTransformer: * the environment in which `sym` is defined. */ def markFree(sym: Symbol, pos: SrcPos)(using Context): Unit = - if sym.exists then - val ref = sym.termRef - if ref.isTracked then - forallOuterEnvsUpTo(sym.enclosure): env => - capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") - checkElem(ref, env.captured, pos, provenance(env)) + markFree(sym, sym.termRef, pos) + + def markFree(sym: Symbol, ref: TermRef, pos: SrcPos)(using Context): Unit = + if sym.exists && ref.isTracked then + forallOuterEnvsUpTo(sym.enclosure): env => + capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") + checkElem(ref, env.captured, pos, provenance(env)) /** Make sure (projected) `cs` is a subset of the capture sets of all enclosing * environments. At each stage, only include references from `cs` that are outside @@ -464,9 +468,16 @@ class CheckCaptures extends Recheck, SymTransformer: includeCallCaptures(tree.symbol, tree.srcPos) else //debugShowEnvs() - markFree(tree.symbol, tree.srcPos) + def addSelects(ref: TermRef, pt: Type): TermRef = pt match + case pt: PathSelectionProto => addSelects(ref.select(pt.sym).asInstanceOf[TermRef], pt.pt) + case _ => ref + markFree(tree.symbol, addSelects(tree.symbol.termRef, pt), tree.srcPos) super.recheckIdent(tree, pt) + override def selectionProto(tree: Select, pt: Type)(using Context): Type = + if !tree.symbol.isOneOf(UnstableValueFlags) then PathSelectionProto(tree.symbol, pt) + else super.selectionProto(tree, pt) + /** A specialized implementation of the selection rule. * * E |- f: T{ m: R^Cr }^{f} diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 7520767c918c..8df9e5966920 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -12,7 +12,7 @@ import DenotTransformers.{DenotTransformer, IdentityDenotTransformer, SymTransfo import NamerOps.linkConstructorParams import NullOpsDecorator.stripNull import typer.ErrorReporting.err -import typer.ProtoTypes.* +import typer.ProtoTypes.{AnySelectionProto, LhsProto} import typer.TypeAssigner.seqLitType import typer.ConstFold import typer.ErrorReporting.{Addenda, NothingToAdd} @@ -206,13 +206,12 @@ abstract class Recheck extends Phase, SymTransformer: tree.tpe def recheckSelect(tree: Select, pt: Type)(using Context): Type = - recheckSelection(tree, recheckSelectQualifier(tree), tree.name, pt) + recheckSelection(tree, + recheck(tree.qualifier, selectionProto(tree, pt)).widenIfUnstable, + tree.name, pt) - def recheckSelectQualifier(tree: Select)(using Context): Type = - val proto = - if tree.symbol == defn.Any_asInstanceOf then WildcardType - else AnySelectionProto - recheck(tree.qualifier, proto).widenIfUnstable + def selectionProto(tree: Select, pt: Type)(using Context): Type = + if tree.symbol == defn.Any_asInstanceOf then WildcardType else AnySelectionProto def recheckSelection(tree: Select, qualType: Type, name: Name, sharpen: Denotation => Denotation)(using Context): Type = @@ -311,7 +310,7 @@ abstract class Recheck extends Phase, SymTransformer: def recheckApply(tree: Apply, pt: Type)(using Context): Type = val (funtpe0, qualType) = tree.fun match case fun: Select => - val qualType = recheckSelectQualifier(fun) + val qualType = recheck(fun.qualifier, selectionProto(fun, WildcardType)).widenIfUnstable (recheckSelection(fun, qualType, fun.name, WildcardType), qualType) case _ => (recheck(tree.fun), NoType) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index a69a63d1ceef..53e0b456ed9a 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -324,6 +324,8 @@ object ProtoTypes { case tp: UnapplyFunProto => new UnapplySelectionProto(name, nameSpan) case tp => SelectionProto(name, IgnoredProto(tp), typer, privateOK = true, nameSpan) + class WildcardSelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) + /** A prototype for expressions [] that are in some unspecified selection operation * * [].?: ? @@ -332,9 +334,9 @@ object ProtoTypes { * operation is further selection. In this case, the expression need not be a value. * @see checkValue */ - @sharable object AnySelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) + @sharable object AnySelectionProto extends WildcardSelectionProto - @sharable object SingletonTypeProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) + @sharable object SingletonTypeProto extends WildcardSelectionProto /** A prototype for selections in pattern constructors */ class UnapplySelectionProto(name: Name, nameSpan: Span) extends SelectionProto(name, WildcardType, NoViewsAllowed, true, nameSpan) diff --git a/tests/pos-custom-args/captures/path-use.scala b/tests/pos-custom-args/captures/path-use.scala new file mode 100644 index 000000000000..5eb2b60fd218 --- /dev/null +++ b/tests/pos-custom-args/captures/path-use.scala @@ -0,0 +1,19 @@ +import language.experimental.namedTuples + +class IO + +class C(val f: IO^): + val procs: List[Proc] = ??? + +type Proc = () => Unit + +def test(io: IO^) = + val c = C(io) + val f = () => println(c.f) + val _: () ->{c.f} Unit = f + + val x = c.procs + val _: List[() ->{c.procs*} Unit] = x + + val g = () => println(c.procs.head) + val _: () ->{c.procs*} Unit = g From 3224b2108832f25487e51a51a79c83645b02a8ec Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 25 Sep 2024 21:01:05 +0200 Subject: [PATCH 703/827] Tweaks to path checking and massage tests Needed to make stdlib2-cc go through. There were two errors. One in LayListIterable required a type annotation and a tweak to markFree. The other in Vieew.scala required a cast, but this could be fixed with better handling of pattern matching. path-patmat-should-be-pos.scala is a minimization. --- .../src/dotty/tools/dotc/cc/CaptureRef.scala | 27 ++++++++++++------- .../dotty/tools/dotc/cc/CheckCaptures.scala | 25 ++++++++++++----- .../src/scala/collection/View.scala | 5 +++- .../immutable/LazyListIterable.scala | 4 ++- .../captures/path-patmat-should-be-pos.scala | 26 ++++++++++++++++++ 5 files changed, 69 insertions(+), 18 deletions(-) create mode 100644 tests/neg-custom-args/captures/path-patmat-should-be-pos.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala index 195f07f778eb..bbaf0c7d2fa0 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -101,6 +101,19 @@ trait CaptureRef extends TypeProxy, ValueType: * TODO: Document path cases */ final def subsumes(y: CaptureRef)(using Context): Boolean = + + def subsumingRefs(x: Type, y: Type): Boolean = x match + case x: CaptureRef => y match + case y: CaptureRef => x.subsumes(y) + case _ => false + case _ => false + + def viaInfo(info: Type)(test: Type => Boolean): Boolean = info.match + case info: SingletonCaptureRef => test(info) + case info: AndType => test(info.tp1) || test(info.tp2) + case info: OrType => test(info.tp1) && test(info.tp2) + case _ => false + (this eq y) || this.isRootCapability || y.match @@ -109,25 +122,21 @@ trait CaptureRef extends TypeProxy, ValueType: case ypre: CaptureRef => this.subsumes(ypre) || this.match - case x @ TermRef(xpre: CaptureRef, _) => - x.symbol == y.symbol && xpre =:= ypre + case x @ TermRef(xpre: CaptureRef, _) if x.symbol == y.symbol => + subsumingRefs(xpre, ypre) && subsumingRefs(ypre, xpre) case _ => false case _ => false - || y.info.match - case y1: SingletonCaptureRef => this.subsumes(y1) - case _ => false + || viaInfo(y.info)(subsumingRefs(this, _)) case MaybeCapability(y1) => this.stripMaybe.subsumes(y1) case _ => false || this.match case ReachCapability(x1) => x1.subsumes(y.stripReach) - case x: TermRef => - x.info match - case x1: SingletonCaptureRef => x1.subsumes(y) - case _ => false + case x: TermRef => viaInfo(x.info)(subsumingRefs(_, y)) case x: TermParamRef => subsumesExistentially(x, y) case x: TypeRef => assumedContainsOf(x).contains(y) case _ => false + end subsumes def assumedContainsOf(x: TypeRef)(using Context): SimpleIdentitySet[CaptureRef] = CaptureSet.assumedContains.getOrElse(x, SimpleIdentitySet.empty) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 05bcecf86067..19acebde8651 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -466,16 +466,24 @@ class CheckCaptures extends Recheck, SymTransformer: if tree.symbol.info.isParameterless then // there won't be an apply; need to include call captures now includeCallCaptures(tree.symbol, tree.srcPos) - else + else if !tree.symbol.isStatic then //debugShowEnvs() def addSelects(ref: TermRef, pt: Type): TermRef = pt match - case pt: PathSelectionProto => addSelects(ref.select(pt.sym).asInstanceOf[TermRef], pt.pt) + case pt: PathSelectionProto if ref.isTracked => + // if `ref` is not tracked then the selection could not give anything new + // class SerializationProxy in stdlib-cc/../LazyListIterable.scala has an example where this matters. + addSelects(ref.select(pt.sym).asInstanceOf[TermRef], pt.pt) case _ => ref - markFree(tree.symbol, addSelects(tree.symbol.termRef, pt), tree.srcPos) + val ref = tree.symbol.termRef + val pathRef = addSelects(ref, pt) + //if pathRef ne ref then + // println(i"add selects $ref --> $pathRef") + markFree(tree.symbol, if false then ref else pathRef, tree.srcPos) super.recheckIdent(tree, pt) override def selectionProto(tree: Select, pt: Type)(using Context): Type = - if !tree.symbol.isOneOf(UnstableValueFlags) then PathSelectionProto(tree.symbol, pt) + val sym = tree.symbol + if !sym.isOneOf(UnstableValueFlags) && !sym.isStatic then PathSelectionProto(sym, pt) else super.selectionProto(tree, pt) /** A specialized implementation of the selection rule. @@ -1141,11 +1149,14 @@ class CheckCaptures extends Recheck, SymTransformer: (erefs /: erefs.elems): (erefs, eref) => eref match case eref: ThisType if isPureContext(ctx.owner, eref.cls) => - erefs ++ arefs.filter { - case aref: TermRef => eref.cls.isProperlyContainedIn(aref.symbol.owner) + def isOuterRef(aref: Type): Boolean = aref match + case aref: TermRef => + val owner = aref.symbol.owner + if owner.isClass then isOuterRef(aref.prefix) + else eref.cls.isProperlyContainedIn(owner) case aref: ThisType => eref.cls.isProperlyContainedIn(aref.cls) case _ => false - } + erefs ++ arefs.filter(isOuterRef) case _ => erefs diff --git a/scala2-library-cc/src/scala/collection/View.scala b/scala2-library-cc/src/scala/collection/View.scala index 31c544a46beb..132934dbe3bd 100644 --- a/scala2-library-cc/src/scala/collection/View.scala +++ b/scala2-library-cc/src/scala/collection/View.scala @@ -150,7 +150,10 @@ object View extends IterableFactory[View] { object Filter { def apply[A](underlying: Iterable[A]^, p: A => Boolean, isFlipped: Boolean): Filter[A]^{underlying, p} = underlying match { - case filter: Filter[A] if filter.isFlipped == isFlipped => new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) + case filter: Filter[A] if filter.isFlipped == isFlipped => + new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) + .asInstanceOf[Filter[A]^{underlying, p}] + // !!! asInstanceOf needed once paths were added, see path-patmat-should-be-pos.scala for minimization case _ => new Filter(underlying, p, isFlipped) } } diff --git a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala index 2f7b017a6729..28ce8da104aa 100644 --- a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala +++ b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala @@ -1366,7 +1366,9 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { case SerializeEnd => initRead = true case a => init += a.asInstanceOf[A] } - val tail = in.readObject().asInstanceOf[LazyListIterable[A]] + val tail: LazyListIterable[A] = in.readObject().asInstanceOf[LazyListIterable[A]] + // Explicit type annotation needed so that tail.state below is dropped from capture set. + // Before paths were added, it was tail that was added, and the `asSeenFrom` to a pure type made it work. // scala/scala#10118: caution that no code path can evaluate `tail.state` // before the resulting LazyListIterable is returned val it = init.toList.iterator diff --git a/tests/neg-custom-args/captures/path-patmat-should-be-pos.scala b/tests/neg-custom-args/captures/path-patmat-should-be-pos.scala new file mode 100644 index 000000000000..aca6102204a3 --- /dev/null +++ b/tests/neg-custom-args/captures/path-patmat-should-be-pos.scala @@ -0,0 +1,26 @@ +class It[A] + +class Filter[A](val underlying: It[A]^, val p: A => Boolean) extends It[A] +object Filter: + def apply[A](underlying: It[A]^, p: A => Boolean): Filter[A]^{underlying, p} = + underlying match + case filter: Filter[A]^ => + val x = new Filter(filter.underlying, a => filter.p(a) && p(a)) + x: Filter[A]^{underlying, p} // error + // !!! should work, it seems to be the case that the system does not recognize that + // underlying and filter are aliases. + + // On the other hand, the following works: + locally: + val filter: underlying.type & Filter[A] = ??? + val a: It[A]^{filter.underlying} = ??? + val b: It[A]^{underlying} = a + val x = new Filter(filter.underlying, a => filter.p(a) && p(a)) + x: Filter[A]^{underlying, p} + + locally: + val filter: underlying.type & Filter[A]^ = ??? + val a: It[A]^{filter.underlying} = ??? + val b: It[A]^{underlying} = a + val x = new Filter(filter.underlying, a => filter.p(a) && p(a)) + x: Filter[A]^{underlying, p} From a0c7361669cc60228b097cbbed2bfbdd483f9c05 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 9 Oct 2024 15:33:11 +0200 Subject: [PATCH 704/827] Update comments for parser; remove unnecessary checks --- compiler/src/dotty/tools/dotc/ast/untpd.scala | 2 +- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 3 +-- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 12 +++++------- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 4684464d477f..e8e3646bd087 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -524,7 +524,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def makeRetaining(parent: Tree, refs: List[Tree], annotName: TypeName)(using Context): Annotated = Annotated(parent, New(scalaAnnotationDot(annotName), List(refs))) - def makeCapsOf(tp: Tree)(using Context): Tree = + def makeCapsOf(tp: RefTree)(using Context): Tree = TypeApply(Select(scalaDot(nme.caps), nme.capsOf), tp :: Nil) def makeCapsBound()(using Context): Tree = diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 79cc7d136e45..aad6ca8ddeac 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -194,8 +194,7 @@ extension (tp: Type) true case tp: TermRef => ((tp.prefix eq NoPrefix) - || tp.symbol.isField && !tp.symbol.isStatic && ( - tp.prefix.isThisTypeOf(tp.symbol.owner) || tp.prefix.isTrackableRef) + || tp.symbol.isField && !tp.symbol.isStatic && tp.prefix.isTrackableRef || tp.isRootCapability ) && !tp.symbol.isOneOf(UnstableValueFlags) case tp: TypeRef => diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index dc3ae4cf7639..8eb2a7a1e045 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1559,22 +1559,20 @@ object Parsers { case _ => None } - /** CaptureRef ::= (ident | `this`) [`*` | `^`] + /** CaptureRef ::= SimpleRef { `.` id } [`*` | `^`] */ def captureRef(): Tree = - val ref = singleton() + val ref = dotSelectors(simpleRef()) if isIdent(nme.raw.STAR) then in.nextToken() atSpan(startOffset(ref)): PostfixOp(ref, Ident(nme.CC_REACH)) else if isIdent(nme.UPARROW) then in.nextToken() - def toTypeSel(r: Tree): Tree = r match - case id: Ident => cpy.Ident(id)(id.name.toTypeName) - case Select(qual, id) => Select(qual, id.toTypeName) - case _ => r atSpan(startOffset(ref)): - makeCapsOf(toTypeSel(ref)) + convertToTypeId(ref) match + case ref: RefTree => makeCapsOf(ref) + case ref => ref else ref /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under captureChecking From d930adf334e1f86ead4b31d212544155a8c2587f Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 9 Oct 2024 17:00:48 +0200 Subject: [PATCH 705/827] More robust check for subsumes --- compiler/src/dotty/tools/dotc/cc/CaptureRef.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala index bbaf0c7d2fa0..61f18008cbad 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -110,8 +110,8 @@ trait CaptureRef extends TypeProxy, ValueType: def viaInfo(info: Type)(test: Type => Boolean): Boolean = info.match case info: SingletonCaptureRef => test(info) - case info: AndType => test(info.tp1) || test(info.tp2) - case info: OrType => test(info.tp1) && test(info.tp2) + case info: AndType => viaInfo(info.tp1)(test) || viaInfo(info.tp2)(test) + case info: OrType => viaInfo(info.tp1)(test) && viaInfo(info.tp2)(test) case _ => false (this eq y) @@ -123,7 +123,7 @@ trait CaptureRef extends TypeProxy, ValueType: this.subsumes(ypre) || this.match case x @ TermRef(xpre: CaptureRef, _) if x.symbol == y.symbol => - subsumingRefs(xpre, ypre) && subsumingRefs(ypre, xpre) + withMode(Mode.IgnoreCaptures) {TypeComparer.isSameRef(xpre, ypre)} case _ => false case _ => false From 5bc20acfe93fb86d0d9557bcfa2166a01c1bb75e Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 9 Oct 2024 18:29:38 +0200 Subject: [PATCH 706/827] Update syntax comment --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 8eb2a7a1e045..aa62b664ba7c 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1559,7 +1559,8 @@ object Parsers { case _ => None } - /** CaptureRef ::= SimpleRef { `.` id } [`*` | `^`] + /** CaptureRef ::= { SimpleRef `.` } SimpleRef [`*`] + * | [ { SimpleRef `.` } SimpleRef `.` ] id `^` */ def captureRef(): Tree = val ref = dotSelectors(simpleRef()) From 1b0634a2b8d56e2693ef378f326f250bcab71d1b Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 27 Sep 2024 19:04:45 +0200 Subject: [PATCH 707/827] Account for added outer refs in the capture sets of classes --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 45 ++++++++++++++----- .../captures/outerRefsUses.scala | 10 +++++ 2 files changed, 45 insertions(+), 10 deletions(-) create mode 100644 tests/neg-custom-args/captures/outerRefsUses.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 19acebde8651..4d905a5df4ab 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1089,7 +1089,7 @@ class CheckCaptures extends Recheck, SymTransformer: if actualBoxed eq actual then // Only `addOuterRefs` when there is no box adaptation - expected1 = addOuterRefs(expected1, actual) + expected1 = addOuterRefs(expected1, actual, tree.srcPos) if isCompatible(actualBoxed, expected1) then if debugSuccesses then tree match case Ident(_) => @@ -1130,8 +1130,12 @@ class CheckCaptures extends Recheck, SymTransformer: * that are outside `Cls`. These are all accessed through `Cls.this`, * so we can assume they are already accounted for by `Ce` and adding * them explicitly to `Ce` changes nothing. + * - To make up for this, we also add these variables to the capture set of `Cls`, + * so that all instances of `Cls` will capture these outer references. + * So in a sense we use `{Cls.this}` as a placeholder for certain outer captures. + * that we needed to be subsumed by `Cls.this`. */ - private def addOuterRefs(expected: Type, actual: Type)(using Context): Type = + private def addOuterRefs(expected: Type, actual: Type, pos: SrcPos)(using Context): Type = def isPure(info: Type): Boolean = info match case info: PolyType => isPure(info.resType) @@ -1144,19 +1148,40 @@ class CheckCaptures extends Recheck, SymTransformer: else isPure(owner.info) && isPureContext(owner.owner, limit) // Augment expeced capture set `erefs` by all references in actual capture - // set `arefs` that are outside some `this.type` reference in `erefs` + // set `arefs` that are outside some `C.this.type` reference in `erefs` for an enclosing + // class `C`. If an added reference is not a ThisType itself, add it to the capture set + // (i.e. use set) of the `C`. This makes sure that any outer reference implicitly subsumed + // by `C.this` becomes a capture reference of every instance of `C`. def augment(erefs: CaptureSet, arefs: CaptureSet): CaptureSet = (erefs /: erefs.elems): (erefs, eref) => eref match case eref: ThisType if isPureContext(ctx.owner, eref.cls) => - def isOuterRef(aref: Type): Boolean = aref match - case aref: TermRef => - val owner = aref.symbol.owner - if owner.isClass then isOuterRef(aref.prefix) - else eref.cls.isProperlyContainedIn(owner) + + def pathRoot(aref: Type): Type = aref match + case aref: NamedType if aref.symbol.owner.isClass => pathRoot(aref.prefix) + case _ => aref + + def isOuterRef(aref: Type): Boolean = pathRoot(aref) match + case aref: NamedType => eref.cls.isProperlyContainedIn(aref.symbol.owner) case aref: ThisType => eref.cls.isProperlyContainedIn(aref.cls) case _ => false - erefs ++ arefs.filter(isOuterRef) + + val outerRefs = arefs.filter(isOuterRef) + + // Include implicitly added outer references in the capture set of the class of `eref`. + for outerRef <- outerRefs.elems do + if !erefs.elems.contains(outerRef) + && !pathRoot(outerRef).isInstanceOf[ThisType] + // we don't need to add outer ThisTypes as these are anyway added as path + // prefixes at the use site. And this exemption is required since capture sets + // of non-local classes are always empty, so we can't add an outer this to them. + then + def provenance = + i""" of the enclosing class ${eref.cls}. + |The reference was included since we tried to establish that $arefs <: $erefs""" + checkElem(outerRef, capturedVars(eref.cls), pos, provenance) + + erefs ++ outerRefs case _ => erefs @@ -1341,7 +1366,7 @@ class CheckCaptures extends Recheck, SymTransformer: * @param sym symbol of the field definition that is being checked */ override def checkSubType(actual: Type, expected: Type)(using Context): Boolean = - val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) + val expected1 = alignDependentFunction(addOuterRefs(expected, actual, srcPos), actual.stripCapturing) val actual1 = val saved = curEnv try diff --git a/tests/neg-custom-args/captures/outerRefsUses.scala b/tests/neg-custom-args/captures/outerRefsUses.scala new file mode 100644 index 000000000000..cd03c8c41efd --- /dev/null +++ b/tests/neg-custom-args/captures/outerRefsUses.scala @@ -0,0 +1,10 @@ +class IO +def test(io: IO^) = + class C: + def foo() = () => + val x: IO^{this} = io + () + val c = new C + val _: C^{io} = c // ok + val _: C = c // error + () From 4d79459b6cf4b503e81869786ae6024afda5eb3e Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 28 Oct 2024 10:45:08 +0000 Subject: [PATCH 708/827] Fix use of class terms in match analysis When instantiating a subclass Outer.this.Bar, such that it's a subtype of Test.this.outer.Foo[X], make sure to infer the term `outer` (even if it's not a parameter). Also make sure to use those singletons when approximating the parent (to fix Outer.this.Qux instantiating). --- .../src/dotty/tools/dotc/core/TypeOps.scala | 131 +++++++++--------- .../tools/dotc/printing/RefinedPrinter.scala | 1 + tests/warn/i21845.orig.scala | 33 +++++ tests/warn/i21845.scala | 15 ++ 4 files changed, 118 insertions(+), 62 deletions(-) create mode 100644 tests/warn/i21845.orig.scala create mode 100644 tests/warn/i21845.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index bfda613d0586..fac5d262d426 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -767,6 +767,67 @@ object TypeOps: * Otherwise, return NoType. */ private def instantiateToSubType(tp1: NamedType, tp2: Type, mixins: List[Type])(using Context): Type = trace(i"instantiateToSubType($tp1, $tp2, $mixins)", typr) { + /** Gather GADT symbols and singletons found in `tp2`, ie. the scrutinee. */ + object TraverseTp2 extends TypeTraverser: + val singletons = util.HashMap[Symbol, SingletonType]() + val gadtSyms = new mutable.ListBuffer[Symbol] + + def traverse(tp: Type) = try + val tpd = tp.dealias + if tpd ne tp then traverse(tpd) + else tp match + case tp: ThisType if !singletons.contains(tp.tref.symbol) && !tp.tref.symbol.isStaticOwner => + singletons(tp.tref.symbol) = tp + traverseChildren(tp.tref) + case tp: TermRef => + singletons(tp.typeSymbol) = tp + traverseChildren(tp) + case tp: TypeRef if !gadtSyms.contains(tp.symbol) && tp.symbol.isAbstractOrParamType => + gadtSyms += tp.symbol + traverseChildren(tp) + // traverse abstract type infos, to add any singletons + // for example, i16451.CanForward.scala, add `Namer.this`, from the info of the type parameter `A1` + // also, i19031.ci-reg2.scala, add `out`, from the info of the type parameter `A1` (from synthetic applyOrElse) + traverseChildren(tp.info) + case _ => + traverseChildren(tp) + catch case ex: Throwable => handleRecursive("traverseTp2", tp.show, ex) + TraverseTp2.traverse(tp2) + val singletons = TraverseTp2.singletons + val gadtSyms = TraverseTp2.gadtSyms.toList + + // Prefix inference, given `p.C.this.Child`: + // 1. return it as is, if `C.this` is found in `tp`, i.e. the scrutinee; or + // 2. replace it with `X.Child` where `X <: p.C`, stripping ThisType in `p` recursively. + // + // See tests/patmat/i3938.scala, tests/pos/i15029.more.scala, tests/pos/i16785.scala + class InferPrefixMap extends TypeMap { + var prefixTVar: Type | Null = null + def apply(tp: Type): Type = tp match { + case tp: TermRef if singletons.contains(tp.symbol) => + prefixTVar = singletons(tp.symbol) // e.g. tests/pos/i19031.ci-reg2.scala, keep out + prefixTVar.uncheckedNN + case ThisType(tref) if !tref.symbol.isStaticOwner => + val symbol = tref.symbol + if singletons.contains(symbol) then + prefixTVar = singletons(symbol) // e.g. tests/pos/i16785.scala, keep Outer.this + prefixTVar.uncheckedNN + else if symbol.is(Module) then + TermRef(this(tref.prefix), symbol.sourceModule) + else if (prefixTVar != null) + this(tref.applyIfParameterized(tref.typeParams.map(_ => WildcardType))) + else { + prefixTVar = WildcardType // prevent recursive call from assigning it + // e.g. tests/pos/i15029.more.scala, create a TypeVar for `Instances`' B, so we can disregard `Ints` + val tvars = tref.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } + val tref2 = this(tref.applyIfParameterized(tvars)) + prefixTVar = newTypeVar(TypeBounds.upper(tref2), DepParamName.fresh(tref.name)) + prefixTVar.uncheckedNN + } + case tp => mapOver(tp) + } + } + // In order for a child type S to qualify as a valid subtype of the parent // T, we need to test whether it is possible S <: T. // @@ -788,8 +849,15 @@ object TypeOps: // then to avoid it failing the <:< // we'll approximate by widening to its bounds + case tp: TermRef if singletons.contains(tp.symbol) => + singletons(tp.symbol) + case ThisType(tref: TypeRef) if !tref.symbol.isStaticOwner => - tref + val symbol = tref.symbol + if singletons.contains(symbol) then + singletons(symbol) + else + tref case tp: TypeRef if !tp.symbol.isClass => val lookup = boundTypeParams.lookup(tp) @@ -840,67 +908,6 @@ object TypeOps: } } - /** Gather GADT symbols and singletons found in `tp2`, ie. the scrutinee. */ - object TraverseTp2 extends TypeTraverser: - val singletons = util.HashMap[Symbol, SingletonType]() - val gadtSyms = new mutable.ListBuffer[Symbol] - - def traverse(tp: Type) = try - val tpd = tp.dealias - if tpd ne tp then traverse(tpd) - else tp match - case tp: ThisType if !singletons.contains(tp.tref.symbol) && !tp.tref.symbol.isStaticOwner => - singletons(tp.tref.symbol) = tp - traverseChildren(tp.tref) - case tp: TermRef if tp.symbol.is(Param) => - singletons(tp.typeSymbol) = tp - traverseChildren(tp) - case tp: TypeRef if !gadtSyms.contains(tp.symbol) && tp.symbol.isAbstractOrParamType => - gadtSyms += tp.symbol - traverseChildren(tp) - // traverse abstract type infos, to add any singletons - // for example, i16451.CanForward.scala, add `Namer.this`, from the info of the type parameter `A1` - // also, i19031.ci-reg2.scala, add `out`, from the info of the type parameter `A1` (from synthetic applyOrElse) - traverseChildren(tp.info) - case _ => - traverseChildren(tp) - catch case ex: Throwable => handleRecursive("traverseTp2", tp.show, ex) - TraverseTp2.traverse(tp2) - val singletons = TraverseTp2.singletons - val gadtSyms = TraverseTp2.gadtSyms.toList - - // Prefix inference, given `p.C.this.Child`: - // 1. return it as is, if `C.this` is found in `tp`, i.e. the scrutinee; or - // 2. replace it with `X.Child` where `X <: p.C`, stripping ThisType in `p` recursively. - // - // See tests/patmat/i3938.scala, tests/pos/i15029.more.scala, tests/pos/i16785.scala - class InferPrefixMap extends TypeMap { - var prefixTVar: Type | Null = null - def apply(tp: Type): Type = tp match { - case tp: TermRef if singletons.contains(tp.symbol) => - prefixTVar = singletons(tp.symbol) // e.g. tests/pos/i19031.ci-reg2.scala, keep out - prefixTVar.uncheckedNN - case ThisType(tref) if !tref.symbol.isStaticOwner => - val symbol = tref.symbol - if singletons.contains(symbol) then - prefixTVar = singletons(symbol) // e.g. tests/pos/i16785.scala, keep Outer.this - prefixTVar.uncheckedNN - else if symbol.is(Module) then - TermRef(this(tref.prefix), symbol.sourceModule) - else if (prefixTVar != null) - this(tref.applyIfParameterized(tref.typeParams.map(_ => WildcardType))) - else { - prefixTVar = WildcardType // prevent recursive call from assigning it - // e.g. tests/pos/i15029.more.scala, create a TypeVar for `Instances`' B, so we can disregard `Ints` - val tvars = tref.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } - val tref2 = this(tref.applyIfParameterized(tvars)) - prefixTVar = newTypeVar(TypeBounds.upper(tref2), DepParamName.fresh(tref.name)) - prefixTVar.uncheckedNN - } - case tp => mapOver(tp) - } - } - val inferThisMap = new InferPrefixMap val tvars = tp1.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } val protoTp1 = inferThisMap.apply(tp1).appliedTo(tvars) diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index b229c7ec29d9..f22abbd2efcb 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -1121,6 +1121,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { recur(fn) ~ "(" ~ toTextGlobal(explicitArgs, ", ") ~ ")" case TypeApply(fn, args) => recur(fn) ~ "[" ~ toTextGlobal(args, ", ") ~ "]" case Select(qual, nme.CONSTRUCTOR) => recur(qual) + case id @ Ident(tpnme.BOUNDTYPE_ANNOT) => "@" ~ toText(id.symbol.name) case New(tpt) => recur(tpt) case _ => val annotSym = sym.orElse(tree.symbol.enclosingClass) diff --git a/tests/warn/i21845.orig.scala b/tests/warn/i21845.orig.scala new file mode 100644 index 000000000000..a8e0893ea5ce --- /dev/null +++ b/tests/warn/i21845.orig.scala @@ -0,0 +1,33 @@ +trait Init[ScopeType]: + sealed trait Initialize[A1] + final class Bind[S, A1](val f: S => Initialize[A1], val in: Initialize[S]) + extends Initialize[A1] + final class Value[A1](val value: () => A1) extends Initialize[A1] + final class ValidationCapture[A1](val key: ScopedKey[A1], val selfRefOk: Boolean) + extends Initialize[ScopedKey[A1]] + final class TransformCapture(val f: [x] => Initialize[x] => Initialize[x]) + extends Initialize[[x] => Initialize[x] => Initialize[x]] + final class Optional[S, A1](val a: Option[Initialize[S]], val f: Option[S] => A1) + extends Initialize[A1] + object StaticScopes extends Initialize[Set[ScopeType]] + + sealed trait Keyed[S, A1] extends Initialize[A1] + trait KeyedInitialize[A1] extends Keyed[A1, A1] + sealed case class ScopedKey[A](scope: ScopeType, key: AttributeKey[A]) extends KeyedInitialize[A] + sealed trait AttributeKey[A] + +abstract class EvaluateSettings[ScopeType]: + protected val init: Init[ScopeType] + import init._ + + val transform: [A] => Initialize[A] => Unit = [A] => + (fa: Initialize[A]) => + fa match + case k: Keyed[s, A] => ??? + case b: Bind[s, A] => ??? + case v: Value[A] => ??? + case v: ValidationCapture[a] => ??? // unrearchable warning + case t: TransformCapture => ??? // unrearchable warning + case o: Optional[s, A] => ??? // unrearchable warning + case StaticScopes => ??? // unrearchable warning + diff --git a/tests/warn/i21845.scala b/tests/warn/i21845.scala new file mode 100644 index 000000000000..58590c74e1d4 --- /dev/null +++ b/tests/warn/i21845.scala @@ -0,0 +1,15 @@ +trait Outer[O1]: + sealed trait Foo[A1] + final class Bar[A2] extends Foo[A2] + final class Baz[A4] extends Foo[Bar[A4]] + final class Qux extends Foo[[a5] => Foo[a5] => Foo[a5]] + +trait Test[O2]: + val outer: Outer[O2] + import outer.* + + def test[X](fa: Foo[X]): Unit = + fa match // was: inexhaustive: fail on _: (Outer[] & (Test#outer : Outer[Test#O2]))#Qux + case _: Bar[X] => ??? + case _: Baz[x] => ??? // was: unrearchable warning + case _: Qux => ??? // was: unrearchable warning From e7221c672030375a0bd2391befc6e1fb4938693e Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 25 Oct 2024 10:48:56 +0200 Subject: [PATCH 709/827] fix: Allow `as` as an infix type in non context bound types --- .../dotty/tools/dotc/parsing/Parsers.scala | 22 +++++++++++-------- tests/pos/i21769.scala | 19 ++++++++++++++++ 2 files changed, 32 insertions(+), 9 deletions(-) create mode 100644 tests/pos/i21769.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 47391a4114cf..1a5b270f2a48 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -80,6 +80,9 @@ object Parsers { enum IntoOK: case Yes, No, Nested + enum InContextBound: + case Yes, No + type StageKind = Int object StageKind { val None = 0 @@ -1550,7 +1553,8 @@ object Parsers { /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and * returns a tree for type `Any` instead. */ - def toplevelTyp(intoOK: IntoOK = IntoOK.No): Tree = rejectWildcardType(typ(intoOK)) + def toplevelTyp(intoOK: IntoOK = IntoOK.No, inContextBound: InContextBound = InContextBound.No): Tree = + rejectWildcardType(typ(intoOK, inContextBound)) private def getFunction(tree: Tree): Option[Function] = tree match { case Parens(tree1) => getFunction(tree1) @@ -1605,7 +1609,7 @@ object Parsers { * IntoTargetType ::= Type * | FunTypeArgs (‘=>’ | ‘?=>’) IntoType */ - def typ(intoOK: IntoOK = IntoOK.No): Tree = + def typ(intoOK: IntoOK = IntoOK.No, inContextBound: InContextBound = InContextBound.No): Tree = val start = in.offset var imods = Modifiers() val erasedArgs: ListBuffer[Boolean] = ListBuffer() @@ -1754,7 +1758,7 @@ object Parsers { val tuple = atSpan(start): makeTupleOrParens(args.mapConserve(convertToElem)) typeRest: - infixTypeRest: + infixTypeRest(inContextBound): refinedTypeRest: withTypeRest: annotTypeRest: @@ -1777,7 +1781,7 @@ object Parsers { else if isIntoPrefix then PrefixOp(typeIdent(), typ(IntoOK.Nested)) else - typeRest(infixType()) + typeRest(infixType(inContextBound)) end typ private def makeKindProjectorTypeDef(name: TypeName): TypeDef = { @@ -1832,13 +1836,13 @@ object Parsers { /** InfixType ::= RefinedType {id [nl] RefinedType} * | RefinedType `^` // under capture checking */ - def infixType(): Tree = infixTypeRest(refinedType()) + def infixType(inContextBound: InContextBound = InContextBound.No): Tree = infixTypeRest(inContextBound)(refinedType()) - def infixTypeRest(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = + def infixTypeRest(inContextBound: InContextBound = InContextBound.No)(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, isOperator = !followingIsVararg() && !isPureArrow - && !(isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`)) + && !(isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`) && inContextBound == InContextBound.Yes) && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] @@ -2229,7 +2233,7 @@ object Parsers { /** ContextBound ::= Type [`as` id] */ def contextBound(pname: TypeName): Tree = - val t = toplevelTyp() + val t = toplevelTyp(inContextBound = InContextBound.Yes) val ownName = if isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`) then in.nextToken() @@ -4207,7 +4211,7 @@ object Parsers { else constrApp() match case parent: Apply => parent :: moreConstrApps() case parent if in.isIdent && newSyntaxAllowed => - infixTypeRest(parent, _ => annotType1()) :: Nil + infixTypeRest()(parent, _ => annotType1()) :: Nil case parent => parent :: moreConstrApps() // The term parameters and parent references */ diff --git a/tests/pos/i21769.scala b/tests/pos/i21769.scala new file mode 100644 index 000000000000..afb1c66c97fe --- /dev/null +++ b/tests/pos/i21769.scala @@ -0,0 +1,19 @@ + +infix trait as[From, To] + +val conv: (String as Int) = ??? +given instance: (String as Int) = ??? +def test(ev: (String as Int)) = ??? + +class F + +class K extends (F as K) + +class TC1[X] + +def doSth[X: TC1 as tc] = ??? + +class TC2[X]: + type Self = X + +def doSth2[X: {TC1 as tc1, TC2 as tc2}](x: tc2.Self) = ??? From cb9fcd88ac63eed06a16fe9f85deac2fc29b17eb Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Mon, 28 Oct 2024 11:31:55 -0400 Subject: [PATCH 710/827] Add comment --- compiler/src/dotty/tools/dotc/transform/patmat/Space.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index c7b304e6caf6..c7f7a236fcf3 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -941,6 +941,7 @@ object SpaceEngine { val msg = if nullOnly then MatchCaseOnlyNullWarning() else MatchCaseUnreachable() report.warning(msg, pat.srcPos) + // in redundancy check, take guard as false in order to soundly approximate val newPrev = if guard.isEmpty then covered :: prevs else prevs recur(rest, newPrev, Nil) From 03ee583d39b9fd0ae85f6d2477dc21b232cd2d06 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 28 Oct 2024 22:05:56 +0100 Subject: [PATCH 711/827] Make sure definition tree has the defined symbol It turns out it could have the wrong symbol referring to a same-named definition in the superclass under some recursive definition of a self type. This caused a crash in pickler in #21755 because we now have two different definitions in two different classes that have the same symbol. Fixes #21755 --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 16 ++++++++++++++++ .../tools/dotc/core/tasty/TreeUnpickler.scala | 1 + compiler/src/dotty/tools/dotc/typer/Typer.scala | 9 ++++++--- tests/pos/i21755.scala | 11 +++++++++++ 4 files changed, 34 insertions(+), 3 deletions(-) create mode 100644 tests/pos/i21755.scala diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index f97baa7f7889..3777969b1076 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -11,6 +11,7 @@ import Symbols.*, StdNames.*, Annotations.*, Trees.*, Symbols.* import Decorators.*, DenotTransformers.* import collection.{immutable, mutable} import util.{Property, SourceFile} +import config.Printers.typr import NameKinds.{TempResultName, OuterSelectName} import typer.ConstFold @@ -1165,6 +1166,21 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { tree } + /** Make sure tree has given symbol. This is called when typing or unpickling + * a ValDef or DefDef. It turns out that under very rare circumstances the symbol + * computed for a tree is not correct. The only known test case is i21755.scala. + * Here we have a self type that mentions a supertype as well as a type parameter + * upper-bounded by the current class and it turns out that we compute the symbol + * for a member method (named `root` in this case) in a subclass to be the + * corresponding symbol in the superclass. It is not known what are the precise + * conditions where this happens, but my guess would be that it's connected to the + * recursion in the self type. + */ + def ensureHasSym(sym: Symbol)(using Context): Unit = + if sym.exists && sym != tree.symbol then + typr.println(i"correcting definition symbol from ${tree.symbol.showLocated} to ${sym.showLocated}") + tree.overwriteType(NamedType(sym.owner.thisType, sym.asTerm.name, sym.denot)) + def etaExpandCFT(using Context): Tree = def expand(target: Tree, tp: Type)(using Context): Tree = tp match case defn.ContextFunctionType(argTypes, resType) => diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index e62db9af520a..b401de823c6c 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -999,6 +999,7 @@ class TreeUnpickler(reader: TastyReader, } } + tree.ensureHasSym(sym) tree.setDefTree } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 3810bc66841e..e2a07d3832ab 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2949,19 +2949,22 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val ddef2 = assignType(cpy.DefDef(ddef)(name, paramss1, tpt1, rhs1), sym) postProcessInfo(ddef2, sym) - ddef2.setDefTree - //todo: make sure dependent method types do not depend on implicits or by-name params + //todo: make sure dependent method types do not depend on implicits or by-name params } /** (1) Check that the signature of the class member does not return a repeated parameter type * (2) If info is an erased class, set erased flag of member * (3) Check that erased classes are not parameters of polymorphic functions. + * (4) Make sure the definition's symbol is `sym`. + * (5) Set the `defTree` of `sym` to be `mdef`. */ - private def postProcessInfo(mdef: MemberDef, sym: Symbol)(using Context): Unit = + private def postProcessInfo(mdef: MemberDef, sym: Symbol)(using Context): MemberDef = if (!sym.isOneOf(Synthetic | InlineProxy | Param) && sym.info.finalResultType.isRepeatedParam) report.error(em"Cannot return repeated parameter type ${sym.info.finalResultType}", sym.srcPos) if !sym.is(Module) && !sym.isConstructor && sym.info.finalResultType.isErasedClass then sym.setFlag(Erased) + mdef.ensureHasSym(sym) + mdef.setDefTree def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = ctx.profiler.onTypedDef(sym) { val TypeDef(name, rhs) = tdef diff --git a/tests/pos/i21755.scala b/tests/pos/i21755.scala new file mode 100644 index 000000000000..170daed04f05 --- /dev/null +++ b/tests/pos/i21755.scala @@ -0,0 +1,11 @@ +trait GraphTraversal { + type NodeT + + protected trait Properties { + def root: NodeT + } + + abstract protected class TraverserMethods[A, +CC <: TraverserMethods[A, CC]] { this: CC with Properties => + def root: NodeT + } +} \ No newline at end of file From 39d45dc82df20306bdd32ee4ea72f494c6ab6ff1 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 29 Oct 2024 10:31:49 +0100 Subject: [PATCH 712/827] Add missing error messages to asserts in QuotesImpl --- .../scala/quoted/runtime/impl/QuotesImpl.scala | 4 ++-- tests/neg/i20946.check | 18 ++++++++++++++++++ tests/neg/i20946/Macro_1.scala | 10 ++++++++++ tests/neg/i20946/Test_2.scala | 6 ++++++ tests/neg/i20946a.check | 18 ++++++++++++++++++ tests/neg/i20946a/Macro_1.scala | 10 ++++++++++ tests/neg/i20946a/Test_2.scala | 6 ++++++ 7 files changed, 70 insertions(+), 2 deletions(-) create mode 100644 tests/neg/i20946.check create mode 100644 tests/neg/i20946/Macro_1.scala create mode 100644 tests/neg/i20946/Test_2.scala create mode 100644 tests/neg/i20946a.check create mode 100644 tests/neg/i20946a/Macro_1.scala create mode 100644 tests/neg/i20946a/Test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index e8524a193e5a..abda4aa191a9 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -472,7 +472,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def term(tp: TermRef): Ref = withDefaultPos(tpd.ref(tp).asInstanceOf[tpd.RefTree]) def apply(sym: Symbol): Ref = - assert(sym.isTerm) + assert(sym.isTerm, s"expected a term symbol but received $sym") val refTree = tpd.ref(sym) match case t @ tpd.This(ident) => // not a RefTree, so we need to work around this - issue #19732 // ident in `This` can be a TypeIdent of sym, so we manually prepare the ref here, @@ -1162,7 +1162,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object TypeIdent extends TypeIdentModule: def apply(sym: Symbol): TypeTree = - assert(sym.isType) + assert(sym.isType, s"Expected a type symbol, but got $sym") withDefaultPos(tpd.ref(sym).asInstanceOf[tpd.TypeTree]) def copy(original: Tree)(name: String): TypeIdent = tpd.cpy.Ident(original)(name.toTypeName) diff --git a/tests/neg/i20946.check b/tests/neg/i20946.check new file mode 100644 index 000000000000..acce8bf4852d --- /dev/null +++ b/tests/neg/i20946.check @@ -0,0 +1,18 @@ + +-- Error: tests/neg/i20946/Test_2.scala:5:29 --------------------------------------------------------------------------- +5 | macroWithAssertFailing[Int](123) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Exception occurred while executing macro expansion. + | java.lang.AssertionError: assertion failed: expected a term symbol but received class Int + | at scala.runtime.Scala3RunTime$.assertFailed(Scala3RunTime.scala:8) + | at scala.quoted.runtime.impl.QuotesImpl$reflect$Ref$.apply(QuotesImpl.scala:475) + | at scala.quoted.runtime.impl.QuotesImpl$reflect$Ref$.apply(QuotesImpl.scala:474) + | at Macro_1$package$.macroWithAssertFailingImpl(Macro_1.scala:6) + | + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Test_2.scala:1 +1 |inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl[T]('t) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i20946/Macro_1.scala b/tests/neg/i20946/Macro_1.scala new file mode 100644 index 000000000000..0f2bb8416a0c --- /dev/null +++ b/tests/neg/i20946/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +def macroWithAssertFailingImpl[T: Type](t: Expr[T])(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + Ref(TypeRepr.of[T].typeSymbol) + + '{()} +} + diff --git a/tests/neg/i20946/Test_2.scala b/tests/neg/i20946/Test_2.scala new file mode 100644 index 000000000000..79a02ff1a5db --- /dev/null +++ b/tests/neg/i20946/Test_2.scala @@ -0,0 +1,6 @@ +inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl[T]('t) } + +@main +def run = + macroWithAssertFailing[Int](123) // error + diff --git a/tests/neg/i20946a.check b/tests/neg/i20946a.check new file mode 100644 index 000000000000..f279a60a4798 --- /dev/null +++ b/tests/neg/i20946a.check @@ -0,0 +1,18 @@ + +-- Error: tests/neg/i20946a/Test_2.scala:5:29 -------------------------------------------------------------------------- +5 | macroWithAssertFailing[Int](123) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Exception occurred while executing macro expansion. + | java.lang.AssertionError: assertion failed: Expected a type symbol, but got val + | at scala.runtime.Scala3RunTime$.assertFailed(Scala3RunTime.scala:8) + | at scala.quoted.runtime.impl.QuotesImpl$reflect$TypeIdent$.apply(QuotesImpl.scala:1165) + | at scala.quoted.runtime.impl.QuotesImpl$reflect$TypeIdent$.apply(QuotesImpl.scala:1164) + | at Macro_1$package$.macroWithAssertFailingImpl(Macro_1.scala:6) + | + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Test_2.scala:1 +1 |inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl[T]('t) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i20946a/Macro_1.scala b/tests/neg/i20946a/Macro_1.scala new file mode 100644 index 000000000000..c0e9e6eec116 --- /dev/null +++ b/tests/neg/i20946a/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +def macroWithAssertFailingImpl[T: Type](t: Expr[T])(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + TypeIdent(t.asTerm.symbol) + + '{()} +} + diff --git a/tests/neg/i20946a/Test_2.scala b/tests/neg/i20946a/Test_2.scala new file mode 100644 index 000000000000..79a02ff1a5db --- /dev/null +++ b/tests/neg/i20946a/Test_2.scala @@ -0,0 +1,6 @@ +inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl[T]('t) } + +@main +def run = + macroWithAssertFailing[Int](123) // error + From facaa5662eb89f0ab6abeac84dbe8486efc7d232 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Tue, 29 Oct 2024 15:05:13 +0100 Subject: [PATCH 713/827] Adapt the workflow to the transfer of the actions to the sdkman org --- .github/workflows/publish-sdkman.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 92123546dc4a..e47c95d01f19 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -46,7 +46,7 @@ jobs: - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: - - uses: hamzaremmal/sdkman-release-action@1f2d4209b4f5a38721d4ae20014ea8e1689d869e + - uses: sdkman/sdkman-release-action@1f2d4209b4f5a38721d4ae20014ea8e1689d869e with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} @@ -59,7 +59,7 @@ jobs: runs-on: ubuntu-latest needs: publish steps: - - uses: hamzaremmal/sdkman-default-action@b3f991bd109e40155af1b13a4c6fc8e8ccada65e + - uses: sdkman/sdkman-default-action@b3f991bd109e40155af1b13a4c6fc8e8ccada65e with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} From f16d5652e10dca2832c6a4c246910a7e733bded9 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 29 Oct 2024 17:35:53 +0100 Subject: [PATCH 714/827] Add note and a test to show the prefix rule for path subsuming --- .../src/dotty/tools/dotc/cc/CaptureRef.scala | 6 +++ .../captures/path-prefix.scala | 44 +++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 tests/neg-custom-args/captures/path-prefix.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala index 61f18008cbad..8676390eda04 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -123,6 +123,12 @@ trait CaptureRef extends TypeProxy, ValueType: this.subsumes(ypre) || this.match case x @ TermRef(xpre: CaptureRef, _) if x.symbol == y.symbol => + // To show `{x.f} <:< {y.f}`, it is important to prove `x` and `y` + // are equvalent, which means `x =:= y` in terms for subtyping, + // not just `{x} =:= {y}`. + // It is posible to construct two singleton types `x` and `y`, + // which subumse each other, but are not equal references. + // See `tests/neg-custom-args/captures/path-prefix.scala` for example. withMode(Mode.IgnoreCaptures) {TypeComparer.isSameRef(xpre, ypre)} case _ => false diff --git a/tests/neg-custom-args/captures/path-prefix.scala b/tests/neg-custom-args/captures/path-prefix.scala new file mode 100644 index 000000000000..af5817636d0b --- /dev/null +++ b/tests/neg-custom-args/captures/path-prefix.scala @@ -0,0 +1,44 @@ +import language.experimental.modularity +import language.experimental.captureChecking +import caps.Capability + +class F: + val f: AnyRef^ = ??? + +case class B(tracked val a: A) extends F, Capability + +class A extends F, Capability: + val b: B { val a: A.this.type } = B(this) + +def test(a: A) = + val x: a.b.type = a.b + val y: x.a.type = x.a + // x and y are two distinct singleton types with following properties: + // x =:= a.b + // y =:= x.a =:= a.b.a =:= a + + val cx: AnyRef^{x} = ??? + val cy: AnyRef^{y} = ??? + val caf: AnyRef^{a.f} = ??? + val cabf: AnyRef^{a.b.f} = ??? + val cxf: AnyRef^{x.f} = ??? + val cyf: AnyRef^{y.f} = ??? + + // x and y subsume to each other: + // * {x} <:< {y}: the underlying singleton of y is x.a, + // and the underlying singleton of x.a is a, + // which is a prefix for the underlying type of x (a.b), + // hence {x} <:< {y}; + // * {y} <:< {x}: by underlying singleton of y is x.a, whose prefix is x. + // Hence, {x} =:= {y}. + val x2y: AnyRef^{y} = cx + val y2x: AnyRef^{x} = cy + + val yf2af: AnyRef^{a.f} = cyf + val af2yf: AnyRef^{y.f} = caf + val xf2abf: AnyRef^{a.b.f} = cxf + val abf2xf: AnyRef^{x.f} = cabf + + // Since `x !=:= y`, {x.f} !=:= {y.f} + val yf2xf: AnyRef^{x.f} = cyf // error + val xf2yf: AnyRef^{y.f} = cxf // error From ad58323fb6e9a9c0bafa51db869168a084cee691 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 29 Oct 2024 17:40:05 +0100 Subject: [PATCH 715/827] Fix typo in the comment --- compiler/src/dotty/tools/dotc/cc/CaptureRef.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala index 8676390eda04..590beda42903 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -124,10 +124,10 @@ trait CaptureRef extends TypeProxy, ValueType: || this.match case x @ TermRef(xpre: CaptureRef, _) if x.symbol == y.symbol => // To show `{x.f} <:< {y.f}`, it is important to prove `x` and `y` - // are equvalent, which means `x =:= y` in terms for subtyping, - // not just `{x} =:= {y}`. - // It is posible to construct two singleton types `x` and `y`, - // which subumse each other, but are not equal references. + // are equvalent, which means `x =:= y` in terms of subtyping, + // not just `{x} =:= {y}` in terms of subcapturing. + // It is possible to construct two singleton types `x` and `y`, + // which subsume each other, but are not equal references. // See `tests/neg-custom-args/captures/path-prefix.scala` for example. withMode(Mode.IgnoreCaptures) {TypeComparer.isSameRef(xpre, ypre)} case _ => From 7d8f0d412844cd8468319ddbd542bb1452660f34 Mon Sep 17 00:00:00 2001 From: Natsu Kagami Date: Wed, 30 Oct 2024 11:17:33 +0100 Subject: [PATCH 716/827] Extract semanticDB for lifted definitions (#21856) --- .../dotc/semanticdb/ExtractSemanticDB.scala | 14 ++++++ .../dotty/tools/dotc/typer/EtaExpansion.scala | 13 ++---- .../semanticdb/expect/Synthetic.expect.scala | 9 ++++ tests/semanticdb/expect/Synthetic.scala | 9 ++++ tests/semanticdb/metac.expect | 46 +++++++++++++++++-- 5 files changed, 76 insertions(+), 15 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 8c1f22005af3..05dff8ffadbc 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -286,6 +286,14 @@ object ExtractSemanticDB: || sym.owner == defn.OpsPackageClass || qualifier.exists(excludeQual) + /** This block is created by lifting i.e. EtaExpansion */ + private def isProbablyLifted(block: Block)(using Context) = + def isSyntheticDef(t: Tree) = + t match + case t: (ValDef | DefDef) => t.symbol.isSyntheticWithIdent + case _ => false + block.stats.forall(isSyntheticDef) + private def traverseAnnotsOfDefinition(sym: Symbol)(using Context): Unit = for annot <- sym.annotations do if annot.tree.span.exists @@ -438,6 +446,12 @@ object ExtractSemanticDB: registerUseGuarded(None, sym, tree.span, tree.source) case _ => () + // If tree is lifted, ignore Synthetic status on all the definitions and traverse all childrens + case tree: Block if isProbablyLifted(tree) => + tree.stats.foreach: + case t: (ValDef | DefDef) if !excludeChildren(t.symbol) => traverseChildren(t) + case _ => () + traverse(tree.expr) case _ => traverseChildren(tree) diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index b09580d51943..26d03db4b7dc 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -39,9 +39,6 @@ abstract class Lifter { /** The tree of a lifted definition */ protected def liftedDef(sym: TermSymbol, rhs: Tree)(using Context): MemberDef = ValDef(sym, rhs) - /** Is lifting performed on erased terms? */ - protected def isErased = false - private def lift(defs: mutable.ListBuffer[Tree], expr: Tree, prefix: TermName = EmptyTermName)(using Context): Tree = if (noLift(expr)) expr else { @@ -117,8 +114,7 @@ abstract class Lifter { case Apply(fn, args) => val fn1 = liftApp(defs, fn) val args1 = liftArgs(defs, fn.tpe, args) - if isErased then untpd.cpy.Apply(tree)(fn1, args1).withType(tree.tpe) // application may be partial - else cpy.Apply(tree)(fn1, args1) + cpy.Apply(tree)(fn1, args1) case TypeApply(fn, targs) => cpy.TypeApply(tree)(liftApp(defs, fn), targs) case Select(pre, name) if isPureRef(tree) => @@ -141,7 +137,7 @@ abstract class Lifter { * * unless `pre` is idempotent. */ - def liftNonIdempotentPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = + private def liftNonIdempotentPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = if (isIdempotentExpr(tree)) tree else lift(defs, tree) /** Lift prefix `pre` of an application `pre.f(...)` to @@ -154,7 +150,7 @@ abstract class Lifter { * Note that default arguments will refer to the prefix, we do not want * to re-evaluate a complex expression each time we access a getter. */ - def liftPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = + private def liftPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = tree match case tree: Literal => tree case tree: This => tree @@ -218,9 +214,6 @@ object LiftCoverage extends LiftImpure { } } -object LiftErased extends LiftComplex: - override def isErased = true - /** Lift all impure or complex arguments to `def`s */ object LiftToDefs extends LiftComplex { override def liftedFlags: FlagSet = Method diff --git a/tests/semanticdb/expect/Synthetic.expect.scala b/tests/semanticdb/expect/Synthetic.expect.scala index 4d797ce2b856..c8ccb2281cbb 100644 --- a/tests/semanticdb/expect/Synthetic.expect.scala +++ b/tests/semanticdb/expect/Synthetic.expect.scala @@ -58,4 +58,13 @@ class Synthetic/*<-example::Synthetic#*/ { given Int/*->scala::Int#*/ = 1 foo/*->example::Synthetic#Contexts.foo().*/(0) } + + // Argument lifting + val _ = + def f/*<-local14*/(s/*<-local12*/: String/*->scala::Predef.String#*/)(i/*<-local13*/: Int/*->scala::Int#*/ = s/*->local12*/.length/*->java::lang::String#length().*/()) = i/*->local13*/ +/*->scala::Int#`+`(+4).*/ 1 + def g/*<-local18*/(s/*<-local16*/: String/*->scala::Predef.String#*/, t/*<-local17*/: String/*->scala::Predef.String#*/) = s/*->local16*/ +/*->java::lang::String#`+`().*/ t/*->local17*/ + + def impure/*<-local20*/(s/*<-local19*/: String/*->scala::Predef.String#*/) = { ???/*->scala::Predef.`???`().*/; s/*->local19*/ } + val _ = f/*->local14*/(impure/*->local20*/(""))() + val _ = g/*->local18*/(t/*->local17*/ = impure/*->local20*/(""), s/*->local16*/ = "a") } diff --git a/tests/semanticdb/expect/Synthetic.scala b/tests/semanticdb/expect/Synthetic.scala index 71fb5fdf70a3..0953d6cc4f98 100644 --- a/tests/semanticdb/expect/Synthetic.scala +++ b/tests/semanticdb/expect/Synthetic.scala @@ -58,4 +58,13 @@ class Synthetic { given Int = 1 foo(0) } + + // Argument lifting + val _ = + def f(s: String)(i: Int = s.length()) = i + 1 + def g(s: String, t: String) = s + t + + def impure(s: String) = { ???; s } + val _ = f(impure(""))() + val _ = g(t = impure(""), s = "a") } diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index dffed5c0d477..26221899035b 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2841,7 +2841,7 @@ Schema => SemanticDB v4 Uri => NamedApplyBlock.scala Text => empty Language => Scala -Symbols => 43 entries +Symbols => 41 entries Occurrences => 41 entries Symbols: @@ -2886,8 +2886,6 @@ example/NamedApplyBlockMethods.foo().(b) => param b: Int example/NamedApplyBlockMethods.foo().(c) => param c: Int example/NamedApplyBlockMethods.local. => val method local Int example/NamedApplyBlockMethods.recursive(). => method recursive => Int -local0 => val local c$1: Int -local1 => val local b$1: Int @uncheckedVariance Occurrences: [0:8..0:15): example <- example/ @@ -3533,8 +3531,8 @@ Schema => SemanticDB v4 Uri => Synthetic.scala Text => empty Language => Scala -Symbols => 52 entries -Occurrences => 137 entries +Symbols => 62 entries +Occurrences => 165 entries Synthetics => 39 entries Symbols: @@ -3590,6 +3588,16 @@ local8 => param a: Int local9 => param b: Int local10 => final implicit lazy val given local x: Int local11 => final implicit lazy val given local given_Int: Int +local12 => param s: String +local13 => param i: Int +local14 => local f: (param s: String)(param i: Int): Int +local15 => local f$default$2: (param s: String): Int @uncheckedVariance +local15(s) => param s: String +local16 => param s: String +local17 => param t: String +local18 => local g: (param s: String, param t: String): String +local19 => param s: String +local20 => local impure: (param s: String): String Occurrences: [0:8..0:15): example <- example/ @@ -3729,6 +3737,34 @@ Occurrences: [56:8..56:10): m4 <- example/Synthetic#Contexts.m4(). [57:12..57:15): Int -> scala/Int# [58:6..58:9): foo -> example/Synthetic#Contexts.foo(). +[63:8..63:9): f <- local14 +[63:10..63:11): s <- local12 +[63:13..63:19): String -> scala/Predef.String# +[63:21..63:22): i <- local13 +[63:24..63:27): Int -> scala/Int# +[63:30..63:31): s -> local12 +[63:32..63:38): length -> java/lang/String#length(). +[63:44..63:45): i -> local13 +[63:46..63:47): + -> scala/Int#`+`(+4). +[64:8..64:9): g <- local18 +[64:10..64:11): s <- local16 +[64:13..64:19): String -> scala/Predef.String# +[64:21..64:22): t <- local17 +[64:24..64:30): String -> scala/Predef.String# +[64:34..64:35): s -> local16 +[64:36..64:37): + -> java/lang/String#`+`(). +[64:38..64:39): t -> local17 +[66:8..66:14): impure <- local20 +[66:15..66:16): s <- local19 +[66:18..66:24): String -> scala/Predef.String# +[66:30..66:33): ??? -> scala/Predef.`???`(). +[66:35..66:36): s -> local19 +[67:12..67:13): f -> local14 +[67:14..67:20): impure -> local20 +[68:12..68:13): g -> local18 +[68:14..68:15): t -> local17 +[68:18..68:24): impure -> local20 +[68:30..68:31): s -> local16 Synthetics: [5:2..5:13):List(1).map => *[Int] From 7b721f0fdc032e871737d11ed1841c9fdc4ef98d Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 30 Oct 2024 14:04:44 +0000 Subject: [PATCH 717/827] Simplify transformIsInstanceOf check --- .../src/dotty/tools/dotc/transform/TypeTestsCasts.scala | 9 +++------ tests/pos/i21544.scala | 2 ++ 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index 45596e1d47f6..c1dd6bc6509e 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -256,7 +256,8 @@ object TypeTestsCasts { else foundClasses.exists(check) end checkSensical - if (expr.tpe <:< testType) && inMatch then + val tp = if expr.tpe.isPrimitiveValueType then defn.boxedType(expr.tpe) else expr.tpe + if tp <:< testType && inMatch then if expr.tpe.isNotNull then constant(expr, Literal(Constant(true))) else expr.testNotNull else { @@ -358,11 +359,7 @@ object TypeTestsCasts { report.error(em"$untestable cannot be used in runtime type tests", tree.srcPos) constant(expr, Literal(Constant(false))) case _ => - val erasedTestType = - if testType.isAny && expr.tpe.isPrimitiveValueType then - defn.AnyValType - else - erasure(testType) + val erasedTestType = erasure(testType) transformIsInstanceOf(expr, erasedTestType, erasedTestType, flagUnrelated) } diff --git a/tests/pos/i21544.scala b/tests/pos/i21544.scala index 45da101e7490..08a3911e4412 100644 --- a/tests/pos/i21544.scala +++ b/tests/pos/i21544.scala @@ -1,2 +1,4 @@ class Test(): def m1(xs: List[Boolean]) = for (x: Any) <- xs yield x + def m2(xs: List[Boolean]) = for (x: AnyVal) <- xs yield x + def m3(xs: List[Boolean]) = for (x: Matchable) <- xs yield x From ae9cffa3a3c736103ca3bec2742b7fd7c2b930ac Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Wed, 30 Oct 2024 15:58:33 +0100 Subject: [PATCH 718/827] Change inContextBound to a Boolean flag --- .../src/dotty/tools/dotc/parsing/Parsers.scala | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 1a5b270f2a48..01cbbe34b3b4 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -80,9 +80,6 @@ object Parsers { enum IntoOK: case Yes, No, Nested - enum InContextBound: - case Yes, No - type StageKind = Int object StageKind { val None = 0 @@ -1553,7 +1550,7 @@ object Parsers { /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and * returns a tree for type `Any` instead. */ - def toplevelTyp(intoOK: IntoOK = IntoOK.No, inContextBound: InContextBound = InContextBound.No): Tree = + def toplevelTyp(intoOK: IntoOK = IntoOK.No, inContextBound: Boolean = false): Tree = rejectWildcardType(typ(intoOK, inContextBound)) private def getFunction(tree: Tree): Option[Function] = tree match { @@ -1609,7 +1606,7 @@ object Parsers { * IntoTargetType ::= Type * | FunTypeArgs (‘=>’ | ‘?=>’) IntoType */ - def typ(intoOK: IntoOK = IntoOK.No, inContextBound: InContextBound = InContextBound.No): Tree = + def typ(intoOK: IntoOK = IntoOK.No, inContextBound: Boolean = false): Tree = val start = in.offset var imods = Modifiers() val erasedArgs: ListBuffer[Boolean] = ListBuffer() @@ -1836,13 +1833,13 @@ object Parsers { /** InfixType ::= RefinedType {id [nl] RefinedType} * | RefinedType `^` // under capture checking */ - def infixType(inContextBound: InContextBound = InContextBound.No): Tree = infixTypeRest(inContextBound)(refinedType()) + def infixType(inContextBound: Boolean = false): Tree = infixTypeRest(inContextBound)(refinedType()) - def infixTypeRest(inContextBound: InContextBound = InContextBound.No)(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = + def infixTypeRest(inContextBound: Boolean = false)(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, isOperator = !followingIsVararg() && !isPureArrow - && !(isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`) && inContextBound == InContextBound.Yes) + && !(isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`) && inContextBound) && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] @@ -2233,7 +2230,7 @@ object Parsers { /** ContextBound ::= Type [`as` id] */ def contextBound(pname: TypeName): Tree = - val t = toplevelTyp(inContextBound = InContextBound.Yes) + val t = toplevelTyp(inContextBound = true) val ownName = if isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`) then in.nextToken() From 17dadf791a5b42eb406227901b1c16940e1537cb Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 29 Oct 2024 17:52:13 +0000 Subject: [PATCH 719/827] Fix extending protected nested java classes PR 21362 added an accessibility fix to Erasure, but that revealed a mistake in determining the accessibility of inner java classes, which I'm now fixing. --- .../dotty/tools/dotc/core/classfile/ClassfileParser.scala | 3 ++- compiler/src/dotty/tools/dotc/printing/Formatting.scala | 7 +++++++ tests/pos/i21631_joint/AbstractChannel.java | 7 +++++++ tests/pos/i21631_joint/i21631.scala | 5 +++++ tests/pos/i21631_separ/AbstractChannel_1.java | 7 +++++++ tests/pos/i21631_separ/i21631_2.scala | 5 +++++ 6 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21631_joint/AbstractChannel.java create mode 100644 tests/pos/i21631_joint/i21631.scala create mode 100644 tests/pos/i21631_separ/AbstractChannel_1.java create mode 100644 tests/pos/i21631_separ/i21631_2.scala diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 9e7b59a0cfac..f6ad61148208 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -401,7 +401,8 @@ class ClassfileParser( classRoot.setFlag(sflags) moduleRoot.setFlag(Flags.JavaDefined | Flags.ModuleClassCreationFlags) - val privateWithin = getPrivateWithin(jflags) + val jflags1 = innerClasses.get(currentClassName.toString).fold(jflags: Int)(_.jflags) + val privateWithin = getPrivateWithin(jflags1) classRoot.setPrivateWithin(privateWithin) moduleRoot.setPrivateWithin(privateWithin) diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index a36e6f48533a..14b822b11333 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -109,6 +109,13 @@ object Formatting { case Atoms.Range(lo, hi) => CtxShow(s"Range(${toStr(lo.toList)}, ${toStr(hi.toList)})") end given + given Show[ast.untpd.Modifiers] with + def show(x: ast.untpd.Modifiers) = + CtxShow(s"Modifiers(${toStr(x.flags)}, ${toStr(x.privateWithin)}, ${toStr(x.annotations)}, ${toStr(x.mods)})") + + given Show[ast.untpd.Mod] with + def show(x: ast.untpd.Mod) = CtxShow(s"Mod(${toStr(x.flags)})") + given Show[Showable] = ShowAny given Show[Shown] = ShowAny given Show[Int] = ShowAny diff --git a/tests/pos/i21631_joint/AbstractChannel.java b/tests/pos/i21631_joint/AbstractChannel.java new file mode 100644 index 000000000000..fbcd04549def --- /dev/null +++ b/tests/pos/i21631_joint/AbstractChannel.java @@ -0,0 +1,7 @@ +public abstract class AbstractChannel { + protected AbstractChannel() {} + protected abstract AbstractUnsafe newUnsafe(); + protected abstract class AbstractUnsafe { + public abstract void connect(); + } +} diff --git a/tests/pos/i21631_joint/i21631.scala b/tests/pos/i21631_joint/i21631.scala new file mode 100644 index 000000000000..c567d75b1375 --- /dev/null +++ b/tests/pos/i21631_joint/i21631.scala @@ -0,0 +1,5 @@ +class Channel extends AbstractChannel() { + override def newUnsafe(): AbstractChannel#AbstractUnsafe = new AbstractUnsafe { + override def connect(): Unit = ??? + } +} diff --git a/tests/pos/i21631_separ/AbstractChannel_1.java b/tests/pos/i21631_separ/AbstractChannel_1.java new file mode 100644 index 000000000000..e89767d6d73e --- /dev/null +++ b/tests/pos/i21631_separ/AbstractChannel_1.java @@ -0,0 +1,7 @@ +public abstract class AbstractChannel_1 { + protected AbstractChannel_1() {} + protected abstract AbstractUnsafe newUnsafe(); + protected abstract class AbstractUnsafe { + public abstract void connect(); + } +} diff --git a/tests/pos/i21631_separ/i21631_2.scala b/tests/pos/i21631_separ/i21631_2.scala new file mode 100644 index 000000000000..89524e333920 --- /dev/null +++ b/tests/pos/i21631_separ/i21631_2.scala @@ -0,0 +1,5 @@ +class Channel extends AbstractChannel_1() { + override def newUnsafe(): AbstractChannel_1#AbstractUnsafe = new AbstractUnsafe { + override def connect(): Unit = ??? + } +} From 7aebb9dab2e557aff2b3276ecd6ad5e31bf357e3 Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Thu, 31 Oct 2024 13:11:38 +0100 Subject: [PATCH 720/827] improvement: Disable projects not compiling with Bloop --- docs/_docs/contributing/setting-up-your-ide.md | 6 +++--- project/Build.scala | 12 +++++++++++- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/docs/_docs/contributing/setting-up-your-ide.md b/docs/_docs/contributing/setting-up-your-ide.md index a15bf651ef74..3779ce1c3403 100644 --- a/docs/_docs/contributing/setting-up-your-ide.md +++ b/docs/_docs/contributing/setting-up-your-ide.md @@ -31,15 +31,15 @@ Normally this is fine, but if you're working on certain modules like `scaladoc` you'll actually want these modules exported. In order to achieve this you'll want to make sure you do two things: -1. You'll want to find and change the following under +1. You'll want to find and change the following above `commonBootstrappedSettings` which is found in the [`Build.scala`](https://github.com/scala/scala3/blob/main/project/Build.scala) file. ```diff -- bspEnabled := false, -+ bspEnabled := true, +- val enableBspAllProjects = false, ++ val enableBspAllProjects = true, ``` 2. Run `sbt publishLocal` to get the needed presentation compiler jars. diff --git a/project/Build.scala b/project/Build.scala index 6ec933a599b3..d955f7067626 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -468,10 +468,20 @@ object Build { ) ++ extMap } + /* This projects are irrelevant from IDE point of view and do not compile with Bloop*/ + val fullyDisabledProjects = Set( + "scala2-library-cc", + "scala2-library-bootstrapped", + "scala2-library-cc-tasty", + "scala2-library-tasty" + ) + + val enableBspAllProjects = false + // Settings used when compiling dotty with a non-bootstrapped dotty lazy val commonBootstrappedSettings = commonDottySettings ++ Seq( // To enable support of scaladoc and language-server projects you need to change this to true - bspEnabled := false, + bspEnabled := { if(fullyDisabledProjects(name.value)) false else enableBspAllProjects }, (Compile / unmanagedSourceDirectories) += baseDirectory.value / "src-bootstrapped", version := dottyVersion, From 09e96e2f08bbfa743fb3184be6ff82421bd62303 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 31 Oct 2024 12:43:30 +0000 Subject: [PATCH 721/827] Fix Java parsing of annotations on qualified types According to [#1][#1] this is valid syntax: java.lang.@NonNull String [#1]: https://checkerframework.org/jsr308/specification/java-annotation-design.html#qualified-type-syntax --- .../dotty/tools/dotc/parsing/JavaParsers.scala | 1 + tests/pos/i21319/Foo.java | 8 ++++++++ tests/pos/i21319/Test.scala | 3 +++ tests/pos/i21319/Valid.java | 17 +++++++++++++++++ 4 files changed, 29 insertions(+) create mode 100644 tests/pos/i21319/Foo.java create mode 100644 tests/pos/i21319/Test.scala create mode 100644 tests/pos/i21319/Valid.java diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 8a9eca2c1e67..fe797c66d104 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -298,6 +298,7 @@ object JavaParsers { } while (in.token == DOT) { in.nextToken() + annotations() t = typeArgs(atSpan(t.span.start, in.offset)(typeSelect(t, ident()))) } convertToTypeId(t) diff --git a/tests/pos/i21319/Foo.java b/tests/pos/i21319/Foo.java new file mode 100644 index 000000000000..1240d014b7e7 --- /dev/null +++ b/tests/pos/i21319/Foo.java @@ -0,0 +1,8 @@ +package app; + +import java.util.Optional; +import lib.*; + +public class Foo { + private java.util.@lib.Valid Optional userId; +} diff --git a/tests/pos/i21319/Test.scala b/tests/pos/i21319/Test.scala new file mode 100644 index 000000000000..a85c8f461aab --- /dev/null +++ b/tests/pos/i21319/Test.scala @@ -0,0 +1,3 @@ +package app + +class Test diff --git a/tests/pos/i21319/Valid.java b/tests/pos/i21319/Valid.java new file mode 100644 index 000000000000..17e0e1173726 --- /dev/null +++ b/tests/pos/i21319/Valid.java @@ -0,0 +1,17 @@ +package lib; + +import static java.lang.annotation.ElementType.CONSTRUCTOR; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +@Target({ METHOD, FIELD, CONSTRUCTOR, PARAMETER, TYPE_USE }) +@Retention(RUNTIME) +@Documented +public @interface Valid {} From c450073b308364217a203cf8bd2c7c09212da475 Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Thu, 31 Oct 2024 15:26:51 +0100 Subject: [PATCH 722/827] Fix typo MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sébastien Doeraene --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index d955f7067626..5b357dab6ef6 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -468,7 +468,7 @@ object Build { ) ++ extMap } - /* This projects are irrelevant from IDE point of view and do not compile with Bloop*/ + /* These projects are irrelevant from IDE point of view and do not compile with Bloop*/ val fullyDisabledProjects = Set( "scala2-library-cc", "scala2-library-bootstrapped", From 350215dcaad554accccef37502033439a6d09caf Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 31 Oct 2024 18:44:04 +0000 Subject: [PATCH 723/827] Set privateWithin when creating java inner classes --- .../dotty/tools/dotc/core/SymbolLoaders.scala | 3 ++- .../dotc/core/classfile/ClassfileParser.scala | 19 ++++++++++++------- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 5690720a1b3f..585db8488a78 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -103,13 +103,14 @@ object SymbolLoaders { */ def enterClassAndModule( owner: Symbol, name: PreName, completer: SymbolLoader, - flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Unit = { + flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): (Symbol, Symbol) = { val clazz = enterClass(owner, name, completer, flags, scope) val module = enterModule( owner, name, completer, modFlags = flags.toTermFlags & RetainedModuleValFlags, clsFlags = flags.toTypeFlags & RetainedModuleClassFlags, scope = scope) + (clazz, module) } /** Enter all toplevel classes and objects in file `src` into package `owner`, provided diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index f6ad61148208..4b0dec2c88b7 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -401,12 +401,12 @@ class ClassfileParser( classRoot.setFlag(sflags) moduleRoot.setFlag(Flags.JavaDefined | Flags.ModuleClassCreationFlags) - val jflags1 = innerClasses.get(currentClassName.toString).fold(jflags: Int)(_.jflags) - val privateWithin = getPrivateWithin(jflags1) + val privateWithin = getPrivateWithin(jflags) - classRoot.setPrivateWithin(privateWithin) - moduleRoot.setPrivateWithin(privateWithin) - moduleRoot.sourceModule.setPrivateWithin(privateWithin) + if privateWithin.exists then + classRoot.setPrivateWithin(privateWithin) + moduleRoot.setPrivateWithin(privateWithin) + moduleRoot.sourceModule.setPrivateWithin(privateWithin) for (i <- 0 until in.nextChar) parseMember(method = false) for (i <- 0 until in.nextChar) parseMember(method = true) @@ -1059,13 +1059,18 @@ class ClassfileParser( */ private def enterOwnInnerClasses()(using Context, DataReader): Unit = { def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) = - SymbolLoaders.enterClassAndModule( + val (cls, mod) = SymbolLoaders.enterClassAndModule( getOwner(jflags), - entry.originalName, + entry.originalName, new ClassfileLoader(file), classTranslation.flags(jflags), getScope(jflags)) + val privateWithin = getPrivateWithin(jflags) + cls.setPrivateWithin(privateWithin) + mod.setPrivateWithin(privateWithin) + mod.sourceModule.setPrivateWithin(privateWithin) + for entry <- innerClasses.valuesIterator do // create a new class member for immediate inner classes if entry.outer.name == currentClassName then From 014d0dbb4df6ee6b93aad7692a35996c962cded6 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 1 Nov 2024 10:28:32 +0100 Subject: [PATCH 724/827] Change tests for i20946 to only check the assert message in code --- .../scala/quoted/runtime/impl/QuotesImpl.scala | 6 +++--- tests/neg/i20946.check | 18 ------------------ tests/neg/i20946/Macro_1.scala | 8 ++++++-- tests/neg/i20946/Test_2.scala | 1 - tests/neg/i20946a.check | 18 ------------------ tests/neg/i20946a/Macro_1.scala | 8 ++++++-- tests/neg/i20946a/Test_2.scala | 1 - 7 files changed, 15 insertions(+), 45 deletions(-) delete mode 100644 tests/neg/i20946.check delete mode 100644 tests/neg/i20946a.check diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index abda4aa191a9..ef2eacf42225 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -301,7 +301,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object DefDef extends DefDefModule: def apply(symbol: Symbol, rhsFn: List[List[Tree]] => Option[Term]): DefDef = - xCheckMacroAssert(symbol.isTerm, s"expected a term symbol but received $symbol") + xCheckMacroAssert(symbol.isTerm, s"expected a term symbol, but received $symbol") xCheckMacroAssert(symbol.flags.is(Flags.Method), "expected a symbol with `Method` flag set") withDefaultPos(tpd.DefDef(symbol.asTerm, prefss => xCheckedMacroOwners(xCheckMacroValidExpr(rhsFn(prefss)), symbol).getOrElse(tpd.EmptyTree) @@ -472,7 +472,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def term(tp: TermRef): Ref = withDefaultPos(tpd.ref(tp).asInstanceOf[tpd.RefTree]) def apply(sym: Symbol): Ref = - assert(sym.isTerm, s"expected a term symbol but received $sym") + assert(sym.isTerm, s"expected a term symbol, but received $sym") val refTree = tpd.ref(sym) match case t @ tpd.This(ident) => // not a RefTree, so we need to work around this - issue #19732 // ident in `This` can be a TypeIdent of sym, so we manually prepare the ref here, @@ -1128,7 +1128,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def of[T <: AnyKind](using tp: scala.quoted.Type[T]): TypeTree = tp.asInstanceOf[TypeImpl].typeTree def ref(sym: Symbol): TypeTree = - assert(sym.isType, "Expected a type symbol, but got " + sym) + assert(sym.isType, s"Expected a type symbol, but got $sym") tpd.ref(sym) end TypeTree diff --git a/tests/neg/i20946.check b/tests/neg/i20946.check deleted file mode 100644 index acce8bf4852d..000000000000 --- a/tests/neg/i20946.check +++ /dev/null @@ -1,18 +0,0 @@ - --- Error: tests/neg/i20946/Test_2.scala:5:29 --------------------------------------------------------------------------- -5 | macroWithAssertFailing[Int](123) // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Exception occurred while executing macro expansion. - | java.lang.AssertionError: assertion failed: expected a term symbol but received class Int - | at scala.runtime.Scala3RunTime$.assertFailed(Scala3RunTime.scala:8) - | at scala.quoted.runtime.impl.QuotesImpl$reflect$Ref$.apply(QuotesImpl.scala:475) - | at scala.quoted.runtime.impl.QuotesImpl$reflect$Ref$.apply(QuotesImpl.scala:474) - | at Macro_1$package$.macroWithAssertFailingImpl(Macro_1.scala:6) - | - |--------------------------------------------------------------------------------------------------------------------- - |Inline stack trace - |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from Test_2.scala:1 -1 |inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl[T]('t) } - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i20946/Macro_1.scala b/tests/neg/i20946/Macro_1.scala index 0f2bb8416a0c..f598f5d278ce 100644 --- a/tests/neg/i20946/Macro_1.scala +++ b/tests/neg/i20946/Macro_1.scala @@ -3,8 +3,12 @@ import scala.quoted.* def macroWithAssertFailingImpl[T: Type](t: Expr[T])(using Quotes): Expr[Unit] = { import quotes.reflect.* - Ref(TypeRepr.of[T].typeSymbol) + try + Ref(TypeRepr.of[T].typeSymbol) + catch + case ex: Throwable => + if ex.getMessage().contains("expected a term symbol, but received ") then + throw ex '{()} } - diff --git a/tests/neg/i20946/Test_2.scala b/tests/neg/i20946/Test_2.scala index 79a02ff1a5db..80ae0a95fa4b 100644 --- a/tests/neg/i20946/Test_2.scala +++ b/tests/neg/i20946/Test_2.scala @@ -3,4 +3,3 @@ inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl @main def run = macroWithAssertFailing[Int](123) // error - diff --git a/tests/neg/i20946a.check b/tests/neg/i20946a.check deleted file mode 100644 index f279a60a4798..000000000000 --- a/tests/neg/i20946a.check +++ /dev/null @@ -1,18 +0,0 @@ - --- Error: tests/neg/i20946a/Test_2.scala:5:29 -------------------------------------------------------------------------- -5 | macroWithAssertFailing[Int](123) // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Exception occurred while executing macro expansion. - | java.lang.AssertionError: assertion failed: Expected a type symbol, but got val - | at scala.runtime.Scala3RunTime$.assertFailed(Scala3RunTime.scala:8) - | at scala.quoted.runtime.impl.QuotesImpl$reflect$TypeIdent$.apply(QuotesImpl.scala:1165) - | at scala.quoted.runtime.impl.QuotesImpl$reflect$TypeIdent$.apply(QuotesImpl.scala:1164) - | at Macro_1$package$.macroWithAssertFailingImpl(Macro_1.scala:6) - | - |--------------------------------------------------------------------------------------------------------------------- - |Inline stack trace - |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from Test_2.scala:1 -1 |inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl[T]('t) } - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i20946a/Macro_1.scala b/tests/neg/i20946a/Macro_1.scala index c0e9e6eec116..b3603fe91b10 100644 --- a/tests/neg/i20946a/Macro_1.scala +++ b/tests/neg/i20946a/Macro_1.scala @@ -3,8 +3,12 @@ import scala.quoted.* def macroWithAssertFailingImpl[T: Type](t: Expr[T])(using Quotes): Expr[Unit] = { import quotes.reflect.* - TypeIdent(t.asTerm.symbol) + try + TypeIdent(t.asTerm.symbol) + catch + case ex: Throwable => + if ex.getMessage().contains("Expected a type symbol, but got ") then + throw ex '{()} } - diff --git a/tests/neg/i20946a/Test_2.scala b/tests/neg/i20946a/Test_2.scala index 79a02ff1a5db..80ae0a95fa4b 100644 --- a/tests/neg/i20946a/Test_2.scala +++ b/tests/neg/i20946a/Test_2.scala @@ -3,4 +3,3 @@ inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl @main def run = macroWithAssertFailing[Int](123) // error - From 2f61ee2648fa35ed66d6a1f03b449032ea9ee316 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Fri, 1 Nov 2024 01:43:21 +0100 Subject: [PATCH 725/827] Ensure to escape characters before constructing JSON profile trace --- .../dotc/profile/JsonNameTransformer.scala | 47 +++++++ .../dotty/tools/dotc/profile/Profiler.scala | 15 +- compiler/test/dotty/tools/DottyTest.scala | 2 +- .../dotc/profile/TraceNameManglingTest.scala | 133 ++++++++++++++++++ 4 files changed, 191 insertions(+), 6 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala create mode 100644 compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala diff --git a/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala b/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala new file mode 100644 index 000000000000..a1bb5f9552c5 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala @@ -0,0 +1,47 @@ +package dotty.tools.dotc.profile + +import scala.annotation.internal.sharable + +// Based on NameTransformer but dedicated for JSON encoding rules +object JsonNameTransformer { + private val nops = 128 + private val ncodes = 26 * 26 + + @sharable private val op2code = new Array[String](nops) + private def enterOp(op: Char, code: String) = op2code(op.toInt) = code + + enterOp('\"', "\\\"") + enterOp('\\', "\\\\") + // enterOp('/', "\\/") // optional, no need for escaping outside of html context + enterOp('\b', "\\b") + enterOp('\f', "\\f") + enterOp('\n', "\\n") + enterOp('\r', "\\r") + enterOp('\t', "\\t") + + def encode(name: String): String = { + var buf: StringBuilder = null.asInstanceOf + val len = name.length + var i = 0 + while (i < len) { + val c = name(i) + if (c < nops && (op2code(c.toInt) ne null)) { + if (buf eq null) { + buf = new StringBuilder() + buf.append(name.subSequence(0, i)) + } + buf.append(op2code(c.toInt)) + } else if (c <= 0x1F || c > 0x7F) { + if (buf eq null) { + buf = new StringBuilder() + buf.append(name.subSequence(0, i)) + } + buf.append("\\u%04X".format(c.toInt)) + } else if (buf ne null) { + buf.append(c) + } + i += 1 + } + if (buf eq null) name else buf.toString + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index 69a806215ddd..ab3e73468385 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -273,7 +273,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) override def beforePhase(phase: Phase): (TracedEventId, ProfileSnap) = { assert(mainThread eq Thread.currentThread()) traceThreadSnapshotCounters() - val eventId = traceDurationStart(Category.Phase, phase.phaseName) + val eventId = traceDurationStart(Category.Phase, escapeSpecialChars(phase.phaseName)) if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) doGC() if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { @@ -287,7 +287,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) assert(mainThread eq Thread.currentThread()) if chromeTrace != null then traceThreadSnapshotCounters() - traceDurationStart(Category.File, unit.source.name) + traceDurationStart(Category.File, escapeSpecialChars(unit.source.name)) else TracedEventId.Empty } @@ -325,7 +325,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) then EmptyCompletionEvent else val completionName = this.completionName(root, associatedFile) - val event = TracedEventId(associatedFile.name) + val event = TracedEventId(escapeSpecialChars(associatedFile.name)) chromeTrace.traceDurationEventStart(Category.Completion.name, "↯", colour = "thread_state_sleeping") chromeTrace.traceDurationEventStart(Category.File.name, event) chromeTrace.traceDurationEventStart(Category.Completion.name, completionName) @@ -350,8 +350,13 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) if chromeTrace != null then chromeTrace.traceDurationEventEnd(category.name, event, colour) - private def symbolName(sym: Symbol): String = s"${sym.showKind} ${sym.showName}" - private def completionName(root: Symbol, associatedFile: AbstractFile): String = + private inline def escapeSpecialChars(value: String): String = + JsonNameTransformer.encode(value) + + private def symbolName(sym: Symbol): String = escapeSpecialChars: + s"${sym.showKind} ${sym.showName}" + + private def completionName(root: Symbol, associatedFile: AbstractFile): String = escapeSpecialChars: def isTopLevel = root.owner != NoSymbol && root.owner.is(Flags.Package) if root.is(Flags.Package) || isTopLevel then root.javaBinaryName diff --git a/compiler/test/dotty/tools/DottyTest.scala b/compiler/test/dotty/tools/DottyTest.scala index 2b94801b67d7..76d2fdcb6d26 100644 --- a/compiler/test/dotty/tools/DottyTest.scala +++ b/compiler/test/dotty/tools/DottyTest.scala @@ -46,7 +46,7 @@ trait DottyTest extends ContextEscapeDetection { protected def defaultCompiler: Compiler = new Compiler() - private def compilerWithChecker(phase: String)(assertion: (tpd.Tree, Context) => Unit) = new Compiler { + protected def compilerWithChecker(phase: String)(assertion: (tpd.Tree, Context) => Unit) = new Compiler { private val baseCompiler = defaultCompiler diff --git a/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala b/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala new file mode 100644 index 000000000000..977b67740f88 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala @@ -0,0 +1,133 @@ +package dotty.tools.dotc.profile + +import org.junit.Assert.* +import org.junit.* + +import scala.annotation.tailrec +import dotty.tools.DottyTest +import dotty.tools.dotc.util.SourceFile +import dotty.tools.dotc.core.Contexts.FreshContext +import java.nio.file.Files +import java.util.Locale + +class TraceNameManglingTest extends DottyTest { + + override protected def initializeCtx(fc: FreshContext): Unit = { + super.initializeCtx(fc) + val tmpDir = Files.createTempDirectory("trace_name_mangling_test").nn + fc.setSetting(fc.settings.YprofileEnabled, true) + fc.setSetting( + fc.settings.YprofileTrace, + tmpDir.resolve("trace.json").nn.toAbsolutePath().toString() + ) + fc.setSetting( + fc.settings.YprofileDestination, + tmpDir.resolve("profiler.out").nn.toAbsolutePath().toString() + ) + } + + @Test def escapeBackslashes(): Unit = { + val isWindows = sys.props("os.name").toLowerCase(Locale.ROOT) == "windows" + val filename = if isWindows then "/.scala" else "\\.scala" + checkTraceEvents( + """ + |class /\ : + | var /\ = ??? + |object /\{ + | def /\ = ??? + |}""".stripMargin, + filename = filename + )( + Set( + raw"class /\\", + raw"object /\\", + raw"method /\\", + raw"variable /\\", + raw"setter /\\_=" + ).map(TraceEvent("typecheck", _)) + ++ Set( + TraceEvent("file", if isWindows then "/.scala" else "\\\\.scala") + ) + ) + } + + @Test def escapeDoubleQuotes(): Unit = { + val filename = "\"quoted\".scala" + checkTraceEvents( + """ + |class `"QuotedClass"`: + | var `"quotedVar"` = ??? + |object `"QuotedObject"` { + | def `"quotedMethod"` = ??? + |}""".stripMargin, + filename = filename + ): + Set( + raw"class \"QuotedClass\"", + raw"object \"QuotedObject\"", + raw"method \"quotedMethod\"", + raw"variable \"quotedVar\"" + ).map(TraceEvent("typecheck", _)) + ++ Set(TraceEvent("file", "\\\"quoted\\\".scala")) + } + @Test def escapeNonAscii(): Unit = { + val filename = "unic😀de.scala" + checkTraceEvents( + """ + |class ΩUnicodeClass: + | var `中文Var` = ??? + |object ΩUnicodeObject { + | def 中文Method = ??? + |}""".stripMargin, + filename = filename + ): + Set( + "class \\u03A9UnicodeClass", + "object \\u03A9UnicodeObject", + "method \\u4E2D\\u6587Method", + "variable \\u4E2D\\u6587Var" + ).map(TraceEvent("typecheck", _)) + ++ Set(TraceEvent("file", "unic\\uD83D\\uDE00de.scala")) + } + + case class TraceEvent(category: String, name: String) + private def compileWithTracer( + code: String, + filename: String, + afterPhase: String = "typer" + )(checkEvents: Seq[TraceEvent] => Unit) = { + val runCtx = locally: + val source = SourceFile.virtual(filename, code) + val c = compilerWithChecker(afterPhase) { (_, _) => () } + val run = c.newRun + run.compileSources(List(source)) + run.runContext + assert(!runCtx.reporter.hasErrors, "compilation failed") + val outfile = ctx.settings.YprofileTrace.value + checkEvents: + scala.io.Source + .fromFile(outfile) + .getLines() + .collect: + case s"""${_}"cat":"${category}","name":${name},"ph":${_}""" => + TraceEvent(category, name.stripPrefix("\"").stripSuffix("\"")) + .distinct.toSeq + } + + private def checkTraceEvents(code: String, filename: String = "test")(expected: Set[TraceEvent]): Unit = { + compileWithTracer(code, filename = filename, afterPhase = "typer"){ events => + val missing = expected.diff(events.toSet) + def showFound = events + .groupBy(_.category) + .collect: + case (category, events) + if expected.exists(_.category == category) => + s"- $category: [${events.map(_.name).mkString(", ")}]" + .mkString("\n") + assert( + missing.isEmpty, + s"""Missing ${missing.size} names [${missing.mkString(", ")}] in events, got:\n${showFound}""" + ) + } + } +} From 19c945b9d938ed62073d8373c18e7acfef8ab2d7 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 1 Nov 2024 13:35:11 +0000 Subject: [PATCH 726/827] Fix provablyDisjoint handling enum constants with mixins --- .../dotty/tools/dotc/core/TypeComparer.scala | 7 ++++--- tests/warn/i21860.scala | 16 ++++++++++++++++ tests/warn/i21860.unenum.scala | 17 +++++++++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 tests/warn/i21860.scala create mode 100644 tests/warn/i21860.unenum.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 145a038dd856..16637c3286c1 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3196,9 +3196,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling cls.is(Sealed) && !cls.hasAnonymousChild def decompose(cls: Symbol): List[Symbol] = - cls.children.map { child => - if child.isTerm then child.info.classSymbol - else child + cls.children.flatMap { child => + if child.isTerm then + child.info.classSymbols // allow enum vals to be decomposed to their enum class (then filtered out) and any mixins + else child :: Nil }.filter(child => child.exists && child != cls) def eitherDerivesFromOther(cls1: Symbol, cls2: Symbol): Boolean = diff --git a/tests/warn/i21860.scala b/tests/warn/i21860.scala new file mode 100644 index 000000000000..377d4761e80f --- /dev/null +++ b/tests/warn/i21860.scala @@ -0,0 +1,16 @@ +trait Figure +sealed trait Corners { self: Figure => } + +enum Shape extends Figure: + case Triangle extends Shape with Corners + case Square extends Shape with Corners + case Circle extends Shape + case Ellipsis extends Shape + +def hasCorners(s: Shape): Boolean = s match + case hasCorners: Corners => true // <--- reported as `Unreachable case` + case _ => false + +class Test: + def test(): Unit = + println(hasCorners(Shape.Circle)) diff --git a/tests/warn/i21860.unenum.scala b/tests/warn/i21860.unenum.scala new file mode 100644 index 000000000000..7335e1b6851d --- /dev/null +++ b/tests/warn/i21860.unenum.scala @@ -0,0 +1,17 @@ +trait Figure +sealed trait Corners { self: Figure => } + +sealed abstract class Shape extends Figure +object Shape: + case object Triange extends Shape with Corners + case object Square extends Shape with Corners + case object Circle extends Shape + case object Ellipsis extends Shape + +def hasCorners(s: Shape): Boolean = s match + case hasCorners: Corners => true // <--- reported as `Unreachable case` + case _ => false + +class Test: + def test(): Unit = + println(hasCorners(Shape.Circle)) From 49d876a87851b78a63f3027811b41791a2fe583c Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sat, 2 Nov 2024 18:09:27 +0000 Subject: [PATCH 727/827] Add extra isInstanceOf test cases --- tests/pos/i21544.scala | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/pos/i21544.scala b/tests/pos/i21544.scala index 08a3911e4412..b2d978ea7d02 100644 --- a/tests/pos/i21544.scala +++ b/tests/pos/i21544.scala @@ -2,3 +2,12 @@ class Test(): def m1(xs: List[Boolean]) = for (x: Any) <- xs yield x def m2(xs: List[Boolean]) = for (x: AnyVal) <- xs yield x def m3(xs: List[Boolean]) = for (x: Matchable) <- xs yield x + + def v1(xs: List[AnyVal]) = for (x: Any) <- xs yield x + def v2(xs: List[AnyVal]) = for (x: AnyVal) <- xs yield x + def v3(xs: List[AnyVal]) = for (x: Matchable) <- xs yield x + + def t1(xs: List[Matchable]) = for (x: Any) <- xs yield x + def t2(xs: List[Matchable]) = for (x: Matchable) <- xs yield x + + def a1(xs: List[Any]) = for (x: Any) <- xs yield x From c5cfa0abc348fe2772a36e98c03254e2c8e4fb83 Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Mon, 28 Oct 2024 16:09:29 -0400 Subject: [PATCH 728/827] Fix warning message for matching on redundant nulls --- .../tools/dotc/transform/patmat/Space.scala | 14 ++++- tests/warn/redundant-null.check | 52 +++++++++++++++++++ tests/warn/redundant-null.scala | 41 +++++++++++++++ 3 files changed, 105 insertions(+), 2 deletions(-) create mode 100644 tests/warn/redundant-null.check create mode 100644 tests/warn/redundant-null.scala diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index cb4dfba957a2..cf4b96a52c50 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -923,6 +923,12 @@ object SpaceEngine { && !sel.tpe.widen.isRef(defn.QuotedExprClass) && !sel.tpe.widen.isRef(defn.QuotedTypeClass) + def mayCoverNull(tp: Space)(using Context): Boolean = tp match + case Empty => false + case Prod(_, _, _) => false + case Typ(tp, decomposed) => tp == ConstantType(Constant(null)) + case Or(ss) => ss.exists(mayCoverNull) + def checkReachability(m: Match)(using Context): Unit = trace(i"checkReachability($m)"): val selTyp = toUnderlying(m.selector.tpe).dealias val isNullable = selTyp.isInstanceOf[FlexibleType] || selTyp.classSymbol.isNullableClass @@ -948,12 +954,16 @@ object SpaceEngine { && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase && isSubspace(covered, prev) then - val nullOnly = isNullable && rest.isEmpty && isWildcardArg(pat) + val nullOnly = isNullable && isWildcardArg(pat) && !mayCoverNull(prev) val msg = if nullOnly then MatchCaseOnlyNullWarning() else MatchCaseUnreachable() report.warning(msg, pat.srcPos) // in redundancy check, take guard as false in order to soundly approximate - val newPrev = if guard.isEmpty then covered :: prevs else prevs + val newPrev = if (guard.isEmpty) + then if (isWildcardArg(pat)) + then Typ(ConstantType(Constant(null))) :: covered :: prevs + else covered :: prevs + else prevs recur(rest, newPrev, Nil) recur(m.cases, Nil, Nil) diff --git a/tests/warn/redundant-null.check b/tests/warn/redundant-null.check new file mode 100644 index 000000000000..7fdfbc1718fc --- /dev/null +++ b/tests/warn/redundant-null.check @@ -0,0 +1,52 @@ +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:10:7 ----------------------------------------- +10 | case _: n.type => // warn + | ^^^^^^^^^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:12:7 ----------------------------------------- +12 | case _ => // warn + | ^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:13:7 ----------------------------------------- +13 | case _ => // warn + | ^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:18:7 ----------------------------------------- +18 | case _ => 3 // warn + | ^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:23:7 ----------------------------------------- +23 | case _: B => // warn + | ^^^^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:24:7 ----------------------------------------- +24 | case _ => // warn + | ^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:25:7 ----------------------------------------- +25 | case null => // warn + | ^^^^ + | Unreachable case +-- [E121] Pattern Match Warning: tests/warn/redundant-null.scala:30:7 -------------------------------------------------- +30 | case _ => // warn + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:31:7 ----------------------------------------- +31 | case null => // warn + | ^^^^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:32:7 ----------------------------------------- +32 | case _ => // warn + | ^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:33:7 ----------------------------------------- +33 | case _ => // warn + | ^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:37:7 ----------------------------------------- +37 | case _ => println("unreachable") // warn + | ^ + | Unreachable case +-- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:41:7 ----------------------------------------- +41 | case _ => // warn + | ^ + | Unreachable case diff --git a/tests/warn/redundant-null.scala b/tests/warn/redundant-null.scala new file mode 100644 index 000000000000..17da3e88350e --- /dev/null +++ b/tests/warn/redundant-null.scala @@ -0,0 +1,41 @@ +class A +class B +class C + +val n = null + +def f(s: A) = s match + case _: n.type => + case _: A => + case _: n.type => // warn + case null => + case _ => // warn + case _ => // warn + +def f2(s: A | B | C) = s match + case _: A => 0 + case _: C | null | _: B => 1 + case _ => 3 // warn + +def f3(s: A | B) = s match + case _: A => + case _ => + case _: B => // warn + case _ => // warn + case null => // warn + +def f4(s: String | Int) = s match + case _: Int => + case _: String => + case _ => // warn + case null => // warn + case _ => // warn + case _ => // warn + +def f5(x: String) = x match + case x => println("catch all") + case _ => println("unreachable") // warn + +def test(s: String | Null) = s match + case ss => + case _ => // warn \ No newline at end of file From 59173cdee428bff8db8e1c6b115fd3fc11bdab37 Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Mon, 28 Oct 2024 18:05:05 -0400 Subject: [PATCH 729/827] Fix checkfile --- tests/patmat/null.check | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/patmat/null.check b/tests/patmat/null.check index d9c265adf377..f539a921e814 100644 --- a/tests/patmat/null.check +++ b/tests/patmat/null.check @@ -1,3 +1,3 @@ -6: Pattern Match +6: Match case Unreachable 13: Pattern Match 20: Pattern Match From 158c1df94e98f9f3ee61106103b367d5c00d259b Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Fri, 1 Nov 2024 15:06:59 -0400 Subject: [PATCH 730/827] Change projection approach --- .../tools/dotc/transform/patmat/Space.scala | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index cf4b96a52c50..463bf767a442 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -937,36 +937,36 @@ object SpaceEngine { then project(OrType(selTyp, ConstantType(Constant(null)), soft = false)) else project(selTyp) - @tailrec def recur(cases: List[CaseDef], prevs: List[Space], deferred: List[Tree]): Unit = + @tailrec def recur(cases: List[CaseDef], prevs: List[Space], deferred: List[Tree], nullCovered: Boolean): Unit = cases match case Nil => - case CaseDef(pat, guard, _) :: rest => - val curr = trace(i"project($pat)")(project(pat)) + case (c @ CaseDef(pat, guard, _)) :: rest => + val patNullable = Nullables.matchesNull(c) + val curr = trace(i"project($pat)")( + if patNullable + then Or(List(project(pat), Typ(ConstantType(Constant(null))))) + else project(pat)) val covered = trace("covered")(simplify(intersect(curr, targetSpace))) val prev = trace("prev")(simplify(Or(prevs))) if prev == Empty && covered == Empty then // defer until a case is reachable - recur(rest, prevs, pat :: deferred) + recur(rest, prevs, pat :: deferred, nullCovered) else for pat <- deferred.reverseIterator do report.warning(MatchCaseUnreachable(), pat.srcPos) if pat != EmptyTree // rethrow case of catch uses EmptyTree && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase - && isSubspace(covered, prev) + && isSubspace(covered, Or(List(prev, Typ(ConstantType(Constant(null)))))) then - val nullOnly = isNullable && isWildcardArg(pat) && !mayCoverNull(prev) - val msg = if nullOnly then MatchCaseOnlyNullWarning() else MatchCaseUnreachable() - report.warning(msg, pat.srcPos) + val nullOnly = isNullable && isWildcardArg(pat) && !nullCovered && !isSubspace(covered, prev) && (!ctx.explicitNulls || selTyp.isInstanceOf[FlexibleType]) + if nullOnly then report.warning(MatchCaseOnlyNullWarning() , pat.srcPos) + else if (isSubspace(covered, prev)) then report.warning(MatchCaseUnreachable(), pat.srcPos) // in redundancy check, take guard as false in order to soundly approximate - val newPrev = if (guard.isEmpty) - then if (isWildcardArg(pat)) - then Typ(ConstantType(Constant(null))) :: covered :: prevs - else covered :: prevs - else prevs - recur(rest, newPrev, Nil) - - recur(m.cases, Nil, Nil) + val newPrev = if (guard.isEmpty) then covered :: prevs else prevs + recur(rest, newPrev, Nil, nullCovered || (guard.isEmpty && patNullable)) + + recur(m.cases, Nil, Nil, false) end checkReachability def checkMatch(m: Match)(using Context): Unit = From 17cd9aa644beaf8fb99163c5a06e9772bc1abf62 Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Mon, 4 Nov 2024 11:59:14 -0500 Subject: [PATCH 731/827] Add suggested changes --- .../tools/dotc/transform/patmat/Space.scala | 52 +++++++++++-------- tests/explicit-nulls/warn/i21577.check | 14 +++-- tests/explicit-nulls/warn/i21577.scala | 12 ++--- tests/patmat/null.check | 2 +- tests/warn/i20121.scala | 4 +- tests/warn/i20122.scala | 2 +- tests/warn/i20123.scala | 2 +- tests/warn/redundant-null.check | 30 ++++++----- tests/warn/redundant-null.scala | 32 +++++++----- 9 files changed, 84 insertions(+), 66 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 463bf767a442..7410d617c4a0 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -116,6 +116,7 @@ object SpaceEngine { def isSubspace(a: Space, b: Space)(using Context): Boolean = a.isSubspace(b) def canDecompose(typ: Typ)(using Context): Boolean = typ.canDecompose def decompose(typ: Typ)(using Context): List[Typ] = typ.decompose + def nullSpace(using Context): Space = Typ(ConstantType(Constant(null)), decomposed = false) /** Simplify space such that a space equal to `Empty` becomes `Empty` */ def computeSimplify(space: Space)(using Context): Space = trace(i"simplify($space)")(space match { @@ -336,6 +337,13 @@ object SpaceEngine { case pat: Ident if isBackquoted(pat) => Typ(pat.tpe, decomposed = false) + case Ident(nme.WILDCARD) => + val tp = pat.tpe.stripAnnots.widenSkolem + val isNullable = tp.isInstanceOf[FlexibleType] || tp.classSymbol.isNullableClass + val tpSpace = Typ(erase(tp, isValue = true), decomposed = false) + if isNullable then Or(tpSpace :: nullSpace :: Nil) + else tpSpace + case Ident(_) | Select(_, _) => Typ(erase(pat.tpe.stripAnnots.widenSkolem, isValue = true), decomposed = false) @@ -667,7 +675,7 @@ object SpaceEngine { case tp => (tp, Nil) val (tp, typeArgs) = getAppliedClass(tpOriginal) // This function is needed to get the arguments of the types that will be applied to the class. - // This is necessary because if the arguments of the types contain Nothing, + // This is necessary because if the arguments of the types contain Nothing, // then this can affect whether the class will be taken into account during the exhaustiveness check def getTypeArgs(parent: Symbol, child: Symbol, typeArgs: List[Type]): List[Type] = val superType = child.typeRef.superType @@ -923,12 +931,6 @@ object SpaceEngine { && !sel.tpe.widen.isRef(defn.QuotedExprClass) && !sel.tpe.widen.isRef(defn.QuotedTypeClass) - def mayCoverNull(tp: Space)(using Context): Boolean = tp match - case Empty => false - case Prod(_, _, _) => false - case Typ(tp, decomposed) => tp == ConstantType(Constant(null)) - case Or(ss) => ss.exists(mayCoverNull) - def checkReachability(m: Match)(using Context): Unit = trace(i"checkReachability($m)"): val selTyp = toUnderlying(m.selector.tpe).dealias val isNullable = selTyp.isInstanceOf[FlexibleType] || selTyp.classSymbol.isNullableClass @@ -936,37 +938,41 @@ object SpaceEngine { if isNullable && !ctx.mode.is(Mode.SafeNulls) then project(OrType(selTyp, ConstantType(Constant(null)), soft = false)) else project(selTyp) - - @tailrec def recur(cases: List[CaseDef], prevs: List[Space], deferred: List[Tree], nullCovered: Boolean): Unit = + var hadNullOnly = false + @tailrec def recur(cases: List[CaseDef], prevs: List[Space], deferred: List[Tree]): Unit = cases match case Nil => - case (c @ CaseDef(pat, guard, _)) :: rest => - val patNullable = Nullables.matchesNull(c) - val curr = trace(i"project($pat)")( - if patNullable - then Or(List(project(pat), Typ(ConstantType(Constant(null))))) - else project(pat)) + case CaseDef(pat, guard, _) :: rest => + val curr = trace(i"project($pat)")(project(pat)) val covered = trace("covered")(simplify(intersect(curr, targetSpace))) val prev = trace("prev")(simplify(Or(prevs))) if prev == Empty && covered == Empty then // defer until a case is reachable - recur(rest, prevs, pat :: deferred, nullCovered) + recur(rest, prevs, pat :: deferred) else for pat <- deferred.reverseIterator do report.warning(MatchCaseUnreachable(), pat.srcPos) if pat != EmptyTree // rethrow case of catch uses EmptyTree && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase - && isSubspace(covered, Or(List(prev, Typ(ConstantType(Constant(null)))))) then - val nullOnly = isNullable && isWildcardArg(pat) && !nullCovered && !isSubspace(covered, prev) && (!ctx.explicitNulls || selTyp.isInstanceOf[FlexibleType]) - if nullOnly then report.warning(MatchCaseOnlyNullWarning() , pat.srcPos) - else if (isSubspace(covered, prev)) then report.warning(MatchCaseUnreachable(), pat.srcPos) + if isSubspace(covered, prev) then + report.warning(MatchCaseUnreachable(), pat.srcPos) + else if isNullable && !hadNullOnly && isWildcardArg(pat) + && isSubspace(covered, Or(prev :: nullSpace :: Nil)) then + // Issue OnlyNull warning only if: + // 1. The target space is nullable; + // 2. OnlyNull warning has not been issued before; + // 3. The pattern is a wildcard pattern; + // 4. The pattern is not covered by the previous cases, + // but covered by the previous cases with null. + hadNullOnly = true + report.warning(MatchCaseOnlyNullWarning(), pat.srcPos) // in redundancy check, take guard as false in order to soundly approximate - val newPrev = if (guard.isEmpty) then covered :: prevs else prevs - recur(rest, newPrev, Nil, nullCovered || (guard.isEmpty && patNullable)) + val newPrev = if guard.isEmpty then covered :: prevs else prevs + recur(rest, newPrev, Nil) - recur(m.cases, Nil, Nil, false) + recur(m.cases, Nil, Nil) end checkReachability def checkMatch(m: Match)(using Context): Unit = diff --git a/tests/explicit-nulls/warn/i21577.check b/tests/explicit-nulls/warn/i21577.check index acedd7a9c713..b548a5bedc30 100644 --- a/tests/explicit-nulls/warn/i21577.check +++ b/tests/explicit-nulls/warn/i21577.check @@ -1,17 +1,21 @@ -- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:5:9 -------------------------------------------- -5 | case _ => // warn +5 | case _ => // warn: null only | ^ | Unreachable case except for null (if this is intentional, consider writing case null => instead). -- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:12:9 ------------------------------------------- -12 | case _ => // warn +12 | case _ => // warn: null only | ^ | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:16:7 ------------------------------------------- +16 | case _ => // warn: null only + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). -- [E030] Match case Unreachable Warning: tests/explicit-nulls/warn/i21577.scala:20:7 ---------------------------------- -20 | case _ => // warn +20 | case _ => // warn: unreachable | ^ | Unreachable case -- [E029] Pattern Match Exhaustivity Warning: tests/explicit-nulls/warn/i21577.scala:29:27 ----------------------------- -29 |def f7(s: String | Null) = s match // warn +29 |def f7(s: String | Null) = s match // warn: not exhuastive | ^ | match may not be exhaustive. | @@ -19,7 +23,7 @@ | | longer explanation available when compiling with `-explain` -- [E029] Pattern Match Exhaustivity Warning: tests/explicit-nulls/warn/i21577.scala:36:33 ----------------------------- -36 |def f9(s: String | Int | Null) = s match // warn +36 |def f9(s: String | Int | Null) = s match // warn: not exhuastive | ^ | match may not be exhaustive. | diff --git a/tests/explicit-nulls/warn/i21577.scala b/tests/explicit-nulls/warn/i21577.scala index 67da6068f22c..1bba8f4da01f 100644 --- a/tests/explicit-nulls/warn/i21577.scala +++ b/tests/explicit-nulls/warn/i21577.scala @@ -2,22 +2,22 @@ def f(s: String) = val s2 = s.trim() s2 match case s3: String => - case _ => // warn + case _ => // warn: null only def f2(s: String | Null) = val s2 = s.nn.trim() s2 match case s3: String => - case _ => // warn + case _ => // warn: null only def f3(s: String | Null) = s match case s2: String => - case _ => + case _ => // warn: null only def f5(s: String) = s match case _: String => - case _ => // warn + case _ => // warn: unreachable def f6(s: String) = s.trim() match case _: String => @@ -26,13 +26,13 @@ def f6(s: String) = s.trim() match def f61(s: String) = s.trim() match case _: String => -def f7(s: String | Null) = s match // warn +def f7(s: String | Null) = s match // warn: not exhuastive case _: String => def f8(s: String | Null) = s match case _: String => case null => -def f9(s: String | Int | Null) = s match // warn +def f9(s: String | Int | Null) = s match // warn: not exhuastive case _: String => case null => \ No newline at end of file diff --git a/tests/patmat/null.check b/tests/patmat/null.check index f539a921e814..d9c265adf377 100644 --- a/tests/patmat/null.check +++ b/tests/patmat/null.check @@ -1,3 +1,3 @@ -6: Match case Unreachable +6: Pattern Match 13: Pattern Match 20: Pattern Match diff --git a/tests/warn/i20121.scala b/tests/warn/i20121.scala index ce8e3e4d74f6..b8402fa808ac 100644 --- a/tests/warn/i20121.scala +++ b/tests/warn/i20121.scala @@ -5,8 +5,8 @@ case class CC_B[A](a: A) extends T_A[A, X] val v_a: T_A[X, X] = CC_B(null) val v_b = v_a match - case CC_B(_) => 0 // warn: unreachable - case _ => 1 + case CC_B(_) => 0 + case _ => 1 // warn: null only // for CC_B[A] to match T_A[X, X] // A := X // so require X, aka T_A[Byte, Byte] diff --git a/tests/warn/i20122.scala b/tests/warn/i20122.scala index 50da42a5926c..d035a18d3b09 100644 --- a/tests/warn/i20122.scala +++ b/tests/warn/i20122.scala @@ -7,7 +7,7 @@ case class CC_E(a: CC_C[Char, Byte]) val v_a: T_B[Int, CC_A] = CC_B(CC_E(CC_C(null))) val v_b = v_a match - case CC_B(CC_E(CC_C(_))) => 0 // warn: unreachable + case CC_B(CC_E(CC_C(_))) => 0 case _ => 1 // for CC_B[A, C] to match T_B[C, CC_A] // C <: Int, ok diff --git a/tests/warn/i20123.scala b/tests/warn/i20123.scala index 32de903210b2..0af7aba5a3a5 100644 --- a/tests/warn/i20123.scala +++ b/tests/warn/i20123.scala @@ -8,7 +8,7 @@ case class CC_G[A, C](c: C) extends T_A[A, C] val v_a: T_A[Boolean, T_B[Boolean]] = CC_G(null) val v_b = v_a match { case CC_D() => 0 - case CC_G(_) => 1 // warn: unreachable + case CC_G(_) => 1 // for CC_G[A, C] to match T_A[Boolean, T_B[Boolean]] // A := Boolean, which is ok // C := T_B[Boolean], diff --git a/tests/warn/redundant-null.check b/tests/warn/redundant-null.check index 7fdfbc1718fc..9d710e18961d 100644 --- a/tests/warn/redundant-null.check +++ b/tests/warn/redundant-null.check @@ -1,52 +1,56 @@ -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:10:7 ----------------------------------------- -10 | case _: n.type => // warn +10 | case _: n.type => // warn: unreachable | ^^^^^^^^^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:12:7 ----------------------------------------- -12 | case _ => // warn +12 | case _ => // warn: unreachable | ^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:13:7 ----------------------------------------- -13 | case _ => // warn +13 | case _ => // warn: unreachable | ^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:18:7 ----------------------------------------- -18 | case _ => 3 // warn +18 | case _ => 3 // warn: unreachable | ^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:23:7 ----------------------------------------- -23 | case _: B => // warn +23 | case _: B => // warn: unreachable | ^^^^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:24:7 ----------------------------------------- -24 | case _ => // warn +24 | case _ => // warn: unreachable | ^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:25:7 ----------------------------------------- -25 | case null => // warn +25 | case null => // warn: unreachable | ^^^^ | Unreachable case -- [E121] Pattern Match Warning: tests/warn/redundant-null.scala:30:7 -------------------------------------------------- -30 | case _ => // warn +30 | case _ => // warn: null only | ^ | Unreachable case except for null (if this is intentional, consider writing case null => instead). -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:31:7 ----------------------------------------- -31 | case null => // warn +31 | case null => // warn: unreachable | ^^^^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:32:7 ----------------------------------------- -32 | case _ => // warn +32 | case _ => // warn: unreachable | ^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:33:7 ----------------------------------------- -33 | case _ => // warn +33 | case _ => // warn: unreachable | ^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:37:7 ----------------------------------------- -37 | case _ => println("unreachable") // warn +37 | case _ => // warn: unreachable | ^ | Unreachable case -- [E030] Match case Unreachable Warning: tests/warn/redundant-null.scala:41:7 ----------------------------------------- -41 | case _ => // warn +41 | case _ => // warn: unreachable | ^ | Unreachable case +-- [E121] Pattern Match Warning: tests/warn/redundant-null.scala:45:7 -------------------------------------------------- +45 | case _ => // warn: null only + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). diff --git a/tests/warn/redundant-null.scala b/tests/warn/redundant-null.scala index 17da3e88350e..ec0096d78daf 100644 --- a/tests/warn/redundant-null.scala +++ b/tests/warn/redundant-null.scala @@ -7,35 +7,39 @@ val n = null def f(s: A) = s match case _: n.type => case _: A => - case _: n.type => // warn + case _: n.type => // warn: unreachable case null => - case _ => // warn - case _ => // warn + case _ => // warn: unreachable + case _ => // warn: unreachable def f2(s: A | B | C) = s match case _: A => 0 case _: C | null | _: B => 1 - case _ => 3 // warn + case _ => 3 // warn: unreachable def f3(s: A | B) = s match case _: A => case _ => - case _: B => // warn - case _ => // warn - case null => // warn + case _: B => // warn: unreachable + case _ => // warn: unreachable + case null => // warn: unreachable def f4(s: String | Int) = s match case _: Int => case _: String => - case _ => // warn - case null => // warn - case _ => // warn - case _ => // warn + case _ => // warn: null only + case null => // warn: unreachable + case _ => // warn: unreachable + case _ => // warn: unreachable def f5(x: String) = x match - case x => println("catch all") - case _ => println("unreachable") // warn + case x => + case _ => // warn: unreachable def test(s: String | Null) = s match case ss => - case _ => // warn \ No newline at end of file + case _ => // warn: unreachable + +def test2(s: String | Null) = s match + case ss: String => + case _ => // warn: null only \ No newline at end of file From 0800e0feaa0ee7e877d8c19ad0abfca1fd507cf6 Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Mon, 4 Nov 2024 12:32:04 -0500 Subject: [PATCH 732/827] Fix null wildcard in SymbolLoaders --- compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 5690720a1b3f..becfcdb5861c 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -337,7 +337,7 @@ abstract class SymbolLoader extends LazyType { self => val sym = root.symbol def associatedFile = root.symbol.associatedFile match case file: AbstractFile => file - case _ => NoAbstractFile + case null => NoAbstractFile ctx.profiler.onCompletion(sym, associatedFile)(body) } From 1b48966b0097473930f2b02a41f66c10448a5839 Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Tue, 5 Nov 2024 00:42:17 -0500 Subject: [PATCH 733/827] Fix patmat test output --- tests/patmat/i12530.check | 1 - tests/patmat/null.check | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/patmat/i12530.check b/tests/patmat/i12530.check index 636347516cbc..b0605bcd95e5 100644 --- a/tests/patmat/i12530.check +++ b/tests/patmat/i12530.check @@ -1,2 +1 @@ 6: Match case Unreachable -14: Match case Unreachable diff --git a/tests/patmat/null.check b/tests/patmat/null.check index d9c265adf377..da081e6b56c0 100644 --- a/tests/patmat/null.check +++ b/tests/patmat/null.check @@ -1,3 +1,4 @@ -6: Pattern Match +6: Match case Unreachable 13: Pattern Match +18: Match case Unreachable 20: Pattern Match From 14e46e845d0775538253560c599c1b43c3bfb880 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 5 Nov 2024 10:15:24 +0000 Subject: [PATCH 734/827] Push java inner class privateWithin setting --- .../dotty/tools/dotc/core/SymbolLoaders.scala | 20 ++++++++++++------- .../dotc/core/classfile/ClassfileParser.scala | 17 +++++++--------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 585db8488a78..0f01a687f905 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -51,8 +51,9 @@ object SymbolLoaders { */ def enterClass( owner: Symbol, name: PreName, completer: SymbolLoader, - flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Symbol = { - val cls = newClassSymbol(owner, name.toTypeName.unmangleClassName.decode, flags, completer, compUnitInfo = completer.compilationUnitInfo) + flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope, privateWithin: Symbol = NoSymbol, + )(using Context): Symbol = { + val cls = newClassSymbol(owner, name.toTypeName.unmangleClassName.decode, flags, completer, privateWithin, compUnitInfo = completer.compilationUnitInfo) enterNew(owner, cls, completer, scope) } @@ -60,10 +61,13 @@ object SymbolLoaders { */ def enterModule( owner: Symbol, name: PreName, completer: SymbolLoader, - modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Symbol = { + modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, + scope: Scope = EmptyScope, privateWithin: Symbol = NoSymbol, + )(using Context): Symbol = { val module = newModuleSymbol( owner, name.toTermName.decode, modFlags, clsFlags, (module, _) => completer.proxy.withDecls(newScope).withSourceModule(module), + privateWithin, compUnitInfo = completer.compilationUnitInfo) enterNew(owner, module, completer, scope) enterNew(owner, module.moduleClass, completer, scope) @@ -103,14 +107,16 @@ object SymbolLoaders { */ def enterClassAndModule( owner: Symbol, name: PreName, completer: SymbolLoader, - flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): (Symbol, Symbol) = { - val clazz = enterClass(owner, name, completer, flags, scope) + flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope, privateWithin: Symbol = NoSymbol, + )(using Context): Unit = { + val clazz = enterClass(owner, name, completer, flags, scope, privateWithin) val module = enterModule( owner, name, completer, modFlags = flags.toTermFlags & RetainedModuleValFlags, clsFlags = flags.toTypeFlags & RetainedModuleClassFlags, - scope = scope) - (clazz, module) + scope = scope, + privateWithin = privateWithin, + ) } /** Enter all toplevel classes and objects in file `src` into package `owner`, provided diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 4b0dec2c88b7..3af0fc6603d5 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -1059,17 +1059,14 @@ class ClassfileParser( */ private def enterOwnInnerClasses()(using Context, DataReader): Unit = { def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) = - val (cls, mod) = SymbolLoaders.enterClassAndModule( - getOwner(jflags), + SymbolLoaders.enterClassAndModule( + getOwner(jflags), entry.originalName, - new ClassfileLoader(file), - classTranslation.flags(jflags), - getScope(jflags)) - - val privateWithin = getPrivateWithin(jflags) - cls.setPrivateWithin(privateWithin) - mod.setPrivateWithin(privateWithin) - mod.sourceModule.setPrivateWithin(privateWithin) + new ClassfileLoader(file), + classTranslation.flags(jflags), + getScope(jflags), + getPrivateWithin(jflags), + ) for entry <- innerClasses.valuesIterator do // create a new class member for immediate inner classes From 7f47867a824f3fd38aed63eaedfa4f4d339cbd99 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 5 Nov 2024 22:18:15 +0100 Subject: [PATCH 735/827] Don't point to the compiler backlog when a compiler plugin phase crashes (#21887) closes #21783 --- compiler/src/dotty/tools/dotc/report.scala | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index e24e6be38b2b..2ccf918e12fa 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -165,13 +165,23 @@ object report: "compiler version" -> dotty.tools.dotc.config.Properties.versionString, "settings" -> settings.map(showSetting).mkString(" "), )) + val fileAReportMsg = + if ctx.phase.isInstanceOf[plugins.PluginPhase] + then + s"""| An unhandled exception was thrown in the compiler plugin named "${ctx.phase.megaPhase}". + | Please report the issue to the plugin's maintainers. + | For non-enriched exceptions, compile with -Xno-enrich-error-messages. + |""".stripMargin + else + s"""| An unhandled exception was thrown in the compiler. + | Please file a crash report here: + | https://github.com/scala/scala3/issues/new/choose + | For non-enriched exceptions, compile with -Xno-enrich-error-messages. + |""".stripMargin s""" | $errorMessage | - | An unhandled exception was thrown in the compiler. - | Please file a crash report here: - | https://github.com/scala/scala3/issues/new/choose - | For non-enriched exceptions, compile with -Xno-enrich-error-messages. + |$fileAReportMsg | |$info1 |""".stripMargin From 11c957c861dbb37f7e81d062d173175c67eca449 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 6 Nov 2024 11:16:02 +0100 Subject: [PATCH 736/827] [build] Improve, document and group versioning code in `Build.scala` (#21837) * Introduce `developedVersion` describing the target for the current release cycle * Adapt `baseVersion` to the effectively revert #21011 changes * Adapt .msi packager to use `developedVersion` as a workaround to MSI ProductInfo limitations (version without RC suffix required) * Group and document versioning related code --- .github/workflows/build-msi.yml | 4 +-- project/Build.scala | 62 ++++++++++++++++++++------------- 2 files changed, 40 insertions(+), 26 deletions(-) diff --git a/.github/workflows/build-msi.yml b/.github/workflows/build-msi.yml index 8e7cf8a9ef47..14838c589d6a 100644 --- a/.github/workflows/build-msi.yml +++ b/.github/workflows/build-msi.yml @@ -14,8 +14,8 @@ on: workflow_call: env: - # NECESSARY FLAG TO CORRECTLY CONFIGURE THE VERSION FOR SCALA - RELEASEBUILD: yes + # Release only happends when triggering CI by pushing tag + RELEASEBUILD: ${{ startsWith(github.event.ref, 'refs/tags/') && 'yes' || 'no' }} jobs: build: diff --git a/project/Build.scala b/project/Build.scala index 55f07f8bcd36..03850a5c0f0d 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -99,10 +99,39 @@ object Build { */ val referenceVersion = "3.6.0" - val baseVersion = "3.6.2" - // Will be required by some automation later - // TODO: Introduce automation and handling for RC versions before 3.6.2-RC1 - // val prereleaseVersion = s"$baseVersion-RC1" + /** Version of the Scala compiler targeted in the current release cycle + * Contains a version without RC/SNAPSHOT/NIGHTLY specific suffixes + * Should be updated ONLY after release or cutoff for previous release cycle. + * + * Should only be referred from `dottyVersion` or settings/tasks requiring simplified version string, + * eg. `compatMode` or Windows native distribution version. + */ + val developedVersion = "3.6.2" + + /** The version of the compiler including the RC prefix. + * Defined as common base before calculating environment specific suffixes in `dottyVersion` + * + * By default, during development cycle defined as `${developedVersion}-RC1`; + * During release candidate cycle incremented by the release officer before publishing a subsequent RC version; + * During final, stable release is set exactly to `developedVersion`. + */ + val baseVersion = s"$developedVersion-RC1" + + /** Final version of Scala compiler, controlled by environment variables. */ + val dottyVersion = { + if (isRelease) baseVersion + else if (isNightly) s"${baseVersion}-bin-${VersionUtil.commitDate}-${VersionUtil.gitHash}-NIGHTLY" + else s"${baseVersion}-bin-SNAPSHOT" + } + def isRelease = sys.env.get("RELEASEBUILD").contains("yes") + def isNightly = sys.env.get("NIGHTLYBUILD").contains("yes") + + /** Version calculate for `nonbootstrapped` projects */ + val dottyNonBootstrappedVersion = { + // Make sure sbt always computes the scalaBinaryVersion correctly + val bin = if (!dottyVersion.contains("-bin")) "-bin" else "" + dottyVersion + bin + "-nonbootstrapped" + } // LTS or Next val versionLine = "Next" @@ -117,7 +146,7 @@ object Build { /** Minor version against which we check binary compatibility. * * This must be the earliest published release in the same versioning line. - * For a baseVersion `3.M.P` the mimaPreviousDottyVersion should be set to: + * For a developedVersion `3.M.P` the mimaPreviousDottyVersion should be set to: * - `3.M.0` if `P > 0` * - `3.(M-1).0` if `P = 0` * 3.6.1 is an exception from this rule - 3.6.0 was a broken release @@ -144,7 +173,7 @@ object Build { val compatMode = { val VersionRE = """^\d+\.(\d+)\.(\d+)""".r - baseVersion match { + developedVersion match { case VersionRE(_, "0") => CompatMode.BinaryCompatible case _ => CompatMode.SourceAndBinaryCompatible } @@ -174,24 +203,6 @@ object Build { val dottyGithubUrl = "https://github.com/scala/scala3" val dottyGithubRawUserContentUrl = "https://raw.githubusercontent.com/scala/scala3" - - val isRelease = sys.env.get("RELEASEBUILD") == Some("yes") - - val dottyVersion = { - def isNightly = sys.env.get("NIGHTLYBUILD") == Some("yes") - if (isRelease) - baseVersion - else if (isNightly) - baseVersion + "-RC1-bin-" + VersionUtil.commitDate + "-" + VersionUtil.gitHash + "-NIGHTLY" - else - baseVersion + "-RC1-bin-SNAPSHOT" - } - val dottyNonBootstrappedVersion = { - // Make sure sbt always computes the scalaBinaryVersion correctly - val bin = if (!dottyVersion.contains("-bin")) "-bin" else "" - dottyVersion + bin + "-nonbootstrapped" - } - val sbtCommunityBuildVersion = "0.1.0-SNAPSHOT" val agentOptions = List( @@ -2274,6 +2285,9 @@ object Build { ) .settings( Windows / name := "scala", + // Windows/version is used to create ProductInfo - it requires a version without any -RC suffixes + // If not explicitly overriden it would try to use `dottyVersion` assigned to `dist-win-x86_64/version` + Windows / version := developedVersion, Windows / mappings := (Universal / mappings).value, Windows / packageBin := (Windows / packageBin).dependsOn(republish).value, Windows / wixFiles := (Windows / wixFiles).dependsOn(republish).value, From c63cfde48f71af8217b7ab4e6a4ac9463b71a5f7 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 6 Nov 2024 11:26:03 +0100 Subject: [PATCH 737/827] Disable automatic release in `publish_release` (#21873) Releases of stable versions now require manual approval on https://oss.sonatype.org/ by release officer. Difference between `sonatypeBundleRelease` and `sonatypeBundleUpload` is described [here](https://github.com/xerial/sbt-sonatype?tab=readme-ov-file#commands) [skip ci] --------- Co-authored-by: Hamza Remmal --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ffa7e515b926..5562423720d8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1093,7 +1093,7 @@ jobs: asset_content_type: text/plain - name: Publish Release - run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleRelease" + run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleUpload" open_issue_on_failure: @@ -1125,7 +1125,7 @@ jobs: needs: [build-msi-package] with: # Ensure that version starts with prefix 3. - # In the future it can be adapted to compare with with git tag or version set in the build.s + # In the future it can be adapted to compare with with git tag or version set in the project/Build.scala version: "3." java-version: 8 From 0dceb7fd16e5db4df1063a77018c604f6fe741b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Tue, 5 Nov 2024 17:00:48 +0100 Subject: [PATCH 738/827] Fix #21295: Restrict `provablyDisjoint` with `Nothing`s in invariant type params. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit If `Foo[T]` is invariant in `T`, we previously concluded that `Foo[A] ⋔ Foo[B]` from `A ⋔ B`. That is however wrong if both `A` and `B` can be (instantiated to) `Nothing`. We now rule out these occurrences in two ways: * either we show that `T` corresponds to a field, like we do in the covariant case, or * we show that `A` or `B` cannot possibly be `Nothing`. The second condition is shaky at best. I would have preferred not to include it. However, introducing the former without the fallback on the latter breaks too many existing test cases. --- .../dotty/tools/dotc/core/TypeComparer.scala | 21 +++++++++++++++++-- tests/pos/i21295.scala | 8 +++++++ 2 files changed, 27 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i21295.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 16637c3286c1..17d427513e58 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -3232,6 +3232,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling end provablyDisjointClasses private def provablyDisjointTypeArgs(cls: ClassSymbol, args1: List[Type], args2: List[Type], pending: util.HashSet[(Type, Type)])(using Context): Boolean = + // sjrd: I will not be surprised when this causes further issues in the future. + // This is a compromise to be able to fix #21295 without breaking the world. + def cannotBeNothing(tp: Type): Boolean = tp match + case tp: TypeParamRef => cannotBeNothing(tp.paramInfo) + case _ => !(tp.loBound.stripTypeVar <:< defn.NothingType) + // It is possible to conclude that two types applied are disjoint by // looking at covariant type parameters if the said type parameters // are disjoint and correspond to fields. @@ -3240,9 +3246,20 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def covariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = provablyDisjoint(tp1, tp2, pending) && typeparamCorrespondsToField(cls.appliedRef, tparam) - // In the invariant case, direct type parameter disjointness is enough. + // In the invariant case, we have more ways to prove disjointness: + // - either the type param corresponds to a field, like in the covariant case, or + // - one of the two actual args can never be `Nothing`. + // The latter condition, as tested by `cannotBeNothing`, is ad hoc and was + // not carefully evaluated to be sound. We have it because we had to + // reintroduce the former condition to fix #21295, and alone, that broke a + // lot of existing test cases. + // Having either one of the two conditions be true is better than not requiring + // any, which was the status quo before #21295. def invariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = - provablyDisjoint(tp1, tp2, pending) + provablyDisjoint(tp1, tp2, pending) && { + typeparamCorrespondsToField(cls.appliedRef, tparam) + || (cannotBeNothing(tp1) || cannotBeNothing(tp2)) + } args1.lazyZip(args2).lazyZip(cls.typeParams).exists { (arg1, arg2, tparam) => diff --git a/tests/pos/i21295.scala b/tests/pos/i21295.scala new file mode 100644 index 000000000000..fc2db7c452d9 --- /dev/null +++ b/tests/pos/i21295.scala @@ -0,0 +1,8 @@ +sealed trait Foo[A] +final class Bar extends Foo[Nothing] + +object Test: + type Extract[T] = T match + case Foo[_] => Int + + val x: Extract[Bar] = 1 From 9d2aeacae28f970139651240dda8f78bdd86574a Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 9 Oct 2024 14:59:42 +0100 Subject: [PATCH 739/827] Allow autotupling if fn's param is a type param --- .../src/dotty/tools/dotc/typer/Applications.scala | 14 +++++++++++++- tests/pos/i21682.1.scala | 15 +++++++++++++++ tests/pos/i21682.2.scala | 7 +++++++ tests/pos/i21682.3.scala | 4 ++++ 4 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21682.1.scala create mode 100644 tests/pos/i21682.2.scala create mode 100644 tests/pos/i21682.3.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 17be2acc7378..26199aa8da37 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -2237,7 +2237,19 @@ trait Applications extends Compatibility { def isCorrectUnaryFunction(alt: TermRef): Boolean = val formals = params(alt) - formals.length == 1 && ptIsCorrectProduct(formals.head, args) + formals.length == 1 && { + formals.head match + case formal: TypeParamRef => + // While `formal` isn't a tuple type of the correct arity, + // it's a type parameter (a method type parameter presumably) + // so check its bounds allow for a tuple type of the correct arity. + // See i21682 for an example. + val tup = defn.tupleType(args.map(v => if v.tpt.isEmpty then WildcardType else typedAheadType(v.tpt).tpe)) + val TypeBounds(lo, hi) = formal.paramInfo + lo <:< tup && tup <:< hi + case formal => + ptIsCorrectProduct(formal, args) + } val numArgs = args.length if numArgs > 1 diff --git a/tests/pos/i21682.1.scala b/tests/pos/i21682.1.scala new file mode 100644 index 000000000000..7340edcaeb4d --- /dev/null +++ b/tests/pos/i21682.1.scala @@ -0,0 +1,15 @@ +sealed abstract class Gen[+T1] +given [T2]: Conversion[T2, Gen[T2]] = ??? + +trait Show[T3] +given Show[Boolean] = ??? +given [A1: Show, B1: Show, C1: Show]: Show[(A1, B1, C1)] = ??? + +object ForAll: + def apply[A2: Show, B2](f: A2 => B2): Unit = ??? + def apply[A3: Show, B3: Show, C3](f: (A3, B3) => C3): Unit = ??? + def apply[A4: Show, B4](gen: Gen[A4])(f: A4 => B4): Unit = ??? + +@main def Test = + ForAll: (b1: Boolean, b2: Boolean, b3: Boolean) => + ??? diff --git a/tests/pos/i21682.2.scala b/tests/pos/i21682.2.scala new file mode 100644 index 000000000000..6717d36c78a6 --- /dev/null +++ b/tests/pos/i21682.2.scala @@ -0,0 +1,7 @@ +object ForAll: + def apply[A1, B](f: A1 => B): Unit = ??? + def apply[A1, A2, B](f: (A1, A2) => B): Unit = ??? + +@main def Test = + ForAll: (b1: Boolean, b2: Boolean, b3: Boolean) => + ??? diff --git a/tests/pos/i21682.3.scala b/tests/pos/i21682.3.scala new file mode 100644 index 000000000000..b44b9a7c91fc --- /dev/null +++ b/tests/pos/i21682.3.scala @@ -0,0 +1,4 @@ +class Test: + def foo[A1 >: (Nothing, Boolean, Nothing) <: (Any, Boolean, Any), B](f: A1 => B): Unit = ??? + def test(): Unit = + val res4 = this.foo((b1: Boolean, b2: Boolean, b3: Boolean) => ???) From 83c75dd62a90c6e60f9135b020cd508756c90d6e Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 6 Nov 2024 13:23:03 +0100 Subject: [PATCH 740/827] Add .msi artifacts to release assets (#21834) We now include .msi files produced by `build-msi-package` in release assets including both .msi file and it's sha256 Resolves #21820 --- .github/workflows/ci.yaml | 80 ++++++++++++++++++++++++++++++--------- 1 file changed, 62 insertions(+), 18 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5562423720d8..19405db61066 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -737,7 +737,7 @@ jobs: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8, build-sdk-package] + needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8, build-sdk-package, build-msi-package] if: "github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/')" @@ -816,6 +816,18 @@ jobs: prepareSDK "-x86_64-apple-darwin" "dist-mac-x86_64" "./dist/mac-x86_64/" prepareSDK "-x86_64-pc-win32" "dist-win-x86_64" "./dist/win-x86_64/" + - name: Download MSI package + uses: actions/download-artifact@v4 + with: + name: scala.msi + path: . + - name: Prepare MSI package + shell: bash + run: | + msiInstaller="scala3-${{ env.RELEASE_TAG }}.msi" + mv scala.msi "${msiInstaller}" + sha256sum "${msiInstaller}" > "${msiInstaller}.sha256" + # Create the GitHub release - name: Create GitHub Release id: create_gh_release @@ -829,10 +841,10 @@ jobs: draft: true prerelease: ${{ contains(env.RELEASE_TAG, '-') }} - # The following steps are generated using template: - # def template(distribution: String, suffix: String) = - # def upload(kind: String, path: String, contentType: String) = - # s"""- name: Upload $kind to GitHub Release ($distribution) + # The following upload steps are generated using template: + # val baseFileName = "scala3-${{ env.RELEASE_TAG }}" + # def upload(kind: String, path: String, contentType: String, distribution: String) = + # s"""- name: Upload $kind to GitHub Release ($distribution) # uses: actions/upload-release-asset@v1 # env: # GITHUB_TOKEN: $${{ secrets.GITHUB_TOKEN }} @@ -841,24 +853,33 @@ jobs: # asset_path: ./${path} # asset_name: ${path} # asset_content_type: ${contentType}""" - # val filename = s"scala3-$${{ env.RELEASE_TAG }}${suffix}" + # def uploadSDK(distribution: String, suffix: String) = + # val filename = s"${baseFileName}${suffix}" # s""" # # $distribution - # ${upload("zip archive", s"$filename.zip", "application/zip")} - # ${upload("zip archive SHA", s"$filename.zip.sha256", "text/plain")} - # ${upload("tar.gz archive", s"$filename.tar.gz", "application/gzip")} - # ${upload("tar.gz archive SHA", s"$filename.tar.gz.sha256", "text/plain")} + # ${upload("zip archive", s"$filename.zip", "application/zip", distribution)} + # ${upload("zip archive SHA", s"$filename.zip.sha256", "text/plain", distribution)} + # ${upload("tar.gz archive", s"$filename.tar.gz", "application/gzip", distribution)} + # ${upload("tar.gz archive SHA", s"$filename.tar.gz.sha256", "text/plain", distribution)} + # """ + # def uploadMSI() = + # val distribution = "Windows x86_64 MSI" + # s""" + # # $distribution + # ${upload(".msi file", s"${baseFileName}.msi", "application/x-msi", distribution)} + # ${upload(".msi file SHA", s"${baseFileName}.msi.sha256", "text/plain", distribution)} # """ - # @main def gen = # Seq( - # template("Universal", ""), - # template("Linux x86-64", "-x86_64-pc-linux"), - # template("Linux aarch64", "-aarch64-pc-linux"), - # template("Mac x86-64", "-x86_64-apple-darwin"), - # template("Mac aarch64", "-aarch64-apple-darwin"), - # template("Windows x86_64", "-x86_64-pc-win32") + # uploadSDK("Universal", ""), + # uploadSDK("Linux x86-64", "-x86_64-pc-linux"), + # uploadSDK("Linux aarch64", "-aarch64-pc-linux"), + # uploadSDK("Mac x86-64", "-x86_64-apple-darwin"), + # uploadSDK("Mac aarch64", "-aarch64-apple-darwin"), + # uploadSDK("Windows x86_64", "-x86_64-pc-win32"), + # uploadMSI() # ).foreach(println) + # Universal - name: Upload zip archive to GitHub Release (Universal) uses: actions/upload-release-asset@v1 @@ -1092,6 +1113,27 @@ jobs: asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz.sha256 asset_content_type: text/plain + + # Windows x86_64 MSI + - name: Upload .msi file to GitHub Release (Windows x86_64 MSI) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}.msi + asset_name: scala3-${{ env.RELEASE_TAG }}.msi + asset_content_type: application/x-msi + - name: Upload .msi file SHA to GitHub Release (Windows x86_64 MSI) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}.msi.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}.msi.sha256 + asset_content_type: text/plain + - name: Publish Release run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleUpload" @@ -1118,7 +1160,9 @@ jobs: build-msi-package: uses: ./.github/workflows/build-msi.yml - if : github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_msi]') + if : + (github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_msi]')) || + (github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/')) test-msi-package: uses: ./.github/workflows/test-msi.yml From 74920d38d0e9943a8c7f1550d17d2eec97e03d6d Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Wed, 6 Nov 2024 13:33:23 +0100 Subject: [PATCH 741/827] Don't typeAhead erroneous parsing trees related to export statements --- .../src/dotty/tools/dotc/typer/Namer.scala | 2 +- tests/neg/i20511-1.check | 32 +++++++++++++++++++ tests/neg/i20511-1.scala | 2 +- tests/neg/i20511.check | 12 +++++++ 4 files changed, 46 insertions(+), 2 deletions(-) create mode 100644 tests/neg/i20511-1.check create mode 100644 tests/neg/i20511.check diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 83964417a6f1..5b8cac9b1684 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1131,7 +1131,7 @@ class Namer { typer: Typer => private def exportForwarders(exp: Export, pathMethod: Symbol)(using Context): List[tpd.MemberDef] = val buf = new mutable.ListBuffer[tpd.MemberDef] val Export(expr, selectors) = exp - if expr.isEmpty then + if expr.isEmpty || selectors.exists(_.imported.name == nme.ERROR) then report.error(em"Export selector must have prefix and `.`", exp.srcPos) return Nil diff --git a/tests/neg/i20511-1.check b/tests/neg/i20511-1.check new file mode 100644 index 000000000000..3f64940bb4fe --- /dev/null +++ b/tests/neg/i20511-1.check @@ -0,0 +1,32 @@ +-- [E083] Type Error: tests/neg/i20511-1.scala:7:7 --------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^^^^^^^^ + | Int => Double is not a valid export prefix, since it is not an immutable path + | + | longer explanation available when compiling with `-explain` +-- [E083] Type Error: tests/neg/i20511-1.scala:7:27 -------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^^^^^^ + | Any is not a valid export prefix, since it is not an immutable path + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/i20511-1.scala:7:38 -------------------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^ + | no eligible member apply at { + | def $anonfun(crustType: Double): Double = pakiet.crustPrice(crustType) + | closure(pakiet.$anonfun:Any) + | } +-- [E083] Type Error: tests/neg/i20511-1.scala:7:45 -------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^^^^^^ + | Any is not a valid export prefix, since it is not an immutable path + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/i20511-1.scala:7:56 -------------------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^^ + | no eligible member unlift at { + | def $anonfun(crustType: Double): Double = pakiet.crustPrice(crustType) + | closure(pakiet.$anonfun:Any) + | } diff --git a/tests/neg/i20511-1.scala b/tests/neg/i20511-1.scala index 03bd475ffafd..882520b55c07 100644 --- a/tests/neg/i20511-1.scala +++ b/tests/neg/i20511-1.scala @@ -4,4 +4,4 @@ def toppingPrice(size: Int): Double = ??? def crustPrice(crustType: Double): Double = ??? -export toppingPrice.apply, crustPrice.unlift // error // error // error +export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error diff --git a/tests/neg/i20511.check b/tests/neg/i20511.check new file mode 100644 index 000000000000..27c8126d43ab --- /dev/null +++ b/tests/neg/i20511.check @@ -0,0 +1,12 @@ +-- [E040] Syntax Error: tests/neg/i20511.scala:7:19 -------------------------------------------------------------------- +7 |export toppingPrice, crustPrice // error // error + | ^ + | '.' expected, but ',' found +-- [E040] Syntax Error: tests/neg/i20511.scala:8:0 --------------------------------------------------------------------- +8 |val i = 1 // error + |^^^ + |'.' expected, but 'end of statement' found +-- Error: tests/neg/i20511.scala:7:21 ---------------------------------------------------------------------------------- +7 |export toppingPrice, crustPrice // error // error + | ^^^^^^^^^^ + | Export selector must have prefix and `.` From 0a04b68eeda258e7fb3cec8551c19d49c4292679 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 27 Aug 2024 22:35:19 +0100 Subject: [PATCH 742/827] Remove tvars introduced while testing normalizedCompatible --- .../dotty/tools/dotc/core/TyperState.scala | 5 ++-- .../src/dotty/tools/dotc/core/Types.scala | 3 ++- .../tools/dotc/printing/Formatting.scala | 2 +- .../tools/dotc/printing/PlainPrinter.scala | 2 +- .../dotty/tools/dotc/typer/ProtoTypes.scala | 8 +++++++ .../dotty/tools/dotc/typer/TypeAssigner.scala | 17 ++++++------- .../src/dotty/tools/dotc/typer/Typer.scala | 24 ++++++++++++++++++- tests/pos/interleaving-overload.cleanup.scala | 9 +++++++ tests/pos/zipped.min.scala | 15 ++++++++++++ 9 files changed, 71 insertions(+), 14 deletions(-) create mode 100644 tests/pos/interleaving-overload.cleanup.scala create mode 100644 tests/pos/zipped.min.scala diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index 160d7749de61..d4345916ba77 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -139,14 +139,15 @@ class TyperState() { def uncommittedAncestor: TyperState = if (isCommitted && previous != null) previous.uncheckedNN.uncommittedAncestor else this - /** Commit typer state so that its information is copied into current typer state + /** Commit `this` typer state by copying information into the current typer state, + * where "current" means contextual, so meaning `ctx.typerState`. * In addition (1) the owning state of undetermined or temporarily instantiated * type variables changes from this typer state to the current one. (2) Variables * that were temporarily instantiated in the current typer state are permanently * instantiated instead. * * A note on merging: An interesting test case is isApplicableSafe.scala. It turns out that this - * requires a context merge using the new `&' operator. Sequence of actions: + * requires a context merge using the new `&` operator. Sequence of actions: * 1) Typecheck argument in typerstate 1. * 2) Cache argument. * 3) Evolve same typer state (to typecheck other arguments, say) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index d8a4453325f2..ca9d73df03aa 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -4124,6 +4124,7 @@ object Types extends TypeUtils { protected def prefixString: String = companion.prefixString } + // Actually.. not cached. MethodOrPoly are `UncachedGroundType`s. final class CachedMethodType(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type, val companion: MethodTypeCompanion) extends MethodType(paramNames)(paramInfosExp, resultTypeExp) @@ -4884,7 +4885,7 @@ object Types extends TypeUtils { def origin: TypeParamRef = currentOrigin /** Set origin to new parameter. Called if we merge two conflicting constraints. - * See OrderingConstraint#merge, OrderingConstraint#rename + * See OrderingConstraint#merge */ def setOrigin(p: TypeParamRef) = currentOrigin = p diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 5870731dadfa..ccd7b4e4e282 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -184,7 +184,7 @@ object Formatting { * The idea is to do this for known cases that are useful and then fall back * on regular syntax highlighting for the cases which are unhandled. * - * Please not that if used in combination with `disambiguateTypes` the + * Please note that if used in combination with `disambiguateTypes` the * correct `Context` for printing should also be passed when calling the * method. * diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index b5ed3bdb4fa7..cac82eb0c4bd 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -253,7 +253,7 @@ class PlainPrinter(_ctx: Context) extends Printer { toTextCapturing(parent, refsText, "") ~ Str("R").provided(printDebug) else toText(parent) case tp: PreviousErrorType if ctx.settings.XprintTypes.value => - "" // do not print previously reported error message because they may try to print this error type again recuresevely + "" // do not print previously reported error message because they may try to print this error type again recursively case tp: ErrorType => s"" case tp: WildcardType => diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 53e0b456ed9a..9a94c50deb7f 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -71,6 +71,14 @@ object ProtoTypes { |constraint was: ${ctx.typerState.constraint} |constraint now: ${newctx.typerState.constraint}""") if result && (ctx.typerState.constraint ne newctx.typerState.constraint) then + // Remove all type lambdas and tvars introduced by testCompat + for tvar <- newctx.typerState.ownedVars do + val tl = tvar.origin.binder + newctx.typerState.ownedVars -= tvar + if newctx.typerState.constraint.contains(tl) then + newctx.typerState.constraint = newctx.typerState.constraint.remove(tl)(using newctx) + + // commit any remaining changes in typer state newctx.typerState.commit() result case _ => testCompat diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index fd16f0de5f3a..f74ab65b6e4a 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -360,20 +360,21 @@ trait TypeAssigner { resultType1) } } + else if !args.hasSameLengthAs(paramNames) then + wrongNumberOfTypeArgs(fn.tpe, pt.typeParams, args, tree.srcPos) else { // Make sure arguments don't contain the type `pt` itself. - // make a copy of the argument if that's the case. + // Make a copy of `pt` if that's the case. // This is done to compensate for the fact that normally every // reference to a polytype would have to be a fresh copy of that type, // but we want to avoid that because it would increase compilation cost. // See pos/i6682a.scala for a test case where the defensive copying matters. - val ensureFresh = new TypeMap with CaptureSet.IdempotentCaptRefMap: - def apply(tp: Type) = mapOver( - if tp eq pt then pt.newLikeThis(pt.paramNames, pt.paramInfos, pt.resType) - else tp) - val argTypes = args.tpes.mapConserve(ensureFresh) - if (argTypes.hasSameLengthAs(paramNames)) pt.instantiate(argTypes) - else wrongNumberOfTypeArgs(fn.tpe, pt.typeParams, args, tree.srcPos) + val needsFresh = new ExistsAccumulator(_ eq pt, StopAt.None, forceLazy = false) + val argTypes = args.tpes + val pt1 = if argTypes.exists(needsFresh(false, _)) then + pt.newLikeThis(pt.paramNames, pt.paramInfos, pt.resType) + else pt + pt1.instantiate(argTypes) } } case err: ErrorType => diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 817e7baf1c8c..e8b5813c83bd 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4754,7 +4754,29 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var typeArgs = tree match case Select(qual, nme.CONSTRUCTOR) => qual.tpe.widenDealias.argTypesLo.map(TypeTree(_)) case _ => Nil - if typeArgs.isEmpty then typeArgs = constrained(poly, tree)._2.map(_.wrapInTypeTree(tree)) + if typeArgs.isEmpty then + val poly1 = tree match + case Select(qual, nme.apply) => qual.tpe.widen match + case defn.PolyFunctionOf(_) => + // Given a poly function, like the one in i6682a: + // val v = [T] => (y:T) => (x:y.type) => 3 + // It's possible to apply `v(v)` which extends to: + // v.apply[?T](v) + // Requiring the circular constraint `v <: ?T`, + // (because type parameter T occurs in v's type). + // So we create a fresh copy of the outer + // poly method type, so we now extend to: + // v.apply[?T'](v) + // Where `?T'` is a type var for a T' type parameter, + // leading to the non-circular `v <: ?T'` constraint. + // + // This also happens in `assignType(tree: untpd.TypeApply, ..)` + // to avoid any type arguments, containing the type lambda, + // being applied to the very same type lambda. + poly.newLikeThis(poly.paramNames, poly.paramInfos, poly.resType) + case _ => poly + case _ => poly + typeArgs = constrained(poly1, tree)._2.map(_.wrapInTypeTree(tree)) convertNewGenericArray(readapt(tree.appliedToTypeTrees(typeArgs))) case wtp => val isStructuralCall = wtp.isValueType && isStructuralTermSelectOrApply(tree) diff --git a/tests/pos/interleaving-overload.cleanup.scala b/tests/pos/interleaving-overload.cleanup.scala new file mode 100644 index 000000000000..6eb2f16de15d --- /dev/null +++ b/tests/pos/interleaving-overload.cleanup.scala @@ -0,0 +1,9 @@ +// A minimisation of interleaving-overload +// Used while developing the tvar/tl clearnup in normalizedCompatible +class B[U] +class Test(): + def fn[T]: [U] => Int => B[U] = [U] => (x: Int) => new B[U]() + def test(): Unit = + fn(1) + fn(2) + () diff --git a/tests/pos/zipped.min.scala b/tests/pos/zipped.min.scala new file mode 100644 index 000000000000..5d15b3fae240 --- /dev/null +++ b/tests/pos/zipped.min.scala @@ -0,0 +1,15 @@ +// Justifies the need for TypeApply in tryInsertImplicitOnQualifier +// after failing ys.map[?B, C] using Zipped2's map +// we want to try ys.map[?B] using Coll's map, after toColl +final class Coll[+A]: + def map[B](f: A => B): Coll[B] = new Coll[B] + def lazyZip[B](that: Coll[B]): Zipped2[A, B] = new Zipped2[A, B](this, that) +final class Zipped2[+X, +Y](xs: Coll[X], ys: Coll[Y]): + def map[B, C](f: (X, Y) => B): Coll[C] = new Coll[C] +object Zipped2: + import scala.language.implicitConversions + implicit def toColl[X, Y](zipped2: Zipped2[X, Y]): Coll[(X, Y)] = new Coll[(X, Y)] +class Test: + def test(xs: Coll[Int]): Unit = + val ys = xs.lazyZip(xs) + ys.map((x: (Int, Int)) => x._1 + x._2) From f9145d73a000cc8391cf2fe3e2b9fe1c0b4683a7 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 6 Nov 2024 15:32:10 +0000 Subject: [PATCH 743/827] Move makePackageObjPrefixExplicit to TypeUtils --- .../dotty/tools/dotc/core/TypeErasure.scala | 1 - .../src/dotty/tools/dotc/core/TypeOps.scala | 30 ----------------- .../src/dotty/tools/dotc/core/TypeUtils.scala | 32 ++++++++++++++++++- .../tools/dotc/core/tasty/TreeUnpickler.scala | 2 +- .../dotty/tools/dotc/typer/TypeAssigner.scala | 2 +- 5 files changed, 33 insertions(+), 34 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 9491bdab9de8..33a1b6ae789e 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -5,7 +5,6 @@ package core import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, StdNames.*, Phases.* import Flags.JavaDefined import Uniques.unique -import TypeOps.makePackageObjPrefixExplicit import backend.sjs.JSDefinitions import transform.ExplicitOuter.* import transform.ValueClasses.* diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 79eac7bde38d..a6981c7c6e95 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -560,36 +560,6 @@ object TypeOps: widenMap(tp) } - /** If `tpe` is of the form `p.x` where `p` refers to a package - * but `x` is not owned by a package, expand it to - * - * p.package.x - */ - def makePackageObjPrefixExplicit(tpe: NamedType)(using Context): Type = { - def tryInsert(pkgClass: SymDenotation): Type = pkgClass match { - case pkg: PackageClassDenotation => - var sym = tpe.symbol - if !sym.exists && tpe.denot.isOverloaded then - // we know that all alternatives must come from the same package object, since - // otherwise we would get "is already defined" errors. So we can take the first - // symbol we see. - sym = tpe.denot.alternatives.head.symbol - val pobj = pkg.packageObjFor(sym) - if (pobj.exists) tpe.derivedSelect(pobj.termRef) - else tpe - case _ => - tpe - } - if (tpe.symbol.isRoot) - tpe - else - tpe.prefix match { - case pre: ThisType if pre.cls.is(Package) => tryInsert(pre.cls) - case pre: TermRef if pre.symbol.is(Package) => tryInsert(pre.symbol.moduleClass) - case _ => tpe - } - } - /** An argument bounds violation is a triple consisting of * - the argument tree * - a string "upper" or "lower" indicating which bound is violated diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index 485272fe71c5..f343d7227bf8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -3,7 +3,7 @@ package dotc package core import TypeErasure.ErasedValueType -import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* +import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.*, SymDenotations.* import Names.{Name, TermName} import Constants.Constant @@ -186,6 +186,36 @@ class TypeUtils: case self: Types.ThisType => self.cls == cls case _ => false + /** If `self` is of the form `p.x` where `p` refers to a package + * but `x` is not owned by a package, expand it to + * + * p.package.x + */ + def makePackageObjPrefixExplicit(using Context): Type = + def tryInsert(tpe: NamedType, pkgClass: SymDenotation): Type = pkgClass match + case pkg: PackageClassDenotation => + var sym = tpe.symbol + if !sym.exists && tpe.denot.isOverloaded then + // we know that all alternatives must come from the same package object, since + // otherwise we would get "is already defined" errors. So we can take the first + // symbol we see. + sym = tpe.denot.alternatives.head.symbol + val pobj = pkg.packageObjFor(sym) + if pobj.exists then tpe.derivedSelect(pobj.termRef) + else tpe + case _ => + tpe + self match + case tpe: NamedType => + if tpe.symbol.isRoot then + tpe + else + tpe.prefix match + case pre: ThisType if pre.cls.is(Package) => tryInsert(tpe, pre.cls) + case pre: TermRef if pre.symbol.is(Package) => tryInsert(tpe, pre.symbol.moduleClass) + case _ => tpe + case tpe => tpe + /** Strip all outer refinements off this type */ def stripRefinement: Type = self match case self: RefinedOrRecType => self.parent.stripRefinement diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index e62db9af520a..44b305db43c8 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1272,7 +1272,7 @@ class TreeUnpickler(reader: TastyReader, val tpe0 = name match case name: TypeName => TypeRef(qualType, name, denot) case name: TermName => TermRef(qualType, name, denot) - val tpe = TypeOps.makePackageObjPrefixExplicit(tpe0) + val tpe = tpe0.makePackageObjPrefixExplicit ConstFold.Select(untpd.Select(qual, name).withType(tpe)) def completeSelect(name: Name, sig: Signature, target: Name): Select = diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index fd16f0de5f3a..bf5a75ff508a 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -85,7 +85,7 @@ trait TypeAssigner { defn.FromJavaObjectType else tpe match case tpe: NamedType => - val tpe1 = TypeOps.makePackageObjPrefixExplicit(tpe) + val tpe1 = tpe.makePackageObjPrefixExplicit if tpe1 ne tpe then accessibleType(tpe1, superAccess) else From 63849a01711f7ae2c837b2acc5093d522dbe4b10 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 6 Nov 2024 18:09:07 +0100 Subject: [PATCH 744/827] Set developedVersion to 3.6.3. (#21892) It was decided that 3.6.2 would be the first (official) stable release of the 3.6 series. 3.6.2 would be based on 3.6.0/3.6.1 release but including backported fixes 3.6.3 is treated as 1st real patch release, it's still being developed in current development cycle. --- project/Build.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index 03850a5c0f0d..49d990b639c4 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -102,15 +102,15 @@ object Build { /** Version of the Scala compiler targeted in the current release cycle * Contains a version without RC/SNAPSHOT/NIGHTLY specific suffixes * Should be updated ONLY after release or cutoff for previous release cycle. - * - * Should only be referred from `dottyVersion` or settings/tasks requiring simplified version string, + * + * Should only be referred from `dottyVersion` or settings/tasks requiring simplified version string, * eg. `compatMode` or Windows native distribution version. */ - val developedVersion = "3.6.2" + val developedVersion = "3.6.3" - /** The version of the compiler including the RC prefix. + /** The version of the compiler including the RC prefix. * Defined as common base before calculating environment specific suffixes in `dottyVersion` - * + * * By default, during development cycle defined as `${developedVersion}-RC1`; * During release candidate cycle incremented by the release officer before publishing a subsequent RC version; * During final, stable release is set exactly to `developedVersion`. From cecd05356beecd232053d4c593af54bcc12cad0e Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 6 Nov 2024 18:16:33 +0100 Subject: [PATCH 745/827] Revert "Drop inaccessible subclasses from refineUsingParent" --- .../src/dotty/tools/dotc/core/Decorators.scala | 2 +- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 6 +----- tests/pos/i21790.scala | 14 -------------- 3 files changed, 2 insertions(+), 20 deletions(-) delete mode 100644 tests/pos/i21790.scala diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 96a2d45db80d..29d4b3fa4052 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -292,7 +292,7 @@ object Decorators { case _ => String.valueOf(x).nn /** Returns the simple class name of `x`. */ - def className: String = if x == null then "" else x.getClass.getSimpleName.nn + def className: String = x.getClass.getSimpleName.nn extension [T](x: T) def assertingErrorsReported(using Context): T = { diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 79eac7bde38d..2403a6e22bc6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -930,11 +930,7 @@ object TypeOps: for tp <- mixins.reverseIterator do protoTp1 <:< tp maximizeType(protoTp1, NoSpan) - val inst = wildApprox(protoTp1) - if !inst.classSymbol.exists then - // E.g. i21790, can't instantiate S#CA as a subtype of O.A, because O.CA isn't accessible - NoType - else inst + wildApprox(protoTp1) } if (protoTp1 <:< tp2) instantiate() diff --git a/tests/pos/i21790.scala b/tests/pos/i21790.scala deleted file mode 100644 index 0cc7db935ac7..000000000000 --- a/tests/pos/i21790.scala +++ /dev/null @@ -1,14 +0,0 @@ -package p - -trait S: - sealed trait A - private class CA() extends A - -object O extends S - -trait T - -class Test: - def f(e: T) = e match - case _: O.A => - case _ => From 74349e85a7450e6914b1bbc2d803edf341d23395 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 11:32:05 +0200 Subject: [PATCH 746/827] Add an infix shorthand for `Tuple.Append` Addressing #19175 --- library/src/scala/Tuple.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 8074fe3664e5..343194d06488 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -22,8 +22,8 @@ sealed trait Tuple extends Product { runtime.Tuples.toIArray(this) /** Return a copy of `this` tuple with an element appended */ - inline def :* [This >: this.type <: Tuple, L] (x: L): Append[This, L] = - runtime.Tuples.append(x, this).asInstanceOf[Append[This, L]] + inline def :* [This >: this.type <: Tuple, L] (x: L): This :* L = + runtime.Tuples.append(x, this).asInstanceOf[This :* L] /** Return a new tuple by prepending the element to `this` tuple. * This operation is O(this.size) @@ -94,6 +94,9 @@ object Tuple { case x *: xs => x *: Append[xs, Y] } + /** An infix shorthand for `Append[X, Y]` */ + infix type :*[X <: Tuple, Y] = Append[X, Y] + /** Type of the head of a tuple */ type Head[X <: Tuple] = X match { case x *: _ => x From 4e47edfa789ca8f4c2175ebfb95dc43f09e29aa3 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Jul 2024 10:51:38 +0200 Subject: [PATCH 747/827] Add an infix shorthand for `Tuple.Concat` Addressing #19175 --- library/src/scala/Tuple.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 343194d06488..bf05b617a51d 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -34,8 +34,8 @@ sealed trait Tuple extends Product { /** Return a new tuple by concatenating `this` tuple with `that` tuple. * This operation is O(this.size + that.size) */ - inline def ++ [This >: this.type <: Tuple](that: Tuple): Concat[This, that.type] = - runtime.Tuples.concat(this, that).asInstanceOf[Concat[This, that.type]] + inline def ++ [This >: this.type <: Tuple](that: Tuple): This ++ that.type = + runtime.Tuples.concat(this, that).asInstanceOf[This ++ that.type] /** Return the size (or arity) of the tuple */ inline def size[This >: this.type <: Tuple]: Size[This] = @@ -126,6 +126,9 @@ object Tuple { case x1 *: xs1 => x1 *: Concat[xs1, Y] } + /** An infix shorthand for `Concat[X, Y]` */ + infix type ++[X <: Tuple, +Y <: Tuple] = Concat[X, Y] + /** Type of the element at position N in the tuple X */ type Elem[X <: Tuple, N <: Int] = X match { case x *: xs => From 995d63b140f1078a7c853d92d8fce2138c474c1e Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 30 Jul 2024 14:27:50 +0200 Subject: [PATCH 748/827] Combine cases of `Tuple.Zip` disjoint from `(h1 *: t1, h2 *: t2)` If we reach the second case of `Zip[T1 <: Tuple, T2 <: Tuple]`, then we know `(T1, T2)` is disjoint from `(NonEmptyTuple, NonEmptyTuple)`, from which we can conclude at least one of the two is an `EmptyTuple`. Addressing #19175 --- library/src/scala/Tuple.scala | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 8074fe3664e5..8291e17a5cfc 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -175,15 +175,12 @@ object Tuple { } /** Given two tuples, `A1 *: ... *: An * At` and `B1 *: ... *: Bn *: Bt` - * where at least one of `At` or `Bt` is `EmptyTuple` or `Tuple`, - * returns the tuple type `(A1, B1) *: ... *: (An, Bn) *: Ct` - * where `Ct` is `EmptyTuple` if `At` or `Bt` is `EmptyTuple`, otherwise `Ct` is `Tuple`. + * where at least one of `At` or `Bt` is `EmptyTuple`, + * returns the tuple type `(A1, B1) *: ... *: (An, Bn) *: EmptyTuple`. */ type Zip[T1 <: Tuple, T2 <: Tuple] <: Tuple = (T1, T2) match { case (h1 *: t1, h2 *: t2) => (h1, h2) *: Zip[t1, t2] - case (EmptyTuple, _) => EmptyTuple - case (_, EmptyTuple) => EmptyTuple - case _ => Tuple + case _ => EmptyTuple } /** Converts a tuple `(F[T1], ..., F[Tn])` to `(T1, ... Tn)` */ From caac72a3c8167a047f5c4180890856108b59bfd4 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Mon, 30 Sep 2024 17:20:43 +0200 Subject: [PATCH 749/827] Do not warn about expected missing positions in quotes reflect.Symbols.pos --- .../src/scala/quoted/runtime/impl/QuotesImpl.scala | 3 ++- tests/pos-macros/i21672/Macro_1.scala | 10 ++++++++++ tests/pos-macros/i21672/Test_2.scala | 3 +++ 3 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 tests/pos-macros/i21672/Macro_1.scala create mode 100644 tests/pos-macros/i21672/Test_2.scala diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index ef2eacf42225..22be293c3562 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -2698,9 +2698,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler if self.exists then val symPos = self.sourcePos if symPos.exists then Some(symPos) - else + else if self.source.exists then if xCheckMacro then report.warning(s"Missing symbol position (defaulting to position 0): $self\nThis is a compiler bug. Please report it.") Some(self.source.atSpan(dotc.util.Spans.Span(0))) + else None else None def docstring: Option[String] = diff --git a/tests/pos-macros/i21672/Macro_1.scala b/tests/pos-macros/i21672/Macro_1.scala new file mode 100644 index 000000000000..2e17631d6cf4 --- /dev/null +++ b/tests/pos-macros/i21672/Macro_1.scala @@ -0,0 +1,10 @@ +object Repro { + inline def apply(): Unit = ${ applyImpl } + + import scala.quoted.* + def applyImpl(using q: Quotes): Expr[Unit] = { + import q.reflect.* + report.info(TypeRepr.of[Some[String]].typeSymbol.pos.toString) + '{ () } + } +} diff --git a/tests/pos-macros/i21672/Test_2.scala b/tests/pos-macros/i21672/Test_2.scala new file mode 100644 index 000000000000..b164962100af --- /dev/null +++ b/tests/pos-macros/i21672/Test_2.scala @@ -0,0 +1,3 @@ +//> using options -Xfatal-warnings +object Test: + Repro() From d8ba366eecc3563b5df8fbb0c8ee5131d3f3479b Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Thu, 7 Nov 2024 19:54:17 +0100 Subject: [PATCH 750/827] Ensure ChromeTraceTest event timestamps are emitted in correct invervals --- .../tools/dotc/profile/ChromeTraceTest.scala | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala index 07dc53da1f83..a5abf86b84c5 100644 --- a/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala +++ b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala @@ -54,11 +54,11 @@ class ChromeTraceTest: val testStart = System.nanoTime() testTraceOutputs{ tracer => tracer.traceDurationEventStart(cat = "test1", name = "event1") - LockSupport.parkNanos(2.millis.toNanos) + sleep(2.millis) tracer.traceDurationEventStart(cat = "test2", name = "event2", colour = "RED", pidSuffix = "pid-suffix") - LockSupport.parkNanos(4.millis.toNanos) + sleep(4.millis) tracer.traceDurationEventEnd(cat = "test2", name = "event2") - LockSupport.parkNanos(8.millis.toNanos) + sleep(8.millis) tracer.traceDurationEventEnd(cat = "test1", name = "event1", colour = "RED", pidSuffix = "pid-suffix") }{ case """{"traceEvents":[""" :: @@ -89,5 +89,18 @@ class ChromeTraceTest: assertTrue(ts4 >= ts3 + 8.millis.toMicros) case _ => fail("unreachable") } + } + } + + private def sleep(duration: FiniteDuration): Unit = { + // A bit of additional precautions to ensure we don't continue execution to early + // Both LockSuppport and Thread.sleep can return earlier then expected (depending on OS) + var remainingNanos = duration.toNanos + val deadline = System.nanoTime() + remainingNanos + while + remainingNanos = deadline - System.nanoTime() + remainingNanos > 0 + do + val millis = NANOSECONDS.toMillis(remainingNanos) + Thread.sleep(millis, (remainingNanos % 1.millis.toNanos).toInt) } -} From 27390b58da657536594867681c8f21fb4d4a63fa Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Fri, 8 Nov 2024 16:21:30 +0100 Subject: [PATCH 751/827] Remove unused variable [skip ci] --- compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala b/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala index a1bb5f9552c5..b8b40ab841c2 100644 --- a/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala +++ b/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala @@ -5,7 +5,6 @@ import scala.annotation.internal.sharable // Based on NameTransformer but dedicated for JSON encoding rules object JsonNameTransformer { private val nops = 128 - private val ncodes = 26 * 26 @sharable private val op2code = new Array[String](nops) private def enterOp(op: Char, code: String) = op2code(op.toInt) = code From 407dacbe800b9768d971f89c7e6d57acae32cfa1 Mon Sep 17 00:00:00 2001 From: Lunfu Zhong Date: Fri, 1 Nov 2024 15:29:49 +0800 Subject: [PATCH 752/827] Fix typos. --- docs/_docs/contributing/architecture/types.md | 2 +- docs/_docs/internals/gadts.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/_docs/contributing/architecture/types.md b/docs/_docs/contributing/architecture/types.md index ed8995c08643..bf96d33b6a3c 100644 --- a/docs/_docs/contributing/architecture/types.md +++ b/docs/_docs/contributing/architecture/types.md @@ -108,7 +108,7 @@ Ground Type has no meaningful underlying type, typically it is the type of metho definitions, but also union types and intersection types, along with utility types of the compiler. -Here's a diagram, serving as the mental model of the most important and distinct types available after the `typer` phase, derived from [dotty/tools/dotc/core/Types.scala][1]: +Here's a diagram, serving as the mental model of the most important and distinct types available after the `typer` phase, derived from [Types.scala]: ``` Type -+- proxy_type --+- NamedType --------+- TypeRef diff --git a/docs/_docs/internals/gadts.md b/docs/_docs/internals/gadts.md index 58f511c946c3..9a96043fc299 100644 --- a/docs/_docs/internals/gadts.md +++ b/docs/_docs/internals/gadts.md @@ -70,7 +70,7 @@ Right now, we record GADT constraints for: - function/method type parameters - class type parameters -There is a branch on the way which will also record them for type members (so path-dependent types) and singleton types. It has a paper associated: "Implementing path-depepdent GADTs for Scala 3". +There is a branch on the way which will also record them for type members (so path-dependent types) and singleton types. It has a paper associated: "Implementing path-dependent GADTs for Scala 3". ### What are necessary relationships? Any examples? From a62a636fd92bf7d2161eae2de1705296d5526c2f Mon Sep 17 00:00:00 2001 From: Alden Torres Date: Sat, 9 Nov 2024 11:03:15 -0500 Subject: [PATCH 753/827] doc fix, removed repeated use of the word with --- .github/workflows/ci.yaml | 2 +- compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala | 2 +- compiler/src/dotty/tools/dotc/printing/Printer.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Applications.scala | 2 +- docs/_docs/reference/experimental/better-fors.md | 2 +- tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala | 2 +- tests/pos-with-compiler-cc/dotc/printing/Printer.scala | 2 +- tests/pos-with-compiler-cc/dotc/typer/Applications.scala | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 19405db61066..303922719b5b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1169,7 +1169,7 @@ jobs: needs: [build-msi-package] with: # Ensure that version starts with prefix 3. - # In the future it can be adapted to compare with with git tag or version set in the project/Build.scala + # In the future it can be adapted to compare with git tag or version set in the project/Build.scala version: "3." java-version: 8 diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index 565ad72c0d9d..35b24ab57b00 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -1143,7 +1143,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { * - Every time when generating an ATHROW, a new basic block is started. * - During classfile writing, such basic blocks are found to be dead: no branches go there * - Eliminating dead code would probably require complex shifts in the output byte buffer - * - But there's an easy solution: replace all code in the dead block with with + * - But there's an easy solution: replace all code in the dead block with * `nop; nop; ... nop; athrow`, making sure the bytecode size stays the same * - The corresponding stack frame can be easily generated: on entering a dead the block, * the frame requires a single Throwable on the stack. diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 297dc31ea94a..9f485ee84cda 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -71,7 +71,7 @@ abstract class Printer { def changePrec(prec: Precedence)(op: => Text): Text = if (prec < this.prec) atPrec(prec) ("(" ~ op ~ ")") else atPrec(prec)(op) - /** The name, possibly with with namespace suffix if debugNames is set: + /** The name, possibly with namespace suffix if debugNames is set: * /L for local names, /V for other term names, /T for type names */ def nameString(name: Name): String diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 17be2acc7378..6bb95e20fcaf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1348,7 +1348,7 @@ trait Applications extends Compatibility { tree } - /** Is `tp` a unary function type or an overloaded type with with only unary function + /** Is `tp` a unary function type or an overloaded type with only unary function * types as alternatives? */ def isUnary(tp: Type)(using Context): Boolean = tp match { diff --git a/docs/_docs/reference/experimental/better-fors.md b/docs/_docs/reference/experimental/better-fors.md index 7add425caf51..a4c42c9fb380 100644 --- a/docs/_docs/reference/experimental/better-fors.md +++ b/docs/_docs/reference/experimental/better-fors.md @@ -8,7 +8,7 @@ The `betterFors` language extension improves the usability of `for`-comprehensio The extension is enabled by the language import `import scala.language.experimental.betterFors` or by setting the command line option `-language:experimental.betterFors`. -The biggest user facing change is the new ability to start `for`-comprehensions with with aliases. This means that the following previously invalid code is now valid: +The biggest user facing change is the new ability to start `for`-comprehensions with aliases. This means that the following previously invalid code is now valid: ```scala for diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala index 6f067a0e5ef0..da6d213351b7 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala @@ -1103,7 +1103,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { * - Every time when generating an ATHROW, a new basic block is started. * - During classfile writing, such basic blocks are found to be dead: no branches go there * - Eliminating dead code would probably require complex shifts in the output byte buffer - * - But there's an easy solution: replace all code in the dead block with with + * - But there's an easy solution: replace all code in the dead block with * `nop; nop; ... nop; athrow`, making sure the bytecode size stays the same * - The corresponding stack frame can be easily generated: on entering a dead the block, * the frame requires a single Throwable on the stack. diff --git a/tests/pos-with-compiler-cc/dotc/printing/Printer.scala b/tests/pos-with-compiler-cc/dotc/printing/Printer.scala index b9da874cf9ae..eff283b345ce 100644 --- a/tests/pos-with-compiler-cc/dotc/printing/Printer.scala +++ b/tests/pos-with-compiler-cc/dotc/printing/Printer.scala @@ -70,7 +70,7 @@ abstract class Printer extends Pure { def changePrec(prec: Precedence)(op: => Text): Text = if (prec < this.prec) atPrec(prec) ("(" ~ op ~ ")") else atPrec(prec)(op) - /** The name, possibly with with namespace suffix if debugNames is set: + /** The name, possibly with namespace suffix if debugNames is set: * /L for local names, /V for other term names, /T for type names */ def nameString(name: Name): String diff --git a/tests/pos-with-compiler-cc/dotc/typer/Applications.scala b/tests/pos-with-compiler-cc/dotc/typer/Applications.scala index aed6c55f8ad8..6ffbf0465120 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Applications.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Applications.scala @@ -1182,7 +1182,7 @@ trait Applications extends Compatibility { tree } - /** Is `tp` a unary function type or an overloaded type with with only unary function + /** Is `tp` a unary function type or an overloaded type with only unary function * types as alternatives? */ def isUnary(tp: Type)(using Context): Boolean = tp match { From a1d7f5e02535e236e2d2378ade402a8a19c24a39 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Apr 2024 12:07:54 +0200 Subject: [PATCH 754/827] Refine the bounds of the `Tuple.Filter` type lambda predicate .. to only require it be defined on the elements of the tuple. This is one of the ongoing proposed tuple improvements, addressing #19175. As carefully pointed out by @sjrd, this _is_ a potential breaking change. See tests/neg/tuple-filter-compat.scala for an example. This is not an unprecedented change however, the analogous improvements were made to `Tuple.{Map, FlatMap}` in 28a695ef. --- library/src/scala/Tuple.scala | 2 +- tests/neg/tuple-filter-compat.scala | 12 ++++++++++++ tests/pos/tuple-filter.scala | 6 ++++++ 3 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 tests/neg/tuple-filter-compat.scala diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 8074fe3664e5..060f2b5492b8 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -166,7 +166,7 @@ object Tuple { * ``` * @syntax markdown */ - type Filter[Tup <: Tuple, P[_] <: Boolean] <: Tuple = Tup match { + type Filter[Tup <: Tuple, P[_ <: Union[Tup]] <: Boolean] <: Tuple = Tup match { case EmptyTuple => EmptyTuple case h *: t => P[h] match { case true => h *: Filter[t, P] diff --git a/tests/neg/tuple-filter-compat.scala b/tests/neg/tuple-filter-compat.scala new file mode 100644 index 000000000000..f50837fc1d4b --- /dev/null +++ b/tests/neg/tuple-filter-compat.scala @@ -0,0 +1,12 @@ + +type OldFilter[Tup <: Tuple, P[_] <: Boolean] = Nothing +type NewFilter[Tup <: Tuple, P[_ <: Tuple.Union[Tup]] <: Boolean] = Nothing + +trait A: + type X >: OldFilter <: OldFilter + +trait B1 extends A: + type X = OldFilter // ok + +trait B2 extends A: + type X = NewFilter // error: breaking change diff --git a/tests/pos/tuple-filter.scala b/tests/pos/tuple-filter.scala index 2c9638b2e47b..f67518f171f0 100644 --- a/tests/pos/tuple-filter.scala +++ b/tests/pos/tuple-filter.scala @@ -1,10 +1,16 @@ +import scala.compiletime.ops.int.< + type P[x] <: Boolean = x match { case 3 => false case _ => true } + type RejectAll[x] = false +type Pos[X <: Int] = 0 < X + def Test = summon[Tuple.Filter[(1, 2, 3, 4), P] =:= (1, 2, 4)] summon[Tuple.Filter[(1, 2, 3, 4), RejectAll] =:= EmptyTuple] summon[Tuple.Filter[EmptyTuple, P] =:= EmptyTuple] + summon[Tuple.Filter[(1, -2, 3, -4), Pos] =:= (1, 3)] From 16becd70ddc4308fa72e34b940e61c1bef015fd2 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Mon, 29 Jul 2024 16:19:01 +0200 Subject: [PATCH 755/827] Move `NonEmptyTuple` members into `Tuple` Addressing #19175 The motivation for this has already been established, among other things: - the corresponding type level operations already use `Tuple` as upper bound; - the corresponding `NamedTuple` operations also do not make a distinction; - these operations are no more unsafe than other operations already available on `Tuple`, such as `drop` Note this should _not_ be a problem for binary compatibility, as both `Tuple` and `NonEmptyTuple` are erased to `Product`s (see `defn.specialErasure`). --- library/src/scala/Tuple.scala | 52 +++++++++++++------------- library/src/scala/runtime/Tuples.scala | 8 ++-- 2 files changed, 29 insertions(+), 31 deletions(-) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 060f2b5492b8..40469df063d1 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -31,6 +31,30 @@ sealed trait Tuple extends Product { inline def *: [H, This >: this.type <: Tuple] (x: H): H *: This = runtime.Tuples.cons(x, this).asInstanceOf[H *: This] + /** Get the i-th element of this tuple. + * Equivalent to productElement but with a precise return type. + */ + inline def apply[This >: this.type <: Tuple](n: Int): Elem[This, n.type] = + runtime.Tuples.apply(this, n).asInstanceOf[Elem[This, n.type]] + + /** Get the head of this tuple */ + inline def head[This >: this.type <: Tuple]: Head[This] = + runtime.Tuples.apply(this, 0).asInstanceOf[Head[This]] + + /** Get the initial part of the tuple without its last element */ + inline def init[This >: this.type <: Tuple]: Init[This] = + runtime.Tuples.init(this).asInstanceOf[Init[This]] + + /** Get the last of this tuple */ + inline def last[This >: this.type <: Tuple]: Last[This] = + runtime.Tuples.last(this).asInstanceOf[Last[This]] + + /** Get the tail of this tuple. + * This operation is O(this.size) + */ + inline def tail[This >: this.type <: Tuple]: Tail[This] = + runtime.Tuples.tail(this).asInstanceOf[Tail[This]] + /** Return a new tuple by concatenating `this` tuple with `that` tuple. * This operation is O(this.size + that.size) */ @@ -304,33 +328,7 @@ case object EmptyTuple extends Tuple { } /** Tuple of arbitrary non-zero arity */ -sealed trait NonEmptyTuple extends Tuple { - import Tuple.* - - /** Get the i-th element of this tuple. - * Equivalent to productElement but with a precise return type. - */ - inline def apply[This >: this.type <: NonEmptyTuple](n: Int): Elem[This, n.type] = - runtime.Tuples.apply(this, n).asInstanceOf[Elem[This, n.type]] - - /** Get the head of this tuple */ - inline def head[This >: this.type <: NonEmptyTuple]: Head[This] = - runtime.Tuples.apply(this, 0).asInstanceOf[Head[This]] - - /** Get the initial part of the tuple without its last element */ - inline def init[This >: this.type <: NonEmptyTuple]: Init[This] = - runtime.Tuples.init(this).asInstanceOf[Init[This]] - - /** Get the last of this tuple */ - inline def last[This >: this.type <: NonEmptyTuple]: Last[This] = - runtime.Tuples.last(this).asInstanceOf[Last[This]] - - /** Get the tail of this tuple. - * This operation is O(this.size) - */ - inline def tail[This >: this.type <: NonEmptyTuple]: Tail[This] = - runtime.Tuples.tail(this).asInstanceOf[Tail[This]] -} +sealed trait NonEmptyTuple extends Tuple @showAsInfix sealed abstract class *:[+H, +T <: Tuple] extends NonEmptyTuple diff --git a/library/src/scala/runtime/Tuples.scala b/library/src/scala/runtime/Tuples.scala index efb54c54d50b..66dc486d2a1d 100644 --- a/library/src/scala/runtime/Tuples.scala +++ b/library/src/scala/runtime/Tuples.scala @@ -350,7 +350,7 @@ object Tuples { } } - def tail(self: NonEmptyTuple): Tuple = (self: Any) match { + def tail(self: Tuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlTail(xxl) case _ => specialCaseTail(self) } @@ -558,16 +558,16 @@ object Tuples { } } - def init(self: NonEmptyTuple): Tuple = (self: Any) match { + def init(self: Tuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlInit(xxl) case _ => specialCaseInit(self) } - def last(self: NonEmptyTuple): Any = (self: Any) match { + def last(self: Tuple): Any = (self: Any) match { case self: Product => self.productElement(self.productArity - 1) } - def apply(self: NonEmptyTuple, n: Int): Any = + def apply(self: Tuple, n: Int): Any = self.productElement(n) // Benchmarks showed that this is faster than doing (it1 zip it2).copyToArray(...) From d9b8da5413cc564d7363024165957325d4b6893a Mon Sep 17 00:00:00 2001 From: HarrisL2 Date: Mon, 11 Nov 2024 11:17:36 -0500 Subject: [PATCH 756/827] Check for open flag on objects Fixes #21760 --- compiler/src/dotty/tools/dotc/typer/Checking.scala | 2 ++ tests/neg/i21760.scala | 1 + 2 files changed, 3 insertions(+) create mode 100644 tests/neg/i21760.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 700bd483ff38..761e7cdab37c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -611,6 +611,8 @@ object Checking { val mods = mdef.mods def flagSourcePos(flag: FlagSet) = mods.mods.find(_.flags == flag).getOrElse(mdef).srcPos + if mods.is(Open) then + report.error(ModifierNotAllowedForDefinition(Open), flagSourcePos(Open)) if mods.is(Abstract) then report.error(ModifierNotAllowedForDefinition(Abstract), flagSourcePos(Abstract)) if mods.is(Sealed) then diff --git a/tests/neg/i21760.scala b/tests/neg/i21760.scala new file mode 100644 index 000000000000..625e03520dfb --- /dev/null +++ b/tests/neg/i21760.scala @@ -0,0 +1 @@ +open object O // error \ No newline at end of file From 04934b77546bb2d1c64a702752161d2808db0610 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Mon, 11 Nov 2024 17:48:35 +0100 Subject: [PATCH 757/827] fix: don't consider `into` as a soft-modifier Co-authored-by: kasiaMarek Co-authored-by: KacperFKorban --- compiler/src/dotty/tools/dotc/parsing/Tokens.scala | 2 +- tests/neg/i21786.check | 6 ++++++ tests/neg/i21786.scala | 1 + tests/pos/i21635.scala | 6 ++++++ 4 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 tests/neg/i21786.check create mode 100644 tests/neg/i21786.scala create mode 100644 tests/pos/i21635.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index b0a533b2f1df..c78a336ecdf5 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -297,7 +297,7 @@ object Tokens extends TokensCommon { final val closingParens = BitSet(RPAREN, RBRACKET, RBRACE) - final val softModifierNames = Set(nme.inline, nme.into, nme.opaque, nme.open, nme.transparent, nme.infix) + final val softModifierNames = Set(nme.inline, nme.opaque, nme.open, nme.transparent, nme.infix) def showTokenDetailed(token: Int): String = debugString(token) diff --git a/tests/neg/i21786.check b/tests/neg/i21786.check new file mode 100644 index 000000000000..47f7e2456c3d --- /dev/null +++ b/tests/neg/i21786.check @@ -0,0 +1,6 @@ +-- [E103] Syntax Error: tests/neg/i21786.scala:1:0 --------------------------------------------------------------------- +1 |into class X // error + |^^^^ + |Illegal start of toplevel definition + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21786.scala b/tests/neg/i21786.scala new file mode 100644 index 000000000000..c5bb9c595d32 --- /dev/null +++ b/tests/neg/i21786.scala @@ -0,0 +1 @@ +into class X // error diff --git a/tests/pos/i21635.scala b/tests/pos/i21635.scala new file mode 100644 index 000000000000..dc0a8314c7d8 --- /dev/null +++ b/tests/pos/i21635.scala @@ -0,0 +1,6 @@ +class A(val into: Boolean) { + def m1(): Any = + into + + def m2(): Int = 1 +} From 14bf406e02488e7c333a94989cbc1f7fff07b625 Mon Sep 17 00:00:00 2001 From: Eugene Flesselle Date: Tue, 12 Nov 2024 10:12:29 +0100 Subject: [PATCH 758/827] Test reduction of Tuple.Zip --- tests/pos/tuple-zip.scala | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 tests/pos/tuple-zip.scala diff --git a/tests/pos/tuple-zip.scala b/tests/pos/tuple-zip.scala new file mode 100644 index 000000000000..980dff9433a6 --- /dev/null +++ b/tests/pos/tuple-zip.scala @@ -0,0 +1,20 @@ + +import scala.Tuple.Zip + +type A +type B +type C + +def Test = + + summon[Zip[A *: B *: C *: EmptyTuple, C *: B *: A *: EmptyTuple] =:= (A, C) *: (B, B) *: (C, A) *: EmptyTuple] + + summon[Zip[A *: B *: EmptyTuple, C *: B *: A *: EmptyTuple] =:= (A, C) *: (B, B) *: EmptyTuple] + summon[Zip[A *: B *: C *: EmptyTuple, C *: B *: EmptyTuple] =:= (A, C) *: (B, B) *: EmptyTuple] + + summon[Zip[A *: B *: C *: Tuple, C *: B *: A *: Tuple] =:= (A, C) *: (B, B) *: (C, A) *: Zip[Tuple, Tuple]] + summon[Zip[A *: B *: C *: Tuple, C *: B *: A *: Tuple] <:< (A, C) *: (B, B) *: (C, A) *: Tuple] + + summon[Zip[A *: B *: Tuple, C *: B *: A *: Tuple] =:= (A, C) *: (B, B) *: Zip[Tuple, A *: Tuple]] + summon[Zip[A *: B *: NonEmptyTuple, C *: B *: A *: Tuple] =:= (A, C) *: (B, B) *: Zip[NonEmptyTuple, A *: Tuple]] + summon[Zip[A *: B *: EmptyTuple, C *: B *: A *: Tuple] =:= (A, C) *: (B, B) *: EmptyTuple] From 7db83c547b6e950fe2ad28fdd3007f8f8cb0bd35 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Sun, 1 Sep 2024 23:57:29 +0100 Subject: [PATCH 759/827] Fix pkg obj prefix of opaque tp ext meth There is use of `makePackageObjPrefixExplicit` within `accessibleType`, which is called on the result of findRef in typedIdent. But in `tryExtension` it's no such use. We could fix it in the usage of the results in `tryExtension`, but I thought perhaps we could fix it for all call sites, by handling it within findRef. --- .../src/dotty/tools/dotc/typer/Typer.scala | 4 ++-- tests/pos/i18097.1.scala | 22 +++++++++++++++++++ tests/pos/i18097.2.scala | 13 +++++++++++ tests/pos/i18097.2.works.scala | 13 +++++++++++ tests/pos/i18097.3/Opaque.scala | 9 ++++++++ tests/pos/i18097.3/Test.scala | 13 +++++++++++ tests/pos/i18097.orig.scala | 20 +++++++++++++++++ 7 files changed, 92 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i18097.1.scala create mode 100644 tests/pos/i18097.2.scala create mode 100644 tests/pos/i18097.2.works.scala create mode 100644 tests/pos/i18097.3/Opaque.scala create mode 100644 tests/pos/i18097.3/Test.scala create mode 100644 tests/pos/i18097.orig.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 817e7baf1c8c..8a633efc1021 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -344,7 +344,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // so we ignore that import. if reallyExists(denot) && !isScalaJsPseudoUnion then if unimported.isEmpty || !unimported.contains(pre.termSymbol) then - return pre.select(name, denot) + return pre.select(name, denot).makePackageObjPrefixExplicit case _ => if imp.importSym.isCompleting then report.warning(i"cyclic ${imp.importSym}, ignored", pos) @@ -504,7 +504,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer defDenot.symbol.owner else curOwner - effectiveOwner.thisType.select(name, defDenot) + effectiveOwner.thisType.select(name, defDenot).makePackageObjPrefixExplicit } if !curOwner.is(Package) || isDefinedInCurrentUnit(defDenot) then result = checkNewOrShadowed(found, Definition) // no need to go further out, we found highest prec entry diff --git a/tests/pos/i18097.1.scala b/tests/pos/i18097.1.scala new file mode 100644 index 000000000000..b7b57467e3b0 --- /dev/null +++ b/tests/pos/i18097.1.scala @@ -0,0 +1,22 @@ +opaque type Pos = Double + +object Pos: + extension (x: Pos) + def mult1(y: Pos): Pos = x * y + +extension (x: Pos) + def mult2(y: Pos): Pos = x * y + +class Test: + def test(key: String, a: Pos, b: Pos): Unit = + val tup1 = new Tuple1(Pos.mult1(a)(b)) + val res1: Pos = tup1._1 + + val tup2 = new Tuple1(a.mult1(b)) + val res2: Pos = tup2._1 + + val tup3 = new Tuple1(mult2(a)(b)) + val res3: Pos = tup3._1 + + val tup4 = new Tuple1(a.mult2(b)) + val res4: Pos = tup4._1 // was error: Found: (tup4._4 : Double) Required: Pos diff --git a/tests/pos/i18097.2.scala b/tests/pos/i18097.2.scala new file mode 100644 index 000000000000..c676479aab42 --- /dev/null +++ b/tests/pos/i18097.2.scala @@ -0,0 +1,13 @@ +opaque type Namespace = List[String] + +object Namespace: + def apply(head: String): Namespace = List(head) + +extension (ns: Namespace) + def appended(segment: String): Namespace = ns.appended(segment) + +object Main: + def main(args: Array[String]): Unit = + val a: Namespace = Namespace("a") + .appended("B") + .appended("c") // was error: Found: List[String] Required: Namespace diff --git a/tests/pos/i18097.2.works.scala b/tests/pos/i18097.2.works.scala new file mode 100644 index 000000000000..3ba8e056a4a5 --- /dev/null +++ b/tests/pos/i18097.2.works.scala @@ -0,0 +1,13 @@ +object Main: + opaque type Namespace = List[String] + + object Namespace: + def apply(head: String): Namespace = List(head) + + extension (ns: Namespace) + def appended(segment: String): Namespace = ns.appended(segment) + + def main(args: Array[String]): Unit = + val a: Namespace = Namespace("a") + .appended("B") + .appended("c") diff --git a/tests/pos/i18097.3/Opaque.scala b/tests/pos/i18097.3/Opaque.scala new file mode 100644 index 000000000000..cb9c9eaedfb3 --- /dev/null +++ b/tests/pos/i18097.3/Opaque.scala @@ -0,0 +1,9 @@ +package test + +type Foo = Unit +val bar: Foo = () + +opaque type Opaque = Unit + +extension (foo: Foo) + def go: Option[Opaque] = ??? diff --git a/tests/pos/i18097.3/Test.scala b/tests/pos/i18097.3/Test.scala new file mode 100644 index 000000000000..38f2199944c2 --- /dev/null +++ b/tests/pos/i18097.3/Test.scala @@ -0,0 +1,13 @@ +package test + +final case class Test(value: Opaque) + +def test: Test = + bar.go match + case Some(value) => Test(value) // was error: Found: (value : Unit) Required: test.Opaque + case _ => ??? + +def test2: Test = + go(bar) match + case Some(value) => Test(value) + case _ => ??? diff --git a/tests/pos/i18097.orig.scala b/tests/pos/i18097.orig.scala new file mode 100644 index 000000000000..092a904f6de4 --- /dev/null +++ b/tests/pos/i18097.orig.scala @@ -0,0 +1,20 @@ +opaque type PositiveNumber = Double + +object PositiveNumber: + extension (x: PositiveNumber) + def mult1(other: PositiveNumber): PositiveNumber = + x * other + +extension (x: PositiveNumber) + def mult2(other: PositiveNumber): PositiveNumber = + x * other + +object Test: + def multMap1[A](x: Map[A, PositiveNumber], num: PositiveNumber): Map[A, PositiveNumber] = x.map((key, value) => key -> value.mult1(num)).toMap + + def multMap2[A](x: Map[A, PositiveNumber], num: PositiveNumber): Map[A, PositiveNumber] = x.map((key, value) => key -> value.mult2(num)).toMap // was error +// ^ +// Cannot prove that (A, Double) <:< (A, V2). +// +// where: V2 is a type variable with constraint <: PositiveNumber + def multMap2_2[A](x: Map[A, PositiveNumber], num: PositiveNumber): Map[A, PositiveNumber] = x.map((key, value) => key -> mult2(value)(num)).toMap From 2c0d6dfd5dbb1f962713fddd06d080df4c8ab8ce Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 25 Oct 2024 12:32:45 +0100 Subject: [PATCH 760/827] Remove tvars in normalizedCompatible via instantiate Use a higher-level method. --- compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 9a94c50deb7f..85f44ead5f28 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -73,10 +73,9 @@ object ProtoTypes { if result && (ctx.typerState.constraint ne newctx.typerState.constraint) then // Remove all type lambdas and tvars introduced by testCompat for tvar <- newctx.typerState.ownedVars do - val tl = tvar.origin.binder - newctx.typerState.ownedVars -= tvar - if newctx.typerState.constraint.contains(tl) then - newctx.typerState.constraint = newctx.typerState.constraint.remove(tl)(using newctx) + inContext(newctx): + if !tvar.isInstantiated then + tvar.instantiate(fromBelow = false) // any direction // commit any remaining changes in typer state newctx.typerState.commit() From fb8389390a7db2f06f41fe2f3d7a9c2fcdec6bdd Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 12 Nov 2024 11:24:29 +0000 Subject: [PATCH 761/827] Fix Any#className decorator on null --- compiler/src/dotty/tools/dotc/core/Decorators.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 29d4b3fa4052..96a2d45db80d 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -292,7 +292,7 @@ object Decorators { case _ => String.valueOf(x).nn /** Returns the simple class name of `x`. */ - def className: String = x.getClass.getSimpleName.nn + def className: String = if x == null then "" else x.getClass.getSimpleName.nn extension [T](x: T) def assertingErrorsReported(using Context): T = { From 3d79d407aff7cbcf70d8736459c231e9b73d55dd Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 12 Nov 2024 11:58:38 +0000 Subject: [PATCH 762/827] (Re-)Drop inaccessible subclasses from refineUsingParent This reverts commit cecd05356beecd232053d4c593af54bcc12cad0e. --- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 6 +++++- tests/pos/i21790.scala | 14 ++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i21790.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 2403a6e22bc6..79eac7bde38d 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -930,7 +930,11 @@ object TypeOps: for tp <- mixins.reverseIterator do protoTp1 <:< tp maximizeType(protoTp1, NoSpan) - wildApprox(protoTp1) + val inst = wildApprox(protoTp1) + if !inst.classSymbol.exists then + // E.g. i21790, can't instantiate S#CA as a subtype of O.A, because O.CA isn't accessible + NoType + else inst } if (protoTp1 <:< tp2) instantiate() diff --git a/tests/pos/i21790.scala b/tests/pos/i21790.scala new file mode 100644 index 000000000000..0cc7db935ac7 --- /dev/null +++ b/tests/pos/i21790.scala @@ -0,0 +1,14 @@ +package p + +trait S: + sealed trait A + private class CA() extends A + +object O extends S + +trait T + +class Test: + def f(e: T) = e match + case _: O.A => + case _ => From 21e5f3c40a156043622f2d400e2cb51cc138bab9 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 12 Nov 2024 12:01:35 +0000 Subject: [PATCH 763/827] Tweak refineUsingParent drop conditions In tests/pos/i21790.scala, O.A has no class symbol, because it's an inaccessible private class of S. But in tests/warn/i21860.scala, Shape.Triangle doesn't have a "classSymbol" because it has multiple - Shape and Corners. So use .classSymbols.isEmpty instead of !.classSymbol.exists --- compiler/src/dotty/tools/dotc/core/TypeOps.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 79eac7bde38d..697e50c6a2a8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -931,7 +931,7 @@ object TypeOps: protoTp1 <:< tp maximizeType(protoTp1, NoSpan) val inst = wildApprox(protoTp1) - if !inst.classSymbol.exists then + if inst.classSymbols.isEmpty then // E.g. i21790, can't instantiate S#CA as a subtype of O.A, because O.CA isn't accessible NoType else inst From 55d2bd716767a810fe059204c3e026c6f9ba420f Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Tue, 12 Nov 2024 15:34:57 +0100 Subject: [PATCH 764/827] Apply suggestions from code review Co-authored-by: Matt Bovel --- compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala b/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala index b8b40ab841c2..8777a95c33cf 100644 --- a/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala +++ b/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala @@ -30,7 +30,7 @@ object JsonNameTransformer { buf.append(name.subSequence(0, i)) } buf.append(op2code(c.toInt)) - } else if (c <= 0x1F || c > 0x7F) { + } else if (c <= 0x1F || c >= 0x7F) { if (buf eq null) { buf = new StringBuilder() buf.append(name.subSequence(0, i)) From d05e60023c890dc8a7ba41133fb4351f7057eeeb Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Tue, 12 Nov 2024 17:04:09 +0100 Subject: [PATCH 765/827] Don't peoject nested wildcard pattern to nullable --- .../dotty/tools/dotc/transform/patmat/Space.scala | 14 +++++--------- tests/warn/i20121.scala | 4 ++-- tests/warn/i20122.scala | 2 +- tests/warn/i20123.scala | 2 +- 4 files changed, 9 insertions(+), 13 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 7410d617c4a0..1ee402deded0 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -337,13 +337,6 @@ object SpaceEngine { case pat: Ident if isBackquoted(pat) => Typ(pat.tpe, decomposed = false) - case Ident(nme.WILDCARD) => - val tp = pat.tpe.stripAnnots.widenSkolem - val isNullable = tp.isInstanceOf[FlexibleType] || tp.classSymbol.isNullableClass - val tpSpace = Typ(erase(tp, isValue = true), decomposed = false) - if isNullable then Or(tpSpace :: nullSpace :: Nil) - else tpSpace - case Ident(_) | Select(_, _) => Typ(erase(pat.tpe.stripAnnots.widenSkolem, isValue = true), decomposed = false) @@ -716,7 +709,6 @@ object SpaceEngine { else NoType }.filter(_.exists) parts - case tp: FlexibleType => List(tp.underlying, ConstantType(Constant(null))) case _ => ListOfNoType end rec @@ -939,11 +931,15 @@ object SpaceEngine { then project(OrType(selTyp, ConstantType(Constant(null)), soft = false)) else project(selTyp) var hadNullOnly = false + def projectPat(pat: Tree): Space = + // Project toplevel wildcard pattern to nullable + if isNullable && isWildcardArg(pat) then Or(project(pat) :: nullSpace :: Nil) + else project(pat) @tailrec def recur(cases: List[CaseDef], prevs: List[Space], deferred: List[Tree]): Unit = cases match case Nil => case CaseDef(pat, guard, _) :: rest => - val curr = trace(i"project($pat)")(project(pat)) + val curr = trace(i"project($pat)")(projectPat(pat)) val covered = trace("covered")(simplify(intersect(curr, targetSpace))) val prev = trace("prev")(simplify(Or(prevs))) if prev == Empty && covered == Empty then // defer until a case is reachable diff --git a/tests/warn/i20121.scala b/tests/warn/i20121.scala index b8402fa808ac..ce8e3e4d74f6 100644 --- a/tests/warn/i20121.scala +++ b/tests/warn/i20121.scala @@ -5,8 +5,8 @@ case class CC_B[A](a: A) extends T_A[A, X] val v_a: T_A[X, X] = CC_B(null) val v_b = v_a match - case CC_B(_) => 0 - case _ => 1 // warn: null only + case CC_B(_) => 0 // warn: unreachable + case _ => 1 // for CC_B[A] to match T_A[X, X] // A := X // so require X, aka T_A[Byte, Byte] diff --git a/tests/warn/i20122.scala b/tests/warn/i20122.scala index d035a18d3b09..50da42a5926c 100644 --- a/tests/warn/i20122.scala +++ b/tests/warn/i20122.scala @@ -7,7 +7,7 @@ case class CC_E(a: CC_C[Char, Byte]) val v_a: T_B[Int, CC_A] = CC_B(CC_E(CC_C(null))) val v_b = v_a match - case CC_B(CC_E(CC_C(_))) => 0 + case CC_B(CC_E(CC_C(_))) => 0 // warn: unreachable case _ => 1 // for CC_B[A, C] to match T_B[C, CC_A] // C <: Int, ok diff --git a/tests/warn/i20123.scala b/tests/warn/i20123.scala index 0af7aba5a3a5..32de903210b2 100644 --- a/tests/warn/i20123.scala +++ b/tests/warn/i20123.scala @@ -8,7 +8,7 @@ case class CC_G[A, C](c: C) extends T_A[A, C] val v_a: T_A[Boolean, T_B[Boolean]] = CC_G(null) val v_b = v_a match { case CC_D() => 0 - case CC_G(_) => 1 + case CC_G(_) => 1 // warn: unreachable // for CC_G[A, C] to match T_A[Boolean, T_B[Boolean]] // A := Boolean, which is ok // C := T_B[Boolean], From 0d5e014c7532d63e44046ae5bed6094810316584 Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 13 Nov 2024 11:18:25 +0100 Subject: [PATCH 766/827] Update test --- tests/patmat/i12530.check | 1 + tests/patmat/null.check | 2 +- tests/patmat/null.scala | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/patmat/i12530.check b/tests/patmat/i12530.check index b0605bcd95e5..636347516cbc 100644 --- a/tests/patmat/i12530.check +++ b/tests/patmat/i12530.check @@ -1 +1,2 @@ 6: Match case Unreachable +14: Match case Unreachable diff --git a/tests/patmat/null.check b/tests/patmat/null.check index da081e6b56c0..3b860ddfd850 100644 --- a/tests/patmat/null.check +++ b/tests/patmat/null.check @@ -1,4 +1,4 @@ 6: Match case Unreachable 13: Pattern Match -18: Match case Unreachable 20: Pattern Match +21: Match case Unreachable diff --git a/tests/patmat/null.scala b/tests/patmat/null.scala index b918109c0cc5..9cff29a5c4e8 100644 --- a/tests/patmat/null.scala +++ b/tests/patmat/null.scala @@ -18,5 +18,6 @@ class Test { case Some(null) => case None => case y => + case _ => } } \ No newline at end of file From bb63e31a8384e6e3127ee679ea0033e8437ef9ce Mon Sep 17 00:00:00 2001 From: Matt Bovel Date: Wed, 13 Nov 2024 15:24:57 +0100 Subject: [PATCH 767/827] Rename `InlineCopier` to `ConservativeTreeCopier`, use it in `TypeMap`s --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 8 +++++ .../src/dotty/tools/dotc/core/Types.scala | 9 +++++- .../dotty/tools/dotc/inlines/Inliner.scala | 17 +++++------ tests/pos/annot-17242.scala | 5 ++++ tests/pos/dependent-annot2.scala | 30 +++++++++++++++++++ 5 files changed, 58 insertions(+), 11 deletions(-) create mode 100644 tests/pos/annot-17242.scala create mode 100644 tests/pos/dependent-annot2.scala diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 3777969b1076..55021bf50ace 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -827,6 +827,14 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { Closure(tree: Tree)(env, meth, tpt) } + // This is a more fault-tolerant copier that does not cause errors when + // function types in applications are undefined. + // This was called `Inliner.InlineCopier` before 3.6.3. + class ConservativeTreeCopier() extends TypedTreeCopier: + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = + if fun.tpe.widen.exists then super.Apply(tree)(fun, args) + else untpd.cpy.Apply(tree)(fun, args).withTypeUnchecked(tree.tpe) + override def skipTransform(tree: Tree)(using Context): Boolean = tree.tpe.isError implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal { diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index ca9d73df03aa..31e11487ae38 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -6291,7 +6291,14 @@ object Types extends TypeUtils { } } - private def treeTypeMap = new TreeTypeMap(typeMap = this) + private def treeTypeMap = new TreeTypeMap( + typeMap = this, + // Using `ConservativeTreeCopier` is needed when copying dependent annoted + // types, where we can refer to a previous parameter represented as + // `TermParamRef` that has no underlying type yet. + // See tests/pos/annot-17242.scala. + cpy = ConservativeTreeCopier() + ) def mapOver(syms: List[Symbol]): List[Symbol] = mapSymbols(syms, treeTypeMap) diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 103f3aac7630..db041b7e8591 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -96,15 +96,6 @@ object Inliner: } end isElideableExpr - // InlineCopier is a more fault-tolerant copier that does not cause errors when - // function types in applications are undefined. This is necessary since we copy at - // the same time as establishing the proper context in which the copied tree should - // be evaluated. This matters for opaque types, see neg/i14653.scala. - private class InlineCopier() extends TypedTreeCopier: - override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = - if fun.tpe.widen.exists then super.Apply(tree)(fun, args) - else untpd.cpy.Apply(tree)(fun, args).withTypeUnchecked(tree.tpe) - // InlinerMap is a TreeTypeMap with special treatment for inlined arguments: // They are generally left alone (not mapped further, and if they wrap a type // the type Inlined wrapper gets dropped @@ -116,7 +107,13 @@ object Inliner: substFrom: List[Symbol], substTo: List[Symbol])(using Context) extends TreeTypeMap( - typeMap, treeMap, oldOwners, newOwners, substFrom, substTo, InlineCopier()): + typeMap, treeMap, oldOwners, newOwners, substFrom, substTo, + // It is necessary to use the `ConservativeTreeCopier` since we copy at + // the same time as establishing the proper context in which the copied + // tree should be evaluated. This matters for opaque types, see + // neg/i14653.scala. + ConservativeTreeCopier() + ): override def copy( typeMap: Type => Type, diff --git a/tests/pos/annot-17242.scala b/tests/pos/annot-17242.scala new file mode 100644 index 000000000000..a8fcc9dbe15f --- /dev/null +++ b/tests/pos/annot-17242.scala @@ -0,0 +1,5 @@ +// See also tests/pos/annot-21595.scala + +class local(predicate: Int) extends annotation.StaticAnnotation + +def failing1(x: Int, z: Int @local(x + x)) = () diff --git a/tests/pos/dependent-annot2.scala b/tests/pos/dependent-annot2.scala new file mode 100644 index 000000000000..9bfa8b594c2b --- /dev/null +++ b/tests/pos/dependent-annot2.scala @@ -0,0 +1,30 @@ +class dummy(b: Any) extends annotation.StaticAnnotation + +class X: + def foo() = 1 + def bar() = 2 + def eq(x: X) = true + def id(): this.type = this + +class Y extends X: + override def bar() = 2 + override def eq(x: X) = true + +def f(x: Int) = x +def g(x: String) = x +def g(x: Int) = x + +object AnnotationTests: + def foo1(elem: Int, bla: Int @dummy(Array(elem))) = bla + def foo2(elem: X, bla: Int @dummy(elem.foo())) = bla + def foo3(elem: Y, bla: Int @dummy(elem.foo())) = bla + def foo4(elem: X, bla: Int @dummy(elem.bar())) = bla + def foo5(elem: Y, bla: Int @dummy(elem.bar())) = bla + def foo6(elem: X, bla: Int @dummy(elem.eq(X()))) = bla + def foo7(elem: Y, bla: Int @dummy(elem.eq(Y()))) = bla + def foo8(elem: X, bla: Int @dummy(elem.id().foo())) = bla + def foo9(elem: Y, bla: Int @dummy(elem.id().foo())) = bla + def foo10(elem: Int, bla: Int @dummy(f(elem))) = bla + def foo11(elem: Int, bla: Int @dummy(g(elem))) = bla + def foo12(elem: Int, bla: Int @dummy(0 == elem)) = bla + def foo13(elem: Int, bla: Int @dummy(elem == 0)) = bla From d2773e80cc07ba63300c91c60b7ffc755f789eaa Mon Sep 17 00:00:00 2001 From: noti0na1 Date: Wed, 13 Nov 2024 16:03:46 +0100 Subject: [PATCH 768/827] Fix names and description for cc and setup phases --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 7 ++++++- compiler/src/dotty/tools/dotc/cc/Setup.scala | 7 +++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 4d905a5df4ab..77d893ad49b9 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -28,6 +28,9 @@ import reporting.{trace, Message, OverrideError} object CheckCaptures: import ast.tpd.* + val name: String = "cc" + val description: String = "capture checking" + enum EnvKind: case Regular // normal case case NestedInOwner // environment is a temporary one nested in the owner's environment, @@ -192,7 +195,9 @@ class CheckCaptures extends Recheck, SymTransformer: import ast.tpd.* import CheckCaptures.* - def phaseName: String = "cc" + override def phaseName: String = CheckCaptures.name + + override def description: String = CheckCaptures.description override def isRunnable(using Context) = super.isRunnable && Feature.ccEnabledSomewhere diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 76ae41649517..3147a0f7bd47 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -28,6 +28,9 @@ trait SetupAPI: object Setup: + val name: String = "ccSetup" + val description: String = "prepare compilation unit for capture checking" + /** Recognizer for `res $throws exc`, returning `(res, exc)` in case of success */ object throwsAlias: def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match @@ -53,6 +56,10 @@ import Setup.* class Setup extends PreRecheck, SymTransformer, SetupAPI: thisPhase => + override def phaseName: String = Setup.name + + override def description: String = Setup.description + override def isRunnable(using Context) = super.isRunnable && Feature.ccEnabledSomewhere From 8e758e0c2c94b812b1732d410f3cb4ed149044bf Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Wed, 13 Nov 2024 17:43:50 +0100 Subject: [PATCH 769/827] Fix issue with owners of top-level symbols in cached quoted code being incorrect --- .../tools/dotc/quoted/PickledQuotes.scala | 4 +- tests/pos-macros/i20471/Macro_1.scala | 63 +++++++++++++++++++ tests/pos-macros/i20471/Main_2.scala | 7 +++ 3 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 tests/pos-macros/i20471/Macro_1.scala create mode 100644 tests/pos-macros/i20471/Main_2.scala diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 67a354919d5b..3ee52624710e 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -241,7 +241,9 @@ object PickledQuotes { treeOwner(tree) match case Some(owner) => // Copy the cached tree to make sure the all definitions are unique. - TreeTypeMap(oldOwners = List(owner), newOwners = List(owner)).apply(tree) + val treeCpy = TreeTypeMap(oldOwners = List(owner), newOwners = List(owner)).apply(tree) + // Then replace the symbol owner with the one pointed by the quote context. + treeCpy.changeNonLocalOwners(ctx.owner) case _ => tree diff --git a/tests/pos-macros/i20471/Macro_1.scala b/tests/pos-macros/i20471/Macro_1.scala new file mode 100644 index 000000000000..2fd940dbc4e2 --- /dev/null +++ b/tests/pos-macros/i20471/Macro_1.scala @@ -0,0 +1,63 @@ +import scala.annotation.experimental +import scala.quoted.* +import scala.annotation.tailrec + +object FlatMap { + @experimental inline def derived[F[_]]: FlatMap[F] = MacroFlatMap.derive +} +trait FlatMap[F[_]]{ + def tailRecM[A, B](a: A)(f: A => F[Either[A, B]]): F[B] +} + +@experimental +object MacroFlatMap: + + inline def derive[F[_]]: FlatMap[F] = ${ flatMap } + + def flatMap[F[_]: Type](using Quotes): Expr[FlatMap[F]] = '{ + new FlatMap[F]: + def tailRecM[A, B](a: A)(f: A => F[Either[A, B]]): F[B] = + ${ deriveTailRecM('{ a }, '{ f }) } + } + + def deriveTailRecM[F[_]: Type, A: Type, B: Type]( + a: Expr[A], + f: Expr[A => F[Either[A, B]]] + )(using q: Quotes): Expr[F[B]] = + import quotes.reflect.* + + val body: PartialFunction[(Symbol, TypeRepr), Term] = { + case (method, tpe) => { + given q2: Quotes = method.asQuotes + '{ + def step(x: A): B = ??? + ??? + }.asTerm + } + } + + val term = '{ $f($a) }.asTerm + val name = Symbol.freshName("$anon") + val parents = List(TypeTree.of[Object], TypeTree.of[F[B]]) + + extension (sym: Symbol) def overridableMembers: List[Symbol] = + val member1 = sym.methodMember("abstractEffect")(0) + val member2 = sym.methodMember("concreteEffect")(0) + def meth(member: Symbol) = Symbol.newMethod(sym, member.name, This(sym).tpe.memberType(member), Flags.Override, Symbol.noSymbol) + List(meth(member1), meth(member2)) + + val cls = Symbol.newClass(Symbol.spliceOwner, name, parents.map(_.tpe), _.overridableMembers, None) + + def transformDef(method: DefDef)(argss: List[List[Tree]]): Option[Term] = + val sym = method.symbol + Some(body.apply((sym, method.returnTpt.tpe))) + + val members = cls.declarations + .filterNot(_.isClassConstructor) + .map: sym => + sym.tree match + case method: DefDef => DefDef(sym, transformDef(method)) + case _ => report.errorAndAbort(s"Not supported: $sym in ${sym.owner}") + + val newCls = New(TypeIdent(cls)).select(cls.primaryConstructor).appliedToNone + Block(ClassDef(cls, parents, members) :: Nil, newCls).asExprOf[F[B]] diff --git a/tests/pos-macros/i20471/Main_2.scala b/tests/pos-macros/i20471/Main_2.scala new file mode 100644 index 000000000000..bdd1cd32ea26 --- /dev/null +++ b/tests/pos-macros/i20471/Main_2.scala @@ -0,0 +1,7 @@ +import scala.annotation.experimental + +@experimental +object autoFlatMapTests: + trait TestAlgebra[T] derives FlatMap: + def abstractEffect(a: String): T + def concreteEffect(a: String): T = abstractEffect(a + " concreteEffect") From dbd49886b9c885089e54dd0e98ebeb9a32734387 Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Wed, 13 Nov 2024 23:41:05 +0100 Subject: [PATCH 770/827] Fix Windows OS detection in TraceNameManglingTest (#21937) Fixes #21936 [test_windows_full] --- .../dotty/tools/dotc/profile/TraceNameManglingTest.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala b/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala index 977b67740f88..f1f570cc85d4 100644 --- a/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala +++ b/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala @@ -27,8 +27,9 @@ class TraceNameManglingTest extends DottyTest { } @Test def escapeBackslashes(): Unit = { - val isWindows = sys.props("os.name").toLowerCase(Locale.ROOT) == "windows" - val filename = if isWindows then "/.scala" else "\\.scala" + val isWindows = sys.props("os.name").toLowerCase(Locale.ROOT).nn.contains("windows") + // It is not possible to create a file with backslash in name on Windows + val filename = if isWindows then "test.scala" else "\\.scala" checkTraceEvents( """ |class /\ : @@ -46,7 +47,8 @@ class TraceNameManglingTest extends DottyTest { raw"setter /\\_=" ).map(TraceEvent("typecheck", _)) ++ Set( - TraceEvent("file", if isWindows then "/.scala" else "\\\\.scala") + // See comment aboce for Windows limitations + TraceEvent("file", if isWindows then filename else "\\\\.scala") ) ) } From a76470f9f34c878e759a20a4e2c76027a1ec6c22 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 24 Sep 2024 10:18:35 +0200 Subject: [PATCH 771/827] Implement basic version of desugaring context bounds for poly functions --- .../src/dotty/tools/dotc/ast/Desugar.scala | 27 +++++++++++++++++++ .../dotty/tools/dotc/parsing/Parsers.scala | 4 +-- .../src/dotty/tools/dotc/typer/Typer.scala | 5 ++-- .../contextbounds-for-poly-functions.scala | 15 +++++++++++ 4 files changed, 47 insertions(+), 4 deletions(-) create mode 100644 tests/pos/contextbounds-for-poly-functions.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index e66c71731b4f..901fbd1bb601 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1221,6 +1221,33 @@ object desugar { case _ => body cpy.PolyFunction(tree)(tree.targs, stripped(tree.body)).asInstanceOf[PolyFunction] + /** Desugar [T_1 : B_1, ..., T_N : B_N] => (P_1, ..., P_M) => R + * Into [T_1, ..., T_N] => (P_1, ..., P_M) => (B_1, ..., B_N) ?=> R + */ + def expandPolyFunctionContextBounds(tree: PolyFunction)(using Context): PolyFunction = + val PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ Function(vparamTypes, res)) = tree: @unchecked + val newTParams = tparams.map { + case td @ TypeDef(name, cb @ ContextBounds(bounds, ctxBounds)) => + TypeDef(name, ContextBounds(bounds, List.empty)) + } + var idx = -1 + val collecedContextBounds = tparams.collect { + case td @ TypeDef(name, cb @ ContextBounds(bounds, ctxBounds)) if ctxBounds.nonEmpty => + // TOOD(kπ) Should we handle non empty normal bounds here? + name -> ctxBounds + }.flatMap { case (name, ctxBounds) => + ctxBounds.map { ctxBound => + idx = idx + 1 + makeSyntheticParameter(idx, ctxBound).withAddedFlags(Given) + } + } + val contextFunctionResult = + if collecedContextBounds.isEmpty then + fun + else + Function(vparamTypes, Function(collecedContextBounds, res)).withSpan(fun.span) + PolyFunction(newTParams, contextFunctionResult).withSpan(tree.span) + /** Desugar [T_1, ..., T_M] => (P_1, ..., P_N) => R * Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } */ diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 47391a4114cf..7a5facf38b67 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -68,7 +68,7 @@ object Parsers { def acceptsVariance = this == Class || this == CaseClass || this == Hk def acceptsCtxBounds = - !(this == Type || this == Hk) + !(this == Hk) def acceptsWildcard = this == Type || this == Hk @@ -3429,7 +3429,7 @@ object Parsers { * * TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ * TypTypeParam ::= {Annotation} - * (id | ‘_’) [HkTypeParamClause] TypeBounds + * (id | ‘_’) [HkTypeParamClause] TypeAndCtxBounds * * HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ * HkTypeParam ::= {Annotation} [‘+’ | ‘-’] diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 817e7baf1c8c..a669d555617d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1919,8 +1919,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedPolyFunction(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val tree1 = desugar.normalizePolyFunction(tree) - if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree1), pt) - else typedPolyFunctionValue(tree1, pt) + val tree2 = desugar.expandPolyFunctionContextBounds(tree1) + if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree2), pt) + else typedPolyFunctionValue(tree2, pt) def typedPolyFunctionValue(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val untpd.PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun) = tree: @unchecked diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala new file mode 100644 index 000000000000..66c177cf6c89 --- /dev/null +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -0,0 +1,15 @@ +import scala.language.experimental.modularity +import scala.language.future + + +trait Ord[X]: + def compare(x: X, y: X): Int + +val less1 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + +// type Comparer = [X: Ord] => (x: X, y: X) => Boolean +// val less2: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 + +// type Cmp[X] = (x: X, y: X) => Boolean +// type Comparer2 = [X: Ord] => Cmp[X] +// val less3: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 From 8dc68c3303c70b8fbb8fbea2a127bd11f5667507 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 24 Sep 2024 10:27:47 +0200 Subject: [PATCH 772/827] Handle named context bounds in poly function context bound desugaring --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 8 ++++++-- tests/pos/contextbounds-for-poly-functions.scala | 6 ++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 901fbd1bb601..91adf3c97733 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1230,7 +1230,7 @@ object desugar { case td @ TypeDef(name, cb @ ContextBounds(bounds, ctxBounds)) => TypeDef(name, ContextBounds(bounds, List.empty)) } - var idx = -1 + var idx = 0 val collecedContextBounds = tparams.collect { case td @ TypeDef(name, cb @ ContextBounds(bounds, ctxBounds)) if ctxBounds.nonEmpty => // TOOD(kπ) Should we handle non empty normal bounds here? @@ -1238,7 +1238,11 @@ object desugar { }.flatMap { case (name, ctxBounds) => ctxBounds.map { ctxBound => idx = idx + 1 - makeSyntheticParameter(idx, ctxBound).withAddedFlags(Given) + ctxBound match + case ContextBoundTypeTree(_, _, ownName) => + ValDef(ownName, ctxBound, EmptyTree).withFlags(TermParam | Given) + case _ => + makeSyntheticParameter(idx, ctxBound).withAddedFlags(Given) } } val contextFunctionResult = diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index 66c177cf6c89..00feedd66d71 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -7,9 +7,11 @@ trait Ord[X]: val less1 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +val less2 = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 + // type Comparer = [X: Ord] => (x: X, y: X) => Boolean -// val less2: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 +// val less3: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 // type Cmp[X] = (x: X, y: X) => Boolean // type Comparer2 = [X: Ord] => Cmp[X] -// val less3: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +// val less4: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 From 3ac5cec843d2f276cb851b67ea7d46871853bdde Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 24 Sep 2024 16:12:28 +0200 Subject: [PATCH 773/827] Correctly-ish desugar poly function context bounds in function types --- .../src/dotty/tools/dotc/ast/Desugar.scala | 23 +++++++++++-------- .../src/dotty/tools/dotc/typer/Typer.scala | 19 ++++++++------- .../contextbounds-for-poly-functions.scala | 9 ++++---- 3 files changed, 30 insertions(+), 21 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 91adf3c97733..5db72d2f5a09 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1226,31 +1226,36 @@ object desugar { */ def expandPolyFunctionContextBounds(tree: PolyFunction)(using Context): PolyFunction = val PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ Function(vparamTypes, res)) = tree: @unchecked - val newTParams = tparams.map { + val newTParams = tparams.mapConserve { case td @ TypeDef(name, cb @ ContextBounds(bounds, ctxBounds)) => TypeDef(name, ContextBounds(bounds, List.empty)) + case t => t } var idx = 0 - val collecedContextBounds = tparams.collect { + val collectedContextBounds = tparams.collect { case td @ TypeDef(name, cb @ ContextBounds(bounds, ctxBounds)) if ctxBounds.nonEmpty => - // TOOD(kπ) Should we handle non empty normal bounds here? name -> ctxBounds }.flatMap { case (name, ctxBounds) => ctxBounds.map { ctxBound => idx = idx + 1 ctxBound match - case ContextBoundTypeTree(_, _, ownName) => - ValDef(ownName, ctxBound, EmptyTree).withFlags(TermParam | Given) + case ctxBound @ ContextBoundTypeTree(tycon, paramName, ownName) => + if tree.isTerm then + ValDef(ownName, ctxBound, EmptyTree).withFlags(TermParam | Given) + else + ContextBoundTypeTree(tycon, paramName, EmptyTermName) // this has to be handled in Typer#typedFunctionType case _ => makeSyntheticParameter(idx, ctxBound).withAddedFlags(Given) } } val contextFunctionResult = - if collecedContextBounds.isEmpty then - fun + if collectedContextBounds.isEmpty then fun else - Function(vparamTypes, Function(collecedContextBounds, res)).withSpan(fun.span) - PolyFunction(newTParams, contextFunctionResult).withSpan(tree.span) + val mods = EmptyModifiers.withFlags(Given) + val erasedParams = collectedContextBounds.map(_ => false) + Function(vparamTypes, FunctionWithMods(collectedContextBounds, res, mods, erasedParams)).withSpan(fun.span) + if collectedContextBounds.isEmpty then tree + else PolyFunction(newTParams, contextFunctionResult).withSpan(tree.span) /** Desugar [T_1, ..., T_M] => (P_1, ..., P_N) => R * Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index a669d555617d..4dfd2c05c808 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -40,7 +40,7 @@ import annotation.tailrec import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} -import config.Feature, Feature.{migrateTo3, modularity, sourceVersion, warnOnMigration} +import config.Feature, Feature.{migrateTo3, sourceVersion, warnOnMigration} import config.SourceVersion.* import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel @@ -1145,7 +1145,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if templ1.parents.isEmpty && isFullyDefined(pt, ForceDegree.flipBottom) && isSkolemFree(pt) - && isEligible(pt.underlyingClassRef(refinementOK = Feature.enabled(modularity))) + && isEligible(pt.underlyingClassRef(refinementOK = Feature.enabled(Feature.modularity))) then templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) for case parent: RefTree <- templ1.parents do @@ -1720,7 +1720,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) else val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) - val result = typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args :+ body), pt) + val args1 = args.mapConserve { + case cb: untpd.ContextBoundTypeTree => typed(cb) + case t => t + } + val result = typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args1 :+ body), pt) // if there are any erased classes, we need to re-do the typecheck. result match case r: AppliedTypeTree if r.args.exists(_.tpe.isErasedClass) => @@ -2451,12 +2455,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if tycon.tpe.typeParams.nonEmpty then val tycon0 = tycon.withType(tycon.tpe.etaCollapse) typed(untpd.AppliedTypeTree(spliced(tycon0), tparam :: Nil)) - else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then + else if Feature.enabled(Feature.modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) typed(untpd.RefinedTypeTree(spliced(tycon), List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else def selfNote = - if Feature.enabled(modularity) then + if Feature.enabled(Feature.modularity) then " and\ndoes not have an abstract type member named `Self` either" else "" errorTree(tree, @@ -2475,7 +2479,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val TypeDef(_, impl: Template) = typed(refineClsDef): @unchecked val refinements1 = impl.body val seen = mutable.Set[Symbol]() - for (refinement <- refinements1) { // TODO: get clarity whether we want to enforce these conditions + for refinement <- refinements1 do // TODO: get clarity whether we want to enforce these conditions typr.println(s"adding refinement $refinement") checkRefinementNonCyclic(refinement, refineCls, seen) val rsym = refinement.symbol @@ -2489,7 +2493,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val member = refineCls.info.member(rsym.name) if (member.isOverloaded) report.error(OverloadInRefinement(rsym), refinement.srcPos) - } assignType(cpy.RefinedTypeTree(tree)(tpt1, refinements1), tpt1, refinements1, refineCls) } @@ -4706,7 +4709,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) case _ => errorTree(tree, em"cannot convert from $tree to an instance creation expression") - val tycon = ctorResultType.underlyingClassRef(refinementOK = Feature.enabled(modularity)) + val tycon = ctorResultType.underlyingClassRef(refinementOK = Feature.enabled(Feature.modularity)) typed( untpd.Select( untpd.New(untpd.TypedSplice(tpt.withType(tycon))), diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index 00feedd66d71..90bd01ce6b6d 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -5,12 +5,13 @@ import scala.language.future trait Ord[X]: def compare(x: X, y: X): Int -val less1 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +// val less1 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 -val less2 = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 +// val less2 = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 -// type Comparer = [X: Ord] => (x: X, y: X) => Boolean -// val less3: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 +type ComparerRef = [X] => (x: X, y: X) => Ord[X] ?=> Boolean +type Comparer = [X: Ord] => (x: X, y: X) => Boolean +val less3: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 // type Cmp[X] = (x: X, y: X) => Boolean // type Comparer2 = [X: Ord] => Cmp[X] From 408aa74c158290a69f3411f12c719daaa45e8021 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 24 Sep 2024 16:17:26 +0200 Subject: [PATCH 774/827] Fix pickling issue --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 2 +- tests/pos/contextbounds-for-poly-functions.scala | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 5db72d2f5a09..12c237701d62 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1228,7 +1228,7 @@ object desugar { val PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ Function(vparamTypes, res)) = tree: @unchecked val newTParams = tparams.mapConserve { case td @ TypeDef(name, cb @ ContextBounds(bounds, ctxBounds)) => - TypeDef(name, ContextBounds(bounds, List.empty)) + cpy.TypeDef(td)(name, bounds) case t => t } var idx = 0 diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index 90bd01ce6b6d..6a3ec9935a65 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -5,10 +5,11 @@ import scala.language.future trait Ord[X]: def compare(x: X, y: X): Int -// val less1 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +val less1 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 -// val less2 = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 +val less2 = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 +type CtxFunctionRef = Ord[Int] ?=> Boolean type ComparerRef = [X] => (x: X, y: X) => Ord[X] ?=> Boolean type Comparer = [X: Ord] => (x: X, y: X) => Boolean val less3: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 From 134c0150617a091d5ea482374c3a42e2572e78ad Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 24 Sep 2024 16:47:13 +0200 Subject: [PATCH 775/827] Hide context bounds expansion for poly functions under modularity feature --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 6 ++++-- compiler/src/dotty/tools/dotc/typer/Typer.scala | 3 ++- tests/pos/contextbounds-for-poly-functions.scala | 1 - 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 7a5facf38b67..e54caff9f47d 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -68,7 +68,7 @@ object Parsers { def acceptsVariance = this == Class || this == CaseClass || this == Hk def acceptsCtxBounds = - !(this == Hk) + !(this == Type || this == Hk) def acceptsWildcard = this == Type || this == Hk @@ -3460,7 +3460,9 @@ object Parsers { else ident().toTypeName val hkparams = typeParamClauseOpt(ParamOwner.Hk) val bounds = - if paramOwner.acceptsCtxBounds then typeAndCtxBounds(name) else typeBounds() + if paramOwner.acceptsCtxBounds then typeAndCtxBounds(name) + else if in.featureEnabled(Feature.modularity) && paramOwner == ParamOwner.Type then typeAndCtxBounds(name) + else typeBounds() TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 4dfd2c05c808..4a656c15a9ea 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1923,7 +1923,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedPolyFunction(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val tree1 = desugar.normalizePolyFunction(tree) - val tree2 = desugar.expandPolyFunctionContextBounds(tree1) + val tree2 = if Feature.enabled(Feature.modularity) then desugar.expandPolyFunctionContextBounds(tree1) + else tree1 if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree2), pt) else typedPolyFunctionValue(tree2, pt) diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index 6a3ec9935a65..c293fd0d9780 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -1,7 +1,6 @@ import scala.language.experimental.modularity import scala.language.future - trait Ord[X]: def compare(x: X, y: X): Int From 309034e5579dd5dcf2667d5cd096c1067681b724 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Wed, 25 Sep 2024 09:43:30 +0200 Subject: [PATCH 776/827] Small cleanup --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 14 +++++--------- tests/pos/contextbounds-for-poly-functions.scala | 2 ++ 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 12c237701d62..d37af4aaedae 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1237,15 +1237,11 @@ object desugar { name -> ctxBounds }.flatMap { case (name, ctxBounds) => ctxBounds.map { ctxBound => - idx = idx + 1 - ctxBound match - case ctxBound @ ContextBoundTypeTree(tycon, paramName, ownName) => - if tree.isTerm then - ValDef(ownName, ctxBound, EmptyTree).withFlags(TermParam | Given) - else - ContextBoundTypeTree(tycon, paramName, EmptyTermName) // this has to be handled in Typer#typedFunctionType - case _ => - makeSyntheticParameter(idx, ctxBound).withAddedFlags(Given) + val ContextBoundTypeTree(tycon, paramName, ownName) = ctxBound: @unchecked + if tree.isTerm then + ValDef(ownName, ctxBound, EmptyTree).withFlags(TermParam | Given) + else + ContextBoundTypeTree(tycon, paramName, EmptyTermName) // this has to be handled in Typer#typedFunctionType } } val contextFunctionResult = diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index c293fd0d9780..7da7405c9225 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -16,3 +16,5 @@ val less3: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 // type Cmp[X] = (x: X, y: X) => Boolean // type Comparer2 = [X: Ord] => Cmp[X] // val less4: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + +val less5 = [X: [X] =>> Ord[X]] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 \ No newline at end of file From 64bd03eff0d4a901de85b563e681e1ffc5d8eb24 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Wed, 25 Sep 2024 09:49:23 +0200 Subject: [PATCH 777/827] Add more test cases --- tests/pos/contextbounds-for-poly-functions.scala | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index 7da7405c9225..7db41628e57d 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -4,6 +4,9 @@ import scala.language.future trait Ord[X]: def compare(x: X, y: X): Int +trait Show[X]: + def show(x: X): String + val less1 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 val less2 = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 @@ -17,4 +20,12 @@ val less3: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 // type Comparer2 = [X: Ord] => Cmp[X] // val less4: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 -val less5 = [X: [X] =>> Ord[X]] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 \ No newline at end of file +val less5 = [X: [X] =>> Ord[X]] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + +val less6 = [X: {Ord, Show}] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + +val less7 = [X: {Ord as ord, Show}] => (x: X, y: X) => ord.compare(x, y) < 0 + +val less8 = [X: {Ord, Show as show}] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + +val less9 = [X: {Ord as ord, Show as show}] => (x: X, y: X) => ord.compare(x, y) < 0 From 5196efde72f40ebca4f516cf4febace2ce9cda15 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Wed, 2 Oct 2024 17:23:15 +0200 Subject: [PATCH 778/827] Change the implementation of context bound expansion for poly functions to reuse some of the existing context bound expansion --- .../src/dotty/tools/dotc/ast/Desugar.scala | 58 ++++++++----------- .../src/dotty/tools/dotc/typer/Typer.scala | 34 ++++++----- .../contextbounds-for-poly-functions.scala | 27 +++++++++ 3 files changed, 68 insertions(+), 51 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index d37af4aaedae..488755f81848 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -52,6 +52,11 @@ object desugar { */ val ContextBoundParam: Property.Key[Unit] = Property.StickyKey() + /** An attachment key to indicate that a DefDef is a poly function apply + * method definition. + */ + val PolyFunctionApply: Property.Key[Unit] = Property.StickyKey() + /** What static check should be applied to a Match? */ enum MatchCheck { case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom @@ -337,7 +342,8 @@ object desugar { cpy.DefDef(meth)( name = normalizeName(meth, tpt).asTermName, paramss = paramssNoContextBounds), - evidenceParamBuf.toList) + evidenceParamBuf.toList + ) end elimContextBounds def addDefaultGetters(meth: DefDef)(using Context): Tree = @@ -508,7 +514,19 @@ object desugar { case Nil => params :: Nil - cpy.DefDef(meth)(paramss = recur(meth.paramss)) + if meth.hasAttachment(PolyFunctionApply) then + meth.removeAttachment(PolyFunctionApply) + val paramTpts = params.map(_.tpt) + val paramNames = params.map(_.name) + val paramsErased = params.map(_.mods.flags.is(Erased)) + if ctx.mode.is(Mode.Type) then + val ctxFunction = makeContextualFunction(paramTpts, paramNames, meth.tpt, paramsErased) + cpy.DefDef(meth)(tpt = ctxFunction) + else + val ctxFunction = makeContextualFunction(paramTpts, paramNames, meth.rhs, paramsErased) + cpy.DefDef(meth)(rhs = ctxFunction) + else + cpy.DefDef(meth)(paramss = recur(meth.paramss)) end addEvidenceParams /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ @@ -1221,38 +1239,6 @@ object desugar { case _ => body cpy.PolyFunction(tree)(tree.targs, stripped(tree.body)).asInstanceOf[PolyFunction] - /** Desugar [T_1 : B_1, ..., T_N : B_N] => (P_1, ..., P_M) => R - * Into [T_1, ..., T_N] => (P_1, ..., P_M) => (B_1, ..., B_N) ?=> R - */ - def expandPolyFunctionContextBounds(tree: PolyFunction)(using Context): PolyFunction = - val PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ Function(vparamTypes, res)) = tree: @unchecked - val newTParams = tparams.mapConserve { - case td @ TypeDef(name, cb @ ContextBounds(bounds, ctxBounds)) => - cpy.TypeDef(td)(name, bounds) - case t => t - } - var idx = 0 - val collectedContextBounds = tparams.collect { - case td @ TypeDef(name, cb @ ContextBounds(bounds, ctxBounds)) if ctxBounds.nonEmpty => - name -> ctxBounds - }.flatMap { case (name, ctxBounds) => - ctxBounds.map { ctxBound => - val ContextBoundTypeTree(tycon, paramName, ownName) = ctxBound: @unchecked - if tree.isTerm then - ValDef(ownName, ctxBound, EmptyTree).withFlags(TermParam | Given) - else - ContextBoundTypeTree(tycon, paramName, EmptyTermName) // this has to be handled in Typer#typedFunctionType - } - } - val contextFunctionResult = - if collectedContextBounds.isEmpty then fun - else - val mods = EmptyModifiers.withFlags(Given) - val erasedParams = collectedContextBounds.map(_ => false) - Function(vparamTypes, FunctionWithMods(collectedContextBounds, res, mods, erasedParams)).withSpan(fun.span) - if collectedContextBounds.isEmpty then tree - else PolyFunction(newTParams, contextFunctionResult).withSpan(tree.span) - /** Desugar [T_1, ..., T_M] => (P_1, ..., P_N) => R * Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } */ @@ -1275,7 +1261,9 @@ object desugar { }.toList RefinedTypeTree(ref(defn.PolyFunctionType), List( - DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree).withFlags(Synthetic) + DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree) + .withFlags(Synthetic) + .withAttachment(PolyFunctionApply, ()) )).withSpan(tree.span) end makePolyFunctionType diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 4a656c15a9ea..00f22c874f7c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -40,7 +40,7 @@ import annotation.tailrec import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} -import config.Feature, Feature.{migrateTo3, sourceVersion, warnOnMigration} +import config.Feature, Feature.{migrateTo3, modularity, sourceVersion, warnOnMigration} import config.SourceVersion.* import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel @@ -53,6 +53,7 @@ import config.MigrationVersion import transform.CheckUnused.OriginalName import scala.annotation.constructorOnly +import dotty.tools.dotc.ast.desugar.PolyFunctionApply object Typer { @@ -1145,7 +1146,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if templ1.parents.isEmpty && isFullyDefined(pt, ForceDegree.flipBottom) && isSkolemFree(pt) - && isEligible(pt.underlyingClassRef(refinementOK = Feature.enabled(Feature.modularity))) + && isEligible(pt.underlyingClassRef(refinementOK = Feature.enabled(modularity))) then templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) for case parent: RefTree <- templ1.parents do @@ -1720,11 +1721,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) else val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) - val args1 = args.mapConserve { - case cb: untpd.ContextBoundTypeTree => typed(cb) - case t => t - } - val result = typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args1 :+ body), pt) + // val args1 = args.mapConserve { + // case cb: untpd.ContextBoundTypeTree => typed(cb) + // case t => t + // } + val result = typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args :+ body), pt) // if there are any erased classes, we need to re-do the typecheck. result match case r: AppliedTypeTree if r.args.exists(_.tpe.isErasedClass) => @@ -1923,10 +1924,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedPolyFunction(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val tree1 = desugar.normalizePolyFunction(tree) - val tree2 = if Feature.enabled(Feature.modularity) then desugar.expandPolyFunctionContextBounds(tree1) - else tree1 - if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree2), pt) - else typedPolyFunctionValue(tree2, pt) + if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree1), pt) + else typedPolyFunctionValue(tree1, pt) def typedPolyFunctionValue(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val untpd.PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun) = tree: @unchecked @@ -1951,7 +1950,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val resultTpt = untpd.InLambdaTypeTree(isResult = true, (tsyms, vsyms) => mt.resultType.substParams(mt, vsyms.map(_.termRef)).substParams(poly, tsyms.map(_.typeRef))) - val desugared = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) + val desugared @ Block(List(defdef), _) = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) + defdef.putAttachment(PolyFunctionApply, ()) typed(desugared, pt) else val msg = @@ -1959,7 +1959,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer |Expected type should be a polymorphic function with the same number of type and value parameters.""" errorTree(EmptyTree, msg, tree.srcPos) case _ => - val desugared = desugar.makeClosure(tparams, vparams, body, untpd.TypeTree(), tree.span) + val desugared @ Block(List(defdef), _) = desugar.makeClosure(tparams, vparams, body, untpd.TypeTree(), tree.span) + defdef.putAttachment(PolyFunctionApply, ()) typed(desugared, pt) end typedPolyFunctionValue @@ -2456,12 +2457,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if tycon.tpe.typeParams.nonEmpty then val tycon0 = tycon.withType(tycon.tpe.etaCollapse) typed(untpd.AppliedTypeTree(spliced(tycon0), tparam :: Nil)) - else if Feature.enabled(Feature.modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then + else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) typed(untpd.RefinedTypeTree(spliced(tycon), List(untpd.TypeDef(tpnme.Self, tparamSplice)))) else def selfNote = - if Feature.enabled(Feature.modularity) then + if Feature.enabled(modularity) then " and\ndoes not have an abstract type member named `Self` either" else "" errorTree(tree, @@ -3610,6 +3611,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer protected def makeContextualFunction(tree: untpd.Tree, pt: Type)(using Context): Tree = { val defn.FunctionOf(formals, _, true) = pt.dropDependentRefinement: @unchecked + println(i"make contextual function $tree / $pt") val paramNamesOrNil = pt match case RefinedType(_, _, rinfo: MethodType) => rinfo.paramNames case _ => Nil @@ -4710,7 +4712,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) case _ => errorTree(tree, em"cannot convert from $tree to an instance creation expression") - val tycon = ctorResultType.underlyingClassRef(refinementOK = Feature.enabled(Feature.modularity)) + val tycon = ctorResultType.underlyingClassRef(refinementOK = Feature.enabled(modularity)) typed( untpd.Select( untpd.New(untpd.TypedSplice(tpt.withType(tycon))), diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index 7db41628e57d..a5a035754b08 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -7,10 +7,18 @@ trait Ord[X]: trait Show[X]: def show(x: X): String +val less0: [X: Ord] => (X, X) => Boolean = ??? + val less1 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +val less1_type_test: [X: Ord] => (X, X) => Boolean = + [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + val less2 = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 +val less2_type_test: [X: Ord as ord] => (X, X) => Boolean = + [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 + type CtxFunctionRef = Ord[Int] ?=> Boolean type ComparerRef = [X] => (x: X, y: X) => Ord[X] ?=> Boolean type Comparer = [X: Ord] => (x: X, y: X) => Boolean @@ -20,12 +28,31 @@ val less3: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 // type Comparer2 = [X: Ord] => Cmp[X] // val less4: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +// type CmpWeak[X] = (x: X, y: X) => Boolean +// type Comparer2Weak = [X: Ord] => (x: X) => CmpWeak[X] +// val less4: Comparer2Weak = [X: Ord] => (x: X) => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + val less5 = [X: [X] =>> Ord[X]] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +val less5_type_test: [X: [X] =>> Ord[X]] => (X, X) => Boolean = + [X: [X] =>> Ord[X]] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + val less6 = [X: {Ord, Show}] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +val less6_type_test: [X: {Ord, Show}] => (X, X) => Boolean = + [X: {Ord, Show}] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + val less7 = [X: {Ord as ord, Show}] => (x: X, y: X) => ord.compare(x, y) < 0 +val less7_type_test: [X: {Ord as ord, Show}] => (X, X) => Boolean = + [X: {Ord as ord, Show}] => (x: X, y: X) => ord.compare(x, y) < 0 + val less8 = [X: {Ord, Show as show}] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +val less8_type_test: [X: {Ord, Show as show}] => (X, X) => Boolean = + [X: {Ord, Show as show}] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 + val less9 = [X: {Ord as ord, Show as show}] => (x: X, y: X) => ord.compare(x, y) < 0 + +val less9_type_test: [X: {Ord as ord, Show as show}] => (X, X) => Boolean = + [X: {Ord as ord, Show as show}] => (x: X, y: X) => ord.compare(x, y) < 0 From 5f0d4a7205ce80d3576c92df1c7243e67031f430 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Thu, 3 Oct 2024 15:49:53 +0200 Subject: [PATCH 779/827] Add support for some type aliases, when expanding context bounds for poly functions --- .../src/dotty/tools/dotc/ast/Desugar.scala | 26 ++++++--- .../src/dotty/tools/dotc/typer/Typer.scala | 56 ++++++++++++++----- .../contextbounds-for-poly-functions.scala | 9 ++- 3 files changed, 64 insertions(+), 27 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 488755f81848..56e519737cb1 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -55,7 +55,7 @@ object desugar { /** An attachment key to indicate that a DefDef is a poly function apply * method definition. */ - val PolyFunctionApply: Property.Key[Unit] = Property.StickyKey() + val PolyFunctionApply: Property.Key[List[ValDef]] = Property.StickyKey() /** What static check should be applied to a Match? */ enum MatchCheck { @@ -514,17 +514,25 @@ object desugar { case Nil => params :: Nil + // TODO(kπ) is this enough? SHould this be a TreeTraverse-thing? + def pushDownEvidenceParams(tree: Tree): Tree = tree match + case Function(params, body) => + cpy.Function(tree)(params, pushDownEvidenceParams(body)) + case Block(stats, expr) => + cpy.Block(tree)(stats, pushDownEvidenceParams(expr)) + case tree => + val paramTpts = params.map(_.tpt) + val paramNames = params.map(_.name) + val paramsErased = params.map(_.mods.flags.is(Erased)) + makeContextualFunction(paramTpts, paramNames, tree, paramsErased).withSpan(tree.span) + if meth.hasAttachment(PolyFunctionApply) then meth.removeAttachment(PolyFunctionApply) - val paramTpts = params.map(_.tpt) - val paramNames = params.map(_.name) - val paramsErased = params.map(_.mods.flags.is(Erased)) + // (kπ): deffer this until we can type the result? if ctx.mode.is(Mode.Type) then - val ctxFunction = makeContextualFunction(paramTpts, paramNames, meth.tpt, paramsErased) - cpy.DefDef(meth)(tpt = ctxFunction) + cpy.DefDef(meth)(tpt = meth.tpt.withAttachment(PolyFunctionApply, params)) else - val ctxFunction = makeContextualFunction(paramTpts, paramNames, meth.rhs, paramsErased) - cpy.DefDef(meth)(rhs = ctxFunction) + cpy.DefDef(meth)(rhs = pushDownEvidenceParams(meth.rhs)) else cpy.DefDef(meth)(paramss = recur(meth.paramss)) end addEvidenceParams @@ -1263,7 +1271,7 @@ object desugar { RefinedTypeTree(ref(defn.PolyFunctionType), List( DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree) .withFlags(Synthetic) - .withAttachment(PolyFunctionApply, ()) + .withAttachment(PolyFunctionApply, List.empty) )).withSpan(tree.span) end makePolyFunctionType diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 00f22c874f7c..56e62bccf83b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -53,7 +53,6 @@ import config.MigrationVersion import transform.CheckUnused.OriginalName import scala.annotation.constructorOnly -import dotty.tools.dotc.ast.desugar.PolyFunctionApply object Typer { @@ -1951,7 +1950,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer untpd.InLambdaTypeTree(isResult = true, (tsyms, vsyms) => mt.resultType.substParams(mt, vsyms.map(_.termRef)).substParams(poly, tsyms.map(_.typeRef))) val desugared @ Block(List(defdef), _) = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) - defdef.putAttachment(PolyFunctionApply, ()) + defdef.putAttachment(desugar.PolyFunctionApply, List.empty) typed(desugared, pt) else val msg = @@ -1960,7 +1959,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer errorTree(EmptyTree, msg, tree.srcPos) case _ => val desugared @ Block(List(defdef), _) = desugar.makeClosure(tparams, vparams, body, untpd.TypeTree(), tree.span) - defdef.putAttachment(PolyFunctionApply, ()) + defdef.putAttachment(desugar.PolyFunctionApply, List.empty) typed(desugared, pt) end typedPolyFunctionValue @@ -3588,30 +3587,57 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case xtree => typedUnnamed(xtree) val unsimplifiedType = result.tpe - simplify(result, pt, locked) - result.tpe.stripTypeVar match + val result1 = simplify(result, pt, locked) + result1.tpe.stripTypeVar match case e: ErrorType if !unsimplifiedType.isErroneous => errorTree(xtree, e.msg, xtree.srcPos) - case _ => result + case _ => result1 catch case ex: TypeError => handleTypeError(ex) } } + private def pushDownDeferredEvidenceParams(tpe: Type, params: List[untpd.ValDef], span: Span)(using Context): Type = tpe.dealias match { + case tpe: MethodType => + MethodType(tpe.paramNames)(paramNames => tpe.paramInfos, _ => pushDownDeferredEvidenceParams(tpe.resultType, params, span)) + case tpe: PolyType => + PolyType(tpe.paramNames)(paramNames => tpe.paramInfos, _ => pushDownDeferredEvidenceParams(tpe.resultType, params, span)) + case tpe: RefinedType => + // TODO(kπ): Doesn't seem right, but the PolyFunction ends up being a refinement + RefinedType(pushDownDeferredEvidenceParams(tpe.parent, params, span), tpe.refinedName, pushDownDeferredEvidenceParams(tpe.refinedInfo, params, span)) + case tpe @ AppliedType(tycon, args) if defn.isFunctionType(tpe) && args.size > 1 => + AppliedType(tpe.tycon, args.init :+ pushDownDeferredEvidenceParams(args.last, params, span)) + case tpe => + val paramNames = params.map(_.name) + val paramTpts = params.map(_.tpt) + val paramsErased = params.map(_.mods.flags.is(Erased)) + val ctxFunction = desugar.makeContextualFunction(paramTpts, paramNames, untpd.TypedSplice(TypeTree(tpe.dealias)), paramsErased).withSpan(span) + typed(ctxFunction).tpe + } + + private def addDownDeferredEvidenceParams(tree: Tree, pt: Type)(using Context): (Tree, Type) = { + tree.getAttachment(desugar.PolyFunctionApply) match + case Some(params) if params.nonEmpty => + tree.removeAttachment(desugar.PolyFunctionApply) + val tpe = pushDownDeferredEvidenceParams(tree.tpe, params, tree.span) + TypeTree(tpe).withSpan(tree.span) -> tpe + case _ => tree -> pt + } + /** Interpolate and simplify the type of the given tree. */ - protected def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = - if !tree.denot.isOverloaded then // for overloaded trees: resolve overloading before simplifying - if !tree.tpe.widen.isInstanceOf[MethodOrPoly] // wait with simplifying until method is fully applied - || tree.isDef // ... unless tree is a definition + protected def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = + val (tree1, pt1) = addDownDeferredEvidenceParams(tree, pt) + if !tree1.denot.isOverloaded then // for overloaded trees: resolve overloading before simplifying + if !tree1.tpe.widen.isInstanceOf[MethodOrPoly] // wait with simplifying until method is fully applied + || tree1.isDef // ... unless tree is a definition then - interpolateTypeVars(tree, pt, locked) - val simplified = tree.tpe.simplified - if !MatchType.thatReducesUsingGadt(tree.tpe) then // needs a GADT cast. i15743 + interpolateTypeVars(tree1, pt1, locked) + val simplified = tree1.tpe.simplified + if !MatchType.thatReducesUsingGadt(tree1.tpe) then // needs a GADT cast. i15743 tree.overwriteType(simplified) - tree + tree1 protected def makeContextualFunction(tree: untpd.Tree, pt: Type)(using Context): Tree = { val defn.FunctionOf(formals, _, true) = pt.dropDependentRefinement: @unchecked - println(i"make contextual function $tree / $pt") val paramNamesOrNil = pt match case RefinedType(_, _, rinfo: MethodType) => rinfo.paramNames case _ => Nil diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index a5a035754b08..adaf6c035406 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -28,9 +28,12 @@ val less3: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 // type Comparer2 = [X: Ord] => Cmp[X] // val less4: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 -// type CmpWeak[X] = (x: X, y: X) => Boolean -// type Comparer2Weak = [X: Ord] => (x: X) => CmpWeak[X] -// val less4: Comparer2Weak = [X: Ord] => (x: X) => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +type CmpWeak[X] = X => Boolean +type Comparer2Weak = [X: Ord] => X => CmpWeak[X] +val less4_0: [X: Ord] => X => X => Boolean = + [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 +val less4: Comparer2Weak = + [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 val less5 = [X: [X] =>> Ord[X]] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 From a736592dcb95a35ddeaef12a84324733b0e4a7b5 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 4 Oct 2024 13:21:40 +0200 Subject: [PATCH 780/827] Make the expandion of context bounds for poly types slightly more elegant --- .../src/dotty/tools/dotc/ast/Desugar.scala | 55 ++++++++++--------- .../src/dotty/tools/dotc/typer/Typer.scala | 51 ++++++++++++++--- .../contextbounds-for-poly-functions.scala | 2 +- 3 files changed, 75 insertions(+), 33 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 56e519737cb1..1d2fd32fe103 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -527,8 +527,7 @@ object desugar { makeContextualFunction(paramTpts, paramNames, tree, paramsErased).withSpan(tree.span) if meth.hasAttachment(PolyFunctionApply) then - meth.removeAttachment(PolyFunctionApply) - // (kπ): deffer this until we can type the result? + // meth.removeAttachment(PolyFunctionApply) if ctx.mode.is(Mode.Type) then cpy.DefDef(meth)(tpt = meth.tpt.withAttachment(PolyFunctionApply, params)) else @@ -1250,29 +1249,35 @@ object desugar { /** Desugar [T_1, ..., T_M] => (P_1, ..., P_N) => R * Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } */ - def makePolyFunctionType(tree: PolyFunction)(using Context): RefinedTypeTree = - val PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ untpd.Function(vparamTypes, res)) = tree: @unchecked - val paramFlags = fun match - case fun: FunctionWithMods => - // TODO: make use of this in the desugaring when pureFuns is enabled. - // val isImpure = funFlags.is(Impure) - - // Function flags to be propagated to each parameter in the desugared method type. - val givenFlag = fun.mods.flags.toTermFlags & Given - fun.erasedParams.map(isErased => if isErased then givenFlag | Erased else givenFlag) - case _ => - vparamTypes.map(_ => EmptyFlags) - - val vparams = vparamTypes.lazyZip(paramFlags).zipWithIndex.map { - case ((p: ValDef, paramFlags), n) => p.withAddedFlags(paramFlags) - case ((p, paramFlags), n) => makeSyntheticParameter(n + 1, p).withAddedFlags(paramFlags) - }.toList - - RefinedTypeTree(ref(defn.PolyFunctionType), List( - DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree) - .withFlags(Synthetic) - .withAttachment(PolyFunctionApply, List.empty) - )).withSpan(tree.span) + def makePolyFunctionType(tree: PolyFunction)(using Context): RefinedTypeTree = tree match + case PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ untpd.Function(vparamTypes, res)) => + val paramFlags = fun match + case fun: FunctionWithMods => + // TODO: make use of this in the desugaring when pureFuns is enabled. + // val isImpure = funFlags.is(Impure) + + // Function flags to be propagated to each parameter in the desugared method type. + val givenFlag = fun.mods.flags.toTermFlags & Given + fun.erasedParams.map(isErased => if isErased then givenFlag | Erased else givenFlag) + case _ => + vparamTypes.map(_ => EmptyFlags) + + val vparams = vparamTypes.lazyZip(paramFlags).zipWithIndex.map { + case ((p: ValDef, paramFlags), n) => p.withAddedFlags(paramFlags) + case ((p, paramFlags), n) => makeSyntheticParameter(n + 1, p).withAddedFlags(paramFlags) + }.toList + + RefinedTypeTree(ref(defn.PolyFunctionType), List( + DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree) + .withFlags(Synthetic) + .withAttachment(PolyFunctionApply, List.empty) + )).withSpan(tree.span) + case PolyFunction(tparams: List[untpd.TypeDef] @unchecked, res) => + RefinedTypeTree(ref(defn.PolyFunctionType), List( + DefDef(nme.apply, tparams :: Nil, res, EmptyTree) + .withFlags(Synthetic) + .withAttachment(PolyFunctionApply, List.empty) + )).withSpan(tree.span) end makePolyFunctionType /** Invent a name for an anonympus given of type or template `impl`. */ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 56e62bccf83b..2951452e44f9 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3598,14 +3598,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer private def pushDownDeferredEvidenceParams(tpe: Type, params: List[untpd.ValDef], span: Span)(using Context): Type = tpe.dealias match { case tpe: MethodType => - MethodType(tpe.paramNames)(paramNames => tpe.paramInfos, _ => pushDownDeferredEvidenceParams(tpe.resultType, params, span)) + tpe.derivedLambdaType(tpe.paramNames, tpe.paramInfos, pushDownDeferredEvidenceParams(tpe.resultType, params, span)) case tpe: PolyType => - PolyType(tpe.paramNames)(paramNames => tpe.paramInfos, _ => pushDownDeferredEvidenceParams(tpe.resultType, params, span)) + tpe.derivedLambdaType(tpe.paramNames, tpe.paramInfos, pushDownDeferredEvidenceParams(tpe.resultType, params, span)) case tpe: RefinedType => - // TODO(kπ): Doesn't seem right, but the PolyFunction ends up being a refinement - RefinedType(pushDownDeferredEvidenceParams(tpe.parent, params, span), tpe.refinedName, pushDownDeferredEvidenceParams(tpe.refinedInfo, params, span)) + tpe.derivedRefinedType( + pushDownDeferredEvidenceParams(tpe.parent, params, span), + tpe.refinedName, + pushDownDeferredEvidenceParams(tpe.refinedInfo, params, span) + ) case tpe @ AppliedType(tycon, args) if defn.isFunctionType(tpe) && args.size > 1 => - AppliedType(tpe.tycon, args.init :+ pushDownDeferredEvidenceParams(args.last, params, span)) + tpe.derivedAppliedType(tycon, args.init :+ pushDownDeferredEvidenceParams(args.last, params, span)) case tpe => val paramNames = params.map(_.name) val paramTpts = params.map(_.tpt) @@ -3614,18 +3617,52 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typed(ctxFunction).tpe } - private def addDownDeferredEvidenceParams(tree: Tree, pt: Type)(using Context): (Tree, Type) = { + private def extractTopMethodTermParams(tpe: Type)(using Context): (List[TermName], List[Type]) = tpe match { + case tpe: MethodType => + tpe.paramNames -> tpe.paramInfos + case tpe: RefinedType if defn.isFunctionType(tpe.parent) => + extractTopMethodTermParams(tpe.refinedInfo) + case _ => + Nil -> Nil + } + + private def removeTopMethodTermParams(tpe: Type)(using Context): Type = tpe match { + case tpe: MethodType => + tpe.resultType + case tpe: RefinedType if defn.isFunctionType(tpe.parent) => + tpe.derivedRefinedType(tpe.parent, tpe.refinedName, removeTopMethodTermParams(tpe.refinedInfo)) + case tpe: AppliedType if defn.isFunctionType(tpe) => + tpe.args.last + case _ => + tpe + } + + private def healToPolyFunctionType(tree: Tree)(using Context): Tree = tree match { + case defdef: DefDef if defdef.name == nme.apply && defdef.paramss.forall(_.forall(_.symbol.flags.is(TypeParam))) && defdef.paramss.size == 1 => + val (names, types) = extractTopMethodTermParams(defdef.tpt.tpe) + val newTpe = removeTopMethodTermParams(defdef.tpt.tpe) + val newParams = names.lazyZip(types).map((name, tpe) => SyntheticValDef(name, TypeTree(tpe), flags = SyntheticTermParam)) + val newDefDef = cpy.DefDef(defdef)(paramss = defdef.paramss ++ List(newParams), tpt = untpd.TypeTree(newTpe)) + val nestedCtx = ctx.fresh.setNewTyperState() + typed(newDefDef)(using nestedCtx) + case _ => tree + } + + private def addDeferredEvidenceParams(tree: Tree, pt: Type)(using Context): (Tree, Type) = { tree.getAttachment(desugar.PolyFunctionApply) match case Some(params) if params.nonEmpty => tree.removeAttachment(desugar.PolyFunctionApply) val tpe = pushDownDeferredEvidenceParams(tree.tpe, params, tree.span) TypeTree(tpe).withSpan(tree.span) -> tpe + // case Some(params) if params.isEmpty => + // println(s"tree: $tree") + // healToPolyFunctionType(tree) -> pt case _ => tree -> pt } /** Interpolate and simplify the type of the given tree. */ protected def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = - val (tree1, pt1) = addDownDeferredEvidenceParams(tree, pt) + val (tree1, pt1) = addDeferredEvidenceParams(tree, pt) if !tree1.denot.isOverloaded then // for overloaded trees: resolve overloading before simplifying if !tree1.tpe.widen.isInstanceOf[MethodOrPoly] // wait with simplifying until method is fully applied || tree1.isDef // ... unless tree is a definition diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index adaf6c035406..8c7bead36633 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -32,7 +32,7 @@ type CmpWeak[X] = X => Boolean type Comparer2Weak = [X: Ord] => X => CmpWeak[X] val less4_0: [X: Ord] => X => X => Boolean = [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 -val less4: Comparer2Weak = +val less4_1: Comparer2Weak = [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 val less5 = [X: [X] =>> Ord[X]] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 From 0af839783a356e2d42133585f6f63759d6df156c Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Tue, 8 Oct 2024 10:51:53 +0200 Subject: [PATCH 781/827] Add more aliases tests for context bounds with poly functions --- tests/pos/contextbounds-for-poly-functions.scala | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index 8c7bead36633..a3b79043c01a 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -24,6 +24,12 @@ type ComparerRef = [X] => (x: X, y: X) => Ord[X] ?=> Boolean type Comparer = [X: Ord] => (x: X, y: X) => Boolean val less3: Comparer = [X: Ord as ord] => (x: X, y: X) => ord.compare(x, y) < 0 +type CmpRest[X] = X => Boolean +type CmpMid[X] = X => CmpRest[X] +type Cmp3 = [X: Ord] => X => CmpMid[X] +val lessCmp3: Cmp3 = [X: Ord] => (x: X) => (y: X) => (z: X) => summon[Ord[X]].compare(x, y) < 0 +val lessCmp3_1: Cmp3 = [X: Ord as ord] => (x: X) => (y: X) => (z: X) => ord.compare(x, y) < 0 + // type Cmp[X] = (x: X, y: X) => Boolean // type Comparer2 = [X: Ord] => Cmp[X] // val less4: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 From 9c66069d330423f46d340c99f630ef8c375dae1d Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Mon, 14 Oct 2024 12:18:25 +0200 Subject: [PATCH 782/827] Bring back the restriction for requiring value parameters in poly function type definitions --- .../src/dotty/tools/dotc/ast/Desugar.scala | 2 - .../src/dotty/tools/dotc/typer/Typer.scala | 47 +++++-------------- 2 files changed, 13 insertions(+), 36 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 1d2fd32fe103..e0a906e06dcc 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -514,7 +514,6 @@ object desugar { case Nil => params :: Nil - // TODO(kπ) is this enough? SHould this be a TreeTraverse-thing? def pushDownEvidenceParams(tree: Tree): Tree = tree match case Function(params, body) => cpy.Function(tree)(params, pushDownEvidenceParams(body)) @@ -527,7 +526,6 @@ object desugar { makeContextualFunction(paramTpts, paramNames, tree, paramsErased).withSpan(tree.span) if meth.hasAttachment(PolyFunctionApply) then - // meth.removeAttachment(PolyFunctionApply) if ctx.mode.is(Mode.Type) then cpy.DefDef(meth)(tpt = meth.tpt.withAttachment(PolyFunctionApply, params)) else diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 2951452e44f9..076fc2e4369f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3596,6 +3596,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } + /** Push down the deferred evidence parameters up until the result type is not + * a method type, poly type or a function type + */ private def pushDownDeferredEvidenceParams(tpe: Type, params: List[untpd.ValDef], span: Span)(using Context): Type = tpe.dealias match { case tpe: MethodType => tpe.derivedLambdaType(tpe.paramNames, tpe.paramInfos, pushDownDeferredEvidenceParams(tpe.resultType, params, span)) @@ -3617,46 +3620,22 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typed(ctxFunction).tpe } - private def extractTopMethodTermParams(tpe: Type)(using Context): (List[TermName], List[Type]) = tpe match { - case tpe: MethodType => - tpe.paramNames -> tpe.paramInfos - case tpe: RefinedType if defn.isFunctionType(tpe.parent) => - extractTopMethodTermParams(tpe.refinedInfo) - case _ => - Nil -> Nil - } - - private def removeTopMethodTermParams(tpe: Type)(using Context): Type = tpe match { - case tpe: MethodType => - tpe.resultType - case tpe: RefinedType if defn.isFunctionType(tpe.parent) => - tpe.derivedRefinedType(tpe.parent, tpe.refinedName, removeTopMethodTermParams(tpe.refinedInfo)) - case tpe: AppliedType if defn.isFunctionType(tpe) => - tpe.args.last - case _ => - tpe - } - - private def healToPolyFunctionType(tree: Tree)(using Context): Tree = tree match { - case defdef: DefDef if defdef.name == nme.apply && defdef.paramss.forall(_.forall(_.symbol.flags.is(TypeParam))) && defdef.paramss.size == 1 => - val (names, types) = extractTopMethodTermParams(defdef.tpt.tpe) - val newTpe = removeTopMethodTermParams(defdef.tpt.tpe) - val newParams = names.lazyZip(types).map((name, tpe) => SyntheticValDef(name, TypeTree(tpe), flags = SyntheticTermParam)) - val newDefDef = cpy.DefDef(defdef)(paramss = defdef.paramss ++ List(newParams), tpt = untpd.TypeTree(newTpe)) - val nestedCtx = ctx.fresh.setNewTyperState() - typed(newDefDef)(using nestedCtx) - case _ => tree - } - + /** If the tree has a `PolyFunctionApply` attachment, add the deferred + * evidence parameters as the last argument list before the result type. This + * follows aliases, so the following two types will be expanded to (up to the + * context bound encoding): + * type CmpWeak[X] = X => Boolean + * type Comparer2Weak = [X: Ord] => X => CmpWeak[X] + * ===> + * type CmpWeak[X] = X => Boolean type Comparer2Weak = [X] => X => X ?=> + * Ord[X] => Boolean + */ private def addDeferredEvidenceParams(tree: Tree, pt: Type)(using Context): (Tree, Type) = { tree.getAttachment(desugar.PolyFunctionApply) match case Some(params) if params.nonEmpty => tree.removeAttachment(desugar.PolyFunctionApply) val tpe = pushDownDeferredEvidenceParams(tree.tpe, params, tree.span) TypeTree(tpe).withSpan(tree.span) -> tpe - // case Some(params) if params.isEmpty => - // println(s"tree: $tree") - // healToPolyFunctionType(tree) -> pt case _ => tree -> pt } From ec6d7effc62129508c68f0337d9c854f94390645 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 18 Oct 2024 10:10:06 +0200 Subject: [PATCH 783/827] Cleanup dead code --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 4 ---- 1 file changed, 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 076fc2e4369f..4ae706fd7b0c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1720,10 +1720,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) else val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) - // val args1 = args.mapConserve { - // case cb: untpd.ContextBoundTypeTree => typed(cb) - // case t => t - // } val result = typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args :+ body), pt) // if there are any erased classes, we need to re-do the typecheck. result match From dfa92409a1f6ed9ea0226c145ef1f383e67c43bd Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Thu, 14 Nov 2024 11:25:08 +0100 Subject: [PATCH 784/827] Reuse addEvidenceParams logic, but no aliases --- .../src/dotty/tools/dotc/ast/Desugar.scala | 89 +++++++++++---- .../src/dotty/tools/dotc/typer/Typer.scala | 101 +++++++++--------- .../contextbounds-for-poly-functions.scala | 33 ++++-- 3 files changed, 147 insertions(+), 76 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index e0a906e06dcc..d82150d8f9da 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -22,6 +22,7 @@ import parsing.Parsers import scala.annotation.internal.sharable import scala.annotation.threadUnsafe +import dotty.tools.dotc.quoted.QuoteUtils.treeOwner object desugar { import untpd.* @@ -52,8 +53,12 @@ object desugar { */ val ContextBoundParam: Property.Key[Unit] = Property.StickyKey() - /** An attachment key to indicate that a DefDef is a poly function apply - * method definition. + /** When first desugaring a PolyFunction, this attachment is added to the + * PolyFunction `apply` method with an empty list value. + * + * Afterwards, the attachment is added to poly function type trees, with the + * list of their context bounds. + * //TODO(kπ) see if it has to be updated */ val PolyFunctionApply: Property.Key[List[ValDef]] = Property.StickyKey() @@ -497,9 +502,9 @@ object desugar { case Ident(name: TermName) => boundNames.contains(name) case _ => false - def recur(mparamss: List[ParamClause]): List[ParamClause] = mparamss match + def recur(mparamss: List[ParamClause]): (List[ParamClause], List[ParamClause]) = mparamss match case ValDefs(mparams) :: _ if mparams.exists(referencesBoundName) => - params :: mparamss + (params :: Nil) -> mparamss case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => val normParams = if params.head.mods.flags.is(Given) != mparam.mods.flags.is(Given) then @@ -508,30 +513,71 @@ object desugar { param.withMods(param.mods.withFlags(normFlags)) .showing(i"adapted param $result ${result.mods.flags} for ${meth.name}", Printers.desugar) else params - (normParams ++ mparams) :: Nil + ((normParams ++ mparams) :: Nil) -> Nil case mparams :: mparamss1 => - mparams :: recur(mparamss1) + val (fst, snd) = recur(mparamss1) + (mparams :: fst) -> snd case Nil => - params :: Nil - - def pushDownEvidenceParams(tree: Tree): Tree = tree match - case Function(params, body) => - cpy.Function(tree)(params, pushDownEvidenceParams(body)) - case Block(stats, expr) => - cpy.Block(tree)(stats, pushDownEvidenceParams(expr)) - case tree => + Nil -> (params :: Nil) + + // def pushDownEvidenceParams(tree: Tree): Tree = tree match + // case Function(mparams, body) if mparams.collect { case v: ValDef => v }.exists(referencesBoundName) => + // ctxFunctionWithParams(tree) + // case Function(mparams, body) => + // cpy.Function(tree)(mparams, pushDownEvidenceParams(body)) + // case Block(stats, expr) => + // cpy.Block(tree)(stats, pushDownEvidenceParams(expr)) + // case tree => + // ctxFunctionWithParams(tree) + + // def ctxFunctionWithParams(tree: Tree): Tree = + // val paramTpts = params.map(_.tpt) + // val paramNames = params.map(_.name) + // val paramsErased = params.map(_.mods.flags.is(Erased)) + // Function(params, tree).withSpan(tree.span).withAttachmentsFrom(tree) + + def functionsOf(paramss: List[ParamClause], rhs: Tree): Tree = paramss match + case Nil => rhs + case ValDefs(head @ (fst :: _)) :: rest if fst.mods.isOneOf(GivenOrImplicit) => val paramTpts = params.map(_.tpt) val paramNames = params.map(_.name) val paramsErased = params.map(_.mods.flags.is(Erased)) - makeContextualFunction(paramTpts, paramNames, tree, paramsErased).withSpan(tree.span) + makeContextualFunction(paramTpts, paramNames, functionsOf(rest, rhs), paramsErased).withSpan(rhs.span) + case head :: rest => + Function(head, functionsOf(rest, rhs)) if meth.hasAttachment(PolyFunctionApply) then - if ctx.mode.is(Mode.Type) then - cpy.DefDef(meth)(tpt = meth.tpt.withAttachment(PolyFunctionApply, params)) - else - cpy.DefDef(meth)(rhs = pushDownEvidenceParams(meth.rhs)) + println(i"${recur(meth.paramss)}") + recur(meth.paramss) match + case (paramsFst, Nil) => + cpy.DefDef(meth)(paramss = paramsFst) + case (paramsFst, paramsSnd) => + if ctx.mode.is(Mode.Type) then + cpy.DefDef(meth)(paramss = paramsFst, tpt = functionsOf(paramsSnd, meth.tpt)) + else + cpy.DefDef(meth)(paramss = paramsFst, rhs = functionsOf(paramsSnd, meth.rhs)) + + // if ctx.mode.is(Mode.Type) then + // meth.removeAttachment(PolyFunctionApply) + // // should be kept on meth to see the current param types? + // meth.tpt.putAttachment(PolyFunctionApply, params) + // val newParamss = recur(meth.paramss) + // println(i"added PolyFunctionApply to ${meth.name}.tpt: ${meth.tpt} with $params") + // println(i"new paramss: $newParamss") + // meth + // else + // val newParamss = recur(meth.paramss) + // println(i"added PolyFunctionApply to ${meth.name} with $params") + // println(i"new paramss: $newParamss") + // val DefDef(_, mparamss, _ , _) = meth: @unchecked + // val tparams :: ValDefs(vparams) :: Nil = mparamss: @unchecked + // if vparams.exists(referencesBoundName) then + // cpy.DefDef(meth)(paramss = tparams :: params :: Nil, rhs = Function(vparams, meth.rhs)) + // else + // cpy.DefDef(meth)(rhs = pushDownEvidenceParams(meth.rhs)) else - cpy.DefDef(meth)(paramss = recur(meth.paramss)) + val (paramsFst, paramsSnd) = recur(meth.paramss) + cpy.DefDef(meth)(paramss = paramsFst ++ paramsSnd) end addEvidenceParams /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ @@ -1265,17 +1311,20 @@ object desugar { case ((p, paramFlags), n) => makeSyntheticParameter(n + 1, p).withAddedFlags(paramFlags) }.toList + vparams.foreach(p => println(i" $p, ${p.mods.flags.flagsString}")) RefinedTypeTree(ref(defn.PolyFunctionType), List( DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree) .withFlags(Synthetic) .withAttachment(PolyFunctionApply, List.empty) )).withSpan(tree.span) + .withAttachment(PolyFunctionApply, tree.attachmentOrElse(PolyFunctionApply, List.empty)) case PolyFunction(tparams: List[untpd.TypeDef] @unchecked, res) => RefinedTypeTree(ref(defn.PolyFunctionType), List( DefDef(nme.apply, tparams :: Nil, res, EmptyTree) .withFlags(Synthetic) .withAttachment(PolyFunctionApply, List.empty) )).withSpan(tree.span) + .withAttachment(PolyFunctionApply, tree.attachmentOrElse(PolyFunctionApply, List.empty)) end makePolyFunctionType /** Invent a name for an anonympus given of type or template `impl`. */ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 4ae706fd7b0c..cfa921f500a2 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3592,61 +3592,62 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } - /** Push down the deferred evidence parameters up until the result type is not - * a method type, poly type or a function type - */ - private def pushDownDeferredEvidenceParams(tpe: Type, params: List[untpd.ValDef], span: Span)(using Context): Type = tpe.dealias match { - case tpe: MethodType => - tpe.derivedLambdaType(tpe.paramNames, tpe.paramInfos, pushDownDeferredEvidenceParams(tpe.resultType, params, span)) - case tpe: PolyType => - tpe.derivedLambdaType(tpe.paramNames, tpe.paramInfos, pushDownDeferredEvidenceParams(tpe.resultType, params, span)) - case tpe: RefinedType => - tpe.derivedRefinedType( - pushDownDeferredEvidenceParams(tpe.parent, params, span), - tpe.refinedName, - pushDownDeferredEvidenceParams(tpe.refinedInfo, params, span) - ) - case tpe @ AppliedType(tycon, args) if defn.isFunctionType(tpe) && args.size > 1 => - tpe.derivedAppliedType(tycon, args.init :+ pushDownDeferredEvidenceParams(args.last, params, span)) - case tpe => - val paramNames = params.map(_.name) - val paramTpts = params.map(_.tpt) - val paramsErased = params.map(_.mods.flags.is(Erased)) - val ctxFunction = desugar.makeContextualFunction(paramTpts, paramNames, untpd.TypedSplice(TypeTree(tpe.dealias)), paramsErased).withSpan(span) - typed(ctxFunction).tpe - } - - /** If the tree has a `PolyFunctionApply` attachment, add the deferred - * evidence parameters as the last argument list before the result type. This - * follows aliases, so the following two types will be expanded to (up to the - * context bound encoding): - * type CmpWeak[X] = X => Boolean - * type Comparer2Weak = [X: Ord] => X => CmpWeak[X] - * ===> - * type CmpWeak[X] = X => Boolean type Comparer2Weak = [X] => X => X ?=> - * Ord[X] => Boolean - */ - private def addDeferredEvidenceParams(tree: Tree, pt: Type)(using Context): (Tree, Type) = { - tree.getAttachment(desugar.PolyFunctionApply) match - case Some(params) if params.nonEmpty => - tree.removeAttachment(desugar.PolyFunctionApply) - val tpe = pushDownDeferredEvidenceParams(tree.tpe, params, tree.span) - TypeTree(tpe).withSpan(tree.span) -> tpe - case _ => tree -> pt - } + // /** Push down the deferred evidence parameters up until the result type is not + // * a method type, poly type or a function type + // */ + // private def pushDownDeferredEvidenceParams(tpe: Type, params: List[untpd.ValDef], span: Span)(using Context): Type = + // tpe.dealias match { + // case tpe if tpe.baseClasses.contains(defn.PolyFunctionClass) => + // attachEvidenceParams(tpe, params, span) + // case tpe: MethodType => + // tpe.derivedLambdaType(tpe.paramNames, tpe.paramInfos, pushDownDeferredEvidenceParams(tpe.resultType, params, span)) + // case tpe @ AppliedType(tycon, args) if defn.isFunctionType(tpe) && args.size > 1 => + // tpe.derivedAppliedType(tycon, args.init :+ pushDownDeferredEvidenceParams(args.last, params, span)) + // case tpe => + // attachEvidenceParams(tpe, params, span) + // } + + // /** (params) ?=> tpe */ + // private def attachEvidenceParams(tpe: Type, params: List[untpd.ValDef], span: Span)(using Context): Type = + // val paramNames = params.map(_.name) + // val paramTpts = params.map(_.tpt) + // val paramsErased = params.map(_.mods.flags.is(Erased)) + // val ctxFunction = desugar.makeContextualFunction(paramTpts, paramNames, untpd.TypedSplice(TypeTree(tpe.dealias)), paramsErased).withSpan(span) + // typed(ctxFunction).tpe + + // /** If the tree has a `PolyFunctionApply` attachment, add the deferred + // * evidence parameters as the last argument list before the result type or a next poly type. + // * This follows aliases, so the following two types will be expanded to (up to the + // * context bound encoding): + // * type CmpWeak[X] = X => Boolean + // * type Comparer2Weak = [X: Ord] => X => CmpWeak[X] + // * ===> + // * type CmpWeak[X] = X => Boolean type Comparer2Weak = [X] => X => X ?=> + // * Ord[X] => Boolean + // */ + // private def addDeferredEvidenceParams(tree: Tree, pt: Type)(using Context): (Tree, Type) = { + // tree.getAttachment(desugar.PolyFunctionApply) match + // case Some(params) if params.nonEmpty => + // tree.putAttachment(desugar.PolyFunctionApply, Nil) + // val tpe = pushDownDeferredEvidenceParams(tree.tpe, params, tree.span) + // TypeTree(tpe).withSpan(tree.span) -> tpe + // case Some(params) => + // tree -> pt + // case _ => tree -> pt + // } /** Interpolate and simplify the type of the given tree. */ protected def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = - val (tree1, pt1) = addDeferredEvidenceParams(tree, pt) - if !tree1.denot.isOverloaded then // for overloaded trees: resolve overloading before simplifying - if !tree1.tpe.widen.isInstanceOf[MethodOrPoly] // wait with simplifying until method is fully applied - || tree1.isDef // ... unless tree is a definition + // val (tree1, pt1) = addDeferredEvidenceParams(tree, pt) + if !tree.denot.isOverloaded then // for overloaded trees: resolve overloading before simplifying + if !tree.tpe.widen.isInstanceOf[MethodOrPoly] // wait with simplifying until method is fully applied + || tree.isDef // ... unless tree is a definition then - interpolateTypeVars(tree1, pt1, locked) - val simplified = tree1.tpe.simplified - if !MatchType.thatReducesUsingGadt(tree1.tpe) then // needs a GADT cast. i15743 + interpolateTypeVars(tree, pt, locked) + val simplified = tree.tpe.simplified + if !MatchType.thatReducesUsingGadt(tree.tpe) then // needs a GADT cast. i15743 tree.overwriteType(simplified) - tree1 + tree protected def makeContextualFunction(tree: untpd.Tree, pt: Type)(using Context): Tree = { val defn.FunctionOf(formals, _, true) = pt.dropDependentRefinement: @unchecked diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index a3b79043c01a..44eb978b6c52 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -3,6 +3,7 @@ import scala.language.future trait Ord[X]: def compare(x: X, y: X): Int + type T trait Show[X]: def show(x: X): String @@ -11,6 +12,8 @@ val less0: [X: Ord] => (X, X) => Boolean = ??? val less1 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 +type PolyTest1 = [X] => X => Ord[X] ?=> Boolean + val less1_type_test: [X: Ord] => (X, X) => Boolean = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 @@ -34,12 +37,12 @@ val lessCmp3_1: Cmp3 = [X: Ord as ord] => (x: X) => (y: X) => (z: X) => ord.comp // type Comparer2 = [X: Ord] => Cmp[X] // val less4: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 -type CmpWeak[X] = X => Boolean -type Comparer2Weak = [X: Ord] => X => CmpWeak[X] -val less4_0: [X: Ord] => X => X => Boolean = - [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 -val less4_1: Comparer2Weak = - [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 +// type CmpWeak[X] = X => Boolean +// type Comparer2Weak = [X: Ord] => X => CmpWeak[X] +// val less4_0: [X: Ord] => X => X => Boolean = +// [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 +// val less4_1: Comparer2Weak = +// [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 val less5 = [X: [X] =>> Ord[X]] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 @@ -65,3 +68,21 @@ val less9 = [X: {Ord as ord, Show as show}] => (x: X, y: X) => ord.compare(x, y) val less9_type_test: [X: {Ord as ord, Show as show}] => (X, X) => Boolean = [X: {Ord as ord, Show as show}] => (x: X, y: X) => ord.compare(x, y) < 0 + +type CmpNested = [X: Ord] => X => [Y: Ord] => Y => Boolean +val less10: CmpNested = [X: Ord] => (x: X) => [Y: Ord] => (y: Y) => true +val less10Explicit: CmpNested = [X] => (x: X) => (ordx: Ord[X]) ?=> [Y] => (y: Y) => (ordy: Ord[Y]) ?=> true + +// type CmpAlias[X] = X => Boolean +// type CmpNestedAliased = [X: Ord] => X => [Y] => Y => CmpAlias[Y] + +// val less11: CmpNestedAliased = [X: Ord] => (x: X) => [Y] => (y: Y) => (y1: Y) => true +// val less11Explicit: CmpNestedAliased = [X] => (x: X) => (ordx: Ord[X]) ?=> [Y] => (y: Y) => (y1: Y) => true + +val notationalExample: [X: Ord] => X => [Y: Ord] => Y => Int = + [X] => (x: X) => (ordx: Ord[X]) ?=> [Y] => (y: Y) => (ordy: Ord[Y]) ?=> 1 + +val namedConstraintRef = [X: {Ord as ord}] => (x: ord.T) => x +type DependentCmp = [X: {Ord as ord}] => ord.T => Boolean +type DependentCmp1 = [X: {Ord as ord}] => (ord.T, Int) => ord.T => Boolean +val dependentCmp: DependentCmp = [X: {Ord as ord}] => (x: ord.T) => true From 7755e3bc166637f6753df2d2f74b127dc97b35f8 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Thu, 14 Nov 2024 11:46:12 +0100 Subject: [PATCH 785/827] Cleanup context bounds for poly functions implementation, make the implementation consistent with addEvidenceParams --- .../src/dotty/tools/dotc/ast/Desugar.scala | 79 ++++--------------- .../src/dotty/tools/dotc/typer/Typer.scala | 45 ----------- .../contextbounds-for-poly-functions.scala | 20 ++--- 3 files changed, 26 insertions(+), 118 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index d82150d8f9da..768d598987f0 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -53,14 +53,9 @@ object desugar { */ val ContextBoundParam: Property.Key[Unit] = Property.StickyKey() - /** When first desugaring a PolyFunction, this attachment is added to the - * PolyFunction `apply` method with an empty list value. - * - * Afterwards, the attachment is added to poly function type trees, with the - * list of their context bounds. - * //TODO(kπ) see if it has to be updated + /** Marks a poly fcuntion apply method, so that we can handle adding evidence parameters to them in a special way */ - val PolyFunctionApply: Property.Key[List[ValDef]] = Property.StickyKey() + val PolyFunctionApply: Property.Key[Unit] = Property.StickyKey() /** What static check should be applied to a Match? */ enum MatchCheck { @@ -520,22 +515,6 @@ object desugar { case Nil => Nil -> (params :: Nil) - // def pushDownEvidenceParams(tree: Tree): Tree = tree match - // case Function(mparams, body) if mparams.collect { case v: ValDef => v }.exists(referencesBoundName) => - // ctxFunctionWithParams(tree) - // case Function(mparams, body) => - // cpy.Function(tree)(mparams, pushDownEvidenceParams(body)) - // case Block(stats, expr) => - // cpy.Block(tree)(stats, pushDownEvidenceParams(expr)) - // case tree => - // ctxFunctionWithParams(tree) - - // def ctxFunctionWithParams(tree: Tree): Tree = - // val paramTpts = params.map(_.tpt) - // val paramNames = params.map(_.name) - // val paramsErased = params.map(_.mods.flags.is(Erased)) - // Function(params, tree).withSpan(tree.span).withAttachmentsFrom(tree) - def functionsOf(paramss: List[ParamClause], rhs: Tree): Tree = paramss match case Nil => rhs case ValDefs(head @ (fst :: _)) :: rest if fst.mods.isOneOf(GivenOrImplicit) => @@ -543,38 +522,21 @@ object desugar { val paramNames = params.map(_.name) val paramsErased = params.map(_.mods.flags.is(Erased)) makeContextualFunction(paramTpts, paramNames, functionsOf(rest, rhs), paramsErased).withSpan(rhs.span) - case head :: rest => + case ValDefs(head) :: rest => Function(head, functionsOf(rest, rhs)) + case head :: _ => + assert(false, i"unexpected type parameters when adding evidence parameters to $meth") + EmptyTree if meth.hasAttachment(PolyFunctionApply) then - println(i"${recur(meth.paramss)}") - recur(meth.paramss) match - case (paramsFst, Nil) => - cpy.DefDef(meth)(paramss = paramsFst) - case (paramsFst, paramsSnd) => - if ctx.mode.is(Mode.Type) then - cpy.DefDef(meth)(paramss = paramsFst, tpt = functionsOf(paramsSnd, meth.tpt)) - else - cpy.DefDef(meth)(paramss = paramsFst, rhs = functionsOf(paramsSnd, meth.rhs)) - - // if ctx.mode.is(Mode.Type) then - // meth.removeAttachment(PolyFunctionApply) - // // should be kept on meth to see the current param types? - // meth.tpt.putAttachment(PolyFunctionApply, params) - // val newParamss = recur(meth.paramss) - // println(i"added PolyFunctionApply to ${meth.name}.tpt: ${meth.tpt} with $params") - // println(i"new paramss: $newParamss") - // meth - // else - // val newParamss = recur(meth.paramss) - // println(i"added PolyFunctionApply to ${meth.name} with $params") - // println(i"new paramss: $newParamss") - // val DefDef(_, mparamss, _ , _) = meth: @unchecked - // val tparams :: ValDefs(vparams) :: Nil = mparamss: @unchecked - // if vparams.exists(referencesBoundName) then - // cpy.DefDef(meth)(paramss = tparams :: params :: Nil, rhs = Function(vparams, meth.rhs)) - // else - // cpy.DefDef(meth)(rhs = pushDownEvidenceParams(meth.rhs)) + meth.removeAttachment(PolyFunctionApply) + // for PolyFunctions we are limited to a single term param list, so we reuse the recur logic to compute the new parameter lists + // and then we add the other parameter lists as function types to the return type + val (paramsFst, paramsSnd) = recur(meth.paramss) + if ctx.mode.is(Mode.Type) then + cpy.DefDef(meth)(paramss = paramsFst, tpt = functionsOf(paramsSnd, meth.tpt)) + else + cpy.DefDef(meth)(paramss = paramsFst, rhs = functionsOf(paramsSnd, meth.rhs)) else val (paramsFst, paramsSnd) = recur(meth.paramss) cpy.DefDef(meth)(paramss = paramsFst ++ paramsSnd) @@ -1293,7 +1255,7 @@ object desugar { /** Desugar [T_1, ..., T_M] => (P_1, ..., P_N) => R * Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } */ - def makePolyFunctionType(tree: PolyFunction)(using Context): RefinedTypeTree = tree match + def makePolyFunctionType(tree: PolyFunction)(using Context): RefinedTypeTree = (tree: @unchecked) match case PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ untpd.Function(vparamTypes, res)) => val paramFlags = fun match case fun: FunctionWithMods => @@ -1311,20 +1273,11 @@ object desugar { case ((p, paramFlags), n) => makeSyntheticParameter(n + 1, p).withAddedFlags(paramFlags) }.toList - vparams.foreach(p => println(i" $p, ${p.mods.flags.flagsString}")) RefinedTypeTree(ref(defn.PolyFunctionType), List( DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree) .withFlags(Synthetic) - .withAttachment(PolyFunctionApply, List.empty) - )).withSpan(tree.span) - .withAttachment(PolyFunctionApply, tree.attachmentOrElse(PolyFunctionApply, List.empty)) - case PolyFunction(tparams: List[untpd.TypeDef] @unchecked, res) => - RefinedTypeTree(ref(defn.PolyFunctionType), List( - DefDef(nme.apply, tparams :: Nil, res, EmptyTree) - .withFlags(Synthetic) - .withAttachment(PolyFunctionApply, List.empty) + .withAttachment(PolyFunctionApply, ()) )).withSpan(tree.span) - .withAttachment(PolyFunctionApply, tree.attachmentOrElse(PolyFunctionApply, List.empty)) end makePolyFunctionType /** Invent a name for an anonympus given of type or template `impl`. */ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index cfa921f500a2..f7610520f61c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3592,53 +3592,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } - // /** Push down the deferred evidence parameters up until the result type is not - // * a method type, poly type or a function type - // */ - // private def pushDownDeferredEvidenceParams(tpe: Type, params: List[untpd.ValDef], span: Span)(using Context): Type = - // tpe.dealias match { - // case tpe if tpe.baseClasses.contains(defn.PolyFunctionClass) => - // attachEvidenceParams(tpe, params, span) - // case tpe: MethodType => - // tpe.derivedLambdaType(tpe.paramNames, tpe.paramInfos, pushDownDeferredEvidenceParams(tpe.resultType, params, span)) - // case tpe @ AppliedType(tycon, args) if defn.isFunctionType(tpe) && args.size > 1 => - // tpe.derivedAppliedType(tycon, args.init :+ pushDownDeferredEvidenceParams(args.last, params, span)) - // case tpe => - // attachEvidenceParams(tpe, params, span) - // } - - // /** (params) ?=> tpe */ - // private def attachEvidenceParams(tpe: Type, params: List[untpd.ValDef], span: Span)(using Context): Type = - // val paramNames = params.map(_.name) - // val paramTpts = params.map(_.tpt) - // val paramsErased = params.map(_.mods.flags.is(Erased)) - // val ctxFunction = desugar.makeContextualFunction(paramTpts, paramNames, untpd.TypedSplice(TypeTree(tpe.dealias)), paramsErased).withSpan(span) - // typed(ctxFunction).tpe - - // /** If the tree has a `PolyFunctionApply` attachment, add the deferred - // * evidence parameters as the last argument list before the result type or a next poly type. - // * This follows aliases, so the following two types will be expanded to (up to the - // * context bound encoding): - // * type CmpWeak[X] = X => Boolean - // * type Comparer2Weak = [X: Ord] => X => CmpWeak[X] - // * ===> - // * type CmpWeak[X] = X => Boolean type Comparer2Weak = [X] => X => X ?=> - // * Ord[X] => Boolean - // */ - // private def addDeferredEvidenceParams(tree: Tree, pt: Type)(using Context): (Tree, Type) = { - // tree.getAttachment(desugar.PolyFunctionApply) match - // case Some(params) if params.nonEmpty => - // tree.putAttachment(desugar.PolyFunctionApply, Nil) - // val tpe = pushDownDeferredEvidenceParams(tree.tpe, params, tree.span) - // TypeTree(tpe).withSpan(tree.span) -> tpe - // case Some(params) => - // tree -> pt - // case _ => tree -> pt - // } - /** Interpolate and simplify the type of the given tree. */ protected def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = - // val (tree1, pt1) = addDeferredEvidenceParams(tree, pt) if !tree.denot.isOverloaded then // for overloaded trees: resolve overloading before simplifying if !tree.tpe.widen.isInstanceOf[MethodOrPoly] // wait with simplifying until method is fully applied || tree.isDef // ... unless tree is a definition diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index 44eb978b6c52..6fadcda2b43e 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -37,12 +37,12 @@ val lessCmp3_1: Cmp3 = [X: Ord as ord] => (x: X) => (y: X) => (z: X) => ord.comp // type Comparer2 = [X: Ord] => Cmp[X] // val less4: Comparer2 = [X: Ord] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 -// type CmpWeak[X] = X => Boolean -// type Comparer2Weak = [X: Ord] => X => CmpWeak[X] -// val less4_0: [X: Ord] => X => X => Boolean = -// [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 -// val less4_1: Comparer2Weak = -// [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 +type CmpWeak[X] = X => Boolean +type Comparer2Weak = [X: Ord] => X => CmpWeak[X] +val less4_0: [X: Ord] => X => X => Boolean = + [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 +val less4_1: Comparer2Weak = + [X: Ord] => (x: X) => (y: X) => summon[Ord[X]].compare(x, y) < 0 val less5 = [X: [X] =>> Ord[X]] => (x: X, y: X) => summon[Ord[X]].compare(x, y) < 0 @@ -73,11 +73,11 @@ type CmpNested = [X: Ord] => X => [Y: Ord] => Y => Boolean val less10: CmpNested = [X: Ord] => (x: X) => [Y: Ord] => (y: Y) => true val less10Explicit: CmpNested = [X] => (x: X) => (ordx: Ord[X]) ?=> [Y] => (y: Y) => (ordy: Ord[Y]) ?=> true -// type CmpAlias[X] = X => Boolean -// type CmpNestedAliased = [X: Ord] => X => [Y] => Y => CmpAlias[Y] +type CmpAlias[X] = X => Boolean +type CmpNestedAliased = [X: Ord] => X => [Y] => Y => CmpAlias[Y] -// val less11: CmpNestedAliased = [X: Ord] => (x: X) => [Y] => (y: Y) => (y1: Y) => true -// val less11Explicit: CmpNestedAliased = [X] => (x: X) => (ordx: Ord[X]) ?=> [Y] => (y: Y) => (y1: Y) => true +val less11: CmpNestedAliased = [X: Ord] => (x: X) => [Y] => (y: Y) => (y1: Y) => true +val less11Explicit: CmpNestedAliased = [X] => (x: X) => (ordx: Ord[X]) ?=> [Y] => (y: Y) => (y1: Y) => true val notationalExample: [X: Ord] => X => [Y: Ord] => Y => Int = [X] => (x: X) => (ordx: Ord[X]) ?=> [Y] => (y: Y) => (ordy: Ord[Y]) ?=> 1 From 24e3fa0fe7810b1bc5db0560e7ff7f047e504603 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Thu, 14 Nov 2024 14:14:03 +0100 Subject: [PATCH 786/827] More cleanup of poly context bound desugaring --- .../src/dotty/tools/dotc/ast/Desugar.scala | 133 ++++++++++-------- .../src/dotty/tools/dotc/typer/Typer.scala | 4 +- .../contextbounds-for-poly-functions.scala | 5 + 3 files changed, 83 insertions(+), 59 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 768d598987f0..0bf3ba71b84d 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -247,7 +247,7 @@ object desugar { * def f$default$2[T](x: Int) = x + "m" */ private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = - addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) + addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor).asInstanceOf[DefDef]) /** Drop context bounds in given TypeDef, replacing them with evidence ValDefs that * get added to a buffer. @@ -309,10 +309,8 @@ object desugar { tdef1 end desugarContextBounds - private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = - val DefDef(_, paramss, tpt, rhs) = meth + def elimContextBounds(meth: Tree, isPrimaryConstructor: Boolean = false)(using Context): Tree = val evidenceParamBuf = mutable.ListBuffer[ValDef]() - var seenContextBounds: Int = 0 def freshName(unused: Tree) = seenContextBounds += 1 // Start at 1 like FreshNameCreator. @@ -322,7 +320,7 @@ object desugar { // parameters of the method since shadowing does not affect // implicit resolution in Scala 3. - val paramssNoContextBounds = + def paramssNoContextBounds(paramss: List[ParamClause]): List[ParamClause] = val iflag = paramss.lastOption.flatMap(_.headOption) match case Some(param) if param.mods.isOneOf(GivenOrImplicit) => param.mods.flags & GivenOrImplicit @@ -334,16 +332,29 @@ object desugar { tparam => desugarContextBounds(tparam, evidenceParamBuf, flags, freshName, paramss) }(identity) - rhs match - case MacroTree(call) => - cpy.DefDef(meth)(rhs = call).withMods(meth.mods | Macro | Erased) - case _ => - addEvidenceParams( - cpy.DefDef(meth)( - name = normalizeName(meth, tpt).asTermName, - paramss = paramssNoContextBounds), - evidenceParamBuf.toList - ) + meth match + case meth @ DefDef(_, paramss, tpt, rhs) => + val newParamss = paramssNoContextBounds(paramss) + rhs match + case MacroTree(call) => + cpy.DefDef(meth)(rhs = call).withMods(meth.mods | Macro | Erased) + case _ => + addEvidenceParams( + cpy.DefDef(meth)( + name = normalizeName(meth, tpt).asTermName, + paramss = newParamss + ), + evidenceParamBuf.toList + ) + case meth @ PolyFunction(tparams, fun) => + val PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun) = meth: @unchecked + val Function(vparams: List[untpd.ValDef] @unchecked, rhs) = fun: @unchecked + val newParamss = paramssNoContextBounds(tparams :: vparams :: Nil) + val params = evidenceParamBuf.toList + val boundNames = getBoundNames(params, newParamss) + val recur = fitEvidenceParams(params, nme.apply, boundNames) + val (paramsFst, paramsSnd) = recur(newParamss) + functionsOf((paramsFst ++ paramsSnd).filter(_.nonEmpty), rhs) end elimContextBounds def addDefaultGetters(meth: DefDef)(using Context): Tree = @@ -471,6 +482,55 @@ object desugar { case _ => (Nil, tree) + private def referencesName(vdef: ValDef, names: Set[TermName])(using Context): Boolean = + vdef.tpt.existsSubTree: + case Ident(name: TermName) => names.contains(name) + case _ => false + + /** Fit evidence `params` into the `mparamss` parameter lists */ + private def fitEvidenceParams(params: List[ValDef], methName: Name, boundNames: Set[TermName])(mparamss: List[ParamClause])(using Context): (List[ParamClause], List[ParamClause]) = mparamss match + case ValDefs(mparams) :: _ if mparams.exists(referencesName(_, boundNames)) => + (params :: Nil) -> mparamss + case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => + val normParams = + if params.head.mods.flags.is(Given) != mparam.mods.flags.is(Given) then + params.map: param => + val normFlags = param.mods.flags &~ GivenOrImplicit | (mparam.mods.flags & (GivenOrImplicit)) + param.withMods(param.mods.withFlags(normFlags)) + .showing(i"adapted param $result ${result.mods.flags} for ${methName}", Printers.desugar) + else params + ((normParams ++ mparams) :: Nil) -> Nil + case mparams :: mparamss1 => + val (fst, snd) = fitEvidenceParams(params, methName, boundNames)(mparamss1) + (mparams :: fst) -> snd + case Nil => + Nil -> (params :: Nil) + + /** Create a chain of possibly contextual functions from the parameter lists */ + private def functionsOf(paramss: List[ParamClause], rhs: Tree)(using Context): Tree = paramss match + case Nil => rhs + case ValDefs(head @ (fst :: _)) :: rest if fst.mods.isOneOf(GivenOrImplicit) => + val paramTpts = head.map(_.tpt) + val paramNames = head.map(_.name) + val paramsErased = head.map(_.mods.flags.is(Erased)) + makeContextualFunction(paramTpts, paramNames, functionsOf(rest, rhs), paramsErased).withSpan(rhs.span) + case ValDefs(head) :: rest => + Function(head, functionsOf(rest, rhs)) + case TypeDefs(head) :: rest => + PolyFunction(head, functionsOf(rest, rhs)) + case _ => + assert(false, i"unexpected paramss $paramss") + EmptyTree + + private def getBoundNames(params: List[ValDef], paramss: List[ParamClause])(using Context): Set[TermName] = + var boundNames = params.map(_.name).toSet // all evidence parameter + context bound proxy names + for mparams <- paramss; mparam <- mparams do + mparam match + case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => + boundNames += tparam.name.toTermName + case _ => + boundNames + /** Add all evidence parameters in `params` as implicit parameters to `meth`. * The position of the added parameters is determined as follows: * @@ -485,48 +545,9 @@ object desugar { private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = if params.isEmpty then return meth - var boundNames = params.map(_.name).toSet // all evidence parameter + context bound proxy names - for mparams <- meth.paramss; mparam <- mparams do - mparam match - case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => - boundNames += tparam.name.toTermName - case _ => + val boundNames = getBoundNames(params, meth.paramss) - def referencesBoundName(vdef: ValDef): Boolean = - vdef.tpt.existsSubTree: - case Ident(name: TermName) => boundNames.contains(name) - case _ => false - - def recur(mparamss: List[ParamClause]): (List[ParamClause], List[ParamClause]) = mparamss match - case ValDefs(mparams) :: _ if mparams.exists(referencesBoundName) => - (params :: Nil) -> mparamss - case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => - val normParams = - if params.head.mods.flags.is(Given) != mparam.mods.flags.is(Given) then - params.map: param => - val normFlags = param.mods.flags &~ GivenOrImplicit | (mparam.mods.flags & (GivenOrImplicit)) - param.withMods(param.mods.withFlags(normFlags)) - .showing(i"adapted param $result ${result.mods.flags} for ${meth.name}", Printers.desugar) - else params - ((normParams ++ mparams) :: Nil) -> Nil - case mparams :: mparamss1 => - val (fst, snd) = recur(mparamss1) - (mparams :: fst) -> snd - case Nil => - Nil -> (params :: Nil) - - def functionsOf(paramss: List[ParamClause], rhs: Tree): Tree = paramss match - case Nil => rhs - case ValDefs(head @ (fst :: _)) :: rest if fst.mods.isOneOf(GivenOrImplicit) => - val paramTpts = params.map(_.tpt) - val paramNames = params.map(_.name) - val paramsErased = params.map(_.mods.flags.is(Erased)) - makeContextualFunction(paramTpts, paramNames, functionsOf(rest, rhs), paramsErased).withSpan(rhs.span) - case ValDefs(head) :: rest => - Function(head, functionsOf(rest, rhs)) - case head :: _ => - assert(false, i"unexpected type parameters when adding evidence parameters to $meth") - EmptyTree + val recur = fitEvidenceParams(params, meth.name, boundNames) if meth.hasAttachment(PolyFunctionApply) then meth.removeAttachment(PolyFunctionApply) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index f7610520f61c..bc4981ef11a4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1920,7 +1920,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedPolyFunction(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val tree1 = desugar.normalizePolyFunction(tree) if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree1), pt) - else typedPolyFunctionValue(tree1, pt) + else typedPolyFunctionValue(desugar.elimContextBounds(tree1).asInstanceOf[untpd.PolyFunction], pt) def typedPolyFunctionValue(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val untpd.PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun) = tree: @unchecked @@ -1946,7 +1946,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer untpd.InLambdaTypeTree(isResult = true, (tsyms, vsyms) => mt.resultType.substParams(mt, vsyms.map(_.termRef)).substParams(poly, tsyms.map(_.typeRef))) val desugared @ Block(List(defdef), _) = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) - defdef.putAttachment(desugar.PolyFunctionApply, List.empty) typed(desugared, pt) else val msg = @@ -1955,7 +1954,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer errorTree(EmptyTree, msg, tree.srcPos) case _ => val desugared @ Block(List(defdef), _) = desugar.makeClosure(tparams, vparams, body, untpd.TypeTree(), tree.span) - defdef.putAttachment(desugar.PolyFunctionApply, List.empty) typed(desugared, pt) end typedPolyFunctionValue diff --git a/tests/pos/contextbounds-for-poly-functions.scala b/tests/pos/contextbounds-for-poly-functions.scala index 6fadcda2b43e..13411a3ad769 100644 --- a/tests/pos/contextbounds-for-poly-functions.scala +++ b/tests/pos/contextbounds-for-poly-functions.scala @@ -86,3 +86,8 @@ val namedConstraintRef = [X: {Ord as ord}] => (x: ord.T) => x type DependentCmp = [X: {Ord as ord}] => ord.T => Boolean type DependentCmp1 = [X: {Ord as ord}] => (ord.T, Int) => ord.T => Boolean val dependentCmp: DependentCmp = [X: {Ord as ord}] => (x: ord.T) => true +val dependentCmp_1: [X: {Ord as ord}] => ord.T => Boolean = [X: {Ord as ord}] => (x: ord.T) => true + +val dependentCmp1: DependentCmp1 = [X: {Ord as ord}] => (x: ord.T, y: Int) => (z: ord.T) => true +val dependentCmp1_1: [X: {Ord as ord}] => (ord.T, Int) => ord.T => Boolean = + [X: {Ord as ord}] => (x: ord.T, y: Int) => (z: ord.T) => true From f292ac54869e9b407a389ea75f891515ad31e07b Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Thu, 14 Nov 2024 16:15:08 +0100 Subject: [PATCH 787/827] Short circuit adding evidence params to poly functions, when there are no context bounds --- compiler/src/dotty/tools/dotc/ast/Desugar.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 0bf3ba71b84d..e8ebd77b0423 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -351,6 +351,7 @@ object desugar { val Function(vparams: List[untpd.ValDef] @unchecked, rhs) = fun: @unchecked val newParamss = paramssNoContextBounds(tparams :: vparams :: Nil) val params = evidenceParamBuf.toList + if params.isEmpty then return meth val boundNames = getBoundNames(params, newParamss) val recur = fitEvidenceParams(params, nme.apply, boundNames) val (paramsFst, paramsSnd) = recur(newParamss) From 11d4295e68fbb27a38bf56ad1bd13d8f9052c618 Mon Sep 17 00:00:00 2001 From: Jan Chyb <48855024+jchyb@users.noreply.github.com> Date: Thu, 14 Nov 2024 16:32:05 +0100 Subject: [PATCH 788/827] Replace symbol traversal with tree traversal when finding top level experimentals (#21827) Aims to fix stale symbol errors caused by the symbol traversal after suspending by a macro --- .../src/dotty/tools/dotc/typer/Checking.scala | 30 +++++++++---------- tests/pos-macros/i21802/Macro.scala | 15 ++++++++++ tests/pos-macros/i21802/Test.scala | 13 ++++++++ 3 files changed, 43 insertions(+), 15 deletions(-) create mode 100644 tests/pos-macros/i21802/Macro.scala create mode 100644 tests/pos-macros/i21802/Test.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 761e7cdab37c..1cd531046753 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -806,20 +806,20 @@ object Checking { * */ def checkAndAdaptExperimentalImports(trees: List[Tree])(using Context): Unit = - def nonExperimentalTopLevelDefs(pack: Symbol): Iterator[Symbol] = - def isNonExperimentalTopLevelDefinition(sym: Symbol) = - sym.isDefinedInCurrentRun - && sym.source == ctx.compilationUnit.source - && !sym.isConstructor // not constructor of package object - && !sym.is(Package) && !sym.name.isPackageObjectName - && !sym.isExperimental - - pack.info.decls.toList.iterator.flatMap: sym => - if sym.isClass && (sym.is(Package) || sym.isPackageObject) then - nonExperimentalTopLevelDefs(sym) - else if isNonExperimentalTopLevelDefinition(sym) then - sym :: Nil - else Nil + def nonExperimentalTopLevelDefs(): List[Symbol] = + new TreeAccumulator[List[Symbol]] { + override def apply(x: List[Symbol], tree: tpd.Tree)(using Context): List[Symbol] = + def addIfNotExperimental(sym: Symbol) = + if !sym.isExperimental then sym :: x + else x + tree match { + case tpd.PackageDef(_, contents) => apply(x, contents) + case typeDef @ tpd.TypeDef(_, temp: Template) if typeDef.symbol.isPackageObject => + apply(x, temp.body) + case mdef: tpd.MemberDef => addIfNotExperimental(mdef.symbol) + case _ => x + } + }.apply(Nil, ctx.compilationUnit.tpdTree) def unitExperimentalLanguageImports = def isAllowedImport(sel: untpd.ImportSelector) = @@ -837,7 +837,7 @@ object Checking { if ctx.owner.is(Package) || ctx.owner.name.startsWith(str.REPL_SESSION_LINE) then def markTopLevelDefsAsExperimental(why: String): Unit = - for sym <- nonExperimentalTopLevelDefs(ctx.owner) do + for sym <- nonExperimentalTopLevelDefs() do sym.addAnnotation(ExperimentalAnnotation(s"Added by $why", sym.span)) unitExperimentalLanguageImports match diff --git a/tests/pos-macros/i21802/Macro.scala b/tests/pos-macros/i21802/Macro.scala new file mode 100644 index 000000000000..e2eb1287c727 --- /dev/null +++ b/tests/pos-macros/i21802/Macro.scala @@ -0,0 +1,15 @@ +class MetricsGroup[A] +object MetricsGroup: + import scala.quoted.* + + transparent inline final def refine[A]: MetricsGroup[A] = + ${ refineImpl[A] } + + private def refineImpl[A](using qctx: Quotes, tpe: Type[A]): Expr[MetricsGroup[A]] = + import qctx.reflect.* + + val mt = MethodType(Nil)(_ => Nil, _ => TypeRepr.of[A]) + val tpe = Refinement(TypeRepr.of[MetricsGroup[A]], "apply", mt).asType + tpe match + case '[tpe] => + '{ MetricsGroup[A]().asInstanceOf[MetricsGroup[A] & tpe] } diff --git a/tests/pos-macros/i21802/Test.scala b/tests/pos-macros/i21802/Test.scala new file mode 100644 index 000000000000..70063653c43c --- /dev/null +++ b/tests/pos-macros/i21802/Test.scala @@ -0,0 +1,13 @@ +//> using options -experimental -Ydebug + +class ProbeFailedException(cause: Exception) extends Exception(cause) +trait Probing: + self: Metrics => + val probeFailureCounter: MetricsGroup[Counter] = + counters("ustats_probe_failures_count").labelled + + +trait Counter +class Metrics: + class counters(name: String): + transparent inline final def labelled: MetricsGroup[Counter] = MetricsGroup.refine[Counter] From 6a7d5d34719ec3904c3cbf5d749fee1881e8bcf6 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Tue, 12 Nov 2024 12:16:19 +0100 Subject: [PATCH 789/827] Bring back the fix for scaladoc TastyInspector regressions Brings back the previosuly reverted commit with an added fix for the test. --- .../sbt-dotty/scaladoc-regressions/build.sbt | 9 ++++ .../i18231/src/main/scala/main.scala | 4 ++ .../i20476/src/main/scala/main.scala | 5 +++ .../scaladoc-regressions/project/plugins.sbt | 1 + sbt-test/sbt-dotty/scaladoc-regressions/test | 2 + .../tools/scaladoc/tasty/TastyParser.scala | 4 +- .../tasty/inspector/TastyInspector.scala | 43 ++++++++++++++----- ...ernalLocationProviderIntegrationTest.scala | 5 ++- .../no-link-warnings/LinkWarningTest.scala | 3 +- 9 files changed, 60 insertions(+), 16 deletions(-) create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/build.sbt create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt create mode 100644 sbt-test/sbt-dotty/scaladoc-regressions/test diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt b/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt new file mode 100644 index 000000000000..bfdadb5ee038 --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt @@ -0,0 +1,9 @@ +ThisBuild / scalaVersion := sys.props("plugin.scalaVersion") + +lazy val i20476 = project + .in(file("i20476")) + .enablePlugins(ScalaJSPlugin) + +lazy val i18231 = project + .in(file("i18231")) + .settings(scalacOptions += "-release:8") diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala b/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala new file mode 100644 index 000000000000..82788aa829f0 --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala @@ -0,0 +1,4 @@ +object Foo { + @Deprecated + def foo(): Unit = ??? +} diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala b/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala new file mode 100644 index 000000000000..31eb78c816cd --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala @@ -0,0 +1,5 @@ +package demo + +import scala.scalajs.js + +def bar: js.Promise[Int] = js.Promise.resolve(()).`then`(_ => 1) diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt b/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt new file mode 100644 index 000000000000..b9ebfd07bf1f --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("org.scala-js" % "sbt-scalajs" % sys.props("plugin.scalaJSVersion")) diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/test b/sbt-test/sbt-dotty/scaladoc-regressions/test new file mode 100644 index 000000000000..816c0be96141 --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/test @@ -0,0 +1,2 @@ +> i18231/doc +> i20476/doc diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala index f55451fdc636..1a8337e0c6b7 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala @@ -5,7 +5,7 @@ package tasty import java.util.regex.Pattern import scala.util.{Try, Success, Failure} -import scala.tasty.inspector.{TastyInspector, Inspector, Tasty} +import scala.tasty.inspector.{ScaladocInternalTastyInspector, Inspector, Tasty} import scala.quoted._ import dotty.tools.dotc @@ -160,7 +160,7 @@ object ScaladocTastyInspector: report.error("File extension is not `tasty` or `jar`: " + invalidPath) if tastyPaths.nonEmpty then - TastyInspector.inspectAllTastyFiles(tastyPaths, jarPaths, classpath)(inspector) + ScaladocInternalTastyInspector.inspectAllTastyFilesInContext(tastyPaths, jarPaths, classpath)(inspector)(using ctx.compilerContext) val all = inspector.topLevels.result() all.groupBy(_._1).map { case (pckName, members) => diff --git a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala index 906578c9d405..190be6a588a1 100644 --- a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala +++ b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala @@ -1,5 +1,7 @@ -// Copy of tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +// Renamed copy of tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala // FIXME remove this copy of the file +// Since copying, an inspectAllTastyFilesInContext method was added for scaladoc only +// to fix regressions introduced by the switch from old to a new TastyInspector package scala.tasty.inspector @@ -21,7 +23,7 @@ import dotty.tools.dotc.report import java.io.File.pathSeparator -object TastyInspector: +object ScaladocInternalTastyInspector: /** Load and process TASTy files using TASTy reflect * @@ -41,6 +43,32 @@ object TastyInspector: def inspectTastyFilesInJar(jar: String)(inspector: Inspector): Boolean = inspectAllTastyFiles(Nil, List(jar), Nil)(inspector) + private def checkFiles(tastyFiles: List[String], jars: List[String]): Unit = + def checkFile(fileName: String, ext: String): Unit = + val file = dotty.tools.io.Path(fileName) + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then + throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") + else if !file.exists then + throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") + tastyFiles.foreach(checkFile(_, "tasty")) + jars.foreach(checkFile(_, "jar")) + + /** + * Added for Scaladoc-only. + * Meant to fix regressions introduces by the switch from old to new TastyInspector: + * https://github.com/scala/scala3/issues/18231 + * https://github.com/scala/scala3/issues/20476 + * Stable TastyInspector API does not support passing compiler context. + */ + def inspectAllTastyFilesInContext(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector)(using Context): Boolean = + checkFiles(tastyFiles, jars) + val classes = tastyFiles ::: jars + classes match + case Nil => true + case _ => + val reporter = inspectorDriver(inspector).process(inspectorArgs(dependenciesClasspath, classes), summon[Context]) + !reporter.hasErrors + /** Load and process TASTy files using TASTy reflect * * @param tastyFiles List of paths of `.tasty` files @@ -50,14 +78,7 @@ object TastyInspector: * @return boolean value indicating whether the process succeeded */ def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = - def checkFile(fileName: String, ext: String): Unit = - val file = dotty.tools.io.Path(fileName) - if !file.ext.toLowerCase.equalsIgnoreCase(ext) then - throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") - else if !file.exists then - throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") - tastyFiles.foreach(checkFile(_, "tasty")) - jars.foreach(checkFile(_, "jar")) + checkFiles(tastyFiles, jars) val files = tastyFiles ::: jars inspectFiles(dependenciesClasspath, files)(inspector) @@ -124,4 +145,4 @@ object TastyInspector: end inspectFiles -end TastyInspector +end ScaladocInternalTastyInspector diff --git a/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala b/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala index a63f699c4c2f..ec39fb5ce16b 100644 --- a/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala @@ -57,8 +57,9 @@ class Scaladoc3ExternalLocationProviderIntegrationTest extends ExternalLocationP def getScalaLibraryPath: String = { val classpath: List[String] = System.getProperty("java.class.path").split(java.io.File.pathSeparatorChar).toList - val stdlib = classpath.find(_.contains("scala-library-2")).getOrElse("foobarbazz") // If we don't find the scala 2 library, the test will fail - new java.io.File(stdlib).getCanonicalPath() // canonicalize for case-insensitive file systems + // For an unclear reason, depending on if we pass the compiler context onto the tasty inspector + // the scala-2-library path needs to have its characters case fixed with new java.io.File(stdlib).getCanonicalPath() + classpath.find(_.contains("scala-library-2")).getOrElse("foobarbazz") // If we don't find the scala 2 library, the test will fail } class Scaladoc2LegacyExternalLocationProviderIntegrationTest extends LegacyExternalLocationProviderIntegrationTest( diff --git a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala index 1d140315cc10..bcaee696b65c 100644 --- a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala @@ -14,6 +14,7 @@ class LinkWarningsTest extends ScaladocTest("noLinkWarnings"): override def runTest = afterRendering { val diagnostics = summon[DocContext].compilerContext.reportedDiagnostics - assertEquals("There should be exactly one warning", 1, diagnostics.warningMsgs.size) + val filteredWarnings = diagnostics.warningMsgs.filter(_ != "1 warning found") + assertEquals("There should be exactly one warning", 1, filteredWarnings.size) assertNoErrors(diagnostics) } From f9db9fa0c963de738b39d8230cd30053db671f14 Mon Sep 17 00:00:00 2001 From: Kacper Korban Date: Fri, 15 Nov 2024 08:35:03 +0100 Subject: [PATCH 790/827] Add a run test for poly context bounds; cleanup typer changes --- .../src/dotty/tools/dotc/ast/Desugar.scala | 1 - .../src/dotty/tools/dotc/typer/Typer.scala | 12 ++++---- .../contextbounds-for-poly-functions.check | 6 ++++ .../contextbounds-for-poly-functions.scala | 30 +++++++++++++++++++ 4 files changed, 42 insertions(+), 7 deletions(-) create mode 100644 tests/run/contextbounds-for-poly-functions.check create mode 100644 tests/run/contextbounds-for-poly-functions.scala diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index e8ebd77b0423..6e54dee51c89 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -22,7 +22,6 @@ import parsing.Parsers import scala.annotation.internal.sharable import scala.annotation.threadUnsafe -import dotty.tools.dotc.quoted.QuoteUtils.treeOwner object desugar { import untpd.* diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index bc4981ef11a4..d9b29e8c5f17 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1945,7 +1945,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val resultTpt = untpd.InLambdaTypeTree(isResult = true, (tsyms, vsyms) => mt.resultType.substParams(mt, vsyms.map(_.termRef)).substParams(poly, tsyms.map(_.typeRef))) - val desugared @ Block(List(defdef), _) = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) + val desugared = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) typed(desugared, pt) else val msg = @@ -1953,7 +1953,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer |Expected type should be a polymorphic function with the same number of type and value parameters.""" errorTree(EmptyTree, msg, tree.srcPos) case _ => - val desugared @ Block(List(defdef), _) = desugar.makeClosure(tparams, vparams, body, untpd.TypeTree(), tree.span) + val desugared = desugar.makeClosure(tparams, vparams, body, untpd.TypeTree(), tree.span) typed(desugared, pt) end typedPolyFunctionValue @@ -3581,17 +3581,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case xtree => typedUnnamed(xtree) val unsimplifiedType = result.tpe - val result1 = simplify(result, pt, locked) - result1.tpe.stripTypeVar match + simplify(result, pt, locked) + result.tpe.stripTypeVar match case e: ErrorType if !unsimplifiedType.isErroneous => errorTree(xtree, e.msg, xtree.srcPos) - case _ => result1 + case _ => result catch case ex: TypeError => handleTypeError(ex) } } /** Interpolate and simplify the type of the given tree. */ - protected def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = + protected def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = if !tree.denot.isOverloaded then // for overloaded trees: resolve overloading before simplifying if !tree.tpe.widen.isInstanceOf[MethodOrPoly] // wait with simplifying until method is fully applied || tree.isDef // ... unless tree is a definition diff --git a/tests/run/contextbounds-for-poly-functions.check b/tests/run/contextbounds-for-poly-functions.check new file mode 100644 index 000000000000..2e7f62a3914f --- /dev/null +++ b/tests/run/contextbounds-for-poly-functions.check @@ -0,0 +1,6 @@ +42 +a string +Kate is 27 years old +42 and a string +a string and Kate is 27 years old +Kate is 27 years old and 42 diff --git a/tests/run/contextbounds-for-poly-functions.scala b/tests/run/contextbounds-for-poly-functions.scala new file mode 100644 index 000000000000..dcc974fce198 --- /dev/null +++ b/tests/run/contextbounds-for-poly-functions.scala @@ -0,0 +1,30 @@ +import scala.language.experimental.modularity +import scala.language.future + +trait Show[X]: + def show(x: X): String + +given Show[Int] with + def show(x: Int) = x.toString + +given Show[String] with + def show(x: String) = x + +case class Person(name: String, age: Int) + +given Show[Person] with + def show(x: Person) = s"${x.name} is ${x.age} years old" + +type Shower = [X: Show] => X => String +val shower: Shower = [X: {Show as show}] => (x: X) => show.show(x) + +type DoubleShower = [X: Show] => X => [Y: Show] => Y => String +val doubleShower: DoubleShower = [X: {Show as show1}] => (x: X) => [Y: {Show as show2}] => (y: Y) => s"${show1.show(x)} and ${show2.show(y)}" + +object Test extends App: + println(shower(42)) + println(shower("a string")) + println(shower(Person("Kate", 27))) + println(doubleShower(42)("a string")) + println(doubleShower("a string")(Person("Kate", 27))) + println(doubleShower(Person("Kate", 27))(42)) From 7b4597fea9c5d409e769d3ce8fe22a26028eb7c9 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 15 Nov 2024 09:59:27 +0000 Subject: [PATCH 791/827] Remove tests/pos-with-compiler-cc --- .../BootstrappedOnlyCompilationTests.scala | 7 - .../backend/ScalaPrimitivesOps.scala | 232 - .../backend/WorklistAlgorithm.scala | 57 - .../backend/jvm/AsmUtils.scala | 65 - .../backend/jvm/BCodeAsmCommon.scala | 158 - .../backend/jvm/BCodeBodyBuilder.scala | 1776 ----- .../backend/jvm/BCodeHelpers.scala | 960 --- .../backend/jvm/BCodeIdiomatic.scala | 727 -- .../backend/jvm/BCodeSkelBuilder.scala | 908 --- .../backend/jvm/BCodeSyncAndTry.scala | 426 -- .../backend/jvm/BTypes.scala | 864 --- .../backend/jvm/BTypesFromSymbols.scala | 348 - .../backend/jvm/BytecodeWriters.scala | 147 - .../backend/jvm/ClassNode1.java | 39 - .../backend/jvm/CollectSuperCalls.scala | 48 - .../backend/jvm/CoreBTypes.scala | 294 - .../backend/jvm/DottyBackendInterface.scala | 204 - .../backend/jvm/GenBCode.scala | 672 -- .../backend/jvm/GenBCodeOps.scala | 16 - .../backend/jvm/GenericSignatureVisitor.scala | 326 - .../backend/jvm/LabelNode1.java | 31 - .../backend/jvm/MethodNode1.java | 47 - .../backend/jvm/Primitives.scala | 191 - .../backend/jvm/scalaPrimitives.scala | 412 -- .../backend/sjs/GenSJSIR.scala | 23 - .../backend/sjs/JSCodeGen.scala | 4897 ------------- .../backend/sjs/JSDefinitions.scala | 340 - .../backend/sjs/JSEncoding.scala | 428 -- .../backend/sjs/JSExportsGen.scala | 1025 --- .../backend/sjs/JSPositions.scala | 102 - .../backend/sjs/JSPrimitives.scala | 150 - .../backend/sjs/ScopedVar.scala | 38 - tests/pos-with-compiler-cc/dotc/Bench.scala | 64 - .../dotc/CompilationUnit.scala | 167 - .../pos-with-compiler-cc/dotc/Compiler.scala | 171 - tests/pos-with-compiler-cc/dotc/Driver.scala | 207 - tests/pos-with-compiler-cc/dotc/Main.scala | 5 - .../dotc/MissingCoreLibraryException.scala | 9 - .../pos-with-compiler-cc/dotc/Resident.scala | 61 - tests/pos-with-compiler-cc/dotc/Run.scala | 404 -- .../dotc/ScalacCommand.scala | 9 - .../dotc/ast/CheckTrees.scala.disabled | 258 - .../dotc/ast/Desugar.scala | 1979 ----- .../dotc/ast/DesugarEnums.scala | 310 - .../dotc/ast/MainProxies.scala | 449 -- .../dotc/ast/NavigateAST.scala | 129 - .../dotc/ast/Positioned.scala | 246 - .../dotc/ast/TreeInfo.scala | 1070 --- .../dotc/ast/TreeMapWithImplicits.scala | 82 - .../dotc/ast/TreeTypeMap.scala | 232 - .../pos-with-compiler-cc/dotc/ast/Trees.scala | 1787 ----- tests/pos-with-compiler-cc/dotc/ast/tpd.scala | 1546 ---- .../pos-with-compiler-cc/dotc/ast/untpd.scala | 829 --- .../dotc/cc/BoxedTypeCache.scala | 19 - .../dotc/cc/CaptureAnnotation.scala | 77 - .../dotc/cc/CaptureOps.scala | 256 - .../dotc/cc/CaptureSet.scala | 902 --- .../dotc/cc/CapturingType.scala | 72 - .../dotc/cc/CheckCaptures.scala | 1039 --- .../pos-with-compiler-cc/dotc/cc/Setup.scala | 482 -- .../dotc/cc/Synthetics.scala | 189 - .../dotc/classpath/AggregateClassPath.scala | 162 - .../dotc/classpath/ClassPath.scala | 85 - .../dotc/classpath/ClassPathFactory.scala | 84 - .../dotc/classpath/DirectoryClassPath.scala | 313 - .../dotc/classpath/FileUtils.scala | 85 - .../dotc/classpath/PackageNameUtils.scala | 37 - .../classpath/VirtualDirectoryClassPath.scala | 55 - .../ZipAndJarFileLookupFactory.scala | 205 - .../dotc/classpath/ZipArchiveFileLookup.scala | 72 - .../dotc/config/CliCommand.scala | 198 - .../dotc/config/CommandLineParser.scala | 125 - .../dotc/config/CompilerCommand.scala | 26 - .../dotc/config/Config.scala | 256 - .../dotc/config/Feature.scala | 173 - .../dotc/config/JavaPlatform.scala | 69 - .../dotc/config/OutputDirs.scala | 117 - .../dotc/config/PathResolver.scala | 268 - .../dotc/config/Platform.scala | 46 - .../dotc/config/Printers.scala | 52 - .../dotc/config/Properties.scala | 142 - .../dotc/config/SJSPlatform.scala | 35 - .../dotc/config/ScalaRelease.scala | 21 - .../dotc/config/ScalaSettings.scala | 347 - .../dotc/config/ScalaVersion.scala | 188 - .../dotc/config/Settings.scala | 295 - .../dotc/config/SourceVersion.scala | 32 - .../dotc/config/WrappedProperties.scala | 42 - .../dotc/core/Annotations.scala | 274 - .../dotc/core/Atoms.scala | 36 - .../dotc/core/CheckRealizable.scala | 216 - .../dotc/core/Comments.scala | 462 -- .../dotc/core/Constants.scala | 261 - .../dotc/core/Constraint.scala | 214 - .../dotc/core/ConstraintHandling.scala | 891 --- .../dotc/core/ConstraintRunInfo.scala | 23 - .../dotc/core/ContextOps.scala | 115 - .../dotc/core/Contexts.scala | 1041 --- .../dotc/core/Decorators.scala | 322 - .../dotc/core/Definitions.scala | 2433 ------- .../dotc/core/DenotTransformers.scala | 82 - .../dotc/core/Denotations.scala | 1376 ---- .../dotc/core/Flags.scala | 612 -- .../dotc/core/GadtConstraint.scala | 267 - .../dotc/core/Hashable.scala | 126 - .../dotc/core/JavaNullInterop.scala | 149 - .../dotc/core/MacroClassLoader.scala | 29 - .../dotc/core/MatchTypeTrace.scala | 136 - .../pos-with-compiler-cc/dotc/core/Mode.scala | 144 - .../dotc/core/NameKinds.scala | 414 -- .../dotc/core/NameOps.scala | 400 - .../dotc/core/NameTags.scala | 68 - .../dotc/core/NamerOps.scala | 223 - .../dotc/core/Names.scala | 694 -- .../dotc/core/NullOpsDecorator.scala | 60 - .../dotc/core/OrderingConstraint.scala | 1007 --- .../dotc/core/ParamInfo.scala | 53 - .../dotc/core/PatternTypeConstrainer.scala | 298 - .../dotc/core/Periods.scala | 143 - .../dotc/core/Phases.scala | 475 -- .../dotc/core/Scopes.scala | 471 -- .../dotc/core/Signature.scala | 197 - .../dotc/core/StagingContext.scala | 58 - .../dotc/core/StdNames.scala | 951 --- .../dotc/core/Substituters.scala | 228 - .../dotc/core/SymDenotations.scala | 2989 -------- .../dotc/core/SymbolLoaders.scala | 450 -- .../dotc/core/Symbols.scala | 922 --- .../dotc/core/TypeApplications.scala | 538 -- .../dotc/core/TypeComparer.scala | 3265 --------- .../dotc/core/TypeErasure.scala | 877 --- .../dotc/core/TypeErrors.scala | 176 - .../dotc/core/TypeEval.scala | 247 - .../dotc/core/TypeOps.scala | 975 --- .../dotc/core/TyperState.scala | 306 - .../dotc/core/Types.scala | 6448 ----------------- .../dotc/core/Uniques.scala | 92 - .../dotc/core/Variances.scala | 82 - .../core/classfile/AbstractFileReader.scala | 100 - .../dotc/core/classfile/ByteCodecs.scala | 224 - .../core/classfile/ClassfileConstants.scala | 381 - .../dotc/core/classfile/ClassfileParser.scala | 1374 ---- .../dotc/core/classfile/DataReader.scala | 61 - .../core/classfile/ReusableDataReader.scala | 150 - .../dotc/core/tasty/CommentPickler.scala | 44 - .../dotc/core/tasty/CommentUnpickler.scala | 35 - .../dotc/core/tasty/DottyUnpickler.scala | 74 - .../dotc/core/tasty/NameBuffer.scala | 126 - .../dotc/core/tasty/PositionPickler.scala | 138 - .../dotc/core/tasty/PositionUnpickler.scala | 77 - .../dotc/core/tasty/TastyAnsiiPrinter.scala | 9 - .../dotc/core/tasty/TastyClassName.scala | 68 - .../dotc/core/tasty/TastyHTMLPrinter.scala | 9 - .../dotc/core/tasty/TastyPickler.scala | 80 - .../dotc/core/tasty/TastyPrinter.scala | 228 - .../dotc/core/tasty/TastyUnpickler.scala | 109 - .../dotc/core/tasty/TreeBuffer.scala | 189 - .../dotc/core/tasty/TreePickler.scala | 802 -- .../dotc/core/tasty/TreeUnpickler.scala | 1641 ----- .../core/unpickleScala2/PickleBuffer.scala | 292 - .../core/unpickleScala2/PickleFormat.scala | 227 - .../core/unpickleScala2/Scala2Erasure.scala | 257 - .../core/unpickleScala2/Scala2Flags.scala | 107 - .../core/unpickleScala2/Scala2Unpickler.scala | 1381 ---- .../dotc/coverage/Coverage.scala | 26 - .../dotc/coverage/Location.scala | 50 - .../dotc/coverage/Serializer.scala | 111 - .../decompiler/DecompilationPrinter.scala | 48 - .../dotc/decompiler/IDEDecompilerDriver.scala | 50 - .../dotc/decompiler/Main.scala | 27 - .../decompiler/PartialTASTYDecompiler.scala | 11 - .../dotc/decompiler/TASTYDecompiler.scala | 23 - .../AlreadyLoadedCompilationUnit.scala | 10 - .../dotc/fromtasty/Debug.scala | 74 - .../dotc/fromtasty/ReadTasty.scala | 84 - .../dotc/fromtasty/TASTYCompilationUnit.scala | 8 - .../dotc/fromtasty/TASTYCompiler.scala | 18 - .../dotc/fromtasty/TASTYRun.scala | 36 - .../dotc/fromtasty/TastyFileUtil.scala | 50 - .../dotc/inlines/InlineReducer.scala | 451 -- .../dotc/inlines/Inliner.scala | 1081 --- .../dotc/inlines/Inlines.scala | 471 -- .../dotc/inlines/PrepareInlineable.scala | 323 - .../dotc/interactive/Completion.scala | 565 -- .../dotc/interactive/Interactive.scala | 445 -- .../interactive/InteractiveCompiler.scala | 21 - .../dotc/interactive/InteractiveDriver.scala | 340 - .../dotc/interactive/SourceTree.scala | 80 - .../dotc/parsing/CharArrayReader.scala | 128 - .../dotc/parsing/JavaParsers.scala | 1031 --- .../dotc/parsing/JavaScanners.scala | 742 -- .../dotc/parsing/JavaTokens.scala | 92 - .../dotc/parsing/ParserPhase.scala | 60 - .../dotc/parsing/Parsers.scala | 4266 ----------- .../dotc/parsing/Scanners.scala | 1706 ----- .../dotc/parsing/ScriptParsers.scala | 147 - .../dotc/parsing/Tokens.scala | 300 - .../dotc/parsing/package.scala | 40 - .../dotc/parsing/xml/MarkupParserCommon.scala | 257 - .../dotc/parsing/xml/MarkupParsers.scala | 484 -- .../dotc/parsing/xml/SymbolicXMLBuilder.scala | 261 - .../dotc/parsing/xml/Utility.scala | 173 - .../dotc/plugins/Plugin.scala | 196 - .../dotc/plugins/Plugins.scala | 278 - .../dotc/printing/Formatting.scala | 167 - .../dotc/printing/Highlighting.scala | 72 - .../dotc/printing/MessageLimiter.scala | 61 - .../dotc/printing/PlainPrinter.scala | 727 -- .../dotc/printing/Printer.scala | 200 - .../dotc/printing/RefinedPrinter.scala | 1134 --- .../dotc/printing/ReplPrinter.scala | 61 - .../dotc/printing/Showable.scala | 36 - .../dotc/printing/SyntaxHighlighting.scala | 147 - .../dotc/printing/Texts.scala | 213 - .../dotc/printing/package.scala | 29 - .../dotc/profile/AsyncHelper.scala | 142 - .../dotc/profile/ExtendedThreadMxBean.java | 305 - .../dotc/profile/ExternalToolHook.java | 17 - .../dotc/profile/Profiler.scala | 264 - .../dotc/quoted/Interpreter.scala | 371 - .../dotc/quoted/MacroExpansion.scala | 18 - .../dotc/quoted/PickledQuotes.scala | 270 - .../dotc/quoted/QuoteUtils.scala | 30 - .../dotc/quoted/QuotesCache.scala | 26 - .../dotc/quoted/TastyString.scala | 31 - .../dotc/quoted/reflect/FromSymbol.scala | 62 - tests/pos-with-compiler-cc/dotc/report.scala | 131 - .../dotc/reporting/AbstractReporter.scala | 8 - .../dotc/reporting/ConsoleReporter.scala | 37 - .../dotc/reporting/Diagnostic.scala | 109 - .../dotc/reporting/ErrorMessageID.scala | 199 - .../dotc/reporting/ExploringReporter.scala | 23 - .../reporting/HideNonSensicalMessages.scala | 20 - .../dotc/reporting/Message.scala | 411 -- .../dotc/reporting/MessageKind.scala | 41 - .../dotc/reporting/MessageRendering.scala | 324 - .../dotc/reporting/Profile.scala | 157 - .../dotc/reporting/Reporter.scala | 271 - .../dotc/reporting/StoreReporter.scala | 51 - .../dotc/reporting/TestReporter.scala | 14 - .../dotc/reporting/ThrowingReporter.scala | 17 - .../reporting/UniqueMessagePositions.scala | 38 - .../dotc/reporting/WConf.scala | 127 - .../dotc/reporting/messages.scala | 2773 ------- .../dotc/reporting/trace.scala | 118 - .../dotc/rewrites/Rewrites.scala | 102 - .../dotc/sbt/APIUtils.scala | 55 - .../dotc/sbt/ExtractAPI.scala | 833 --- .../dotc/sbt/ExtractDependencies.scala | 504 -- .../dotc/sbt/ShowAPI.scala | 162 - .../dotc/sbt/ThunkHolder.scala | 33 - .../dotc/sbt/package.scala | 20 - .../dotc/semanticdb/ConstantOps.scala | 25 - .../dotc/semanticdb/Descriptor.scala | 122 - .../dotc/semanticdb/ExtractSemanticDB.scala | 517 -- .../dotc/semanticdb/LinkMode.scala | 4 - .../dotc/semanticdb/PPrint.scala | 397 - .../dotc/semanticdb/Scala3.scala | 537 -- .../semanticdb/SemanticSymbolBuilder.scala | 152 - .../dotc/semanticdb/SyntheticsExtractor.scala | 151 - .../dotc/semanticdb/Tools.scala | 130 - .../dotc/semanticdb/TypeOps.scala | 527 -- .../dotc/semanticdb/generated/Access.scala | 654 -- .../semanticdb/generated/Annotation.scala | 91 - .../dotc/semanticdb/generated/Constant.scala | 1181 --- .../semanticdb/generated/Diagnostic.scala | 196 - .../semanticdb/generated/Documentation.scala | 174 - .../dotc/semanticdb/generated/Language.scala | 56 - .../dotc/semanticdb/generated/Location.scala | 109 - .../dotc/semanticdb/generated/Range.scala | 156 - .../dotc/semanticdb/generated/Schema.scala | 56 - .../dotc/semanticdb/generated/Scope.scala | 108 - .../dotc/semanticdb/generated/Signature.scala | 706 -- .../generated/SymbolInformation.scala | 670 -- .../generated/SymbolOccurrence.scala | 178 - .../dotc/semanticdb/generated/Synthetic.scala | 113 - .../semanticdb/generated/TextDocument.scala | 271 - .../semanticdb/generated/TextDocuments.scala | 87 - .../dotc/semanticdb/generated/Tree.scala | 1088 --- .../dotc/semanticdb/generated/Type.scala | 2036 ------ .../InvalidProtocolBufferException.scala | 55 - .../dotc/semanticdb/internal/LiteParser.scala | 36 - .../dotc/semanticdb/internal/MD5.scala | 30 - .../semanticdb/internal/SemanticdbEnum.scala | 5 - .../SemanticdbGeneratedMessageCompanion.scala | 79 - .../internal/SemanticdbInputStream.scala | 802 -- .../internal/SemanticdbMessage.scala | 7 - .../internal/SemanticdbOutputStream.scala | 610 -- .../internal/SemanticdbTypeMapper.scala | 18 - .../dotc/semanticdb/internal/WireFormat.scala | 20 - .../dotc/transform/AccessProxies.scala | 174 - .../dotc/transform/ArrayApply.scala | 76 - .../dotc/transform/ArrayConstructors.scala | 56 - .../dotc/transform/BeanProperties.scala | 65 - .../dotc/transform/BetaReduce.scala | 106 - .../dotc/transform/Bridges.scala | 180 - .../dotc/transform/CapturedVars.scala | 173 - .../transform/CheckLoopingImplicits.scala | 111 - .../dotc/transform/CheckNoSuperThis.scala | 51 - .../dotc/transform/CheckReentrant.scala | 92 - .../dotc/transform/CheckStatic.scala | 68 - .../dotc/transform/CollectEntryPoints.scala | 53 - .../transform/CollectNullableFields.scala | 110 - .../dotc/transform/CompleteJavaEnums.scala | 190 - .../dotc/transform/Constructors.scala | 356 - .../transform/ContextFunctionResults.scala | 136 - .../dotc/transform/CookComments.scala | 34 - .../dotc/transform/CountOuterAccesses.scala | 59 - .../dotc/transform/CrossStageSafety.scala | 308 - .../dotc/transform/CtxLazy.scala | 24 - .../dotc/transform/Dependencies.scala | 289 - .../DropEmptyCompanions.scala.disabled | 98 - .../dotc/transform/DropOuterAccessors.scala | 87 - .../dotc/transform/ElimByName.scala | 161 - .../dotc/transform/ElimErasedValueType.scala | 143 - .../dotc/transform/ElimOpaque.scala | 75 - .../dotc/transform/ElimOuterSelect.scala | 38 - .../dotc/transform/ElimPackagePrefixes.scala | 37 - .../dotc/transform/ElimPolyFunction.scala | 71 - .../dotc/transform/ElimRepeated.scala | 299 - .../dotc/transform/ElimStaticThis.scala | 43 - .../dotc/transform/EmptyPhase.scala | 19 - .../dotc/transform/Erasure.scala | 1071 --- .../dotc/transform/EtaReduce.scala | 76 - .../dotc/transform/ExpandPrivate.scala | 118 - .../dotc/transform/ExpandSAMs.scala | 194 - .../dotc/transform/ExplicitOuter.scala | 479 -- .../dotc/transform/ExplicitSelf.scala | 64 - .../dotc/transform/ExtensionMethods.scala | 208 - .../dotc/transform/FirstTransform.scala | 219 - .../dotc/transform/Flatten.scala | 66 - .../dotc/transform/ForwardDepChecks.scala | 134 - .../dotc/transform/FullParameterization.scala | 271 - .../transform/FunctionXXLForwarders.scala | 66 - .../dotc/transform/GenericSignatures.scala | 481 -- .../dotc/transform/Getters.scala | 125 - .../dotc/transform/HoistSuperArgs.scala | 246 - .../dotc/transform/InlinePatterns.scala | 65 - .../dotc/transform/InlineVals.scala | 59 - .../dotc/transform/Inlining.scala | 83 - .../dotc/transform/InstrumentCoverage.scala | 528 -- .../dotc/transform/Instrumentation.scala | 110 - .../dotc/transform/InterceptedMethods.scala | 82 - .../IsInstanceOfEvaluator.scala.disabled | 172 - .../dotc/transform/LambdaLift.scala | 343 - .../dotc/transform/LazyVals.scala | 700 -- .../dotc/transform/LetOverApply.scala | 37 - .../dotc/transform/LiftTry.scala | 88 - .../dotc/transform/Literalize.scala.disabled | 95 - .../dotc/transform/MacroTransform.scala | 60 - .../dotc/transform/MegaPhase.scala | 1066 --- .../dotc/transform/Memoize.scala | 222 - .../dotc/transform/Mixin.scala | 329 - .../dotc/transform/MixinOps.scala | 104 - .../dotc/transform/MoveStatics.scala | 93 - .../dotc/transform/NonLocalReturns.scala | 106 - .../dotc/transform/OverridingPairs.scala | 227 - .../dotc/transform/ParamForwarding.scala | 87 - .../dotc/transform/PatternMatcher.scala | 1055 --- .../dotc/transform/PickleQuotes.scala | 383 - .../dotc/transform/Pickler.scala | 154 - .../dotc/transform/PostInlining.scala | 35 - .../dotc/transform/PostTyper.scala | 500 -- .../dotc/transform/PreRecheck.scala | 19 - .../dotc/transform/ProtectedAccessors.scala | 97 - .../dotc/transform/PruneErasedDefs.scala | 72 - .../dotc/transform/PureStats.scala | 35 - .../dotc/transform/Recheck.scala | 576 -- .../dotc/transform/ReifiedReflect.scala | 109 - .../transform/RepeatableAnnotations.scala | 66 - .../dotc/transform/ResolveSuper.scala | 128 - .../dotc/transform/RestoreScopes.scala | 56 - .../dotc/transform/SelectStatic.scala | 101 - .../dotc/transform/SeqLiterals.scala | 44 - .../dotc/transform/SetRootTree.scala | 49 - .../transform/SpecializeApplyMethods.scala | 124 - .../dotc/transform/SpecializeFunctions.scala | 114 - .../dotc/transform/SpecializeTuples.scala | 53 - .../dotc/transform/Splicer.scala | 254 - .../dotc/transform/Splicing.scala | 392 - .../dotc/transform/Staging.scala | 83 - .../dotc/transform/SuperAccessors.scala | 224 - .../dotc/transform/SymUtils.scala | 432 -- .../dotc/transform/SyntheticMembers.scala | 651 -- .../dotc/transform/TailRec.scala | 461 -- .../dotc/transform/TransformWildcards.scala | 34 - .../dotc/transform/TreeChecker.scala | 726 -- .../dotc/transform/TreeExtractors.scala | 52 - .../dotc/transform/TreeMapWithStages.scala | 164 - .../dotc/transform/TryCatchPatterns.scala | 104 - .../dotc/transform/TupleOptimizations.scala | 221 - .../dotc/transform/TypeTestsCasts.scala | 394 - .../dotc/transform/TypeUtils.scala | 120 - .../dotc/transform/UncacheGivenAliases.scala | 65 - .../dotc/transform/UninitializedDefs.scala | 48 - .../dotc/transform/VCElideAllocations.scala | 55 - .../dotc/transform/VCInlineMethods.scala | 113 - .../dotc/transform/ValueClasses.scala | 57 - .../dotc/transform/YCheckPositions.scala | 72 - .../dotc/transform/init/Checker.scala | 72 - .../dotc/transform/init/Errors.scala | 145 - .../dotc/transform/init/Semantic.scala | 1806 ----- .../transform/localopt/FormatChecker.scala | 286 - .../FormatInterpolatorTransform.scala | 39 - .../localopt/StringInterpolatorOpt.scala | 170 - .../dotc/transform/patmat/Space.scala | 966 --- .../transform/sjs/AddLocalJSFakeNews.scala | 98 - .../transform/sjs/ExplicitJSClasses.scala | 730 -- .../dotc/transform/sjs/JSExportUtils.scala | 38 - .../dotc/transform/sjs/JSSymUtils.scala | 255 - .../transform/sjs/JUnitBootstrappers.scala | 332 - .../dotc/transform/sjs/PrepJSExports.scala | 467 -- .../dotc/transform/sjs/PrepJSInterop.scala | 1329 ---- .../dotc/typer/Applications.scala | 2409 ------ .../dotc/typer/Checking.scala | 1561 ---- .../dotc/typer/ConstFold.scala | 210 - .../dotc/typer/CrossVersionChecks.scala | 185 - .../dotc/typer/Deriving.scala | 312 - .../dotc/typer/Docstrings.scala | 67 - .../dotc/typer/Dynamic.scala | 249 - .../dotc/typer/ErrorReporting.scala | 276 - .../dotc/typer/EtaExpansion.scala | 293 - .../dotc/typer/Implicits.scala | 1915 ----- .../dotc/typer/ImportInfo.scala | 231 - .../dotc/typer/ImportSuggestions.scala | 358 - .../dotc/typer/Inferencing.scala | 780 -- .../dotc/typer/JavaChecks.scala | 26 - .../dotc/typer/Namer.scala | 1916 ----- .../dotc/typer/Nullables.scala | 569 -- .../dotc/typer/ProtoTypes.scala | 958 --- .../dotc/typer/QuotesAndSplices.scala | 455 -- .../dotc/typer/ReTyper.scala | 146 - .../dotc/typer/RefChecks.scala | 1783 ----- .../dotc/typer/Synthesizer.scala | 763 -- .../dotc/typer/TypeAssigner.scala | 546 -- .../dotc/typer/Typer.scala | 4296 ----------- .../dotc/typer/TyperPhase.scala | 108 - .../dotc/typer/VarianceChecker.scala | 209 - .../dotc/util/Attachment.scala | 128 - .../dotc/util/CharBuffer.scala | 28 - .../dotc/util/Chars.scala | 99 - .../dotc/util/ClasspathFromClassloader.scala | 53 - .../dotc/util/CommentParsing.scala | 256 - .../dotc/util/DiffUtil.scala | 234 - .../dotc/util/DotClass.scala | 8 - .../dotc/util/EqHashMap.scala | 80 - .../dotc/util/FreshNameCreator.scala | 30 - .../dotc/util/GenericHashMap.scala | 192 - .../dotc/util/HashMap.scala | 85 - .../dotc/util/HashSet.scala | 190 - .../dotc/util/IntMap.scala | 57 - .../dotc/util/LRUCache.scala | 104 - .../dotc/util/LinearMap.scala | 43 - .../dotc/util/LinearSet.scala | 46 - .../dotc/util/MutableMap.scala | 18 - .../dotc/util/MutableSet.scala | 25 - .../dotc/util/NameTransformer.scala | 151 - .../dotc/util/ParsedComment.scala | 224 - .../dotc/util/PerfectHashing.scala | 136 - .../dotc/util/Property.scala | 18 - .../dotc/util/ReadOnlyMap.scala | 41 - .../dotc/util/ReadOnlySet.scala | 24 - .../dotc/util/ReusableInstance.scala | 34 - .../dotc/util/ShowPickled.scala | 287 - .../dotc/util/Signatures.scala | 549 -- .../dotc/util/SimpleIdentityMap.scala | 246 - .../dotc/util/SimpleIdentitySet.scala | 261 - .../dotc/util/SixteenNibbles.scala | 28 - .../dotc/util/SourceFile.scala | 288 - .../dotc/util/SourcePosition.scala | 104 - .../dotc/util/Spans.scala | 195 - .../dotc/util/StackTraceOps.scala | 78 - .../dotc/util/Stats.scala | 69 - .../dotc/util/Store.scala | 33 - .../pos-with-compiler-cc/dotc/util/Util.scala | 25 - .../dotc/util/WeakHashSet.scala | 349 - .../dotc/util/common.scala | 14 - .../pos-with-compiler-cc/dotc/util/kwords.sc | 18 - .../pos-with-compiler-cc/dotc/util/lrutest.sc | 40 - tests/pos-with-compiler-cc/package.scala | 54 - 480 files changed, 163760 deletions(-) delete mode 100644 tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala delete mode 100644 tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/BTypes.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/BytecodeWriters.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/Primitives.scala delete mode 100644 tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala delete mode 100644 tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala delete mode 100644 tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala delete mode 100644 tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala delete mode 100644 tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala delete mode 100644 tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala delete mode 100644 tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala delete mode 100644 tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala delete mode 100644 tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/Bench.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/CompilationUnit.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/Compiler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/Driver.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/Main.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/Resident.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/Run.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ScalacCommand.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/Desugar.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/Positioned.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/Trees.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/tpd.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/ast/untpd.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/cc/Setup.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/CliCommand.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/Config.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/Feature.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/PathResolver.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/Platform.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/Printers.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/Properties.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/Settings.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Annotations.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Atoms.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Comments.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Constants.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Constraint.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/ContextOps.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Contexts.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Decorators.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Definitions.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Denotations.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Flags.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/GadtConstraint.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Hashable.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/JavaNullInterop.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/MacroClassLoader.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/MatchTypeTrace.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Mode.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/NameKinds.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/NameOps.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/NameTags.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/NamerOps.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Names.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/NullOpsDecorator.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/OrderingConstraint.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/ParamInfo.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/PatternTypeConstrainer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Periods.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Phases.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Scopes.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Signature.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/StagingContext.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/StdNames.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Substituters.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/SymDenotations.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/SymbolLoaders.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Symbols.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/TypeApplications.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/TypeErasure.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/TypeErrors.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/TypeEval.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/TypeOps.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/TyperState.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Types.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Uniques.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/Variances.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/classfile/AbstractFileReader.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/classfile/ByteCodecs.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileConstants.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/classfile/ClassfileParser.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/classfile/DataReader.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/classfile/ReusableDataReader.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/CommentPickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/CommentUnpickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/DottyUnpickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/NameBuffer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/PositionPickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/PositionUnpickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/TastyAnsiiPrinter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/TastyClassName.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/TastyHTMLPrinter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/TastyPickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/TastyPrinter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/TastyUnpickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/TreeBuffer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/TreePickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/unpickleScala2/PickleBuffer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/unpickleScala2/PickleFormat.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/unpickleScala2/Scala2Erasure.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/unpickleScala2/Scala2Flags.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/core/unpickleScala2/Scala2Unpickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/coverage/Coverage.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/coverage/Location.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/coverage/Serializer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/decompiler/DecompilationPrinter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/decompiler/IDEDecompilerDriver.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/decompiler/Main.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/decompiler/PartialTASTYDecompiler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/decompiler/TASTYDecompiler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/fromtasty/AlreadyLoadedCompilationUnit.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/fromtasty/Debug.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/fromtasty/ReadTasty.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/fromtasty/TASTYCompilationUnit.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/fromtasty/TASTYCompiler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/fromtasty/TASTYRun.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/fromtasty/TastyFileUtil.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/inlines/InlineReducer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/inlines/Inliner.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/inlines/Inlines.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/interactive/Completion.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/interactive/Interactive.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/interactive/InteractiveCompiler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/interactive/InteractiveDriver.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/interactive/SourceTree.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/CharArrayReader.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/JavaParsers.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/JavaScanners.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/JavaTokens.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/ParserPhase.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/Parsers.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/ScriptParsers.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/Tokens.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/package.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/xml/MarkupParserCommon.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/xml/MarkupParsers.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/xml/SymbolicXMLBuilder.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/parsing/xml/Utility.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/plugins/Plugin.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/plugins/Plugins.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/Formatting.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/Highlighting.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/MessageLimiter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/PlainPrinter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/Printer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/RefinedPrinter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/ReplPrinter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/Showable.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/SyntaxHighlighting.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/Texts.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/printing/package.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/profile/AsyncHelper.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/profile/ExtendedThreadMxBean.java delete mode 100644 tests/pos-with-compiler-cc/dotc/profile/ExternalToolHook.java delete mode 100644 tests/pos-with-compiler-cc/dotc/profile/Profiler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/quoted/Interpreter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/quoted/MacroExpansion.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/quoted/PickledQuotes.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/quoted/QuoteUtils.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/quoted/QuotesCache.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/quoted/TastyString.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/quoted/reflect/FromSymbol.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/report.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/AbstractReporter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/ConsoleReporter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/Diagnostic.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/ErrorMessageID.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/ExploringReporter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/HideNonSensicalMessages.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/Message.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/MessageKind.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/MessageRendering.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/Profile.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/Reporter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/StoreReporter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/TestReporter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/ThrowingReporter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/UniqueMessagePositions.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/WConf.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/messages.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/reporting/trace.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/rewrites/Rewrites.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/sbt/APIUtils.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/sbt/ExtractAPI.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/sbt/ExtractDependencies.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/sbt/ShowAPI.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/sbt/ThunkHolder.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/sbt/package.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/ConstantOps.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/Descriptor.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/ExtractSemanticDB.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/LinkMode.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/PPrint.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/Scala3.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/SemanticSymbolBuilder.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/SyntheticsExtractor.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/Tools.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/TypeOps.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Access.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Annotation.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Constant.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Diagnostic.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Documentation.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Language.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Location.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Range.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Schema.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Scope.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Signature.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/SymbolInformation.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/SymbolOccurrence.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Synthetic.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/TextDocument.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/TextDocuments.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Tree.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/generated/Type.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/InvalidProtocolBufferException.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/LiteParser.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/MD5.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbEnum.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbGeneratedMessageCompanion.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbInputStream.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbMessage.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbOutputStream.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/SemanticdbTypeMapper.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/semanticdb/internal/WireFormat.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/AccessProxies.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ArrayApply.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ArrayConstructors.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/BeanProperties.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/BetaReduce.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Bridges.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CapturedVars.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CheckLoopingImplicits.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CheckNoSuperThis.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CheckReentrant.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CheckStatic.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CollectEntryPoints.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CollectNullableFields.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CompleteJavaEnums.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Constructors.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ContextFunctionResults.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CookComments.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CountOuterAccesses.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CrossStageSafety.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/CtxLazy.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Dependencies.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/DropEmptyCompanions.scala.disabled delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/DropOuterAccessors.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ElimByName.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ElimErasedValueType.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ElimOpaque.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ElimOuterSelect.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ElimPackagePrefixes.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ElimPolyFunction.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ElimRepeated.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ElimStaticThis.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/EmptyPhase.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Erasure.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/EtaReduce.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ExpandPrivate.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ExpandSAMs.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ExplicitOuter.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ExplicitSelf.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ExtensionMethods.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/FirstTransform.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Flatten.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ForwardDepChecks.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/FullParameterization.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/FunctionXXLForwarders.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/GenericSignatures.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Getters.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/HoistSuperArgs.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/InlinePatterns.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/InlineVals.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Inlining.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/InstrumentCoverage.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Instrumentation.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/InterceptedMethods.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/IsInstanceOfEvaluator.scala.disabled delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/LambdaLift.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/LazyVals.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/LetOverApply.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/LiftTry.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Literalize.scala.disabled delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/MacroTransform.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/MegaPhase.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Memoize.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Mixin.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/MixinOps.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/MoveStatics.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/NonLocalReturns.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/OverridingPairs.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ParamForwarding.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/PatternMatcher.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/PickleQuotes.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Pickler.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/PostInlining.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/PostTyper.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/PreRecheck.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ProtectedAccessors.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/PruneErasedDefs.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/PureStats.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Recheck.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ReifiedReflect.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/RepeatableAnnotations.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ResolveSuper.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/RestoreScopes.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/SelectStatic.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/SeqLiterals.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/SetRootTree.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/SpecializeApplyMethods.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/SpecializeFunctions.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/SpecializeTuples.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Splicer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Splicing.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/Staging.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/SuperAccessors.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/SymUtils.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/SyntheticMembers.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/TailRec.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/TransformWildcards.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/TreeChecker.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/TreeExtractors.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/TreeMapWithStages.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/TryCatchPatterns.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/TupleOptimizations.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/TypeTestsCasts.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/TypeUtils.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/UncacheGivenAliases.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/UninitializedDefs.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/VCElideAllocations.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/VCInlineMethods.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/ValueClasses.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/YCheckPositions.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/init/Checker.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/init/Errors.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/localopt/FormatChecker.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/localopt/FormatInterpolatorTransform.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/localopt/StringInterpolatorOpt.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/patmat/Space.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/sjs/AddLocalJSFakeNews.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/sjs/ExplicitJSClasses.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/sjs/JSExportUtils.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/sjs/JSSymUtils.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/sjs/JUnitBootstrappers.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSExports.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/transform/sjs/PrepJSInterop.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Applications.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Checking.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/ConstFold.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/CrossVersionChecks.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Deriving.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Docstrings.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Dynamic.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/ErrorReporting.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/EtaExpansion.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Implicits.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/ImportInfo.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/ImportSuggestions.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Inferencing.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/JavaChecks.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Namer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Nullables.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/QuotesAndSplices.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/ReTyper.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/RefChecks.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Synthesizer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/TypeAssigner.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/Typer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/TyperPhase.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/typer/VarianceChecker.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/Attachment.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/CharBuffer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/Chars.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/ClasspathFromClassloader.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/CommentParsing.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/DiffUtil.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/DotClass.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/EqHashMap.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/FreshNameCreator.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/GenericHashMap.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/HashMap.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/HashSet.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/IntMap.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/LRUCache.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/LinearMap.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/LinearSet.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/MutableMap.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/MutableSet.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/NameTransformer.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/ParsedComment.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/PerfectHashing.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/Property.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/ReadOnlyMap.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/ReadOnlySet.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/ReusableInstance.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/ShowPickled.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/Signatures.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/SimpleIdentityMap.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/SimpleIdentitySet.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/SixteenNibbles.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/SourceFile.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/SourcePosition.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/Spans.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/StackTraceOps.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/Stats.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/Store.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/Util.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/WeakHashSet.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/common.scala delete mode 100644 tests/pos-with-compiler-cc/dotc/util/kwords.sc delete mode 100644 tests/pos-with-compiler-cc/dotc/util/lrutest.sc delete mode 100644 tests/pos-with-compiler-cc/package.scala diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index 5cd4f837b823..3b19f1d3d4bb 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -32,13 +32,6 @@ class BootstrappedOnlyCompilationTests { ).checkCompile() } - // @Test - def posWithCompilerCC: Unit = - implicit val testGroup: TestGroup = TestGroup("compilePosWithCompilerCC") - aggregateTests( - compileDir("tests/pos-with-compiler-cc/dotc", withCompilerOptions.and("-language:experimental.captureChecking")) - ).checkCompile() - @Test def posWithCompiler: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePosWithCompiler") aggregateTests( diff --git a/tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala b/tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala deleted file mode 100644 index 6b5bfbc3e00e..000000000000 --- a/tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala +++ /dev/null @@ -1,232 +0,0 @@ -package dotty.tools -package backend - -object ScalaPrimitivesOps extends ScalaPrimitivesOps - -class ScalaPrimitivesOps { - // Arithmetic unary operations - inline val POS = 1 // +x - inline val NEG = 2 // -x - inline val NOT = 3 // ~x - - // Arithmetic binary operations - inline val ADD = 10 // x + y - inline val SUB = 11 // x - y - inline val MUL = 12 // x * y - inline val DIV = 13 // x / y - inline val MOD = 14 // x % y - - // Bitwise operations - inline val OR = 20 // x | y - inline val XOR = 21 // x ^ y - inline val AND = 22 // x & y - - // Shift operations - inline val LSL = 30 // x << y - inline val LSR = 31 // x >>> y - inline val ASR = 32 // x >> y - - // Comparison operations - inline val ID = 40 // x eq y - inline val NI = 41 // x ne y - inline val EQ = 42 // x == y - inline val NE = 43 // x != y - inline val LT = 44 // x < y - inline val LE = 45 // x <= y - inline val GT = 46 // x > y - inline val GE = 47 // x >= y - - // Boolean unary operations - inline val ZNOT = 50 // !x - - // Boolean binary operations - inline val ZOR = 60 // x || y - inline val ZAND = 61 // x && y - - // Array operations - inline val LENGTH = 70 // x.length - inline val APPLY = 71 // x(y) - inline val UPDATE = 72 // x(y) = z - - // Any operations - inline val IS = 80 // x.is[y] - inline val AS = 81 // x.as[y] - inline val HASH = 87 // x.## - - // AnyRef operations - inline val SYNCHRONIZED = 90 // x.synchronized(y) - - // String operations - inline val CONCAT = 100 // String.valueOf(x)+String.valueOf(y) - - // coercions - inline val COERCE = 101 - - // RunTime operations - inline val BOX = 110 // RunTime.box_(x) - inline val UNBOX = 111 // RunTime.unbox_(x) - inline val NEW_ZARRAY = 112 // RunTime.zarray(x) - inline val NEW_BARRAY = 113 // RunTime.barray(x) - inline val NEW_SARRAY = 114 // RunTime.sarray(x) - inline val NEW_CARRAY = 115 // RunTime.carray(x) - inline val NEW_IARRAY = 116 // RunTime.iarray(x) - inline val NEW_LARRAY = 117 // RunTime.larray(x) - inline val NEW_FARRAY = 118 // RunTime.farray(x) - inline val NEW_DARRAY = 119 // RunTime.darray(x) - inline val NEW_OARRAY = 120 // RunTime.oarray(x) - - inline val ZARRAY_LENGTH = 131 // RunTime.zarray_length(x) - inline val BARRAY_LENGTH = 132 // RunTime.barray_length(x) - inline val SARRAY_LENGTH = 133 // RunTime.sarray_length(x) - inline val CARRAY_LENGTH = 134 // RunTime.carray_length(x) - inline val IARRAY_LENGTH = 135 // RunTime.iarray_length(x) - inline val LARRAY_LENGTH = 136 // RunTime.larray_length(x) - inline val FARRAY_LENGTH = 137 // RunTime.farray_length(x) - inline val DARRAY_LENGTH = 138 // RunTime.darray_length(x) - inline val OARRAY_LENGTH = 139 // RunTime.oarray_length(x) - - inline val ZARRAY_GET = 140 // RunTime.zarray_get(x,y) - inline val BARRAY_GET = 141 // RunTime.barray_get(x,y) - inline val SARRAY_GET = 142 // RunTime.sarray_get(x,y) - inline val CARRAY_GET = 143 // RunTime.carray_get(x,y) - inline val IARRAY_GET = 144 // RunTime.iarray_get(x,y) - inline val LARRAY_GET = 145 // RunTime.larray_get(x,y) - inline val FARRAY_GET = 146 // RunTime.farray_get(x,y) - inline val DARRAY_GET = 147 // RunTime.darray_get(x,y) - inline val OARRAY_GET = 148 // RunTime.oarray_get(x,y) - - inline val ZARRAY_SET = 150 // RunTime.zarray(x,y,z) - inline val BARRAY_SET = 151 // RunTime.barray(x,y,z) - inline val SARRAY_SET = 152 // RunTime.sarray(x,y,z) - inline val CARRAY_SET = 153 // RunTime.carray(x,y,z) - inline val IARRAY_SET = 154 // RunTime.iarray(x,y,z) - inline val LARRAY_SET = 155 // RunTime.larray(x,y,z) - inline val FARRAY_SET = 156 // RunTime.farray(x,y,z) - inline val DARRAY_SET = 157 // RunTime.darray(x,y,z) - inline val OARRAY_SET = 158 // RunTime.oarray(x,y,z) - - inline val B2B = 200 // RunTime.b2b(x) - inline val B2S = 201 // RunTime.b2s(x) - inline val B2C = 202 // RunTime.b2c(x) - inline val B2I = 203 // RunTime.b2i(x) - inline val B2L = 204 // RunTime.b2l(x) - inline val B2F = 205 // RunTime.b2f(x) - inline val B2D = 206 // RunTime.b2d(x) - - inline val S2B = 210 // RunTime.s2b(x) - inline val S2S = 211 // RunTime.s2s(x) - inline val S2C = 212 // RunTime.s2c(x) - inline val S2I = 213 // RunTime.s2i(x) - inline val S2L = 214 // RunTime.s2l(x) - inline val S2F = 215 // RunTime.s2f(x) - inline val S2D = 216 // RunTime.s2d(x) - - inline val C2B = 220 // RunTime.c2b(x) - inline val C2S = 221 // RunTime.c2s(x) - inline val C2C = 222 // RunTime.c2c(x) - inline val C2I = 223 // RunTime.c2i(x) - inline val C2L = 224 // RunTime.c2l(x) - inline val C2F = 225 // RunTime.c2f(x) - inline val C2D = 226 // RunTime.c2d(x) - - inline val I2B = 230 // RunTime.i2b(x) - inline val I2S = 231 // RunTime.i2s(x) - inline val I2C = 232 // RunTime.i2c(x) - inline val I2I = 233 // RunTime.i2i(x) - inline val I2L = 234 // RunTime.i2l(x) - inline val I2F = 235 // RunTime.i2f(x) - inline val I2D = 236 // RunTime.i2d(x) - - inline val L2B = 240 // RunTime.l2b(x) - inline val L2S = 241 // RunTime.l2s(x) - inline val L2C = 242 // RunTime.l2c(x) - inline val L2I = 243 // RunTime.l2i(x) - inline val L2L = 244 // RunTime.l2l(x) - inline val L2F = 245 // RunTime.l2f(x) - inline val L2D = 246 // RunTime.l2d(x) - - inline val F2B = 250 // RunTime.f2b(x) - inline val F2S = 251 // RunTime.f2s(x) - inline val F2C = 252 // RunTime.f2c(x) - inline val F2I = 253 // RunTime.f2i(x) - inline val F2L = 254 // RunTime.f2l(x) - inline val F2F = 255 // RunTime.f2f(x) - inline val F2D = 256 // RunTime.f2d(x) - - inline val D2B = 260 // RunTime.d2b(x) - inline val D2S = 261 // RunTime.d2s(x) - inline val D2C = 262 // RunTime.d2c(x) - inline val D2I = 263 // RunTime.d2i(x) - inline val D2L = 264 // RunTime.d2l(x) - inline val D2F = 265 // RunTime.d2f(x) - inline val D2D = 266 // RunTime.d2d(x) - - /** Check whether the given operation code is an array operation. */ - def isArrayOp(code: Int): Boolean = - isArrayNew(code) | isArrayLength(code) | isArrayGet(code) | isArraySet(code) - - def isArrayNew(code: Int): Boolean = code match { - case NEW_ZARRAY | NEW_BARRAY | NEW_SARRAY | NEW_CARRAY | - NEW_IARRAY | NEW_LARRAY | NEW_FARRAY | NEW_DARRAY | - NEW_OARRAY => true - case _ => false - } - - def isArrayLength(code: Int): Boolean = code match { - case ZARRAY_LENGTH | BARRAY_LENGTH | SARRAY_LENGTH | CARRAY_LENGTH | - IARRAY_LENGTH | LARRAY_LENGTH | FARRAY_LENGTH | DARRAY_LENGTH | - OARRAY_LENGTH | LENGTH => true - case _ => false - } - - def isArrayGet(code: Int): Boolean = code match { - case ZARRAY_GET | BARRAY_GET | SARRAY_GET | CARRAY_GET | - IARRAY_GET | LARRAY_GET | FARRAY_GET | DARRAY_GET | - OARRAY_GET | APPLY => true - case _ => false - } - - def isArraySet(code: Int): Boolean = code match { - case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET | - IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET | - OARRAY_SET | UPDATE => true - case _ => false - } - - /** Check whether the given code is a comparison operator */ - def isComparisonOp(code: Int): Boolean = code match { - case ID | NI | EQ | NE | - LT | LE | GT | GE => true - - case _ => false - } - def isUniversalEqualityOp(code: Int): Boolean = (code == EQ) || (code == NE) - def isReferenceEqualityOp(code: Int): Boolean = (code == ID) || (code == NI) - - def isArithmeticOp(code: Int): Boolean = code match { - case POS | NEG | NOT => true; // unary - case ADD | SUB | MUL | - DIV | MOD => true; // binary - case OR | XOR | AND | - LSL | LSR | ASR => true; // bitwise - case _ => false - } - - def isLogicalOp(code: Int): Boolean = code match { - case ZNOT | ZAND | ZOR => true - case _ => false - } - - def isShiftOp(code: Int): Boolean = code match { - case LSL | LSR | ASR => true - case _ => false - } - - def isBitwiseOp(code: Int): Boolean = code match { - case OR | XOR | AND => true - case _ => false - } - - def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D) - -} diff --git a/tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala b/tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala deleted file mode 100644 index b3d98d425b2a..000000000000 --- a/tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala +++ /dev/null @@ -1,57 +0,0 @@ -package dotty.tools -package backend - -/** - * Simple implementation of a worklist algorithm. A processing - * function is applied repeatedly to the first element in the - * worklist, as long as the stack is not empty. - * - * The client class should mix-in this class and initialize the worklist - * field and define the `processElement` method. Then call the `run` method - * providing a function that initializes the worklist. - * - * @author Martin Odersky - * @version 1.0 - * @see [[scala.tools.nsc.backend.icode.Linearizers]] - */ -trait WorklistAlgorithm { - type Elem - class WList { - private var list: List[Elem] = Nil - def isEmpty = list.isEmpty - def nonEmpty = !isEmpty - def push(e: Elem): Unit = { list = e :: list } - def pop(): Elem = { - val head = list.head - list = list.tail - head - } - def pushAll(xs: Iterable[Elem]): Unit = xs.foreach(push) - def clear(): Unit = list = Nil - - } - - val worklist: WList - - /** - * Run the iterative algorithm until the worklist remains empty. - * The initializer is run once before the loop starts and should - * initialize the worklist. - */ - def run(initWorklist: => Unit) = { - initWorklist - - while (worklist.nonEmpty) - processElement(dequeue) - } - - /** - * Process the current element from the worklist. - */ - def processElement(e: Elem): Unit - - /** - * Remove and return the first element to be processed from the worklist. - */ - def dequeue: Elem -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala b/tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala deleted file mode 100644 index e6393ce82054..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala +++ /dev/null @@ -1,65 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.tools.asm.tree.{AbstractInsnNode} -import java.io.PrintWriter -import scala.tools.asm.util.{TraceClassVisitor, TraceMethodVisitor, Textifier} -import scala.tools.asm.ClassReader - -object AsmUtils { - - /** - * Print the bytecode of methods generated by GenBCode to the standard output. Only methods - * whose name contains `traceMethodPattern` are traced. - */ - final val traceMethodEnabled = sys.env.contains("printBCODE") - final val traceMethodPattern = sys.env.getOrElse("printBCODE", "") - - /** - * Print the bytecode of classes generated by GenBCode to the standard output. - */ - inline val traceClassEnabled = false - inline val traceClassPattern = "" - - /** - * Print the bytedcode of classes as they are serialized by the ASM library. The serialization - * performed by `asm.ClassWriter` can change the code generated by GenBCode. For example, it - * introduces stack map frames, it computes the maximal stack sizes, and it replaces dead - * code by NOPs (see also https://github.com/scala/scala/pull/3726#issuecomment-42861780). - */ - inline val traceSerializedClassEnabled = false - inline val traceSerializedClassPattern = "" - - def traceMethod(mnode: MethodNode1): Unit = { - println(s"Bytecode for method ${mnode.name}") - val p = new Textifier - val tracer = new TraceMethodVisitor(p) - mnode.accept(tracer) - val w = new PrintWriter(System.out) - p.print(w) - w.flush() - } - - def traceClass(cnode: ClassNode1): Unit = { - println(s"Bytecode for class ${cnode.name}") - val w = new PrintWriter(System.out) - cnode.accept(new TraceClassVisitor(w)) - w.flush() - } - - def traceClass(bytes: Array[Byte]): Unit = traceClass(readClass(bytes)) - - def readClass(bytes: Array[Byte]): ClassNode1 = { - val node = new ClassNode1() - new ClassReader(bytes).accept(node, 0) - node - } - - def instructionString(instruction: AbstractInsnNode): String = instruction.getOpcode match { - case -1 => instruction.toString - case op => scala.tools.asm.util.Printer.OPCODES(op) - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala deleted file mode 100644 index d95638be2695..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala +++ /dev/null @@ -1,158 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.report - -/** - * This trait contains code shared between GenBCode and GenASM that depends on types defined in - * the compiler cake (Global). - */ -final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { - import interface.given - import DottyBackendInterface.symExtensions - - /** - * True if `classSym` is an anonymous class or a local class. I.e., false if `classSym` is a - * member class. This method is used to decide if we should emit an EnclosingMethod attribute. - * It is also used to decide whether the "owner" field in the InnerClass attribute should be - * null. - */ - def isAnonymousOrLocalClass(classSym: Symbol): Boolean = { - assert(classSym.isClass, s"not a class: $classSym") - // Here used to be an `assert(!classSym.isDelambdafyFunction)`: delambdafy lambda classes are - // always top-level. However, SI-8900 shows an example where the weak name-based implementation - // of isDelambdafyFunction failed (for a function declared in a package named "lambda"). - classSym.isAnonymousClass || { - val originalOwner = classSym.originalOwner - originalOwner != NoSymbol && !originalOwner.isClass - } - } - - /** - * Returns the enclosing method for non-member classes. In the following example - * - * class A { - * def f = { - * class B { - * class C - * } - * } - * } - * - * the method returns Some(f) for B, but None for C, because C is a member class. For non-member - * classes that are not enclosed by a method, it returns None: - * - * class A { - * { class B } - * } - * - * In this case, for B, we return None. - * - * The EnclosingMethod attribute needs to be added to non-member classes (see doc in BTypes). - * This is a source-level property, so we need to use the originalOwner chain to reconstruct it. - */ - private def enclosingMethodForEnclosingMethodAttribute(classSym: Symbol): Option[Symbol] = { - assert(classSym.isClass, classSym) - def enclosingMethod(sym: Symbol): Option[Symbol] = { - if (sym.isClass || sym == NoSymbol) None - else if (sym.is(Method)) Some(sym) - else enclosingMethod(sym.originalOwner) - } - enclosingMethod(classSym.originalOwner) - } - - /** - * The enclosing class for emitting the EnclosingMethod attribute. Since this is a source-level - * property, this method looks at the originalOwner chain. See doc in BTypes. - */ - private def enclosingClassForEnclosingMethodAttribute(classSym: Symbol): Symbol = { - assert(classSym.isClass, classSym) - def enclosingClass(sym: Symbol): Symbol = { - if (sym.isClass) sym - else enclosingClass(sym.originalOwner.originalLexicallyEnclosingClass) - } - enclosingClass(classSym.originalOwner.originalLexicallyEnclosingClass) - } - - /*final*/ case class EnclosingMethodEntry(owner: String, name: String, methodDescriptor: String) - - /** - * Data for emitting an EnclosingMethod attribute. None if `classSym` is a member class (not - * an anonymous or local class). See doc in BTypes. - * - * The class is parametrized by two functions to obtain a bytecode class descriptor for a class - * symbol, and to obtain a method signature descriptor fro a method symbol. These function depend - * on the implementation of GenASM / GenBCode, so they need to be passed in. - */ - def enclosingMethodAttribute(classSym: Symbol, classDesc: Symbol => String, methodDesc: Symbol => String): Option[EnclosingMethodEntry] = { - if (isAnonymousOrLocalClass(classSym)) { - val methodOpt = enclosingMethodForEnclosingMethodAttribute(classSym) - report.debuglog(s"enclosing method for $classSym is $methodOpt (in ${methodOpt.map(_.enclosingClass)})") - Some(EnclosingMethodEntry( - classDesc(enclosingClassForEnclosingMethodAttribute(classSym)), - methodOpt.map(_.javaSimpleName).orNull, - methodOpt.map(methodDesc).orNull)) - } else { - None - } - } -} - -object BCodeAsmCommon{ - def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = { - val ca = new Array[Char](bytes.length) - var idx = 0 - while(idx < bytes.length) { - val b: Byte = bytes(idx) - assert((b & ~0x7f) == 0) - ca(idx) = b.asInstanceOf[Char] - idx += 1 - } - - ca - } - - final def arrEncode(bSeven: Array[Byte]): Array[String] = { - var strs: List[String] = Nil - // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure) - var prevOffset = 0 - var offset = 0 - var encLength = 0 - while(offset < bSeven.length) { - val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1) - val newEncLength = encLength.toLong + deltaEncLength - if(newEncLength >= 65535) { - val ba = bSeven.slice(prevOffset, offset) - strs ::= new java.lang.String(ubytesToCharArray(ba)) - encLength = 0 - prevOffset = offset - } else { - encLength += deltaEncLength - offset += 1 - } - } - if(prevOffset < offset) { - assert(offset == bSeven.length) - val ba = bSeven.slice(prevOffset, offset) - strs ::= new java.lang.String(ubytesToCharArray(ba)) - } - assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict? - strs.reverse.toArray - } - - - def strEncode(bSeven: Array[Byte]): String = { - val ca = ubytesToCharArray(bSeven) - new java.lang.String(ca) - // debug val bvA = new asm.ByteVector; bvA.putUTF8(s) - // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes) - // debug assert(enc(idx) == bvA.getByte(idx + 2)) - // debug assert(bvA.getLength == enc.size + 2) - } - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala deleted file mode 100644 index da6d213351b7..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala +++ /dev/null @@ -1,1776 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.annotation.switch -import scala.collection.mutable.SortedMap - -import scala.tools.asm -import scala.tools.asm.{Handle, Opcodes} -import BCodeHelpers.InvokeStyle - -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.core.Constants._ -import dotty.tools.dotc.core.Flags.{Label => LabelFlag, _} -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.StdNames.{nme, str} -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.transform.Erasure -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.util.Spans._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Decorators.em -import dotty.tools.dotc.report - -/* - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ - * @version 1.0 - * - */ -trait BCodeBodyBuilder extends BCodeSkelBuilder { - // import global._ - // import definitions._ - import tpd._ - import int.{_, given} - import DottyBackendInterface.symExtensions - import bTypes._ - import coreBTypes._ - - protected val primitives: DottyPrimitives - - /* - * Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions. - */ - abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) { - - import Primitives.TestOp - - /* ---------------- helper utils for generating methods and code ---------------- */ - - def emit(opc: Int): Unit = { mnode.visitInsn(opc) } - - def emitZeroOf(tk: BType): Unit = { - tk match { - case BOOL => bc.boolconst(false) - case BYTE | - SHORT | - CHAR | - INT => bc.iconst(0) - case LONG => bc.lconst(0) - case FLOAT => bc.fconst(0) - case DOUBLE => bc.dconst(0) - case UNIT => () - case _ => emit(asm.Opcodes.ACONST_NULL) - } - } - - /* - * Emits code that adds nothing to the operand stack. - * Two main cases: `tree` is an assignment, - * otherwise an `adapt()` to UNIT is performed if needed. - */ - def genStat(tree: Tree): Unit = { - lineNumber(tree) - - tree match { - case Assign(lhs @ DesugaredSelect(qual, _), rhs) => - val isStatic = lhs.symbol.isStaticMember - if (!isStatic) { genLoadQualifier(lhs) } - genLoad(rhs, symInfoTK(lhs.symbol)) - lineNumber(tree) - // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError - val receiverClass = qual.tpe.typeSymbol - fieldStore(lhs.symbol, receiverClass) - - case Assign(lhs, rhs) => - val s = lhs.symbol - val Local(tk, _, idx, _) = locals.getOrMakeLocal(s) - - rhs match { - case Apply(Select(larg: Ident, nme.ADD), Literal(x) :: Nil) - if larg.symbol == s && tk.isIntSizedType && x.isShortRange => - lineNumber(tree) - bc.iinc(idx, x.intValue) - - case Apply(Select(larg: Ident, nme.SUB), Literal(x) :: Nil) - if larg.symbol == s && tk.isIntSizedType && Constant(-x.intValue).isShortRange => - lineNumber(tree) - bc.iinc(idx, -x.intValue) - - case _ => - genLoad(rhs, tk) - lineNumber(tree) - bc.store(idx, tk) - } - - case _ => - genLoad(tree, UNIT) - } - } - - /* Generate code for primitive arithmetic operations. */ - def genArithmeticOp(tree: Tree, code: Int): BType = tree match{ - case Apply(fun @ DesugaredSelect(larg, _), args) => - var resKind = tpeTK(larg) - - assert(resKind.isNumericType || (resKind == BOOL), - s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]") - - import ScalaPrimitivesOps._ - - args match { - // unary operation - case Nil => - genLoad(larg, resKind) - code match { - case POS => () // nothing - case NEG => bc.neg(resKind) - case NOT => bc.genPrimitiveArithmetic(Primitives.NOT, resKind) - case _ => abort(s"Unknown unary operation: ${fun.symbol.showFullName} code: $code") - } - - // binary operation - case rarg :: Nil => - val isShift = isShiftOp(code) - resKind = tpeTK(larg).maxType(if (isShift) INT else tpeTK(rarg)) - - if (isShift || isBitwiseOp(code)) { - assert(resKind.isIntegralType || (resKind == BOOL), - s"$resKind incompatible with arithmetic modulo operation.") - } - - genLoad(larg, resKind) - genLoad(rarg, if (isShift) INT else resKind) - - (code: @switch) match { - case ADD => bc add resKind - case SUB => bc sub resKind - case MUL => bc mul resKind - case DIV => bc div resKind - case MOD => bc rem resKind - - case OR | XOR | AND => bc.genPrimitiveLogical(code, resKind) - - case LSL | LSR | ASR => bc.genPrimitiveShift(code, resKind) - - case _ => abort(s"Unknown primitive: ${fun.symbol}[$code]") - } - - case _ => - abort(s"Too many arguments for primitive function: $tree") - } - lineNumber(tree) - resKind - } - - /* Generate primitive array operations. */ - def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = tree match{ - - case Apply(DesugaredSelect(arrayObj, _), args) => - import ScalaPrimitivesOps._ - val k = tpeTK(arrayObj) - genLoad(arrayObj, k) - val elementType = typeOfArrayOp.getOrElse[bTypes.BType](code, abort(s"Unknown operation on arrays: $tree code: $code")) - - var generatedType = expectedType - - if (isArrayGet(code)) { - // load argument on stack - assert(args.length == 1, s"Too many arguments for array get operation: $tree"); - genLoad(args.head, INT) - generatedType = k.asArrayBType.componentType - bc.aload(elementType) - } - else if (isArraySet(code)) { - val List(a1, a2) = args - genLoad(a1, INT) - genLoad(a2) - generatedType = UNIT - bc.astore(elementType) - } else { - generatedType = INT - emit(asm.Opcodes.ARRAYLENGTH) - } - lineNumber(tree) - - generatedType - } - - def genLoadIfTo(tree: If, expectedType: BType, dest: LoadDestination): BType = tree match{ - case If(condp, thenp, elsep) => - - val success = new asm.Label - val failure = new asm.Label - - val hasElse = !elsep.isEmpty && (elsep match { - case Literal(value) if value.tag == UnitTag => false - case _ => true - }) - - genCond(condp, success, failure, targetIfNoJump = success) - markProgramPoint(success) - - if dest == LoadDestination.FallThrough then - if hasElse then - val thenKind = tpeTK(thenp) - val elseKind = tpeTK(elsep) - def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT) && expectedType == UNIT - val resKind = if (hasUnitBranch) UNIT else tpeTK(tree) - - val postIf = new asm.Label - genLoadTo(thenp, resKind, LoadDestination.Jump(postIf)) - markProgramPoint(failure) - genLoadTo(elsep, resKind, LoadDestination.FallThrough) - markProgramPoint(postIf) - resKind - else - genLoad(thenp, UNIT) - markProgramPoint(failure) - UNIT - end if - else - genLoadTo(thenp, expectedType, dest) - markProgramPoint(failure) - if hasElse then - genLoadTo(elsep, expectedType, dest) - else - genAdaptAndSendToDest(UNIT, expectedType, dest) - expectedType - end if - } - - def genPrimitiveOp(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { - case Apply(fun @ DesugaredSelect(receiver, _), _) => - val sym = tree.symbol - - val code = primitives.getPrimitive(tree, receiver.tpe) - - import ScalaPrimitivesOps._ - - if (isArithmeticOp(code)) genArithmeticOp(tree, code) - else if (code == CONCAT) genStringConcat(tree) - else if (code == HASH) genScalaHash(receiver) - else if (isArrayOp(code)) genArrayOp(tree, code, expectedType) - else if (isLogicalOp(code) || isComparisonOp(code)) { - val success, failure, after = new asm.Label - genCond(tree, success, failure, targetIfNoJump = success) - // success block - markProgramPoint(success) - bc boolconst true - bc goTo after - // failure block - markProgramPoint(failure) - bc boolconst false - // after - markProgramPoint(after) - - BOOL - } - else if (isCoercion(code)) { - genLoad(receiver) - lineNumber(tree) - genCoercion(code) - coercionTo(code) - } - else abort( - s"Primitive operation not handled yet: ${sym.showFullName}(${fun.symbol.name}) at: ${tree.span}" - ) - } - - def genLoad(tree: Tree): Unit = { - genLoad(tree, tpeTK(tree)) - } - - /* Generate code for trees that produce values on the stack */ - def genLoad(tree: Tree, expectedType: BType): Unit = - genLoadTo(tree, expectedType, LoadDestination.FallThrough) - - /* Generate code for trees that produce values, sent to a given `LoadDestination`. */ - def genLoadTo(tree: Tree, expectedType: BType, dest: LoadDestination): Unit = - var generatedType = expectedType - var generatedDest = LoadDestination.FallThrough - - lineNumber(tree) - - tree match { - case tree@ValDef(_, _, _) => - val sym = tree.symbol - /* most of the time, !locals.contains(sym), unless the current activation of genLoad() is being called - while duplicating a finalizer that contains this ValDef. */ - val loc = locals.getOrMakeLocal(sym) - val Local(tk, _, idx, isSynth) = loc - if (tree.rhs == tpd.EmptyTree) { emitZeroOf(tk) } - else { genLoad(tree.rhs, tk) } - bc.store(idx, tk) - val localVarStart = currProgramPoint() - if (!isSynth) { // there are case ValDef's emitted by patmat - varsInScope ::= (sym -> localVarStart) - } - generatedType = UNIT - - case t @ If(_, _, _) => - generatedType = genLoadIfTo(t, expectedType, dest) - generatedDest = dest - - case t @ Labeled(_, _) => - generatedType = genLabeledTo(t, expectedType, dest) - generatedDest = dest - - case r: Return => - genReturn(r) - generatedDest = LoadDestination.Return - - case t @ WhileDo(_, _) => - generatedDest = genWhileDo(t) - generatedType = UNIT - - case t @ Try(_, _, _) => - generatedType = genLoadTry(t) - - case t: Apply if t.fun.symbol eq defn.throwMethod => - val thrownExpr = t.args.head - val thrownKind = tpeTK(thrownExpr) - genLoadTo(thrownExpr, thrownKind, LoadDestination.Throw) - generatedDest = LoadDestination.Throw - - case New(tpt) => - abort(s"Unexpected New(${tpt.tpe.showSummary()}/$tpt) reached GenBCode.\n" + - " Call was genLoad" + ((tree, expectedType))) - - case t @ Closure(env, call, tpt) => - val functionalInterface: Symbol = - if !tpt.isEmpty then tpt.tpe.classSymbol - else t.tpe.classSymbol - val (fun, args) = call match { - case Apply(fun, args) => (fun, args) - case t @ DesugaredSelect(_, _) => (t, Nil) // TODO: use Select - case t @ Ident(_) => (t, Nil) - } - - if (!fun.symbol.isStaticMember) { - // load receiver of non-static implementation of lambda - - // darkdimius: I haven't found in spec `this` reference should go - // but I was able to derrive it by reading - // AbstractValidatingLambdaMetafactory.validateMetafactoryArgs - - val DesugaredSelect(prefix, _) = fun: @unchecked - genLoad(prefix) - } - - genLoadArguments(env, fun.symbol.info.firstParamTypes map toTypeKind) - generatedType = genInvokeDynamicLambda(NoSymbol, fun.symbol, env.size, functionalInterface) - - case app @ Apply(_, _) => - generatedType = genApply(app, expectedType) - - case This(qual) => - val symIsModuleClass = tree.symbol.is(ModuleClass) - assert(tree.symbol == claszSymbol || symIsModuleClass, - s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $claszSymbol compilation unit: $cunit") - if (symIsModuleClass && tree.symbol != claszSymbol) { - generatedType = genLoadModule(tree) - } - else { - mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) - // When compiling Array.scala, the constructor invokes `Array.this.super.`. The expectedType - // is `[Object` (computed by typeToBType, the type of This(Array) is `Array[T]`). If we would set - // the generatedType to `Array` below, the call to adapt at the end would fail. The situation is - // similar for primitives (`I` vs `Int`). - if (tree.symbol != defn.ArrayClass && !tree.symbol.isPrimitiveValueClass) { - generatedType = classBTypeFromSymbol(claszSymbol) - } - } - - case DesugaredSelect(Ident(nme.EMPTY_PACKAGE), module) => - assert(tree.symbol.is(Module), s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.span}") - genLoadModule(tree) - - case DesugaredSelect(qualifier, _) => - val sym = tree.symbol - generatedType = symInfoTK(sym) - val qualSafeToElide = tpd.isIdempotentExpr(qualifier) - - def genLoadQualUnlessElidable(): Unit = { if (!qualSafeToElide) { genLoadQualifier(tree) } } - - // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError - def receiverClass = qualifier.tpe.typeSymbol - if (sym.is(Module)) { - genLoadQualUnlessElidable() - genLoadModule(tree) - } else if (sym.isStaticMember) { - genLoadQualUnlessElidable() - fieldLoad(sym, receiverClass) - } else { - genLoadQualifier(tree) - fieldLoad(sym, receiverClass) - } - - case t @ Ident(name) => - val sym = tree.symbol - val tk = symInfoTK(sym) - generatedType = tk - - val desugared = cachedDesugarIdent(t) - desugared match { - case None => - if (!sym.is(Package)) { - if (sym.is(Module)) genLoadModule(sym) - else locals.load(sym) - } - case Some(t) => - genLoad(t, generatedType) - } - - case Literal(value) => - if (value.tag != UnitTag) (value.tag, expectedType) match { - case (IntTag, LONG ) => bc.lconst(value.longValue); generatedType = LONG - case (FloatTag, DOUBLE) => bc.dconst(value.doubleValue); generatedType = DOUBLE - case (NullTag, _ ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = srNullRef - case _ => genConstant(value); generatedType = tpeTK(tree) - } - - case blck @ Block(stats, expr) => - if(stats.isEmpty) - genLoadTo(expr, expectedType, dest) - else - genBlockTo(blck, expectedType, dest) - generatedDest = dest - - case Typed(Super(_, _), _) => - genLoadTo(tpd.This(claszSymbol.asClass), expectedType, dest) - generatedDest = dest - - case Typed(expr, _) => - genLoadTo(expr, expectedType, dest) - generatedDest = dest - - case Assign(_, _) => - generatedType = UNIT - genStat(tree) - - case av @ ArrayValue(_, _) => - generatedType = genArrayValue(av) - - case mtch @ Match(_, _) => - generatedType = genMatchTo(mtch, expectedType, dest) - generatedDest = dest - - case tpd.EmptyTree => if (expectedType != UNIT) { emitZeroOf(expectedType) } - - - case t: TypeApply => // dotty specific - generatedType = genTypeApply(t) - - case _ => abort(s"Unexpected tree in genLoad: $tree/${tree.getClass} at: ${tree.span}") - } - - // emit conversion and send to the right destination - if generatedDest == LoadDestination.FallThrough then - genAdaptAndSendToDest(generatedType, expectedType, dest) - end genLoadTo - - def genAdaptAndSendToDest(generatedType: BType, expectedType: BType, dest: LoadDestination): Unit = - if generatedType != expectedType then - adapt(generatedType, expectedType) - - dest match - case LoadDestination.FallThrough => - () - case LoadDestination.Jump(label) => - bc goTo label - case LoadDestination.Return => - bc emitRETURN returnType - case LoadDestination.Throw => - val thrownType = expectedType - // `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable. - // Similarly for scala.Nothing (again, as defined in src/libray-aux). - assert(thrownType.isNullType || thrownType.isNothingType || thrownType.asClassBType.isSubtypeOf(jlThrowableRef)) - emit(asm.Opcodes.ATHROW) - end genAdaptAndSendToDest - - // ---------------- field load and store ---------------- - - /* - * must-single-thread - */ - def fieldLoad( field: Symbol, hostClass: Symbol = null): Unit = fieldOp(field, isLoad = true, hostClass) - - /* - * must-single-thread - */ - def fieldStore(field: Symbol, hostClass: Symbol = null): Unit = fieldOp(field, isLoad = false, hostClass) - - /* - * must-single-thread - */ - private def fieldOp(field: Symbol, isLoad: Boolean, specificReceiver: Symbol): Unit = { - val useSpecificReceiver = specificReceiver != null && !field.isScalaStatic - - val owner = internalName(if (useSpecificReceiver) specificReceiver else field.owner) - val fieldJName = field.javaSimpleName - val fieldDescr = symInfoTK(field).descriptor - val isStatic = field.isStaticMember - val opc = - if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD } - else { if (isStatic) asm.Opcodes.PUTSTATIC else asm.Opcodes.PUTFIELD } - mnode.visitFieldInsn(opc, owner, fieldJName, fieldDescr) - - } - - // ---------------- emitting constant values ---------------- - - /* - * For ClazzTag: - * must-single-thread - * Otherwise it's safe to call from multiple threads. - */ - def genConstant(const: Constant): Unit = { - (const.tag/*: @switch*/) match { - - case BooleanTag => bc.boolconst(const.booleanValue) - - case ByteTag => bc.iconst(const.byteValue) - case ShortTag => bc.iconst(const.shortValue) - case CharTag => bc.iconst(const.charValue) - case IntTag => bc.iconst(const.intValue) - - case LongTag => bc.lconst(const.longValue) - case FloatTag => bc.fconst(const.floatValue) - case DoubleTag => bc.dconst(const.doubleValue) - - case UnitTag => () - - case StringTag => - assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant` - mnode.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag - - case NullTag => emit(asm.Opcodes.ACONST_NULL) - - case ClazzTag => - val tp = toTypeKind(const.typeValue) - if tp.isPrimitive then - val boxedClass = boxedClassOfPrimitive(tp.asPrimitiveBType) - mnode.visitFieldInsn( - asm.Opcodes.GETSTATIC, - boxedClass.internalName, - "TYPE", // field name - jlClassRef.descriptor - ) - else - mnode.visitLdcInsn(tp.toASMType) - - case _ => abort(s"Unknown constant value: $const") - } - } - - private def genLabeledTo(tree: Labeled, expectedType: BType, dest: LoadDestination): BType = tree match { - case Labeled(bind, expr) => - - val labelSym = bind.symbol - - if dest == LoadDestination.FallThrough then - val resKind = tpeTK(tree) - val jumpTarget = new asm.Label - registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget)) - genLoad(expr, resKind) - markProgramPoint(jumpTarget) - resKind - else - registerJumpDest(labelSym, expectedType, dest) - genLoadTo(expr, expectedType, dest) - expectedType - end if - } - - private def genReturn(r: Return): Unit = { - val expr: Tree = r.expr - val fromSym: Symbol = if (r.from.symbol.is(LabelFlag)) r.from.symbol else NoSymbol - - if (NoSymbol == fromSym) { - // return from enclosing method - cleanups match { - case Nil => - // not an assertion: !shouldEmitCleanup (at least not yet, pendingCleanups() may still have to run, and reset `shouldEmitCleanup`. - genLoadTo(expr, returnType, LoadDestination.Return) - case nextCleanup :: rest => - genLoad(expr, returnType) - lineNumber(r) - val saveReturnValue = (returnType != UNIT) - if (saveReturnValue) { - // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted. - if (earlyReturnVar == null) { - earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar", expr.tpe, expr.span) - } - locals.store(earlyReturnVar) - } - bc goTo nextCleanup - shouldEmitCleanup = true - } - } else { - // return from labeled - assert(fromSym.is(LabelFlag), fromSym) - assert(!fromSym.is(Method), fromSym) - - /* TODO At the moment, we disregard cleanups, because by construction we don't have return-from-labels - * that cross cleanup boundaries. However, in theory such crossings are valid, so we should take care - * of them. - */ - val (exprExpectedType, exprDest) = findJumpDest(fromSym) - genLoadTo(expr, exprExpectedType, exprDest) - } - } // end of genReturn() - - def genWhileDo(tree: WhileDo): LoadDestination = tree match{ - case WhileDo(cond, body) => - - val isInfinite = cond == tpd.EmptyTree - - val loop = new asm.Label - markProgramPoint(loop) - - if isInfinite then - val dest = LoadDestination.Jump(loop) - genLoadTo(body, UNIT, dest) - dest - else - body match - case Literal(value) if value.tag == UnitTag => - // this is the shape of do..while loops - val exitLoop = new asm.Label - genCond(cond, loop, exitLoop, targetIfNoJump = exitLoop) - markProgramPoint(exitLoop) - case _ => - val success = new asm.Label - val failure = new asm.Label - genCond(cond, success, failure, targetIfNoJump = success) - markProgramPoint(success) - genLoadTo(body, UNIT, LoadDestination.Jump(loop)) - markProgramPoint(failure) - end match - LoadDestination.FallThrough - } - - def genTypeApply(t: TypeApply): BType = (t: @unchecked) match { - case TypeApply(fun@DesugaredSelect(obj, _), targs) => - - val sym = fun.symbol - val cast = - if (sym == defn.Any_isInstanceOf) false - else if (sym == defn.Any_asInstanceOf) true - else abort(s"Unexpected type application $fun[sym: ${sym.showFullName}] in: $t") - val l = tpeTK(obj) - val r = tpeTK(targs.head) - genLoadQualifier(fun) - - // TODO @lry make pattern match - if (l.isPrimitive && r.isPrimitive) - genConversion(l, r, cast) - else if (l.isPrimitive) { - bc drop l - if (cast) { - mnode.visitTypeInsn(asm.Opcodes.NEW, jlClassCastExceptionRef.internalName) - bc dup ObjectRef - emit(asm.Opcodes.ATHROW) - } else { - bc boolconst false - } - } - else if (r.isPrimitive && cast) { - abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $t") - } - else if (r.isPrimitive) { - bc isInstance boxedClassOfPrimitive(r.asPrimitiveBType) - } - else { - assert(r.isRef, r) // ensure that it's not a method - genCast(r.asRefBType, cast) - } - - if (cast) r else BOOL - } // end of genTypeApply() - - - private def mkArrayConstructorCall(arr: ArrayBType, app: Apply, args: List[Tree]) = { - val dims = arr.dimension - var elemKind = arr.elementType - val argsSize = args.length - if (argsSize > dims) { - report.error(em"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)", ctx.source.atSpan(app.span)) - } - if (argsSize < dims) { - /* In one step: - * elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize) - * however the above does not enter a TypeName for each nested arrays in chrs. - */ - for (i <- args.length until dims) elemKind = ArrayBType(elemKind) - } - genLoadArguments(args, List.fill(args.size)(INT)) - (argsSize /*: @switch*/) match { - case 1 => bc newarray elemKind - case _ => - val descr = ("[" * argsSize) + elemKind.descriptor // denotes the same as: arrayN(elemKind, argsSize).descriptor - mnode.visitMultiANewArrayInsn(descr, argsSize) - } - } - - - private def genApply(app: Apply, expectedType: BType): BType = { - var generatedType = expectedType - lineNumber(app) - app match { - case Apply(_, args) if app.symbol eq defn.newArrayMethod => - val List(elemClaz, Literal(c: Constant), ArrayValue(_, dims)) = args: @unchecked - - generatedType = toTypeKind(c.typeValue) - mkArrayConstructorCall(generatedType.asArrayBType, app, dims) - case Apply(t :TypeApply, _) => - generatedType = - if (t.symbol ne defn.Object_synchronized) genTypeApply(t) - else genSynchronized(app, expectedType) - - case Apply(fun @ DesugaredSelect(Super(superQual, _), _), args) => - // 'super' call: Note: since constructors are supposed to - // return an instance of what they construct, we have to take - // special care. On JVM they are 'void', and Scala forbids (syntactically) - // to call super constructors explicitly and/or use their 'returned' value. - // therefore, we can ignore this fact, and generate code that leaves nothing - // on the stack (contrary to what the type in the AST says). - - // scala/bug#10290: qual can be `this.$outer()` (not just `this`), so we call genLoad (not just ALOAD_0) - genLoad(superQual) - genLoadArguments(args, paramTKs(app)) - generatedType = genCallMethod(fun.symbol, InvokeStyle.Super, app.span) - - // 'new' constructor call: Note: since constructors are - // thought to return an instance of what they construct, - // we have to 'simulate' it by DUPlicating the freshly created - // instance (on JVM, methods return VOID). - case Apply(fun @ DesugaredSelect(New(tpt), nme.CONSTRUCTOR), args) => - val ctor = fun.symbol - assert(ctor.isClassConstructor, s"'new' call to non-constructor: ${ctor.name}") - - generatedType = toTypeKind(tpt.tpe) - assert(generatedType.isRef, s"Non reference type cannot be instantiated: $generatedType") - - generatedType match { - case arr: ArrayBType => - mkArrayConstructorCall(arr, app, args) - - case rt: ClassBType => - assert(classBTypeFromSymbol(ctor.owner) == rt, s"Symbol ${ctor.owner.showFullName} is different from $rt") - mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName) - bc dup generatedType - genLoadArguments(args, paramTKs(app)) - genCallMethod(ctor, InvokeStyle.Special, app.span) - - case _ => - abort(s"Cannot instantiate $tpt of kind: $generatedType") - } - - case Apply(fun, List(expr)) if Erasure.Boxing.isBox(fun.symbol) && fun.symbol.denot.owner != defn.UnitModuleClass => - val nativeKind = tpeTK(expr) - genLoad(expr, nativeKind) - val MethodNameAndType(mname, methodType) = asmBoxTo(nativeKind) - bc.invokestatic(srBoxesRuntimeRef.internalName, mname, methodType.descriptor, itf = false) - generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType) - - case Apply(fun, List(expr)) if Erasure.Boxing.isUnbox(fun.symbol) && fun.symbol.denot.owner != defn.UnitModuleClass => - genLoad(expr) - val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe) - generatedType = boxType - val MethodNameAndType(mname, methodType) = asmUnboxTo(boxType) - bc.invokestatic(srBoxesRuntimeRef.internalName, mname, methodType.descriptor, itf = false) - - case app @ Apply(fun, args) => - val sym = fun.symbol - - if (isPrimitive(fun)) { // primitive method call - generatedType = genPrimitiveOp(app, expectedType) - } else { // normal method call - val invokeStyle = - if (sym.isStaticMember) InvokeStyle.Static - else if (sym.is(Private) || sym.isClassConstructor) InvokeStyle.Special - else if (app.hasAttachment(BCodeHelpers.UseInvokeSpecial)) InvokeStyle.Special - else InvokeStyle.Virtual - - if (invokeStyle.hasInstance) genLoadQualifier(fun) - genLoadArguments(args, paramTKs(app)) - - val DesugaredSelect(qual, name) = fun: @unchecked // fun is a Select, also checked in genLoadQualifier - val isArrayClone = name == nme.clone_ && qual.tpe.widen.isInstanceOf[JavaArrayType] - if (isArrayClone) { - // Special-case Array.clone, introduced in 36ef60e. The goal is to generate this call - // as "[I.clone" instead of "java/lang/Object.clone". This is consistent with javac. - // Arrays have a public method `clone` (jls 10.7). - // - // The JVMS is not explicit about this, but that receiver type can be an array type - // descriptor (instead of a class internal name): - // invokevirtual #2; //Method "[I".clone:()Ljava/lang/Object - // - // Note that using `Object.clone()` would work as well, but only because the JVM - // relaxes protected access specifically if the receiver is an array: - // http://hg.openjdk.java.net/jdk8/jdk8/hotspot/file/87ee5ee27509/src/share/vm/interpreter/linkResolver.cpp#l439 - // Example: `class C { override def clone(): Object = "hi" }` - // Emitting `def f(c: C) = c.clone()` as `Object.clone()` gives a VerifyError. - val target: String = tpeTK(qual).asRefBType.classOrArrayType - val methodBType = asmMethodType(sym) - bc.invokevirtual(target, sym.javaSimpleName, methodBType.descriptor) - generatedType = methodBType.returnType - } else { - val receiverClass = if (!invokeStyle.isVirtual) null else { - // receiverClass is used in the bytecode to as the method receiver. using sym.owner - // may lead to IllegalAccessErrors, see 9954eaf / aladdin bug 455. - val qualSym = qual.tpe.typeSymbol - if (qualSym == defn.ArrayClass) { - // For invocations like `Array(1).hashCode` or `.wait()`, use Object as receiver - // in the bytecode. Using the array descriptor (like we do for clone above) seems - // to work as well, but it seems safer not to change this. Javac also uses Object. - // Note that array apply/update/length are handled by isPrimitive (above). - assert(sym.owner == defn.ObjectClass, s"unexpected array call: $app") - defn.ObjectClass - } else qualSym - } - generatedType = genCallMethod(sym, invokeStyle, app.span, receiverClass) - } - } - } - - generatedType - } // end of genApply() - - private def genArrayValue(av: tpd.JavaSeqLiteral): BType = { - val ArrayValue(tpt, elems) = av: @unchecked - - lineNumber(av) - genArray(elems, tpt) - } - - private def genArray(elems: List[Tree], elemType: Type): BType = { - val elmKind = toTypeKind(elemType) - val generatedType = ArrayBType(elmKind) - - bc iconst elems.length - bc newarray elmKind - - var i = 0 - var rest = elems - while (!rest.isEmpty) { - bc dup generatedType - bc iconst i - genLoad(rest.head, elmKind) - bc astore elmKind - rest = rest.tail - i = i + 1 - } - - generatedType - } - - /* A Match node contains one or more case clauses, each case clause lists one or more - * Int/String values to use as keys, and a code block. The exception is the "default" case - * clause which doesn't list any key (there is exactly one of these per match). - */ - private def genMatchTo(tree: Match, expectedType: BType, dest: LoadDestination): BType = tree match { - case Match(selector, cases) => - lineNumber(tree) - - val (generatedType, postMatch, postMatchDest) = - if dest == LoadDestination.FallThrough then - val postMatch = new asm.Label - (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch)) - else - (expectedType, null, dest) - - // Only two possible selector types exist in `Match` trees at this point: Int and String - if (tpeTK(selector) == INT) { - - /* On a first pass over the case clauses, we flatten the keys and their - * targets (the latter represented with asm.Labels). That representation - * allows JCodeMethodV to emit a lookupswitch or a tableswitch. - * - * On a second pass, we emit the switch blocks, one for each different target. - */ - - var flatKeys: List[Int] = Nil - var targets: List[asm.Label] = Nil - var default: asm.Label = null - var switchBlocks: List[(asm.Label, Tree)] = Nil - - genLoad(selector, INT) - - // collect switch blocks and their keys, but don't emit yet any switch-block. - for (caze @ CaseDef(pat, guard, body) <- cases) { - assert(guard == tpd.EmptyTree, guard) - val switchBlockPoint = new asm.Label - switchBlocks ::= (switchBlockPoint, body) - pat match { - case Literal(value) => - flatKeys ::= value.intValue - targets ::= switchBlockPoint - case Ident(nme.WILDCARD) => - assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") - default = switchBlockPoint - case Alternative(alts) => - alts foreach { - case Literal(value) => - flatKeys ::= value.intValue - targets ::= switchBlockPoint - case _ => - abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") - } - case _ => - abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") - } - } - - bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) - - // emit switch-blocks. - for (sb <- switchBlocks.reverse) { - val (caseLabel, caseBody) = sb - markProgramPoint(caseLabel) - genLoadTo(caseBody, generatedType, postMatchDest) - } - } else { - - /* Since the JVM doesn't have a way to switch on a string, we switch - * on the `hashCode` of the string then do an `equals` check (with a - * possible second set of jumps if blocks can be reach from multiple - * string alternatives). - * - * This mirrors the way that Java compiles `switch` on Strings. - */ - - var default: asm.Label = null - var indirectBlocks: List[(asm.Label, Tree)] = Nil - - - // Cases grouped by their hashCode - val casesByHash = SortedMap.empty[Int, List[(String, Either[asm.Label, Tree])]] - var caseFallback: Tree = null - - for (caze @ CaseDef(pat, guard, body) <- cases) { - assert(guard == tpd.EmptyTree, guard) - pat match { - case Literal(value) => - val strValue = value.stringValue - casesByHash.updateWith(strValue.##) { existingCasesOpt => - val newCase = (strValue, Right(body)) - Some(newCase :: existingCasesOpt.getOrElse(Nil)) - } - case Ident(nme.WILDCARD) => - assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") - default = new asm.Label - indirectBlocks ::= (default, body) - case Alternative(alts) => - // We need an extra basic block since multiple strings can lead to this code - val indirectCaseGroupLabel = new asm.Label - indirectBlocks ::= (indirectCaseGroupLabel, body) - alts foreach { - case Literal(value) => - val strValue = value.stringValue - casesByHash.updateWith(strValue.##) { existingCasesOpt => - val newCase = (strValue, Left(indirectCaseGroupLabel)) - Some(newCase :: existingCasesOpt.getOrElse(Nil)) - } - case _ => - abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") - } - - case _ => - abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") - } - } - - // Organize the hashCode options into switch cases - var flatKeys: List[Int] = Nil - var targets: List[asm.Label] = Nil - var hashBlocks: List[(asm.Label, List[(String, Either[asm.Label, Tree])])] = Nil - for ((hashValue, hashCases) <- casesByHash) { - val switchBlockPoint = new asm.Label - hashBlocks ::= (switchBlockPoint, hashCases) - flatKeys ::= hashValue - targets ::= switchBlockPoint - } - - // Push the hashCode of the string (or `0` it is `null`) onto the stack and switch on it - genLoadIfTo( - If( - tree.selector.select(defn.Any_==).appliedTo(nullLiteral), - Literal(Constant(0)), - tree.selector.select(defn.Any_hashCode).appliedToNone - ), - INT, - LoadDestination.FallThrough - ) - bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) - - // emit blocks for each hash case - for ((hashLabel, caseAlternatives) <- hashBlocks.reverse) { - markProgramPoint(hashLabel) - for ((caseString, indirectLblOrBody) <- caseAlternatives) { - val comparison = if (caseString == null) defn.Any_== else defn.Any_equals - val condp = Literal(Constant(caseString)).select(defn.Any_==).appliedTo(tree.selector) - val keepGoing = new asm.Label - indirectLblOrBody match { - case Left(jump) => - genCond(condp, jump, keepGoing, targetIfNoJump = keepGoing) - - case Right(caseBody) => - val thisCaseMatches = new asm.Label - genCond(condp, thisCaseMatches, keepGoing, targetIfNoJump = thisCaseMatches) - markProgramPoint(thisCaseMatches) - genLoadTo(caseBody, generatedType, postMatchDest) - } - markProgramPoint(keepGoing) - } - bc goTo default - } - - // emit blocks for common patterns - for ((caseLabel, caseBody) <- indirectBlocks.reverse) { - markProgramPoint(caseLabel) - genLoadTo(caseBody, generatedType, postMatchDest) - } - } - - if postMatch != null then - markProgramPoint(postMatch) - generatedType - } - - def genBlockTo(tree: Block, expectedType: BType, dest: LoadDestination): Unit = tree match { - case Block(stats, expr) => - - val savedScope = varsInScope - varsInScope = Nil - stats foreach genStat - genLoadTo(expr, expectedType, dest) - emitLocalVarScopes() - varsInScope = savedScope - } - - /** Add entries to the `LocalVariableTable` JVM attribute for all the vars in - * `varsInScope`, ending at the current program point. - */ - def emitLocalVarScopes(): Unit = - if (emitVars) { - val end = currProgramPoint() - for ((sym, start) <- varsInScope.reverse) { - emitLocalVarScope(sym, start, end) - } - } - end emitLocalVarScopes - - def adapt(from: BType, to: BType): Unit = { - if (!from.conformsTo(to)) { - to match { - case UNIT => bc drop from - case _ => bc.emitT2T(from, to) - } - } else if (from.isNothingType) { - /* There are two possibilities for from.isNothingType: emitting a "throw e" expressions and - * loading a (phantom) value of type Nothing. - * - * The Nothing type in Scala's type system does not exist in the JVM. In bytecode, Nothing - * is mapped to scala.runtime.Nothing$. To the JVM, a call to Predef.??? looks like it would - * return an object of type Nothing$. We need to do something with that phantom object on - * the stack. "Phantom" because it never exists: such methods always throw, but the JVM does - * not know that. - * - * Note: The two verifiers (old: type inference, new: type checking) have different - * requirements. Very briefly: - * - * Old (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.2.1): at - * each program point, no matter what branches were taken to get there - * - Stack is same size and has same typed values - * - Local and stack values need to have consistent types - * - In practice, the old verifier seems to ignore unreachable code and accept any - * instructions after an ATHROW. For example, there can be another ATHROW (without - * loading another throwable first). - * - * New (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1) - * - Requires consistent stack map frames. GenBCode generates stack frames if -target:jvm-1.6 - * or higher. - * - In practice: the ASM library computes stack map frames for us (ClassWriter). Emitting - * correct frames after an ATHROW is probably complex, so ASM uses the following strategy: - * - Every time when generating an ATHROW, a new basic block is started. - * - During classfile writing, such basic blocks are found to be dead: no branches go there - * - Eliminating dead code would probably require complex shifts in the output byte buffer - * - But there's an easy solution: replace all code in the dead block with - * `nop; nop; ... nop; athrow`, making sure the bytecode size stays the same - * - The corresponding stack frame can be easily generated: on entering a dead the block, - * the frame requires a single Throwable on the stack. - * - Since there are no branches to the dead block, the frame requirements are never violated. - * - * To summarize the above: it does matter what we emit after an ATHROW. - * - * NOW: if we end up here because we emitted a load of a (phantom) value of type Nothing$, - * there was no ATHROW emitted. So, we have to make the verifier happy and do something - * with that value. Since Nothing$ extends Throwable, the easiest is to just emit an ATHROW. - * - * If we ended up here because we generated a "throw e" expression, we know the last - * emitted instruction was an ATHROW. As explained above, it is OK to emit a second ATHROW, - * the verifiers will be happy. - */ - if (lastInsn.getOpcode != asm.Opcodes.ATHROW) - emit(asm.Opcodes.ATHROW) - } else if (from.isNullType) { - /* After loading an expression of type `scala.runtime.Null$`, introduce POP; ACONST_NULL. - * This is required to pass the verifier: in Scala's type system, Null conforms to any - * reference type. In bytecode, the type Null is represented by scala.runtime.Null$, which - * is not a subtype of all reference types. Example: - * - * def nl: Null = null // in bytecode, nl has return type scala.runtime.Null$ - * val a: String = nl // OK for Scala but not for the JVM, scala.runtime.Null$ does not conform to String - * - * In order to fix the above problem, the value returned by nl is dropped and ACONST_NULL is - * inserted instead - after all, an expression of type scala.runtime.Null$ can only be null. - */ - if (lastInsn.getOpcode != asm.Opcodes.ACONST_NULL) { - bc drop from - emit(asm.Opcodes.ACONST_NULL) - } - } - else (from, to) match { - case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG) - case _ => () - } - } - - /* Emit code to Load the qualifier of `tree` on top of the stack. */ - def genLoadQualifier(tree: Tree): Unit = { - lineNumber(tree) - tree match { - case DesugaredSelect(qualifier, _) => genLoad(qualifier) - case t: Ident => // dotty specific - cachedDesugarIdent(t) match { - case Some(sel) => genLoadQualifier(sel) - case None => - assert(t.symbol.owner == this.claszSymbol) - } - case _ => abort(s"Unknown qualifier $tree") - } - } - - def genLoadArguments(args: List[Tree], btpes: List[BType]): Unit = - args match - case arg :: args1 => - btpes match - case btpe :: btpes1 => - genLoad(arg, btpe) - genLoadArguments(args1, btpes1) - case _ => - case _ => - - def genLoadModule(tree: Tree): BType = { - val module = ( - if (!tree.symbol.is(PackageClass)) tree.symbol - else tree.symbol.info.member(nme.PACKAGE).symbol match { - case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree") - case s => abort(s"SI-5604: found package class where package object expected: $tree") - } - ) - lineNumber(tree) - genLoadModule(module) - symInfoTK(module) - } - - def genLoadModule(module: Symbol): Unit = { - def inStaticMethod = methSymbol != null && methSymbol.isStaticMember - if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) { - mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) - } else { - val mbt = symInfoTK(module).asClassBType - mnode.visitFieldInsn( - asm.Opcodes.GETSTATIC, - mbt.internalName /* + "$" */ , - str.MODULE_INSTANCE_FIELD, - mbt.descriptor // for nostalgics: toTypeKind(module.tpe).descriptor - ) - } - } - - def genConversion(from: BType, to: BType, cast: Boolean): Unit = { - if (cast) { bc.emitT2T(from, to) } - else { - bc drop from - bc boolconst (from == to) - } - } - - def genCast(to: RefBType, cast: Boolean): Unit = { - if (cast) { bc checkCast to } - else { bc isInstance to } - } - - /* Is the given symbol a primitive operation? */ - def isPrimitive(fun: Tree): Boolean = { - primitives.isPrimitive(fun) - } - - /* Generate coercion denoted by "code" */ - def genCoercion(code: Int): Unit = { - import ScalaPrimitivesOps._ - (code: @switch) match { - case B2B | S2S | C2C | I2I | L2L | F2F | D2D => () - case _ => - val from = coercionFrom(code) - val to = coercionTo(code) - bc.emitT2T(from, to) - } - } - - /* Generate string concatenation - * - * On JDK 8: create and append using `StringBuilder` - * On JDK 9+: use `invokedynamic` with `StringConcatFactory` - */ - def genStringConcat(tree: Tree): BType = { - lineNumber(tree) - liftStringConcat(tree) match { - // Optimization for expressions of the form "" + x - case List(Literal(Constant("")), arg) => - genLoad(arg, ObjectRef) - genCallMethod(defn.String_valueOf_Object, InvokeStyle.Static) - - case concatenations => - val concatArguments = concatenations.view - .filter { - case Literal(Constant("")) => false // empty strings are no-ops in concatenation - case _ => true - } - .map { - case Apply(boxOp, value :: Nil) if Erasure.Boxing.isBox(boxOp.symbol) && boxOp.symbol.denot.owner != defn.UnitModuleClass => - // Eliminate boxing of primitive values. Boxing is introduced by erasure because - // there's only a single synthetic `+` method "added" to the string class. - value - case other => other - } - .toList - - // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower - if (classfileVersion < asm.Opcodes.V9) { - - // Estimate capacity needed for the string builder - val approxBuilderSize = concatArguments.view.map { - case Literal(Constant(s: String)) => s.length - case Literal(c @ Constant(_)) if c.isNonUnitAnyVal => String.valueOf(c).length - case _ => 0 - }.sum - bc.genNewStringBuilder(approxBuilderSize) - - for (elem <- concatArguments) { - val elemType = tpeTK(elem) - genLoad(elem, elemType) - bc.genStringBuilderAppend(elemType) - } - bc.genStringBuilderEnd - } else { - - /* `StringConcatFactory#makeConcatWithConstants` accepts max 200 argument slots. If - * the string concatenation is longer (unlikely), we spill into multiple calls - */ - val MaxIndySlots = 200 - val TagArg = '\u0001' // indicates a hole (in the recipe string) for an argument - val TagConst = '\u0002' // indicates a hole (in the recipe string) for a constant - - val recipe = new StringBuilder() - val argTypes = Seq.newBuilder[asm.Type] - val constVals = Seq.newBuilder[String] - var totalArgSlots = 0 - var countConcats = 1 // ie. 1 + how many times we spilled - - for (elem <- concatArguments) { - val tpe = tpeTK(elem) - val elemSlots = tpe.size - - // Unlikely spill case - if (totalArgSlots + elemSlots >= MaxIndySlots) { - bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) - countConcats += 1 - totalArgSlots = 0 - recipe.setLength(0) - argTypes.clear() - constVals.clear() - } - - elem match { - case Literal(Constant(s: String)) => - if (s.contains(TagArg) || s.contains(TagConst)) { - totalArgSlots += elemSlots - recipe.append(TagConst) - constVals += s - } else { - recipe.append(s) - } - - case other => - totalArgSlots += elemSlots - recipe.append(TagArg) - val tpe = tpeTK(elem) - argTypes += tpe.toASMType - genLoad(elem, tpe) - } - } - bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) - - // If we spilled, generate one final concat - if (countConcats > 1) { - bc.genIndyStringConcat( - TagArg.toString * countConcats, - Seq.fill(countConcats)(StringRef.toASMType), - Seq.empty - ) - } - } - } - StringRef - } - - /** - * Generate a method invocation. If `specificReceiver != null`, it is used as receiver in the - * invocation instruction, otherwise `method.owner`. A specific receiver class is needed to - * prevent an IllegalAccessError, (aladdin bug 455). - */ - def genCallMethod(method: Symbol, style: InvokeStyle, pos: Span = NoSpan, specificReceiver: Symbol = null): BType = { - val methodOwner = method.owner - - // the class used in the invocation's method descriptor in the classfile - val receiverClass = { - if (specificReceiver != null) - assert(style.isVirtual || specificReceiver == methodOwner, s"specificReceiver can only be specified for virtual calls. $method - $specificReceiver") - - val useSpecificReceiver = specificReceiver != null && !defn.isBottomClass(specificReceiver) && !method.isScalaStatic - val receiver = if (useSpecificReceiver) specificReceiver else methodOwner - - // workaround for a JVM bug: https://bugs.openjdk.java.net/browse/JDK-8154587 - // when an interface method overrides a member of Object (note that all interfaces implicitly - // have superclass Object), the receiver needs to be the interface declaring the override (and - // not a sub-interface that inherits it). example: - // trait T { override def clone(): Object = "" } - // trait U extends T - // class C extends U - // class D { def f(u: U) = u.clone() } - // The invocation `u.clone()` needs `T` as a receiver: - // - using Object is illegal, as Object.clone is protected - // - using U results in a `NoSuchMethodError: U.clone. This is the JVM bug. - // Note that a mixin forwarder is generated, so the correct method is executed in the end: - // class C { override def clone(): Object = super[T].clone() } - val isTraitMethodOverridingObjectMember = { - receiver != methodOwner && // fast path - the boolean is used to pick either of these two, if they are the same it does not matter - style.isVirtual && - isEmittedInterface(receiver) && - defn.ObjectType.decl(method.name).symbol.exists && { // fast path - compute overrideChain on the next line only if necessary - val syms = method.allOverriddenSymbols.toList - !syms.isEmpty && syms.last.owner == defn.ObjectClass - } - } - if (isTraitMethodOverridingObjectMember) methodOwner else receiver - } - - receiverClass.info // ensure types the type is up to date; erasure may add lateINTERFACE to traits - val receiverName = internalName(receiverClass) - - val jname = method.javaSimpleName - val bmType = asmMethodType(method) - val mdescr = bmType.descriptor - - val isInterface = isEmittedInterface(receiverClass) - import InvokeStyle._ - if (style == Super) { - if (isInterface && !method.is(JavaDefined)) { - val args = new Array[BType](bmType.argumentTypes.length + 1) - val ownerBType = toTypeKind(method.owner.info) - bmType.argumentTypes.copyToArray(args, 1) - val staticDesc = MethodBType(ownerBType :: bmType.argumentTypes, bmType.returnType).descriptor - val staticName = traitSuperAccessorName(method) - bc.invokestatic(receiverName, staticName, staticDesc, isInterface) - } else { - bc.invokespecial(receiverName, jname, mdescr, isInterface) - } - } else { - val opc = style match { - case Static => Opcodes.INVOKESTATIC - case Special => Opcodes.INVOKESPECIAL - case Virtual => if (isInterface) Opcodes.INVOKEINTERFACE else Opcodes.INVOKEVIRTUAL - } - bc.emitInvoke(opc, receiverName, jname, mdescr, isInterface) - } - - bmType.returnType - } // end of genCallMethod() - - /* Generate the scala ## method. */ - def genScalaHash(tree: Tree): BType = { - genLoad(tree, ObjectRef) - genCallMethod(NoSymbol, InvokeStyle.Static) // used to dispatch ## on primitives to ScalaRuntime.hash. Should be implemented by a miniphase - } - - /* - * Returns a list of trees that each should be concatenated, from left to right. - * It turns a chained call like "a".+("b").+("c") into a list of arguments. - */ - def liftStringConcat(tree: Tree): List[Tree] = tree match { - case tree @ Apply(fun @ DesugaredSelect(larg, method), rarg) => - if (isPrimitive(fun) && - primitives.getPrimitive(tree, larg.tpe) == ScalaPrimitivesOps.CONCAT) - liftStringConcat(larg) ::: rarg - else - tree :: Nil - case _ => - tree :: Nil - } - - /* Emit code to compare the two top-most stack values using the 'op' operator. */ - private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { - if (targetIfNoJump == success) genCJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) - else { - if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT - bc.emitIF_ICMP(op, success) - } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) - bc.emitIF_ACMP(op, success) - } else { - import Primitives._ - def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE - (tk: @unchecked) match { - case LONG => emit(asm.Opcodes.LCMP) - case FLOAT => emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL) - case DOUBLE => emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) - } - bc.emitIF(op, success) - } - if (targetIfNoJump != failure) bc goTo failure - } - } - - /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */ - private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { - import Primitives._ - if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) - else { - if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT - bc.emitIF(op, success) - } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) - (op: @unchecked) match { // references are only compared with EQ and NE - case EQ => bc emitIFNULL success - case NE => bc emitIFNONNULL success - } - } else { - def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE - (tk: @unchecked) match { - case LONG => - emit(asm.Opcodes.LCONST_0) - emit(asm.Opcodes.LCMP) - case FLOAT => - emit(asm.Opcodes.FCONST_0) - emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL) - case DOUBLE => - emit(asm.Opcodes.DCONST_0) - emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) - } - bc.emitIF(op, success) - } - if (targetIfNoJump != failure) bc goTo failure - } - } - - def testOpForPrimitive(primitiveCode: Int) = (primitiveCode: @switch) match { - case ScalaPrimitivesOps.ID => Primitives.EQ - case ScalaPrimitivesOps.NI => Primitives.NE - case ScalaPrimitivesOps.EQ => Primitives.EQ - case ScalaPrimitivesOps.NE => Primitives.NE - case ScalaPrimitivesOps.LT => Primitives.LT - case ScalaPrimitivesOps.LE => Primitives.LE - case ScalaPrimitivesOps.GT => Primitives.GT - case ScalaPrimitivesOps.GE => Primitives.GE - } - - /* - * Generate code for conditional expressions. - * The jump targets success/failure of the test are `then-target` and `else-target` resp. - */ - private def genCond(tree: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = { - - def genComparisonOp(l: Tree, r: Tree, code: Int): Unit = { - val op = testOpForPrimitive(code) - def isNull(t: Tree): Boolean = t match { - case Literal(Constant(null)) => true - case _ => false - } - def ifOneIsNull(l: Tree, r: Tree): Tree = if (isNull(l)) r else if (isNull(r)) l else null - val nonNullSide = if (ScalaPrimitivesOps.isReferenceEqualityOp(code)) ifOneIsNull(l, r) else null - if (nonNullSide != null) { - // special-case reference (in)equality test for null (null eq x, x eq null) - genLoad(nonNullSide, ObjectRef) - genCZJUMP(success, failure, op, ObjectRef, targetIfNoJump) - } else { - val tk = tpeTK(l).maxType(tpeTK(r)) - genLoad(l, tk) - genLoad(r, tk) - genCJUMP(success, failure, op, tk, targetIfNoJump) - } - } - - def loadAndTestBoolean() = { - genLoad(tree, BOOL) - genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } - - lineNumber(tree) - tree match { - - case tree @ Apply(fun, args) if primitives.isPrimitive(fun.symbol) => - import ScalaPrimitivesOps.{ ZNOT, ZAND, ZOR, EQ } - - // lhs and rhs of test - lazy val DesugaredSelect(lhs, _) = fun: @unchecked - val rhs = if (args.isEmpty) tpd.EmptyTree else args.head // args.isEmpty only for ZNOT - - def genZandOrZor(and: Boolean): Unit = { - // reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited). - val keepGoing = new asm.Label - - if (and) genCond(lhs, keepGoing, failure, targetIfNoJump = keepGoing) - else genCond(lhs, success, keepGoing, targetIfNoJump = keepGoing) - - markProgramPoint(keepGoing) - genCond(rhs, success, failure, targetIfNoJump) - } - - primitives.getPrimitive(fun.symbol) match { - case ZNOT => genCond(lhs, failure, success, targetIfNoJump) - case ZAND => genZandOrZor(and = true) - case ZOR => genZandOrZor(and = false) - case code => - if (ScalaPrimitivesOps.isUniversalEqualityOp(code) && tpeTK(lhs).isClass) { - // rewrite `==` to null tests and `equals`. not needed for arrays (`equals` is reference equality). - if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure, targetIfNoJump) - else genEqEqPrimitive(lhs, rhs, failure, success, targetIfNoJump) - } else if (ScalaPrimitivesOps.isComparisonOp(code)) { - genComparisonOp(lhs, rhs, code) - } else - loadAndTestBoolean() - } - - case Block(stats, expr) => - /* Push the decision further down the `expr`. - * This is particularly effective for the shape of do..while loops. - */ - val savedScope = varsInScope - varsInScope = Nil - stats foreach genStat - genCond(expr, success, failure, targetIfNoJump) - emitLocalVarScopes() - varsInScope = savedScope - - case If(condp, thenp, elsep) => - val innerSuccess = new asm.Label - val innerFailure = new asm.Label - genCond(condp, innerSuccess, innerFailure, targetIfNoJump = innerSuccess) - markProgramPoint(innerSuccess) - genCond(thenp, success, failure, targetIfNoJump = innerFailure) - markProgramPoint(innerFailure) - genCond(elsep, success, failure, targetIfNoJump) - - case _ => loadAndTestBoolean() - } - - } // end of genCond() - - /* - * Generate the "==" code for object references. It is equivalent of - * if (l eq null) r eq null else l.equals(r); - * - * @param l left-hand-side of the '==' - * @param r right-hand-side of the '==' - */ - def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = { - - /* True if the equality comparison is between values that require the use of the rich equality - * comparator (scala.runtime.Comparator.equals). This is the case when either side of the - * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character. - * When it is statically known that both sides are equal and subtypes of Number of Character, - * not using the rich equality is possible (their own equals method will do ok.) - */ - val mustUseAnyComparator: Boolean = { - val areSameFinals = l.tpe.typeSymbol.is(Final) && r.tpe.typeSymbol.is(Final) && (l.tpe =:= r.tpe) - // todo: remove - def isMaybeBoxed(sym: Symbol): Boolean = { - (sym == defn.ObjectClass) || - (sym == defn.JavaSerializableClass) || - (sym == defn.ComparableClass) || - (sym derivesFrom defn.BoxedNumberClass) || - (sym derivesFrom defn.BoxedCharClass) || - (sym derivesFrom defn.BoxedBooleanClass) - } - !areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol) - } - def isNull(t: Tree): Boolean = t match { - case Literal(Constant(null)) => true - case _ => false - } - def isNonNullExpr(t: Tree): Boolean = t.isInstanceOf[Literal] || ((t.symbol ne null) && t.symbol.is(Module)) - - if (mustUseAnyComparator) { - val equalsMethod: Symbol = { - if (l.tpe <:< defn.BoxedNumberClass.info) { - if (r.tpe <:< defn.BoxedNumberClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) - else if (r.tpe <:< defn.BoxedCharClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumChar) - else defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) - } else defn.BoxesRunTimeModule_externalEquals - } - - genLoad(l, ObjectRef) - genLoad(r, ObjectRef) - genCallMethod(equalsMethod, InvokeStyle.Static) - genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } - else { - if (isNull(l)) { - // null == expr -> expr eq null - genLoad(r, ObjectRef) - genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump) - } else if (isNull(r)) { - // expr == null -> expr eq null - genLoad(l, ObjectRef) - genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump) - } else if (isNonNullExpr(l)) { - // SI-7852 Avoid null check if L is statically non-null. - genLoad(l, ObjectRef) - genLoad(r, ObjectRef) - genCallMethod(defn.Any_equals, InvokeStyle.Virtual) - genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } else { - // l == r -> if (l eq null) r eq null else l.equals(r) - val eqEqTempLocal = locals.makeLocal(ObjectRef, nme.EQEQ_LOCAL_VAR.mangledString, defn.ObjectType, r.span) - val lNull = new asm.Label - val lNonNull = new asm.Label - - genLoad(l, ObjectRef) - genLoad(r, ObjectRef) - locals.store(eqEqTempLocal) - bc dup ObjectRef - genCZJUMP(lNull, lNonNull, Primitives.EQ, ObjectRef, targetIfNoJump = lNull) - - markProgramPoint(lNull) - bc drop ObjectRef - locals.load(eqEqTempLocal) - genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump = lNonNull) - - markProgramPoint(lNonNull) - locals.load(eqEqTempLocal) - genCallMethod(defn.Any_equals, InvokeStyle.Virtual) - genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } - } - } - - - def genSynchronized(tree: Apply, expectedType: BType): BType - def genLoadTry(tree: Try): BType - - def genInvokeDynamicLambda(ctor: Symbol, lambdaTarget: Symbol, environmentSize: Int, functionalInterface: Symbol): BType = { - import java.lang.invoke.LambdaMetafactory.{FLAG_BRIDGES, FLAG_SERIALIZABLE} - - report.debuglog(s"Using invokedynamic rather than `new ${ctor.owner}`") - val generatedType = classBTypeFromSymbol(functionalInterface) - // Lambdas should be serializable if they implement a SAM that extends Serializable or if they - // implement a scala.Function* class. - val isSerializable = functionalInterface.isSerializable || defn.isFunctionClass(functionalInterface) - val isInterface = isEmittedInterface(lambdaTarget.owner) - val invokeStyle = - if (lambdaTarget.isStaticMember) asm.Opcodes.H_INVOKESTATIC - else if (lambdaTarget.is(Private) || lambdaTarget.isClassConstructor) asm.Opcodes.H_INVOKESPECIAL - else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE - else asm.Opcodes.H_INVOKEVIRTUAL - - val targetHandle = - new asm.Handle(invokeStyle, - classBTypeFromSymbol(lambdaTarget.owner).internalName, - lambdaTarget.javaSimpleName, - asmMethodType(lambdaTarget).descriptor, - /* itf = */ isInterface) - - val (a,b) = lambdaTarget.info.firstParamTypes.splitAt(environmentSize) - var (capturedParamsTypes, lambdaParamTypes) = (a,b) - - if (invokeStyle != asm.Opcodes.H_INVOKESTATIC) capturedParamsTypes = lambdaTarget.owner.info :: capturedParamsTypes - - // Requires https://github.com/scala/scala-java8-compat on the runtime classpath - val returnUnit = lambdaTarget.info.resultType.typeSymbol == defn.UnitClass - val functionalInterfaceDesc: String = generatedType.descriptor - val desc = capturedParamsTypes.map(tpe => toTypeKind(tpe)).mkString(("("), "", ")") + functionalInterfaceDesc - // TODO specialization - val instantiatedMethodType = new MethodBType(lambdaParamTypes.map(p => toTypeKind(p)), toTypeKind(lambdaTarget.info.resultType)).toASMType - - val samMethod = atPhase(erasurePhase) { - val samMethods = toDenot(functionalInterface).info.possibleSamMethods.toList - samMethods match { - case x :: Nil => x.symbol - case Nil => abort(s"${functionalInterface.show} is not a functional interface. It doesn't have abstract methods") - case xs => abort(s"${functionalInterface.show} is not a functional interface. " + - s"It has the following abstract methods: ${xs.map(_.name).mkString(", ")}") - } - } - - val methodName = samMethod.javaSimpleName - val samMethodType = asmMethodType(samMethod).toASMType - // scala/bug#10334: make sure that a lambda object for `T => U` has a method `apply(T)U`, not only the `(Object)Object` - // version. Using the lambda a structural type `{def apply(t: T): U}` causes a reflective lookup for this method. - val needsGenericBridge = samMethodType != instantiatedMethodType - val bridgeMethods = atPhase(erasurePhase){ - samMethod.allOverriddenSymbols.toList - } - val overriddenMethodTypes = bridgeMethods.map(b => asmMethodType(b).toASMType) - - // any methods which `samMethod` overrides need bridges made for them - // this is done automatically during erasure for classes we generate, but LMF needs to have them explicitly mentioned - // so we have to compute them at this relatively late point. - val bridgeTypes = ( - if (needsGenericBridge) - instantiatedMethodType +: overriddenMethodTypes - else - overriddenMethodTypes - ).distinct.filterNot(_ == samMethodType) - - val needsBridges = bridgeTypes.nonEmpty - - def flagIf(b: Boolean, flag: Int): Int = if (b) flag else 0 - val flags = flagIf(isSerializable, FLAG_SERIALIZABLE) | flagIf(needsBridges, FLAG_BRIDGES) - - val bsmArgs0 = Seq(samMethodType, targetHandle, instantiatedMethodType) - val bsmArgs1 = if (flags != 0) Seq(Int.box(flags)) else Seq.empty - val bsmArgs2 = if needsBridges then bridgeTypes.length +: bridgeTypes else Seq.empty - - val bsmArgs = bsmArgs0 ++ bsmArgs1 ++ bsmArgs2 - - val metafactory = - if (flags != 0) - jliLambdaMetaFactoryAltMetafactoryHandle // altMetafactory required to be able to pass the flags and additional arguments if needed - else - jliLambdaMetaFactoryMetafactoryHandle - - bc.jmethod.visitInvokeDynamicInsn(methodName, desc, metafactory, bsmArgs: _*) - - generatedType - } - } - - /** Does this symbol actually correspond to an interface that will be emitted? - * In the backend, this should be preferred over `isInterface` because it - * also returns true for the symbols of the fake companion objects we - * create for Java-defined classes as well as for Java annotations - * which we represent as classes. - */ - private def isEmittedInterface(sym: Symbol): Boolean = sym.isInterface || - sym.is(JavaDefined) && (toDenot(sym).isAnnotation || sym.is(ModuleClass) && (sym.companionClass.is(PureInterface)) || sym.companionClass.is(Trait)) - - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala deleted file mode 100644 index 5ad6a99f6055..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala +++ /dev/null @@ -1,960 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.annotation.threadUnsafe -import scala.tools.asm -import scala.tools.asm.AnnotationVisitor -import scala.tools.asm.ClassWriter -import scala.collection.mutable - -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.Trees -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Constants._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Names.Name -import dotty.tools.dotc.core.NameKinds.ExpandedName -import dotty.tools.dotc.core.Signature -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.NameKinds -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.TypeErasure -import dotty.tools.dotc.transform.GenericSignatures -import dotty.tools.dotc.transform.ElimErasedValueType -import dotty.tools.io.AbstractFile -import dotty.tools.dotc.report - -import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions - -/* - * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes. - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded - * @version 1.0 - * - */ -trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { - // for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce - - //import global._ - //import bTypes._ - //import coreBTypes._ - import bTypes._ - import tpd._ - import coreBTypes._ - import int.{_, given} - import DottyBackendInterface._ - - def ScalaATTRName: String = "Scala" - def ScalaSignatureATTRName: String = "ScalaSig" - - @threadUnsafe lazy val AnnotationRetentionAttr: ClassSymbol = requiredClass("java.lang.annotation.Retention") - @threadUnsafe lazy val AnnotationRetentionSourceAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("SOURCE") - @threadUnsafe lazy val AnnotationRetentionClassAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("CLASS") - @threadUnsafe lazy val AnnotationRetentionRuntimeAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("RUNTIME") - - val bCodeAsmCommon: BCodeAsmCommon[int.type] = new BCodeAsmCommon(int) - - /* - * must-single-thread - */ - def getFileForClassfile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - getFile(base, clsName, suffix) - } - - /* - * must-single-thread - */ - def getOutFolder(csym: Symbol, cName: String): AbstractFile = { - try { - outputDirectory - } catch { - case ex: Throwable => - report.error(em"Couldn't create file for class $cName\n${ex.getMessage}", ctx.source.atSpan(csym.span)) - null - } - } - - final def traitSuperAccessorName(sym: Symbol): String = { - val nameString = sym.javaSimpleName.toString - if (sym.name == nme.TRAIT_CONSTRUCTOR) nameString - else nameString + "$" - } - - // ----------------------------------------------------------------------------------------- - // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) - // Background: - // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf - // http://comments.gmane.org/gmane.comp.java.vm.languages/2293 - // https://issues.scala-lang.org/browse/SI-3872 - // ----------------------------------------------------------------------------------------- - - /* An `asm.ClassWriter` that uses `jvmWiseLUB()` - * The internal name of the least common ancestor of the types given by inameA and inameB. - * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow - */ - final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) { - - /** - * This method is thread-safe: it depends only on the BTypes component, which does not depend - * on global. TODO @lry move to a different place where no global is in scope, on bTypes. - */ - override def getCommonSuperClass(inameA: String, inameB: String): String = { - val a = classBTypeFromInternalName(inameA) - val b = classBTypeFromInternalName(inameB) - val lub = a.jvmWiseLUB(b) - val lubName = lub.internalName - assert(lubName != "scala/Any") - lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. - } - } - - /* - * must-single-thread - */ - def initBytecodeWriter(): BytecodeWriter = { - (None: Option[AbstractFile] /*getSingleOutput*/) match { // todo: implement - case Some(f) if f.hasExtension("jar") => - new DirectToJarfileWriter(f.file) - case _ => - factoryNonJarBytecodeWriter() - } - } - - /* - * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner - * classes in BTypes.scala. - * - * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of - * each inner class it lists (those are looked up and included). - * - * This method serializes in the InnerClasses JVM attribute in an appropriate order, - * not necessarily that given by `refedInnerClasses`. - * - * can-multi-thread - */ - final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { - // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler - val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) - allNestedClasses ++= declaredInnerClasses - refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) - for nestedClass <- allNestedClasses - do { - // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. - val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked - jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) - } - } - - /* - * can-multi-thread - */ - def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = { - new asm.Attribute(name) { - override def write(classWriter: ClassWriter, code: Array[Byte], - codeLength: Int, maxStack: Int, maxLocals: Int): asm.ByteVector = { - val byteVector = new asm.ByteVector(len) - byteVector.putByteArray(b, offset, len) - byteVector - } - } - } - - /* - * Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only - * i.e., the pickle is contained in a custom annotation, see: - * (1) `addAnnotations()`, - * (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10 - * (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5 - * That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9) - * other than both ending up encoded as attributes (JVMS 4.7) - * (with the caveat that the "ScalaSig" attribute is associated to some classes, - * while the "Signature" attribute can be associated to classes, methods, and fields.) - * - */ - trait BCPickles { - - import dotty.tools.dotc.core.unpickleScala2.{ PickleFormat, PickleBuffer } - - val versionPickle = { - val vp = new PickleBuffer(new Array[Byte](16), -1, 0) - assert(vp.writeIndex == 0, vp) - vp writeNat PickleFormat.MajorVersion - vp writeNat PickleFormat.MinorVersion - vp writeNat 0 - vp - } - - /* - * can-multi-thread - */ - def pickleMarkerLocal = { - createJAttribute(ScalaSignatureATTRName, versionPickle.bytes, 0, versionPickle.writeIndex) - } - - /* - * can-multi-thread - */ - def pickleMarkerForeign = { - createJAttribute(ScalaATTRName, new Array[Byte](0), 0, 0) - } - } // end of trait BCPickles - - trait BCInnerClassGen extends Pure { - - def debugLevel = 3 // 0 -> no debug info; 1-> filename; 2-> lines; 3-> varnames - - final val emitSource = debugLevel >= 1 - final val emitLines = debugLevel >= 2 - final val emitVars = debugLevel >= 3 - - /** - * The class internal name for a given class symbol. - */ - final def internalName(sym: Symbol): String = { - // For each java class, the scala compiler creates a class and a module (thus a module class). - // If the `sym` is a java module class, we use the java class instead. This ensures that the - // ClassBType is created from the main class (instead of the module class). - // The two symbols have the same name, so the resulting internalName is the same. - val classSym = if (sym.is(JavaDefined) && sym.is(ModuleClass)) sym.linkedClass else sym - getClassBType(classSym).internalName - } - - private def assertClassNotArray(sym: Symbol): Unit = { - assert(sym.isClass, sym) - assert(sym != defn.ArrayClass || compilingArray, sym) - } - - private def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = { - assertClassNotArray(sym) - assert(!primitiveTypeMap.contains(sym) || isCompilingPrimitive, sym) - } - - /** - * The ClassBType for a class symbol. - * - * The class symbol scala.Nothing is mapped to the class scala.runtime.Nothing$. Similarly, - * scala.Null is mapped to scala.runtime.Null$. This is because there exist no class files - * for the Nothing / Null. If used for example as a parameter type, we use the runtime classes - * in the classfile method signature. - * - * Note that the referenced class symbol may be an implementation class. For example when - * compiling a mixed-in method that forwards to the static method in the implementation class, - * the class descriptor of the receiver (the implementation class) is obtained by creating the - * ClassBType. - */ - final def getClassBType(sym: Symbol): ClassBType = { - assertClassNotArrayNotPrimitive(sym) - - if (sym == defn.NothingClass) srNothingRef - else if (sym == defn.NullClass) srNullRef - else classBTypeFromSymbol(sym) - } - - /* - * must-single-thread - */ - final def asmMethodType(msym: Symbol): MethodBType = { - assert(msym.is(Method), s"not a method-symbol: $msym") - val resT: BType = - if (msym.isClassConstructor || msym.isConstructor) UNIT - else toTypeKind(msym.info.resultType) - MethodBType(msym.info.firstParamTypes map toTypeKind, resT) - } - - /** - * The jvm descriptor of a type. - */ - final def typeDescriptor(t: Type): String = { toTypeKind(t).descriptor } - - /** - * The jvm descriptor for a symbol. - */ - final def symDescriptor(sym: Symbol): String = getClassBType(sym).descriptor - - final def toTypeKind(tp: Type): BType = typeToTypeKind(tp)(BCodeHelpers.this)(this) - - } // end of trait BCInnerClassGen - - trait BCAnnotGen extends BCInnerClassGen { - - /* - * must-single-thread - */ - def emitAnnotations(cw: asm.ClassVisitor, annotations: List[Annotation]): Unit = - for(annot <- annotations; if shouldEmitAnnotation(annot)) { - val typ = annot.tree.tpe - val assocs = assocsFromApply(annot.tree) - val av = cw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) - emitAssocs(av, assocs, BCodeHelpers.this)(this) - } - - /* - * must-single-thread - */ - def emitAnnotations(mw: asm.MethodVisitor, annotations: List[Annotation]): Unit = - for(annot <- annotations; if shouldEmitAnnotation(annot)) { - val typ = annot.tree.tpe - val assocs = assocsFromApply(annot.tree) - val av = mw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) - emitAssocs(av, assocs, BCodeHelpers.this)(this) - } - - /* - * must-single-thread - */ - def emitAnnotations(fw: asm.FieldVisitor, annotations: List[Annotation]): Unit = - for(annot <- annotations; if shouldEmitAnnotation(annot)) { - val typ = annot.tree.tpe - val assocs = assocsFromApply(annot.tree) - val av = fw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) - emitAssocs(av, assocs, BCodeHelpers.this)(this) - } - - /* - * must-single-thread - */ - def emitParamNames(jmethod: asm.MethodVisitor, params: List[Symbol]) = - for param <- params do - var access = asm.Opcodes.ACC_FINAL - if param.is(Artifact) then access |= asm.Opcodes.ACC_SYNTHETIC - jmethod.visitParameter(param.name.mangledString, access) - - /* - * must-single-thread - */ - def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[Annotation]]): Unit = - val annotationss = pannotss map (_ filter shouldEmitAnnotation) - if (annotationss forall (_.isEmpty)) return - for ((annots, idx) <- annotationss.zipWithIndex; - annot <- annots) { - val typ = annot.tree.tpe - val assocs = assocsFromApply(annot.tree) - val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, typeDescriptor(typ.asInstanceOf[Type]), isRuntimeVisible(annot)) - emitAssocs(pannVisitor, assocs, BCodeHelpers.this)(this) - } - - - private def shouldEmitAnnotation(annot: Annotation): Boolean = { - annot.symbol.is(JavaDefined) && - retentionPolicyOf(annot) != AnnotationRetentionSourceAttr - } - - private def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, Object)], bcodeStore: BCodeHelpers) - (innerClasesStore: bcodeStore.BCInnerClassGen) = { - for ((name, value) <- assocs) - emitArgument(av, name.mangledString, value.asInstanceOf[Tree], bcodeStore)(innerClasesStore) - av.visitEnd() - } - - private def emitArgument(av: AnnotationVisitor, - name: String, - arg: Tree, bcodeStore: BCodeHelpers)(innerClasesStore: bcodeStore.BCInnerClassGen): Unit = { - val narg = normalizeArgument(arg) - // Transformation phases are not run on annotation trees, so we need to run - // `constToLiteral` at this point. - val t = atPhase(erasurePhase)(constToLiteral(narg)) - t match { - case Literal(const @ Constant(_)) => - const.tag match { - case BooleanTag | ByteTag | ShortTag | CharTag | IntTag | LongTag | FloatTag | DoubleTag => av.visit(name, const.value) - case StringTag => - assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant` - av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag - case ClazzTag => av.visit(name, typeToTypeKind(TypeErasure.erasure(const.typeValue))(bcodeStore)(innerClasesStore).toASMType) - } - case Ident(nme.WILDCARD) => - // An underscore argument indicates that we want to use the default value for this parameter, so do not emit anything - case t: tpd.RefTree if t.symbol.owner.linkedClass.isAllOf(JavaEnum) => - val edesc = innerClasesStore.typeDescriptor(t.tpe) // the class descriptor of the enumeration class. - val evalue = t.symbol.javaSimpleName // value the actual enumeration value. - av.visitEnum(name, edesc, evalue) - case t: SeqLiteral => - val arrAnnotV: AnnotationVisitor = av.visitArray(name) - for (arg <- t.elems) { emitArgument(arrAnnotV, null, arg, bcodeStore)(innerClasesStore) } - arrAnnotV.visitEnd() - - case Apply(fun, args) if fun.symbol == defn.ArrayClass.primaryConstructor || - toDenot(fun.symbol).owner == defn.ArrayClass.linkedClass && fun.symbol.name == nme.apply => - val arrAnnotV: AnnotationVisitor = av.visitArray(name) - - var actualArgs = if (fun.tpe.isImplicitMethod) { - // generic array method, need to get implicit argument out of the way - fun.asInstanceOf[Apply].args - } else args - - val flatArgs = actualArgs.flatMap { arg => - normalizeArgument(arg) match { - case t: tpd.SeqLiteral => t.elems - case e => List(e) - } - } - for(arg <- flatArgs) { - emitArgument(arrAnnotV, null, arg, bcodeStore)(innerClasesStore) - } - arrAnnotV.visitEnd() - /* - case sb @ ScalaSigBytes(bytes) => - // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files) - // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure. - if (sb.fitsInOneString) { - av.visit(name, BCodeAsmCommon.strEncode(sb)) - } else { - val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name) - for(arg <- BCodeAsmCommon.arrEncode(sb)) { arrAnnotV.visit(name, arg) } - arrAnnotV.visitEnd() - } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape. - */ - case t @ Apply(constr, args) if t.tpe.classSymbol.is(JavaAnnotation) => - val typ = t.tpe.classSymbol.denot.info - val assocs = assocsFromApply(t) - val desc = innerClasesStore.typeDescriptor(typ) // the class descriptor of the nested annotation class - val nestedVisitor = av.visitAnnotation(name, desc) - emitAssocs(nestedVisitor, assocs, bcodeStore)(innerClasesStore) - - case t => - report.error(em"Annotation argument is not a constant", t.sourcePos) - } - } - - private def normalizeArgument(arg: Tree): Tree = arg match { - case Trees.NamedArg(_, arg1) => normalizeArgument(arg1) - case Trees.Typed(arg1, _) => normalizeArgument(arg1) - case _ => arg - } - - private def isRuntimeVisible(annot: Annotation): Boolean = - if (toDenot(annot.tree.tpe.typeSymbol).hasAnnotation(AnnotationRetentionAttr)) - retentionPolicyOf(annot) == AnnotationRetentionRuntimeAttr - else { - // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the - // annotation is emitted with visibility `RUNTIME` - // dotty bug: #389 - true - } - - private def retentionPolicyOf(annot: Annotation): Symbol = - annot.tree.tpe.typeSymbol.getAnnotation(AnnotationRetentionAttr). - flatMap(_.argument(0).map(_.tpe.termSymbol)).getOrElse(AnnotationRetentionClassAttr) - - private def assocsFromApply(tree: Tree): List[(Name, Tree)] = { - tree match { - case Block(_, expr) => assocsFromApply(expr) - case Apply(fun, args) => - fun.tpe.widen match { - case MethodType(names) => - (names zip args).filter { - case (_, t: tpd.Ident) if (t.tpe.normalizedPrefix eq NoPrefix) => false - case _ => true - } - } - } - } - } // end of trait BCAnnotGen - - trait BCJGenSigGen { - import int.given - - def getCurrentCUnit(): CompilationUnit - - /** - * Generates the generic signature for `sym` before erasure. - * - * @param sym The symbol for which to generate a signature. - * @param owner The owner of `sym`. - * @return The generic signature of `sym` before erasure, as specified in the Java Virtual - * Machine Specification, §4.3.4, or `null` if `sym` doesn't need a generic signature. - * @see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.3.4 - */ - def getGenericSignature(sym: Symbol, owner: Symbol): String = { - atPhase(erasurePhase) { - val memberTpe = - if (sym.is(Method)) sym.denot.info - else owner.denot.thisType.memberInfo(sym) - getGenericSignatureHelper(sym, owner, memberTpe).orNull - } - } - - } // end of trait BCJGenSigGen - - trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen { - - /* Add a forwarder for method m. Used only from addForwarders(). - * - * must-single-thread - */ - private def addForwarder(jclass: asm.ClassVisitor, module: Symbol, m: Symbol, isSynthetic: Boolean): Unit = { - val moduleName = internalName(module) - val methodInfo = module.thisType.memberInfo(m) - val paramJavaTypes: List[BType] = methodInfo.firstParamTypes map toTypeKind - // val paramNames = 0 until paramJavaTypes.length map ("x_" + _) - - /* Forwarders must not be marked final, - * as the JVM will not allow redefinition of a final static method, - * and we don't know what classes might be subclassing the companion class. See SI-4827. - */ - // TODO: evaluate the other flags we might be dropping on the floor here. - val flags = GenBCodeOps.PublicStatic | ( - if (m.is(JavaVarargs)) asm.Opcodes.ACC_VARARGS else 0 - ) | ( - if (isSynthetic) asm.Opcodes.ACC_SYNTHETIC else 0 - ) - - // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize } - val jgensig = getStaticForwarderGenericSignature(m, module) - val (throws, others) = m.annotations.partition(_.symbol eq defn.ThrowsAnnot) - val thrownExceptions: List[String] = getExceptions(throws) - - val jReturnType = toTypeKind(methodInfo.resultType) - val mdesc = MethodBType(paramJavaTypes, jReturnType).descriptor - val mirrorMethodName = m.javaSimpleName - val mirrorMethod: asm.MethodVisitor = jclass.visitMethod( - flags, - mirrorMethodName, - mdesc, - jgensig, - mkArrayS(thrownExceptions) - ) - - emitAnnotations(mirrorMethod, others) - val params: List[Symbol] = Nil // backend uses this to emit annotations on parameter lists of forwarders - // to static methods of companion class - // Old assumption: in Dotty this link does not exists: there is no way to get from method type - // to inner symbols of DefDef - // TODO: now we have paramSymss and could use it here. - emitParamAnnotations(mirrorMethod, params.map(_.annotations)) - - mirrorMethod.visitCode() - - mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, str.MODULE_INSTANCE_FIELD, symDescriptor(module)) - - var index = 0 - for(jparamType <- paramJavaTypes) { - mirrorMethod.visitVarInsn(jparamType.typedOpcode(asm.Opcodes.ILOAD), index) - assert(!jparamType.isInstanceOf[MethodBType], jparamType) - index += jparamType.size - } - - mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).descriptor, false) - mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN)) - - mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments - mirrorMethod.visitEnd() - - } - - /* Add forwarders for all methods defined in `module` that don't conflict - * with methods in the companion class of `module`. A conflict arises when - * a method with the same name is defined both in a class and its companion object: - * method signature is not taken into account. - * - * must-single-thread - */ - def addForwarders(jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol): Unit = { - assert(moduleClass.is(ModuleClass), moduleClass) - report.debuglog(s"Dumping mirror class for object: $moduleClass") - - val linkedClass = moduleClass.companionClass - lazy val conflictingNames: Set[Name] = { - (linkedClass.info.allMembers.collect { case d if d.name.isTermName => d.name }).toSet - } - report.debuglog(s"Potentially conflicting names for forwarders: $conflictingNames") - - for (m0 <- sortedMembersBasedOnFlags(moduleClass.info, required = Method, excluded = ExcludedForwarder)) { - val m = if (m0.is(Bridge)) m0.nextOverriddenSymbol else m0 - if (m == NoSymbol) - report.log(s"$m0 is a bridge method that overrides nothing, something went wrong in a previous phase.") - else if (m.isType || m.is(Deferred) || (m.owner eq defn.ObjectClass) || m.isConstructor || m.name.is(ExpandedName)) - report.debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'") - else if (conflictingNames(m.name)) - report.log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}") - else if (m.accessBoundary(defn.RootClass) ne defn.RootClass) - report.log(s"No forwarder for non-public member $m") - else { - report.log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'") - // It would be simpler to not generate forwarders for these methods, - // but that wouldn't be binary-compatible with Scala 3.0.0, so instead - // we generate ACC_SYNTHETIC forwarders so Java compilers ignore them. - val isSynthetic = - m0.name.is(NameKinds.SyntheticSetterName) || - // Only hide bridges generated at Erasure, mixin forwarders are also - // marked as bridge but shouldn't be hidden since they don't have a - // non-bridge overload. - m0.is(Bridge) && m0.initial.validFor.firstPhaseId == erasurePhase.next.id - addForwarder(jclass, moduleClass, m, isSynthetic) - } - } - } - - /** The members of this type that have all of `required` flags but none of `excluded` flags set. - * The members are sorted by name and signature to guarantee a stable ordering. - */ - private def sortedMembersBasedOnFlags(tp: Type, required: Flag, excluded: FlagSet): List[Symbol] = { - // The output of `memberNames` is a Set, sort it to guarantee a stable ordering. - val names = tp.memberNames(takeAllFilter).toSeq.sorted - val buffer = mutable.ListBuffer[Symbol]() - names.foreach { name => - buffer ++= tp.memberBasedOnFlags(name, required, excluded) - .alternatives.sortBy(_.signature)(Signature.lexicographicOrdering).map(_.symbol) - } - buffer.toList - } - - /* - * Quoting from JVMS 4.7.5 The Exceptions Attribute - * "The Exceptions attribute indicates which checked exceptions a method may throw. - * There may be at most one Exceptions attribute in each method_info structure." - * - * The contents of that attribute are determined by the `String[] exceptions` argument to ASM's ClassVisitor.visitMethod() - * This method returns such list of internal names. - * - * must-single-thread - */ - def getExceptions(excs: List[Annotation]): List[String] = { - for (case ThrownException(exc) <- excs.distinct) - yield internalName(TypeErasure.erasure(exc).classSymbol) - } - } // end of trait BCForwardersGen - - trait BCClassGen extends BCInnerClassGen { - - // Used as threshold above which a tableswitch bytecode instruction is preferred over a lookupswitch. - // There's a space tradeoff between these multi-branch instructions (details in the JVM spec). - // The particular value in use for `MIN_SWITCH_DENSITY` reflects a heuristic. - val MIN_SWITCH_DENSITY = 0.7 - - /* - * Add public static final field serialVersionUID with value `id` - * - * can-multi-thread - */ - def addSerialVUID(id: Long, jclass: asm.ClassVisitor): Unit = { - // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)` - jclass.visitField( - GenBCodeOps.PrivateStaticFinal, - "serialVersionUID", - "J", - null, // no java-generic-signature - java.lang.Long.valueOf(id) - ).visitEnd() - } - } // end of trait BCClassGen - - /* functionality for building plain and mirror classes */ - abstract class JCommonBuilder - extends BCInnerClassGen - with BCAnnotGen - with BCForwardersGen - with BCPickles { } - - /* builder of mirror classes */ - class JMirrorBuilder extends JCommonBuilder { - - private var cunit: CompilationUnit = _ - def getCurrentCUnit(): CompilationUnit = cunit; - - /* Generate a mirror class for a top-level module. A mirror class is a class - * containing only static methods that forward to the corresponding method - * on the MODULE instance of the given Scala object. It will only be - * generated if there is no companion class: if there is, an attempt will - * instead be made to add the forwarder methods to the companion class. - * - * must-single-thread - */ - def genMirrorClass(moduleClass: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = { - assert(moduleClass.is(ModuleClass)) - assert(moduleClass.companionClass == NoSymbol, moduleClass) - this.cunit = cunit - val bType = mirrorClassBTypeFromSymbol(moduleClass) - val moduleName = internalName(moduleClass) // + "$" - val mirrorName = bType.internalName - - val mirrorClass = new asm.tree.ClassNode - mirrorClass.visit( - classfileVersion, - bType.info.flags, - mirrorName, - null /* no java-generic-signature */, - ObjectRef.internalName, - EMPTY_STRING_ARRAY - ) - - if (emitSource) { - mirrorClass.visitSource("" + cunit.source.file.name, - null /* SourceDebugExtension */) - } - - val ssa = None // getAnnotPickle(mirrorName, if (moduleClass.is(Module)) moduleClass.companionClass else moduleClass.companionModule) - mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) - emitAnnotations(mirrorClass, moduleClass.annotations ++ ssa) - - addForwarders(mirrorClass, mirrorName, moduleClass) - mirrorClass.visitEnd() - - moduleClass.name // this side-effect is necessary, really. - - mirrorClass - } - - } // end of class JMirrorBuilder - - trait JAndroidBuilder { - self: BCInnerClassGen => - - /* From the reference documentation of the Android SDK: - * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`. - * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`, - * which is an object implementing the `Parcelable.Creator` interface. - */ - val androidFieldName = "CREATOR".toTermName - - lazy val AndroidParcelableInterface : Symbol = NoSymbol // getClassIfDefined("android.os.Parcelable") - lazy val AndroidCreatorClass : Symbol = NoSymbol // getClassIfDefined("android.os.Parcelable$Creator") - - /* - * must-single-thread - */ - def isAndroidParcelableClass(sym: Symbol) = - (AndroidParcelableInterface != NoSymbol) && - (sym.info.parents.map(_.typeSymbol) contains AndroidParcelableInterface) - - /* - * must-single-thread - */ - def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String): Unit = { - val androidCreatorType = getClassBType(AndroidCreatorClass) - val tdesc_creator = androidCreatorType.descriptor - - cnode.visitField( - GenBCodeOps.PublicStaticFinal, - "CREATOR", - tdesc_creator, - null, // no java-generic-signature - null // no initial value - ).visitEnd() - - val moduleName = (thisName + "$") - - // GETSTATIC `moduleName`.MODULE$ : `moduleName`; - clinit.visitFieldInsn( - asm.Opcodes.GETSTATIC, - moduleName, - str.MODULE_INSTANCE_FIELD, - "L" + moduleName + ";" - ) - - // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator; - val bt = MethodBType(Nil, androidCreatorType) - clinit.visitMethodInsn( - asm.Opcodes.INVOKEVIRTUAL, - moduleName, - "CREATOR", - bt.descriptor, - false - ) - - // PUTSTATIC `thisName`.CREATOR; - clinit.visitFieldInsn( - asm.Opcodes.PUTSTATIC, - thisName, - "CREATOR", - tdesc_creator - ) - } - - } // end of trait JAndroidBuilder - - /** - * This method returns the BType for a type reference, for example a parameter type. - * - * If the result is a ClassBType for a nested class, it is added to the innerClassBufferASM. - * - * If `t` references a class, toTypeKind ensures that the class is not an implementation class. - * See also comment on getClassBTypeAndRegisterInnerClass, which is invoked for implementation - * classes. - */ - private def typeToTypeKind(tp: Type)(ct: BCodeHelpers)(storage: ct.BCInnerClassGen): ct.bTypes.BType = { - import ct.bTypes._ - val defn = ctx.definitions - import coreBTypes._ - import Types._ - /** - * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int. - * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType. - */ - def primitiveOrClassToBType(sym: Symbol): BType = { - assert(sym.isClass, sym) - assert(sym != defn.ArrayClass || compilingArray, sym) - primitiveTypeMap.getOrElse(sym, storage.getClassBType(sym)).asInstanceOf[BType] - } - - /** - * When compiling Array.scala, the type parameter T is not erased and shows up in method - * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference. - */ - def nonClassTypeRefToBType(sym: Symbol): ClassBType = { - assert(sym.isType && compilingArray, sym) - ObjectRef.asInstanceOf[ct.bTypes.ClassBType] - } - - tp.widenDealias match { - case JavaArrayType(el) =>ArrayBType(typeToTypeKind(el)(ct)(storage)) // Array type such as Array[Int] (kept by erasure) - case t: TypeRef => - t.info match { - - case _ => - if (!t.symbol.isClass) nonClassTypeRefToBType(t.symbol) // See comment on nonClassTypeRefToBType - else primitiveOrClassToBType(t.symbol) // Common reference to a type such as scala.Int or java.lang.String - } - case Types.ClassInfo(_, sym, _, _, _) => primitiveOrClassToBType(sym) // We get here, for example, for genLoadModule, which invokes toTypeKind(moduleClassSymbol.info) - - /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for - * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning. - * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala. - */ - case a @ AnnotatedType(t, _) => - report.debuglog(s"typeKind of annotated type $a") - typeToTypeKind(t)(ct)(storage) - - /* The cases below should probably never occur. They are kept for now to avoid introducing - * new compiler crashes, but we added a warning. The compiler / library bootstrap and the - * test suite don't produce any warning. - */ - - case tp => - report.warning( - s"an unexpected type representation reached the compiler backend while compiling ${ctx.compilationUnit}: $tp. " + - "If possible, please file a bug on https://github.com/scala/scala3/issues") - - tp match { - case tp: ThisType if tp.cls == defn.ArrayClass => ObjectRef.asInstanceOf[ct.bTypes.ClassBType] // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test - case tp: ThisType => storage.getClassBType(tp.cls) - // case t: SingletonType => primitiveOrClassToBType(t.classSymbol) - case t: SingletonType => typeToTypeKind(t.underlying)(ct)(storage) - case t: RefinedType => typeToTypeKind(t.parent)(ct)(storage) //parents.map(_.toTypeKind(ct)(storage).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b)) - } - } - } - - private def getGenericSignatureHelper(sym: Symbol, owner: Symbol, memberTpe: Type)(using Context): Option[String] = { - if (needsGenericSignature(sym)) { - val erasedTypeSym = TypeErasure.fullErasure(sym.denot.info).typeSymbol - if (erasedTypeSym.isPrimitiveValueClass) { - // Suppress signatures for symbols whose types erase in the end to primitive - // value types. This is needed to fix #7416. - None - } else { - val jsOpt = GenericSignatures.javaSig(sym, memberTpe) - if (ctx.settings.XverifySignatures.value) { - jsOpt.foreach(verifySignature(sym, _)) - } - - jsOpt - } - } else { - None - } - } - - private def verifySignature(sym: Symbol, sig: String)(using Context): Unit = { - import scala.tools.asm.util.CheckClassAdapter - def wrap(body: => Unit): Unit = { - try body - catch { - case ex: Throwable => - report.error( - em"""|compiler bug: created invalid generic signature for $sym in ${sym.denot.owner.showFullName} - |signature: $sig - |if this is reproducible, please report bug at https://github.com/scala/scala3/issues - """, sym.sourcePos) - throw ex - } - } - - wrap { - if (sym.is(Method)) { - CheckClassAdapter.checkMethodSignature(sig) - } - else if (sym.isTerm) { - CheckClassAdapter.checkFieldSignature(sig) - } - else { - CheckClassAdapter.checkClassSignature(sig) - } - } - } - - // @M don't generate java generics sigs for (members of) implementation - // classes, as they are monomorphic (TODO: ok?) - private final def needsGenericSignature(sym: Symbol): Boolean = !( - // pp: this condition used to include sym.hasexpandedname, but this leads - // to the total loss of generic information if a private member is - // accessed from a closure: both the field and the accessor were generated - // without it. This is particularly bad because the availability of - // generic information could disappear as a consequence of a seemingly - // unrelated change. - ctx.base.settings.YnoGenericSig.value - || sym.is(Artifact) - || sym.isAllOf(LiftedMethod) - || sym.is(Bridge) - ) - - private def getStaticForwarderGenericSignature(sym: Symbol, moduleClass: Symbol): String = { - // scala/bug#3452 Static forwarder generation uses the same erased signature as the method if forwards to. - // By rights, it should use the signature as-seen-from the module class, and add suitable - // primitive and value-class boxing/unboxing. - // But for now, just like we did in mixin, we just avoid writing a wrong generic signature - // (one that doesn't erase to the actual signature). See run/t3452b for a test case. - - val memberTpe = atPhase(erasurePhase) { moduleClass.denot.thisType.memberInfo(sym) } - val erasedMemberType = ElimErasedValueType.elimEVT(TypeErasure.transformInfo(sym, memberTpe)) - if (erasedMemberType =:= sym.denot.info) - getGenericSignatureHelper(sym, moduleClass, memberTpe).orNull - else null - } - - def abort(msg: String): Nothing = { - report.error(msg) - throw new RuntimeException(msg) - } - - private def compilingArray(using Context) = - ctx.compilationUnit.source.file.name == "Array.scala" -} - -object BCodeHelpers { - - class InvokeStyle(val style: Int) extends AnyVal { - import InvokeStyle._ - def isVirtual: Boolean = this == Virtual - def isStatic : Boolean = this == Static - def isSpecial: Boolean = this == Special - def isSuper : Boolean = this == Super - - def hasInstance = this != Static - } - - object InvokeStyle { - val Virtual = new InvokeStyle(0) // InvokeVirtual or InvokeInterface - val Static = new InvokeStyle(1) // InvokeStatic - val Special = new InvokeStyle(2) // InvokeSpecial (private methods, constructors) - val Super = new InvokeStyle(3) // InvokeSpecial (super calls) - } - - /** An attachment on Apply nodes indicating that it should be compiled with - * `invokespecial` instead of `invokevirtual`. This is used for static - * forwarders. - * See BCodeSkelBuilder.makeStaticForwarder for more details. - */ - val UseInvokeSpecial = new dotc.util.Property.Key[Unit] - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala deleted file mode 100644 index 9b8d81bbdbd1..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala +++ /dev/null @@ -1,727 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.tools.asm -import scala.annotation.switch -import Primitives.{NE, EQ, TestOp, ArithmeticOp} -import scala.tools.asm.tree.MethodInsnNode -import dotty.tools.dotc.report - -/* - * A high-level facade to the ASM API for bytecode generation. - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded - * @version 1.0 - * - */ -trait BCodeIdiomatic extends Pure { - val int: DottyBackendInterface - final lazy val bTypes = new BTypesFromSymbols[int.type](int) - - import int.{_, given} - import bTypes._ - import coreBTypes._ - - - - lazy val target = - val releaseValue = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) - val targetValue = Option(ctx.settings.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) - val defaultTarget = "8" - (releaseValue, targetValue) match - case (Some(release), None) => release - case (None, Some(target)) => target - case (Some(release), Some(_)) => - report.warning(s"The value of ${ctx.settings.XuncheckedJavaOutputVersion.name} was overridden by ${ctx.settings.javaOutputVersion.name}") - release - case (None, None) => "8" // least supported version by default - - - // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings - lazy val classfileVersion: Int = target match { - case "8" => asm.Opcodes.V1_8 - case "9" => asm.Opcodes.V9 - case "10" => asm.Opcodes.V10 - case "11" => asm.Opcodes.V11 - case "12" => asm.Opcodes.V12 - case "13" => asm.Opcodes.V13 - case "14" => asm.Opcodes.V14 - case "15" => asm.Opcodes.V15/* - case "16" => asm.Opcodes.V16 - case "17" => asm.Opcodes.V17 - case "18" => asm.Opcodes.V18 - case "19" => asm.Opcodes.V19 - case "20" => asm.Opcodes.V20 - case "21" => asm.Opcodes.V21 - case "22" => asm.Opcodes.V22*/ - } - - lazy val majorVersion: Int = (classfileVersion & 0xFF) - lazy val emitStackMapFrame = (majorVersion >= 50) - - val extraProc: Int = - import GenBCodeOps.addFlagIf - asm.ClassWriter.COMPUTE_MAXS - .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) - - lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName - - val CLASS_CONSTRUCTOR_NAME = "" - val INSTANCE_CONSTRUCTOR_NAME = "" - - val EMPTY_STRING_ARRAY = Array.empty[String] - val EMPTY_INT_ARRAY = Array.empty[Int] - val EMPTY_LABEL_ARRAY = Array.empty[asm.Label] - val EMPTY_BTYPE_ARRAY = Array.empty[BType] - - /* can-multi-thread */ - final def mkArrayB(xs: List[BType]): Array[BType] = { - if (xs.isEmpty) { return EMPTY_BTYPE_ARRAY } - val a = new Array[BType](xs.size); xs.copyToArray(a); a - } - /* can-multi-thread */ - final def mkArrayS(xs: List[String]): Array[String] = { - if (xs.isEmpty) { return EMPTY_STRING_ARRAY } - val a = new Array[String](xs.size); xs.copyToArray(a); a - } - /* can-multi-thread */ - final def mkArrayL(xs: List[asm.Label]): Array[asm.Label] = { - if (xs.isEmpty) { return EMPTY_LABEL_ARRAY } - val a = new Array[asm.Label](xs.size); xs.copyToArray(a); a - } - - /* - * can-multi-thread - */ - final def mkArrayReverse(xs: List[String]): Array[String] = { - val len = xs.size - if (len == 0) { return EMPTY_STRING_ARRAY } - val a = new Array[String](len) - var i = len - 1 - var rest = xs - while (!rest.isEmpty) { - a(i) = rest.head - rest = rest.tail - i -= 1 - } - a - } - - /* - * can-multi-thread - */ - final def mkArrayReverse(xs: List[Int]): Array[Int] = { - val len = xs.size - if (len == 0) { return EMPTY_INT_ARRAY } - val a = new Array[Int](len) - var i = len - 1 - var rest = xs - while (!rest.isEmpty) { - a(i) = rest.head - rest = rest.tail - i -= 1 - } - a - } - - /* Just a namespace for utilities that encapsulate MethodVisitor idioms. - * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role, - * but the methods here allow choosing when to transition from ICode to ASM types - * (including not at all, e.g. for performance). - */ - abstract class JCodeMethodN { - - def jmethod: asm.tree.MethodNode - - import asm.Opcodes; - - final def emit(opc: Int): Unit = { jmethod.visitInsn(opc) } - - /* - * can-multi-thread - */ - final def genPrimitiveArithmetic(op: ArithmeticOp, kind: BType): Unit = { - - import Primitives.{ ADD, SUB, MUL, DIV, REM, NOT } - - op match { - - case ADD => add(kind) - case SUB => sub(kind) - case MUL => mul(kind) - case DIV => div(kind) - case REM => rem(kind) - - case NOT => - if (kind.isIntSizedType) { - emit(Opcodes.ICONST_M1) - emit(Opcodes.IXOR) - } else if (kind == LONG) { - jmethod.visitLdcInsn(java.lang.Long.valueOf(-1)) - jmethod.visitInsn(Opcodes.LXOR) - } else { - abort(s"Impossible to negate an $kind") - } - - case _ => - abort(s"Unknown arithmetic primitive $op") - } - - } // end of method genPrimitiveArithmetic() - - /* - * can-multi-thread - */ - final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType): Unit = { - - import ScalaPrimitivesOps.{ AND, OR, XOR } - - ((op, kind): @unchecked) match { - case (AND, LONG) => emit(Opcodes.LAND) - case (AND, INT) => emit(Opcodes.IAND) - case (AND, _) => - emit(Opcodes.IAND) - if (kind != BOOL) { emitT2T(INT, kind) } - - case (OR, LONG) => emit(Opcodes.LOR) - case (OR, INT) => emit(Opcodes.IOR) - case (OR, _) => - emit(Opcodes.IOR) - if (kind != BOOL) { emitT2T(INT, kind) } - - case (XOR, LONG) => emit(Opcodes.LXOR) - case (XOR, INT) => emit(Opcodes.IXOR) - case (XOR, _) => - emit(Opcodes.IXOR) - if (kind != BOOL) { emitT2T(INT, kind) } - } - - } // end of method genPrimitiveLogical() - - /* - * can-multi-thread - */ - final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType): Unit = { - - import ScalaPrimitivesOps.{ LSL, ASR, LSR } - - ((op, kind): @unchecked) match { - case (LSL, LONG) => emit(Opcodes.LSHL) - case (LSL, INT) => emit(Opcodes.ISHL) - case (LSL, _) => - emit(Opcodes.ISHL) - emitT2T(INT, kind) - - case (ASR, LONG) => emit(Opcodes.LSHR) - case (ASR, INT) => emit(Opcodes.ISHR) - case (ASR, _) => - emit(Opcodes.ISHR) - emitT2T(INT, kind) - - case (LSR, LONG) => emit(Opcodes.LUSHR) - case (LSR, INT) => emit(Opcodes.IUSHR) - case (LSR, _) => - emit(Opcodes.IUSHR) - emitT2T(INT, kind) - } - - } // end of method genPrimitiveShift() - - /* Creates a new `StringBuilder` instance with the requested capacity - * - * can-multi-thread - */ - final def genNewStringBuilder(size: Int): Unit = { - jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName) - jmethod.visitInsn(Opcodes.DUP) - jmethod.visitLdcInsn(Integer.valueOf(size)) - invokespecial( - JavaStringBuilderClassName, - INSTANCE_CONSTRUCTOR_NAME, - "(I)V", - itf = false - ) - } - - /* Issue a call to `StringBuilder#append` for the right element type - * - * can-multi-thread - */ - final def genStringBuilderAppend(elemType: BType): Unit = { - val paramType = elemType match { - case ct: ClassBType if ct.isSubtypeOf(StringRef) => StringRef - case ct: ClassBType if ct.isSubtypeOf(jlStringBufferRef) => jlStringBufferRef - case ct: ClassBType if ct.isSubtypeOf(jlCharSequenceRef) => jlCharSequenceRef - // Don't match for `ArrayBType(CHAR)`, even though StringBuilder has such an overload: - // `"a" + Array('b')` should NOT be "ab", but "a[C@...". - case _: RefBType => ObjectRef - // jlStringBuilder does not have overloads for byte and short, but we can just use the int version - case BYTE | SHORT => INT - case pt: PrimitiveBType => pt - } - val bt = MethodBType(List(paramType), jlStringBuilderRef) - invokevirtual(JavaStringBuilderClassName, "append", bt.descriptor) - } - - /* Extract the built `String` from the `StringBuilder` - * - * can-multi-thread - */ - final def genStringBuilderEnd: Unit = { - invokevirtual(JavaStringBuilderClassName, "toString", genStringBuilderEndDesc) - } - // Use ClassBType refs instead of plain string literal to make sure that needed ClassBTypes are initialized and reachable - private lazy val genStringBuilderEndDesc = MethodBType(Nil, StringRef).descriptor - - /* Concatenate top N arguments on the stack with `StringConcatFactory#makeConcatWithConstants` - * (only works for JDK 9+) - * - * can-multi-thread - */ - final def genIndyStringConcat( - recipe: String, - argTypes: Seq[asm.Type], - constants: Seq[String] - ): Unit = { - jmethod.visitInvokeDynamicInsn( - "makeConcatWithConstants", - asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*), - coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle, - (recipe +: constants):_* - ) - } - - /* - * Emits one or more conversion instructions based on the types given as arguments. - * - * @param from The type of the value to be converted into another type. - * @param to The type the value will be converted into. - * - * can-multi-thread - */ - final def emitT2T(from: BType, to: BType): Unit = { - - assert( - from.isNonVoidPrimitiveType && to.isNonVoidPrimitiveType, - s"Cannot emit primitive conversion from $from to $to" - ) - - def pickOne(opcs: Array[Int]): Unit = { // TODO index on to.sort - val chosen = (to: @unchecked) match { - case BYTE => opcs(0) - case SHORT => opcs(1) - case CHAR => opcs(2) - case INT => opcs(3) - case LONG => opcs(4) - case FLOAT => opcs(5) - case DOUBLE => opcs(6) - } - if (chosen != -1) { emit(chosen) } - } - - if (from == to) { return } - // the only conversion involving BOOL that is allowed is (BOOL -> BOOL) - assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to") - - // We're done with BOOL already - from match { - - // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - - case BYTE => pickOne(JCodeMethodN.fromByteT2T) - case SHORT => pickOne(JCodeMethodN.fromShortT2T) - case CHAR => pickOne(JCodeMethodN.fromCharT2T) - case INT => pickOne(JCodeMethodN.fromIntT2T) - - case FLOAT => - import asm.Opcodes.{ F2L, F2D, F2I } - to match { - case LONG => emit(F2L) - case DOUBLE => emit(F2D) - case _ => emit(F2I); emitT2T(INT, to) - } - - case LONG => - import asm.Opcodes.{ L2F, L2D, L2I } - to match { - case FLOAT => emit(L2F) - case DOUBLE => emit(L2D) - case _ => emit(L2I); emitT2T(INT, to) - } - - case DOUBLE => - import asm.Opcodes.{ D2L, D2F, D2I } - to match { - case FLOAT => emit(D2F) - case LONG => emit(D2L) - case _ => emit(D2I); emitT2T(INT, to) - } - } - } // end of emitT2T() - - // can-multi-thread - final def boolconst(b: Boolean): Unit = { iconst(if (b) 1 else 0) } - - // can-multi-thread - final def iconst(cst: Int): Unit = { - if (cst >= -1 && cst <= 5) { - emit(Opcodes.ICONST_0 + cst) - } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) { - jmethod.visitIntInsn(Opcodes.BIPUSH, cst) - } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) { - jmethod.visitIntInsn(Opcodes.SIPUSH, cst) - } else { - jmethod.visitLdcInsn(Integer.valueOf(cst)) - } - } - - // can-multi-thread - final def lconst(cst: Long): Unit = { - if (cst == 0L || cst == 1L) { - emit(Opcodes.LCONST_0 + cst.asInstanceOf[Int]) - } else { - jmethod.visitLdcInsn(java.lang.Long.valueOf(cst)) - } - } - - // can-multi-thread - final def fconst(cst: Float): Unit = { - val bits: Int = java.lang.Float.floatToIntBits(cst) - if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2 - emit(Opcodes.FCONST_0 + cst.asInstanceOf[Int]) - } else { - jmethod.visitLdcInsn(java.lang.Float.valueOf(cst)) - } - } - - // can-multi-thread - final def dconst(cst: Double): Unit = { - val bits: Long = java.lang.Double.doubleToLongBits(cst) - if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d - emit(Opcodes.DCONST_0 + cst.asInstanceOf[Int]) - } else { - jmethod.visitLdcInsn(java.lang.Double.valueOf(cst)) - } - } - - // can-multi-thread - final def newarray(elem: BType): Unit = { - elem match { - case c: RefBType => - /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which isObject. */ - jmethod.visitTypeInsn(Opcodes.ANEWARRAY, c.classOrArrayType) - case _ => - assert(elem.isNonVoidPrimitiveType) - val rand = { - // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - elem match { - case BOOL => Opcodes.T_BOOLEAN - case BYTE => Opcodes.T_BYTE - case SHORT => Opcodes.T_SHORT - case CHAR => Opcodes.T_CHAR - case INT => Opcodes.T_INT - case LONG => Opcodes.T_LONG - case FLOAT => Opcodes.T_FLOAT - case DOUBLE => Opcodes.T_DOUBLE - } - } - jmethod.visitIntInsn(Opcodes.NEWARRAY, rand) - } - } - - - final def load( idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread - final def store(idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread - final def iinc( idx: Int, increment: Int): Unit = jmethod.visitIincInsn(idx, increment) // can-multi-thread - - final def aload( tk: BType): Unit = { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread - final def astore(tk: BType): Unit = { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread - - final def neg(tk: BType): Unit = { emitPrimitive(JCodeMethodN.negOpcodes, tk) } // can-multi-thread - final def add(tk: BType): Unit = { emitPrimitive(JCodeMethodN.addOpcodes, tk) } // can-multi-thread - final def sub(tk: BType): Unit = { emitPrimitive(JCodeMethodN.subOpcodes, tk) } // can-multi-thread - final def mul(tk: BType): Unit = { emitPrimitive(JCodeMethodN.mulOpcodes, tk) } // can-multi-thread - final def div(tk: BType): Unit = { emitPrimitive(JCodeMethodN.divOpcodes, tk) } // can-multi-thread - final def rem(tk: BType): Unit = { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread - - // can-multi-thread - final def invokespecial(owner: String, name: String, desc: String, itf: Boolean): Unit = { - emitInvoke(Opcodes.INVOKESPECIAL, owner, name, desc, itf) - } - // can-multi-thread - final def invokestatic(owner: String, name: String, desc: String, itf: Boolean): Unit = { - emitInvoke(Opcodes.INVOKESTATIC, owner, name, desc, itf) - } - // can-multi-thread - final def invokeinterface(owner: String, name: String, desc: String): Unit = { - emitInvoke(Opcodes.INVOKEINTERFACE, owner, name, desc, itf = true) - } - // can-multi-thread - final def invokevirtual(owner: String, name: String, desc: String): Unit = { - emitInvoke(Opcodes.INVOKEVIRTUAL, owner, name, desc, itf = false) - } - - def emitInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean): Unit = { - val node = new MethodInsnNode(opcode, owner, name, desc, itf) - jmethod.instructions.add(node) - } - - - // can-multi-thread - final def goTo(label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.GOTO, label) } - // can-multi-thread - final def emitIF(cond: TestOp, label: asm.Label): Unit = { jmethod.visitJumpInsn(cond.opcodeIF(), label) } - // can-multi-thread - final def emitIF_ICMP(cond: TestOp, label: asm.Label): Unit = { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) } - // can-multi-thread - final def emitIF_ACMP(cond: TestOp, label: asm.Label): Unit = { - assert((cond == EQ) || (cond == NE), cond) - val opc = (if (cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE) - jmethod.visitJumpInsn(opc, label) - } - // can-multi-thread - final def emitIFNONNULL(label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) } - // can-multi-thread - final def emitIFNULL (label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNULL, label) } - - // can-multi-thread - final def emitRETURN(tk: BType): Unit = { - if (tk == UNIT) { emit(Opcodes.RETURN) } - else { emitTypeBased(JCodeMethodN.returnOpcodes, tk) } - } - - /* Emits one of tableswitch or lookoupswitch. - * - * can-multi-thread - */ - final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double): Unit = { - assert(keys.length == branches.length) - - // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only. - // Similar to what javac emits for a switch statement consisting only of a default case. - if (keys.length == 0) { - jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) - return - } - - // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort - var i = 1 - while (i < keys.length) { - var j = 1 - while (j <= keys.length - i) { - if (keys(j) < keys(j - 1)) { - val tmp = keys(j) - keys(j) = keys(j - 1) - keys(j - 1) = tmp - val tmpL = branches(j) - branches(j) = branches(j - 1) - branches(j - 1) = tmpL - } - j += 1 - } - i += 1 - } - - // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011) - i = 1 - while (i < keys.length) { - if (keys(i-1) == keys(i)) { - abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.") - } - i += 1 - } - - val keyMin = keys(0) - val keyMax = keys(keys.length - 1) - - val isDenseEnough: Boolean = { - /* Calculate in long to guard against overflow. TODO what overflow? */ - val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double] - val klenD: Double = keys.length - val kdensity: Double = (klenD / keyRangeD) - - kdensity >= minDensity - } - - if (isDenseEnough) { - // use a table in which holes are filled with defaultBranch. - val keyRange = (keyMax - keyMin + 1) - val newBranches = new Array[asm.Label](keyRange) - var oldPos = 0 - var i = 0 - while (i < keyRange) { - val key = keyMin + i; - if (keys(oldPos) == key) { - newBranches(i) = branches(oldPos) - oldPos += 1 - } else { - newBranches(i) = defaultBranch - } - i += 1 - } - assert(oldPos == keys.length, "emitSWITCH") - jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*) - } else { - jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) - } - } - - // internal helpers -- not part of the public API of `jcode` - // don't make private otherwise inlining will suffer - - // can-multi-thread - final def emitVarInsn(opc: Int, idx: Int, tk: BType): Unit = { - assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc) - jmethod.visitVarInsn(tk.typedOpcode(opc), idx) - } - - // ---------------- array load and store ---------------- - - // can-multi-thread - final def emitTypeBased(opcs: Array[Int], tk: BType): Unit = { - assert(tk != UNIT, tk) - val opc = { - if (tk.isRef) { opcs(0) } - else if (tk.isIntSizedType) { - (tk: @unchecked) match { - case BOOL | BYTE => opcs(1) - case SHORT => opcs(2) - case CHAR => opcs(3) - case INT => opcs(4) - } - } else { - (tk: @unchecked) match { - case LONG => opcs(5) - case FLOAT => opcs(6) - case DOUBLE => opcs(7) - } - } - } - emit(opc) - } - - // ---------------- primitive operations ---------------- - - // can-multi-thread - final def emitPrimitive(opcs: Array[Int], tk: BType): Unit = { - val opc = { - // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - tk match { - case LONG => opcs(1) - case FLOAT => opcs(2) - case DOUBLE => opcs(3) - case _ => opcs(0) - } - } - emit(opc) - } - - // can-multi-thread - final def drop(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) } - - // can-multi-thread - final def dup(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) } - - // ---------------- type checks and casts ---------------- - - // can-multi-thread - final def isInstance(tk: RefBType): Unit = { - jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.classOrArrayType) - } - - // can-multi-thread - final def checkCast(tk: RefBType): Unit = { - // TODO ICode also requires: but that's too much, right? assert(!isBoxedType(tk), "checkcast on boxed type: " + tk) - jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.classOrArrayType) - } - - def abort(msg: String): Nothing = { - report.error(msg) - throw new RuntimeException(msg) - } - - } // end of class JCodeMethodN - - /* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */ - object JCodeMethodN { - - import asm.Opcodes._ - - // ---------------- conversions ---------------- - - val fromByteT2T = { Array( -1, -1, I2C, -1, I2L, I2F, I2D) } // do nothing for (BYTE -> SHORT) and for (BYTE -> INT) - val fromCharT2T = { Array(I2B, I2S, -1, -1, I2L, I2F, I2D) } // for (CHAR -> INT) do nothing - val fromShortT2T = { Array(I2B, -1, I2C, -1, I2L, I2F, I2D) } // for (SHORT -> INT) do nothing - val fromIntT2T = { Array(I2B, I2S, I2C, -1, I2L, I2F, I2D) } - - // ---------------- array load and store ---------------- - - val aloadOpcodes = { Array(AALOAD, BALOAD, SALOAD, CALOAD, IALOAD, LALOAD, FALOAD, DALOAD) } - val astoreOpcodes = { Array(AASTORE, BASTORE, SASTORE, CASTORE, IASTORE, LASTORE, FASTORE, DASTORE) } - val returnOpcodes = { Array(ARETURN, IRETURN, IRETURN, IRETURN, IRETURN, LRETURN, FRETURN, DRETURN) } - - // ---------------- primitive operations ---------------- - - val negOpcodes: Array[Int] = { Array(INEG, LNEG, FNEG, DNEG) } - val addOpcodes: Array[Int] = { Array(IADD, LADD, FADD, DADD) } - val subOpcodes: Array[Int] = { Array(ISUB, LSUB, FSUB, DSUB) } - val mulOpcodes: Array[Int] = { Array(IMUL, LMUL, FMUL, DMUL) } - val divOpcodes: Array[Int] = { Array(IDIV, LDIV, FDIV, DDIV) } - val remOpcodes: Array[Int] = { Array(IREM, LREM, FREM, DREM) } - - } // end of object JCodeMethodN - - // ---------------- adapted from scalaPrimitives ---------------- - - /* Given `code` reports the src TypeKind of the coercion indicated by `code`. - * To find the dst TypeKind, `ScalaPrimitivesOps.generatedKind(code)` can be used. - * - * can-multi-thread - */ - final def coercionFrom(code: Int): BType = { - import ScalaPrimitivesOps._ - (code: @switch) match { - case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE - case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT - case C2B | C2S | C2C | C2I | C2L | C2F | C2D => CHAR - case I2B | I2S | I2C | I2I | I2L | I2F | I2D => INT - case L2B | L2S | L2C | L2I | L2L | L2F | L2D => LONG - case F2B | F2S | F2C | F2I | F2L | F2F | F2D => FLOAT - case D2B | D2S | D2C | D2I | D2L | D2F | D2D => DOUBLE - } - } - - /* If code is a coercion primitive, the result type. - * - * can-multi-thread - */ - final def coercionTo(code: Int): BType = { - import ScalaPrimitivesOps._ - (code: @switch) match { - case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE - case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR - case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT - case B2I | C2I | S2I | I2I | L2I | F2I | D2I => INT - case B2L | C2L | S2L | I2L | L2L | F2L | D2L => LONG - case B2F | C2F | S2F | I2F | L2F | F2F | D2F => FLOAT - case B2D | C2D | S2D | I2D | L2D | F2D | D2D => DOUBLE - } - } - - implicit class InsnIterMethodNode(mnode: asm.tree.MethodNode) { - @`inline` final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { mnode.instructions.foreachInsn(f) } - } - - implicit class InsnIterInsnList(lst: asm.tree.InsnList) { - - @`inline` final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { - val insnIter = lst.iterator() - while (insnIter.hasNext) { - f(insnIter.next()) - } - } - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala deleted file mode 100644 index 125ee26b0528..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala +++ /dev/null @@ -1,908 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.annotation.tailrec - -import scala.collection.{ mutable, immutable } - -import scala.tools.asm -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.TreeTypeMap -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.NameKinds._ -import dotty.tools.dotc.core.Names.TermName -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.util.Spans._ -import dotty.tools.dotc.report -import dotty.tools.dotc.transform.SymUtils._ - -/* - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ - * @version 1.0 - * - */ -trait BCodeSkelBuilder extends BCodeHelpers { - import int.{_, given} - import DottyBackendInterface.{symExtensions, _} - import tpd._ - import bTypes._ - import coreBTypes._ - import bCodeAsmCommon._ - - lazy val NativeAttr: Symbol = requiredClass[scala.native] - - /** The destination of a value generated by `genLoadTo`. */ - enum LoadDestination: - /** The value is put on the stack, and control flows through to the next opcode. */ - case FallThrough - /** The value is put on the stack, and control flow is transferred to the given `label`. */ - case Jump(label: asm.Label) - /** The value is RETURN'ed from the enclosing method. */ - case Return - /** The value is ATHROW'n. */ - case Throw - end LoadDestination - - /* - * There's a dedicated PlainClassBuilder for each CompilationUnit, - * which simplifies the initialization of per-class data structures in `genPlainClass()` which in turn delegates to `initJClass()` - * - * The entry-point to emitting bytecode instructions is `genDefDef()` where the per-method data structures are initialized, - * including `resetMethodBookkeeping()` and `initJMethod()`. - * Once that's been done, and assuming the method being visited isn't abstract, `emitNormalMethodBody()` populates - * the ASM MethodNode instance with ASM AbstractInsnNodes. - * - * Given that CleanUp delivers trees that produce values on the stack, - * the entry-point to all-things instruction-emit is `genLoad()`. - * There, an operation taking N arguments results in recursively emitting instructions to lead each of them, - * followed by emitting instructions to process those arguments (to be found at run-time on the operand-stack). - * - * In a few cases the above recipe deserves more details, as provided in the documentation for: - * - `genLoadTry()` - * - `genSynchronized() - * - `jumpDest` , `cleanups` , `labelDefsAtOrUnder` - */ - abstract class PlainSkelBuilder(cunit: CompilationUnit) - extends BCClassGen - with BCAnnotGen - with BCInnerClassGen - with JAndroidBuilder - with BCForwardersGen - with BCPickles - with BCJGenSigGen { - - // Strangely I can't find this in the asm code 255, but reserving 1 for "this" - inline val MaximumJvmParameters = 254 - - // current class - var cnode: ClassNode1 = null - var thisName: String = null // the internal name of the class being emitted - - var claszSymbol: Symbol = null - var isCZParcelable = false - var isCZStaticModule = false - - /* ---------------- idiomatic way to ask questions to typer ---------------- */ - - def paramTKs(app: Apply, take: Int = -1): List[BType] = app match { - case Apply(fun, _) => - val funSym = fun.symbol - (funSym.info.firstParamTypes map toTypeKind) // this tracks mentioned inner classes (in innerClassBufferASM) - } - - def symInfoTK(sym: Symbol): BType = { - toTypeKind(sym.info) // this tracks mentioned inner classes (in innerClassBufferASM) - } - - def tpeTK(tree: Tree): BType = { toTypeKind(tree.tpe) } - - override def getCurrentCUnit(): CompilationUnit = { cunit } - - /* ---------------- helper utils for generating classes and fields ---------------- */ - - def genPlainClass(cd0: TypeDef) = cd0 match { - case TypeDef(_, impl: Template) => - assert(cnode == null, "GenBCode detected nested methods.") - - claszSymbol = cd0.symbol - isCZParcelable = isAndroidParcelableClass(claszSymbol) - isCZStaticModule = claszSymbol.isStaticModuleClass - thisName = internalName(claszSymbol) - - cnode = new ClassNode1() - - initJClass(cnode) - - val cd = if (isCZStaticModule) { - // Move statements from the primary constructor following the superclass constructor call to - // a newly synthesised tree representing the "", which also assigns the MODULE$ field. - // Because the assigments to both the module instance fields, and the fields of the module itself - // are in the , these fields can be static + final. - - // Should we do this transformation earlier, say in Constructors? Or would that just cause - // pain for scala-{js, native}? - // - // @sjrd (https://github.com/scala/scala3/pull/9181#discussion_r457458205): - // moving that before the back-end would make things significantly more complicated for - // Scala.js and Native. Both have a first-class concept of ModuleClass, and encode the - // singleton pattern of MODULE$ in a completely different way. In the Scala.js IR, there - // even isn't anything that corresponds to MODULE$ per se. - // - // So if you move this before the back-end, then Scala.js and Scala Native will have to - // reverse all the effects of this transformation, which would be counter-productive. - - - // TODO: remove `!f.name.is(LazyBitMapName)` once we change lazy val encoding - // https://github.com/scala/scala3/issues/7140 - // - // Lazy val encoding assumes bitmap fields are non-static - // - // See `tests/run/given-var.scala` - // - - // !!! Part of this logic is duplicated in JSCodeGen.genCompilationUnit - claszSymbol.info.decls.foreach { f => - if f.isField && !f.name.is(LazyBitMapName) then - f.setFlag(JavaStatic) - } - - val (clinits, body) = impl.body.partition(stat => stat.isInstanceOf[DefDef] && stat.symbol.isStaticConstructor) - - val (uptoSuperStats, remainingConstrStats) = splitAtSuper(impl.constr.rhs.asInstanceOf[Block].stats) - val clInitSymbol: TermSymbol = - if (clinits.nonEmpty) clinits.head.symbol.asTerm - else newSymbol( - claszSymbol, - nme.STATIC_CONSTRUCTOR, - JavaStatic | Method, - MethodType(Nil)(_ => Nil, _ => defn.UnitType), - privateWithin = NoSymbol, - coord = claszSymbol.coord - ) - - val moduleField = newSymbol( - claszSymbol, - str.MODULE_INSTANCE_FIELD.toTermName, - JavaStatic | Final, - claszSymbol.typeRef, - privateWithin = NoSymbol, - coord = claszSymbol.coord - ).entered - - val thisMap = new TreeMap { - override def transform(tree: Tree)(using Context) = { - val tp = tree.tpe.substThis(claszSymbol.asClass, claszSymbol.sourceModule.termRef) - tree.withType(tp) match { - case tree: This if tree.symbol == claszSymbol => - ref(claszSymbol.sourceModule) - case tree => - super.transform(tree) - } - } - } - - def rewire(stat: Tree) = thisMap.transform(stat).changeOwner(claszSymbol.primaryConstructor, clInitSymbol) - - val callConstructor = New(claszSymbol.typeRef).select(claszSymbol.primaryConstructor).appliedToTermArgs(Nil) - val assignModuleField = Assign(ref(moduleField), callConstructor) - val remainingConstrStatsSubst = remainingConstrStats.map(rewire) - val clinit = clinits match { - case (ddef: DefDef) :: _ => - cpy.DefDef(ddef)(rhs = Block(ddef.rhs :: assignModuleField :: remainingConstrStatsSubst, unitLiteral)) - case _ => - DefDef(clInitSymbol, Block(assignModuleField :: remainingConstrStatsSubst, unitLiteral)) - } - - val constr2 = { - val rhs = Block(uptoSuperStats, impl.constr.rhs.asInstanceOf[Block].expr) - cpy.DefDef(impl.constr)(rhs = rhs) - } - - val impl2 = cpy.Template(impl)(constr = constr2, body = clinit :: body) - cpy.TypeDef(cd0)(rhs = impl2) - } else cd0 - - val hasStaticCtor = isCZStaticModule || cd.symbol.info.decls.exists(_.isStaticConstructor) - if (!hasStaticCtor && isCZParcelable) fabricateStaticInitAndroid() - - val optSerial: Option[Long] = - claszSymbol.getAnnotation(defn.SerialVersionUIDAnnot).flatMap { annot => - if (claszSymbol.is(Trait)) { - report.warning("@SerialVersionUID does nothing on a trait", annot.tree.sourcePos) - None - } else { - val vuid = annot.argumentConstant(0).map(_.longValue) - if (vuid.isEmpty) - report.error("The argument passed to @SerialVersionUID must be a constant", - annot.argument(0).getOrElse(annot.tree).sourcePos) - vuid - } - } - if (optSerial.isDefined) { addSerialVUID(optSerial.get, cnode)} - - addClassFields() - gen(cd.rhs) - - if (AsmUtils.traceClassEnabled && cnode.name.contains(AsmUtils.traceClassPattern)) - AsmUtils.traceClass(cnode) - - cnode.innerClasses - assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().") - - } // end of method genPlainClass() - - /* - * must-single-thread - */ - private def initJClass(jclass: asm.ClassVisitor): Unit = { - - val ps = claszSymbol.info.parents - val superClass: String = if (ps.isEmpty) ObjectRef.internalName else internalName(ps.head.typeSymbol) - val interfaceNames0 = classBTypeFromSymbol(claszSymbol).info.interfaces.map(_.internalName) - /* To avoid deadlocks when combining objects, lambdas and multi-threading, - * lambdas in objects are compiled to instance methods of the module class - * instead of static methods (see tests/run/deadlock.scala and - * https://github.com/scala/scala-dev/issues/195 for details). - * This has worked well for us so far but this is problematic for - * serialization: serializing a lambda requires serializing all the values - * it captures, if this lambda is in an object, this means serializing the - * enclosing object, which fails if the object does not extend - * Serializable. - * Because serializing objects is basically free since #5775, it seems like - * the simplest solution is to simply make all objects Serializable, this - * certainly seems preferable to deadlocks. - * This cannot be done earlier because Scala.js would not like it (#9596). - */ - val interfaceNames = - if (claszSymbol.is(ModuleClass) && !interfaceNames0.contains("java/io/Serializable")) - interfaceNames0 :+ "java/io/Serializable" - else - interfaceNames0 - - val flags = javaFlags(claszSymbol) - - val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner) - cnode.visit(classfileVersion, flags, - thisName, thisSignature, - superClass, interfaceNames.toArray) - - if (emitSource) { - cnode.visitSource(cunit.source.file.name, null /* SourceDebugExtension */) - } - - enclosingMethodAttribute(claszSymbol, internalName, asmMethodType(_).descriptor) match { - case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) => - cnode.visitOuterClass(className, methodName, methodDescriptor) - case _ => () - } - - val ssa = None // TODO: inlined form `getAnnotPickle(thisName, claszSymbol)`. Should something be done on Dotty? - cnode.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) - emitAnnotations(cnode, claszSymbol.annotations ++ ssa) - - if (!isCZStaticModule && !isCZParcelable) { - val skipStaticForwarders = (claszSymbol.is(Module) || ctx.settings.XnoForwarders.value) - if (!skipStaticForwarders) { - val lmoc = claszSymbol.companionModule - // add static forwarders if there are no name conflicts; see bugs #363 and #1735 - if (lmoc != NoSymbol) { - // it must be a top level class (name contains no $s) - val isCandidateForForwarders = (lmoc.is(Module)) && lmoc.isStatic - if (isCandidateForForwarders) { - report.log(s"Adding static forwarders from '$claszSymbol' to implementations in '$lmoc'") - addForwarders(cnode, thisName, lmoc.moduleClass) - } - } - } - - } - - // the invoker is responsible for adding a class-static constructor. - - } // end of method initJClass - - /* - * must-single-thread - */ - private def fabricateStaticInitAndroid(): Unit = { - - val clinit: asm.MethodVisitor = cnode.visitMethod( - GenBCodeOps.PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED - CLASS_CONSTRUCTOR_NAME, - "()V", - null, // no java-generic-signature - null // no throwable exceptions - ) - clinit.visitCode() - - legacyAddCreatorCode(clinit, cnode, thisName) - - clinit.visitInsn(asm.Opcodes.RETURN) - clinit.visitMaxs(0, 0) // just to follow protocol, dummy arguments - clinit.visitEnd() - } - - def addClassFields(): Unit = { - /* Non-method term members are fields, except for module members. Module - * members can only happen on .NET (no flatten) for inner traits. There, - * a module symbol is generated (transformInfo in mixin) which is used - * as owner for the members of the implementation class (so that the - * backend emits them as static). - * No code is needed for this module symbol. - */ - for (f <- claszSymbol.info.decls.filter(p => p.isTerm && !p.is(Method))) { - val javagensig = getGenericSignature(f, claszSymbol) - val flags = javaFieldFlags(f) - - assert(!f.isStaticMember || !claszSymbol.isInterface || !f.is(Mutable), - s"interface $claszSymbol cannot have non-final static field $f") - - val jfield = new asm.tree.FieldNode( - flags, - f.javaSimpleName, - symInfoTK(f).descriptor, - javagensig, - null // no initial value - ) - cnode.fields.add(jfield) - emitAnnotations(jfield, f.annotations) - } - - } // end of method addClassFields() - - // current method - var mnode: MethodNode1 = null - var jMethodName: String = null - var isMethSymStaticCtor = false - var returnType: BType = null - var methSymbol: Symbol = null - // used by genLoadTry() and genSynchronized() - var earlyReturnVar: Symbol = null - var shouldEmitCleanup = false - // line numbers - var lastEmittedLineNr = -1 - - object bc extends JCodeMethodN { - override def jmethod = PlainSkelBuilder.this.mnode - } - - /* ---------------- Part 1 of program points, ie Labels in the ASM world ---------------- */ - - /* - * A jump is represented as a Return node whose `from` symbol denotes a Labeled's Bind node, the target of the jump. - * The `jumpDest` map is used to find the `LoadDestination` at the end of the `Labeled` block, as well as the - * corresponding expected type. The `LoadDestination` can never be `FallThrough` here. - */ - var jumpDest: immutable.Map[ /* Labeled */ Symbol, (BType, LoadDestination) ] = null - def registerJumpDest(labelSym: Symbol, expectedType: BType, dest: LoadDestination): Unit = { - assert(labelSym.is(Label), s"trying to register a jump-dest for a non-label symbol, at: ${labelSym.span}") - assert(dest != LoadDestination.FallThrough, s"trying to register a FallThrough dest for label, at: ${labelSym.span}") - assert(!jumpDest.contains(labelSym), s"trying to register a second jump-dest for label, at: ${labelSym.span}") - jumpDest += (labelSym -> (expectedType, dest)) - } - def findJumpDest(labelSym: Symbol): (BType, LoadDestination) = { - assert(labelSym.is(Label), s"trying to map a non-label symbol to an asm.Label, at: ${labelSym.span}") - jumpDest.getOrElse(labelSym, { - abort(s"unknown label symbol, for label at: ${labelSym.span}") - }) - } - - /* - * A program point may be lexically nested (at some depth) - * (a) in the try-clause of a try-with-finally expression - * (b) in a synchronized block. - * Each of the constructs above establishes a "cleanup block" to execute upon - * both normal-exit, early-return, and abrupt-termination of the instructions it encloses. - * - * The `cleanups` LIFO queue represents the nesting of active (for the current program point) - * pending cleanups. For each such cleanup an asm.Label indicates the start of its cleanup-block. - * At any given time during traversal of the method body, - * the head of `cleanups` denotes the cleanup-block for the closest enclosing try-with-finally or synchronized-expression. - * - * `cleanups` is used: - * - * (1) upon visiting a Return statement. - * In case of pending cleanups, we can't just emit a RETURN instruction, but must instead: - * - store the result (if any) in `earlyReturnVar`, and - * - jump to the next pending cleanup. - * See `genReturn()` - * - * (2) upon emitting a try-with-finally or a synchronized-expr, - * In these cases, the targets of the above jumps are emitted, - * provided an early exit was actually encountered somewhere in the protected clauses. - * See `genLoadTry()` and `genSynchronized()` - * - * The code thus emitted for jumps and targets covers the early-return case. - * The case of abrupt (ie exceptional) termination is covered by exception handlers - * emitted for that purpose as described in `genLoadTry()` and `genSynchronized()`. - */ - var cleanups: List[asm.Label] = Nil - def registerCleanup(finCleanup: asm.Label): Unit = { - if (finCleanup != null) { cleanups = finCleanup :: cleanups } - } - def unregisterCleanup(finCleanup: asm.Label): Unit = { - if (finCleanup != null) { - assert(cleanups.head eq finCleanup, - s"Bad nesting of cleanup operations: $cleanups trying to unregister: $finCleanup") - cleanups = cleanups.tail - } - } - - /* ---------------- local variables and params ---------------- */ - - case class Local(tk: BType, name: String, idx: Int, isSynth: Boolean) - - /* - * Bookkeeping for method-local vars and method-params. - * - * TODO: use fewer slots. local variable slots are never re-used in separate blocks. - * In the following example, x and y could use the same slot. - * def foo() = { - * { val x = 1 } - * { val y = "a" } - * } - */ - object locals { - - private val slots = mutable.AnyRefMap.empty[Symbol, Local] // (local-or-param-sym -> Local(BType, name, idx, isSynth)) - - private var nxtIdx = -1 // next available index for local-var - - def reset(isStaticMethod: Boolean): Unit = { - slots.clear() - nxtIdx = if (isStaticMethod) 0 else 1 - } - - def contains(locSym: Symbol): Boolean = { slots.contains(locSym) } - - def apply(locSym: Symbol): Local = { slots.apply(locSym) } - - /* Make a fresh local variable, ensuring a unique name. - * The invoker must make sure inner classes are tracked for the sym's tpe. - */ - def makeLocal(tk: BType, name: String, tpe: Type, pos: Span): Symbol = { - - val locSym = newSymbol(methSymbol, name.toTermName, Synthetic, tpe, NoSymbol, pos) - makeLocal(locSym, tk) - locSym - } - - def makeLocal(locSym: Symbol): Local = { - makeLocal(locSym, symInfoTK(locSym)) - } - - def getOrMakeLocal(locSym: Symbol): Local = { - // `getOrElse` below has the same effect as `getOrElseUpdate` because `makeLocal()` adds an entry to the `locals` map. - slots.getOrElse(locSym, makeLocal(locSym)) - } - - def reuseLocal(sym: Symbol, loc: Local): Unit = - val existing = slots.put(sym, loc) - if (existing.isDefined) - report.error("attempt to create duplicate local var.", ctx.source.atSpan(sym.span)) - - def reuseThisSlot(sym: Symbol): Unit = - reuseLocal(sym, Local(symInfoTK(sym), sym.javaSimpleName, 0, sym.is(Synthetic))) - - private def makeLocal(sym: Symbol, tk: BType): Local = { - assert(nxtIdx != -1, "not a valid start index") - val loc = Local(tk, sym.javaSimpleName, nxtIdx, sym.is(Synthetic)) - val existing = slots.put(sym, loc) - if (existing.isDefined) - report.error("attempt to create duplicate local var.", ctx.source.atSpan(sym.span)) - assert(tk.size > 0, "makeLocal called for a symbol whose type is Unit.") - nxtIdx += tk.size - loc - } - - // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol. - def store(locSym: Symbol): Unit = { - val Local(tk, _, idx, _) = slots(locSym) - bc.store(idx, tk) - } - - def load(locSym: Symbol): Unit = { - val Local(tk, _, idx, _) = slots(locSym) - bc.load(idx, tk) - } - - } - - /* ---------------- Part 2 of program points, ie Labels in the ASM world ---------------- */ - - // bookkeeping the scopes of non-synthetic local vars, to emit debug info (`emitVars`). - var varsInScope: List[(Symbol, asm.Label)] = null // (local-var-sym -> start-of-scope) - - // helpers around program-points. - def lastInsn: asm.tree.AbstractInsnNode = mnode.instructions.getLast - def currProgramPoint(): asm.Label = { - lastInsn match { - case labnode: asm.tree.LabelNode => labnode.getLabel - case _ => - val pp = new asm.Label - mnode visitLabel pp - pp - } - } - def markProgramPoint(lbl: asm.Label): Unit = { - val skip = (lbl == null) || isAtProgramPoint(lbl) - if (!skip) { mnode visitLabel lbl } - } - def isAtProgramPoint(lbl: asm.Label): Boolean = { - def getNonLineNumberNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match { - case a: asm.tree.LineNumberNode => getNonLineNumberNode(a.getPrevious) // line numbers aren't part of code itself - case _ => a - } - (getNonLineNumberNode(lastInsn) match { - case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); - case _ => false } ) - } - def lineNumber(tree: Tree): Unit = { - if (!emitLines || !tree.span.exists) return; - val nr = ctx.source.offsetToLine(tree.span.point) + 1 - if (nr != lastEmittedLineNr) { - lastEmittedLineNr = nr - lastInsn match { - case lnn: asm.tree.LineNumberNode => - // overwrite previous landmark as no instructions have been emitted for it - lnn.line = nr - case _ => - mnode.visitLineNumber(nr, currProgramPoint()) - } - } - } - - // on entering a method - def resetMethodBookkeeping(dd: DefDef) = { - val rhs = dd.rhs - locals.reset(isStaticMethod = methSymbol.isStaticMember) - jumpDest = immutable.Map.empty - - // check previous invocation of genDefDef exited as many varsInScope as it entered. - assert(varsInScope == null, "Unbalanced entering/exiting of GenBCode's genBlock().") - // check previous invocation of genDefDef unregistered as many cleanups as it registered. - assert(cleanups == Nil, "Previous invocation of genDefDef didn't unregister as many cleanups as it registered.") - earlyReturnVar = null - shouldEmitCleanup = false - - lastEmittedLineNr = -1 - } - - /* ---------------- top-down traversal invoking ASM Tree API along the way ---------------- */ - - def gen(tree: Tree): Unit = { - tree match { - case tpd.EmptyTree => () - - case ValDef(name, tpt, rhs) => () // fields are added in `genPlainClass()`, via `addClassFields()` - - case dd: DefDef => - /* First generate a static forwarder if this is a non-private trait - * trait method. This is required for super calls to this method, which - * go through the static forwarder in order to work around limitations - * of the JVM. - * - * For the $init$ method, we must not leave it as a default method, but - * instead we must put the whole body in the static method. If we leave - * it as a default method, Java classes cannot extend Scala classes that - * extend several Scala traits, since they then inherit unrelated default - * $init$ methods. See #8599. scalac does the same thing. - * - * In theory, this would go in a separate MiniPhase, but it would have to - * sit in a MegaPhase of its own between GenSJSIR and GenBCode, so the cost - * is not worth it. We directly do it in this back-end instead, which also - * kind of makes sense because it is JVM-specific. - */ - val sym = dd.symbol - val needsStaticImplMethod = - claszSymbol.isInterface && !dd.rhs.isEmpty && !sym.isPrivate && !sym.isStaticMember - if needsStaticImplMethod then - if sym.name == nme.TRAIT_CONSTRUCTOR then - genTraitConstructorDefDef(dd) - else - genStaticForwarderForDefDef(dd) - genDefDef(dd) - else - genDefDef(dd) - - case tree: Template => - val body = - if (tree.constr.rhs.isEmpty) tree.body - else tree.constr :: tree.body - body foreach gen - - case _ => abort(s"Illegal tree in gen: $tree") - } - } - - /* - * must-single-thread - */ - def initJMethod(flags: Int, params: List[Symbol]): Unit = { - - val jgensig = getGenericSignature(methSymbol, claszSymbol) - val (excs, others) = methSymbol.annotations.partition(_.symbol eq defn.ThrowsAnnot) - val thrownExceptions: List[String] = getExceptions(excs) - - val bytecodeName = - if (isMethSymStaticCtor) CLASS_CONSTRUCTOR_NAME - else jMethodName - - val mdesc = asmMethodType(methSymbol).descriptor - mnode = cnode.visitMethod( - flags, - bytecodeName, - mdesc, - jgensig, - mkArrayS(thrownExceptions) - ).asInstanceOf[MethodNode1] - - // TODO param names: (m.params map (p => javaName(p.sym))) - - emitAnnotations(mnode, others) - emitParamNames(mnode, params) - emitParamAnnotations(mnode, params.map(_.annotations)) - - } // end of method initJMethod - - private def genTraitConstructorDefDef(dd: DefDef): Unit = - val statifiedDef = makeStatifiedDefDef(dd) - genDefDef(statifiedDef) - - /** Creates a copy of the given DefDef that is static and where an explicit - * self parameter represents the original `this` value. - * - * Example: from - * {{{ - * trait Enclosing { - * def foo(x: Int): String = this.toString() + x - * } - * }}} - * the statified version of `foo` would be - * {{{ - * static def foo($self: Enclosing, x: Int): String = $self.toString() + x - * }}} - */ - private def makeStatifiedDefDef(dd: DefDef): DefDef = - val origSym = dd.symbol.asTerm - val newSym = makeStatifiedDefSymbol(origSym, origSym.name) - tpd.DefDef(newSym, { paramRefss => - val selfParamRef :: regularParamRefs = paramRefss.head: @unchecked - val enclosingClass = origSym.owner.asClass - new TreeTypeMap( - typeMap = _.substThis(enclosingClass, selfParamRef.symbol.termRef) - .subst(dd.termParamss.head.map(_.symbol), regularParamRefs.map(_.symbol.termRef)), - treeMap = { - case tree: This if tree.symbol == enclosingClass => selfParamRef - case tree => tree - }, - oldOwners = origSym :: Nil, - newOwners = newSym :: Nil - ).transform(dd.rhs) - }) - - private def genStaticForwarderForDefDef(dd: DefDef): Unit = - val forwarderDef = makeStaticForwarder(dd) - genDefDef(forwarderDef) - - /* Generates a synthetic static forwarder for a trait method. - * For a method such as - * def foo(...args: Ts): R - * in trait X, we generate the following method: - * static def foo$($this: X, ...args: Ts): R = - * invokespecial $this.X::foo(...args) - * We force an invokespecial with the attachment UseInvokeSpecial. It is - * necessary to make sure that the call will not follow overrides of foo() - * in subtraits and subclasses, since the whole point of this forward is to - * encode super calls. - */ - private def makeStaticForwarder(dd: DefDef): DefDef = - val origSym = dd.symbol.asTerm - val name = traitSuperAccessorName(origSym).toTermName - val sym = makeStatifiedDefSymbol(origSym, name) - tpd.DefDef(sym, { paramss => - val params = paramss.head - tpd.Apply(params.head.select(origSym), params.tail) - .withAttachment(BCodeHelpers.UseInvokeSpecial, ()) - }) - - private def makeStatifiedDefSymbol(origSym: TermSymbol, name: TermName): TermSymbol = - val info = origSym.info match - case mt: MethodType => - MethodType(nme.SELF :: mt.paramNames, origSym.owner.typeRef :: mt.paramInfos, mt.resType) - origSym.copy( - name = name.toTermName, - flags = Method | JavaStatic, - info = info - ).asTerm - - def genDefDef(dd: DefDef): Unit = { - val rhs = dd.rhs - val vparamss = dd.termParamss - // the only method whose implementation is not emitted: getClass() - if (dd.symbol eq defn.Any_getClass) { return } - assert(mnode == null, "GenBCode detected nested method.") - - methSymbol = dd.symbol - jMethodName = methSymbol.javaSimpleName - returnType = asmMethodType(dd.symbol).returnType - isMethSymStaticCtor = methSymbol.isStaticConstructor - - resetMethodBookkeeping(dd) - - // add method-local vars for params - - assert(vparamss.isEmpty || vparamss.tail.isEmpty, s"Malformed parameter list: $vparamss") - val params = if (vparamss.isEmpty) Nil else vparamss.head - for (p <- params) { locals.makeLocal(p.symbol) } - // debug assert((params.map(p => locals(p.symbol).tk)) == asmMethodType(methSymbol).getArgumentTypes.toList, "debug") - - if (params.size > MaximumJvmParameters) { - // SI-7324 - report.error(em"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.", ctx.source.atSpan(methSymbol.span)) - return - } - - val isNative = methSymbol.hasAnnotation(NativeAttr) - val isAbstractMethod = (methSymbol.is(Deferred) || (methSymbol.owner.isInterface && ((methSymbol.is(Deferred)) || methSymbol.isClassConstructor))) - val flags = - import GenBCodeOps.addFlagIf - javaFlags(methSymbol) - .addFlagIf(isAbstractMethod, asm.Opcodes.ACC_ABSTRACT) - .addFlagIf(false /*methSymbol.isStrictFP*/, asm.Opcodes.ACC_STRICT) - .addFlagIf(isNative, asm.Opcodes.ACC_NATIVE) // native methods of objects are generated in mirror classes - - // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize } - val paramSyms = params.map(_.symbol) - initJMethod(flags, paramSyms) - - - if (!isAbstractMethod && !isNative) { - // #14773 Reuse locals slots for tailrec-generated mutable vars - val trimmedRhs: Tree = - @tailrec def loop(stats: List[Tree]): List[Tree] = - stats match - case (tree @ ValDef(TailLocalName(_, _), _, _)) :: rest if tree.symbol.isAllOf(Mutable | Synthetic) => - tree.rhs match - case This(_) => - locals.reuseThisSlot(tree.symbol) - loop(rest) - case rhs: Ident if paramSyms.contains(rhs.symbol) => - locals.reuseLocal(tree.symbol, locals(rhs.symbol)) - loop(rest) - case _ => - stats - case _ => - stats - end loop - - rhs match - case Block(stats, expr) => - val trimmedStats = loop(stats) - if trimmedStats eq stats then - rhs - else - Block(trimmedStats, expr) - case _ => - rhs - end trimmedRhs - - def emitNormalMethodBody(): Unit = { - val veryFirstProgramPoint = currProgramPoint() - - if trimmedRhs == tpd.EmptyTree then - report.error( - em"Concrete method has no definition: $dd${ - if (ctx.settings.Ydebug.value) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" - else ""}", - ctx.source.atSpan(NoSpan) - ) - else - genLoadTo(trimmedRhs, returnType, LoadDestination.Return) - - if (emitVars) { - // add entries to LocalVariableTable JVM attribute - val onePastLastProgramPoint = currProgramPoint() - val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0) - if (!hasStaticBitSet) { - mnode.visitLocalVariable( - "this", - "L" + thisName + ";", - null, - veryFirstProgramPoint, - onePastLastProgramPoint, - 0 - ) - } - for (p <- params) { emitLocalVarScope(p.symbol, veryFirstProgramPoint, onePastLastProgramPoint, force = true) } - } - - if (isMethSymStaticCtor) { appendToStaticCtor(dd) } - } // end of emitNormalMethodBody() - - lineNumber(rhs) - emitNormalMethodBody() - - // Note we don't invoke visitMax, thus there are no FrameNode among mnode.instructions. - // The only non-instruction nodes to be found are LabelNode and LineNumberNode. - } - - if (AsmUtils.traceMethodEnabled && mnode.name.contains(AsmUtils.traceMethodPattern)) - AsmUtils.traceMethod(mnode) - - mnode = null - } // end of method genDefDef() - - /* - * must-single-thread - * - * TODO document, explain interplay with `fabricateStaticInitAndroid()` - */ - private def appendToStaticCtor(dd: DefDef): Unit = { - - def insertBefore( - location: asm.tree.AbstractInsnNode, - i0: asm.tree.AbstractInsnNode, - i1: asm.tree.AbstractInsnNode): Unit = { - if (i0 != null) { - mnode.instructions.insertBefore(location, i0.clone(null)) - mnode.instructions.insertBefore(location, i1.clone(null)) - } - } - - // collect all return instructions - var rets: List[asm.tree.AbstractInsnNode] = Nil - mnode foreachInsn { i => if (i.getOpcode() == asm.Opcodes.RETURN) { rets ::= i } } - if (rets.isEmpty) { return } - - var insnParcA: asm.tree.AbstractInsnNode = null - var insnParcB: asm.tree.AbstractInsnNode = null - // android creator code - if (isCZParcelable) { - // add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator - val andrFieldDescr = classBTypeFromSymbol(AndroidCreatorClass).descriptor - cnode.visitField( - asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL, - "CREATOR", - andrFieldDescr, - null, - null - ) - // INVOKESTATIC CREATOR(): android.os.Parcelable$Creator; -- TODO where does this Android method come from? - val callee = claszSymbol.companionModule.info.member(androidFieldName).symbol - val jowner = internalName(callee.owner) - val jname = callee.javaSimpleName - val jtype = asmMethodType(callee).descriptor - insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype, false) - // PUTSTATIC `thisName`.CREATOR; - insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr) - } - - // insert a few instructions for initialization before each return instruction - for(r <- rets) { - insertBefore(r, insnParcA, insnParcB) - } - - } - - def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false): Unit = { - val Local(tk, name, idx, isSynth) = locals(sym) - if (force || !isSynth) { - mnode.visitLocalVariable(name, tk.descriptor, null, start, end, idx) - } - } - - def genLoadTo(tree: Tree, expectedType: BType, dest: LoadDestination): Unit - - } // end of class PlainSkelBuilder - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala deleted file mode 100644 index b5ed27511e7e..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala +++ /dev/null @@ -1,426 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.collection.immutable -import scala.tools.asm - -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.core.StdNames.nme -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.ast.tpd - -/* - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ - * @version 1.0 - * - */ -trait BCodeSyncAndTry extends BCodeBodyBuilder { - import int.given - import tpd._ - import bTypes._ - import coreBTypes._ - /* - * Functionality to lower `synchronized` and `try` expressions. - */ - abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit) { - - def genSynchronized(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { - case Apply(TypeApply(fun, _), args) => - val monitor = locals.makeLocal(ObjectRef, "monitor", defn.ObjectType, tree.span) - val monCleanup = new asm.Label - - // if the synchronized block returns a result, store it in a local variable. - // Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks). - val hasResult = (expectedType != UNIT) - val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult", defn.ObjectType, tree.span) else null - - /* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */ - genLoadQualifier(fun) - bc dup ObjectRef - locals.store(monitor) - emit(asm.Opcodes.MONITORENTER) - - /* ------ (2) Synchronized block. - * Reached by fall-through from (1). - * Protected by: - * (2.a) the EH-version of the monitor-exit, and - * (2.b) whatever protects the whole synchronized expression. - * ------ - */ - val startProtected = currProgramPoint() - registerCleanup(monCleanup) - genLoad(args.head, expectedType /* toTypeKind(tree.tpe.resultType) */) - unregisterCleanup(monCleanup) - if (hasResult) { locals.store(monitorResult) } - nopIfNeeded(startProtected) - val endProtected = currProgramPoint() - - /* ------ (3) monitor-exit after normal, non-early-return, termination of (2). - * Reached by fall-through from (2). - * Protected by whatever protects the whole synchronized expression. - * ------ - */ - locals.load(monitor) - emit(asm.Opcodes.MONITOREXIT) - if (hasResult) { locals.load(monitorResult) } - val postHandler = new asm.Label - bc goTo postHandler - - /* ------ (4) exception-handler version of monitor-exit code. - * Reached upon abrupt termination of (2). - * Protected by whatever protects the whole synchronized expression. - * null => "any" exception in bytecode, like we emit for finally. - * Important not to use j/l/Throwable which dooms the method to a life of interpretation! (SD-233) - * ------ - */ - protect(startProtected, endProtected, currProgramPoint(), null) - locals.load(monitor) - emit(asm.Opcodes.MONITOREXIT) - emit(asm.Opcodes.ATHROW) - - /* ------ (5) cleanup version of monitor-exit code. - * Reached upon early-return from (2). - * Protected by whatever protects the whole synchronized expression. - * ------ - */ - if (shouldEmitCleanup) { - markProgramPoint(monCleanup) - locals.load(monitor) - emit(asm.Opcodes.MONITOREXIT) - pendingCleanups() - } - - /* ------ (6) normal exit of the synchronized expression. - * Reached after normal, non-early-return, termination of (3). - * Protected by whatever protects the whole synchronized expression. - * ------ - */ - mnode visitLabel postHandler - - lineNumber(tree) - - expectedType - } - - /* - * Detects whether no instructions have been emitted since label `lbl` and if so emits a NOP. - * Useful to avoid emitting an empty try-block being protected by exception handlers, - * which results in "java.lang.ClassFormatError: Illegal exception table range". See SI-6102. - */ - def nopIfNeeded(lbl: asm.Label): Unit = { - val noInstructionEmitted = isAtProgramPoint(lbl) - if (noInstructionEmitted) { emit(asm.Opcodes.NOP) } - } - - /* - * Emitting try-catch is easy, emitting try-catch-finally not quite so. - * A finally-block (which always has type Unit, thus leaving the operand stack unchanged) - * affects control-transfer from protected regions, as follows: - * - * (a) `return` statement: - * - * First, the value to return (if any) is evaluated. - * Afterwards, all enclosing finally-blocks are run, from innermost to outermost. - * Only then is the return value (if any) returned. - * - * Some terminology: - * (a.1) Executing a return statement that is protected - * by one or more finally-blocks is called "early return" - * (a.2) the chain of code sections (a code section for each enclosing finally-block) - * to run upon early returns is called "cleanup chain" - * - * As an additional spin, consider a return statement in a finally-block. - * In this case, the value to return depends on how control arrived at that statement: - * in case it arrived via a previous return, the previous return enjoys priority: - * the value to return is given by that statement. - * - * (b) A finally-block protects both the try-clause and the catch-clauses. - * - * Sidenote: - * A try-clause may contain an empty block. On CLR, a finally-block has special semantics - * regarding Abort interruptions; but on the JVM it's safe to elide an exception-handler - * that protects an "empty" range ("empty" as in "containing NOPs only", - * see `asm.optimiz.DanglingExcHandlers` and SI-6720). - * - * This means a finally-block indicates instructions that can be reached: - * (b.1) Upon normal (non-early-returning) completion of the try-clause or a catch-clause - * In this case, the next-program-point is that following the try-catch-finally expression. - * (b.2) Upon early-return initiated in the try-clause or a catch-clause - * In this case, the next-program-point is the enclosing cleanup section (if any), otherwise return. - * (b.3) Upon abrupt termination (due to unhandled exception) of the try-clause or a catch-clause - * In this case, the unhandled exception must be re-thrown after running the finally-block. - * - * (c) finally-blocks are implicit to `synchronized` (a finally-block is added to just release the lock) - * that's why `genSynchronized()` too emits cleanup-sections. - * - * A number of code patterns can be emitted to realize the intended semantics. - * - * A popular alternative (GenICode, javac) consists in duplicating the cleanup-chain at each early-return position. - * The principle at work being that once control is transferred to a cleanup-section, - * control will always stay within the cleanup-chain. - * That is, barring an exception being thrown in a cleanup-section, in which case the enclosing try-block - * (reached via abrupt termination) takes over. - * - * The observations above hint at another code layout, less verbose, for the cleanup-chain. - * - * The code layout that GenBCode emits takes into account that once a cleanup section has been reached, - * jumping to the next cleanup-section (and so on, until the outermost one) realizes the correct semantics. - * - * There is still code duplication in that two cleanup-chains are needed (but this is unavoidable, anyway): - * one for normal control flow and another chain consisting of exception handlers. - * The in-line comments below refer to them as - * - "early-return-cleanups" and - * - "exception-handler-version-of-finally-block" respectively. - * - */ - def genLoadTry(tree: Try): BType = tree match { - case Try(block, catches, finalizer) => - val kind = tpeTK(tree) - - val caseHandlers: List[EHClause] = - for (CaseDef(pat, _, caseBody) <- catches) yield { - pat match { - case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody) - case Ident(nme.WILDCARD) => NamelessEH(jlThrowableRef, caseBody) - case Bind(_, _) => BoundEH (pat.symbol, caseBody) - } - } - - // ------ (0) locals used later ------ - - /* - * `postHandlers` is a program point denoting: - * (a) the finally-clause conceptually reached via fall-through from try-catch-finally - * (in case a finally-block is present); or - * (b) the program point right after the try-catch - * (in case there's no finally-block). - * The name choice emphasizes that the code section lies "after all exception handlers", - * where "all exception handlers" includes those derived from catch-clauses as well as from finally-blocks. - */ - val postHandlers = new asm.Label - - val hasFinally = (finalizer != tpd.EmptyTree) - - /* - * used in the finally-clause reached via fall-through from try-catch, if any. - */ - val guardResult = hasFinally && (kind != UNIT) && mayCleanStack(finalizer) - - /* - * please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type. - * Because those two types can be different, dedicated vars are needed. - */ - val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp", tree.tpe, tree.span) else null - - /* - * upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause) - * AND hasFinally, a cleanup is needed. - */ - val finCleanup = if (hasFinally) new asm.Label else null - - /* ------ (1) try-block, protected by: - * (1.a) the EHs due to case-clauses, emitted in (2), - * (1.b) the EH due to finally-clause, emitted in (3.A) - * (1.c) whatever protects the whole try-catch-finally expression. - * ------ - */ - - val startTryBody = currProgramPoint() - registerCleanup(finCleanup) - genLoad(block, kind) - unregisterCleanup(finCleanup) - nopIfNeeded(startTryBody) - val endTryBody = currProgramPoint() - bc goTo postHandlers - - /** - * A return within a `try` or `catch` block where a `finally` is present ("early return") - * emits a store of the result to a local, jump to a "cleanup" version of the `finally` block, - * and sets `shouldEmitCleanup = true` (see [[PlainBodyBuilder.genReturn]]). - * - * If the try-catch is nested, outer `finally` blocks need to be emitted in a cleanup version - * as well, so the `shouldEmitCleanup` variable remains `true` until the outermost `finally`. - * Nested cleanup `finally` blocks jump to the next enclosing one. For the outermost, we emit - * a read of the local variable, a return, and we set `shouldEmitCleanup = false` (see - * [[pendingCleanups]]). - * - * Now, assume we have - * - * try { return 1 } finally { - * try { println() } finally { println() } - * } - * - * Here, the outer `finally` needs a cleanup version, but the inner one does not. The method - * here makes sure that `shouldEmitCleanup` is only propagated outwards, not inwards to - * nested `finally` blocks. - */ - def withFreshCleanupScope(body: => Unit) = { - val savedShouldEmitCleanup = shouldEmitCleanup - shouldEmitCleanup = false - body - shouldEmitCleanup = savedShouldEmitCleanup || shouldEmitCleanup - } - - /* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause) - * An EH in (2) is reached upon abrupt termination of (1). - * An EH in (2) is protected by: - * (2.a) the EH-version of the finally-clause, if any. - * (2.b) whatever protects the whole try-catch-finally expression. - * ------ - */ - - for (ch <- caseHandlers) withFreshCleanupScope { - - // (2.a) emit case clause proper - val startHandler = currProgramPoint() - var endHandler: asm.Label = null - var excType: ClassBType = null - registerCleanup(finCleanup) - ch match { - case NamelessEH(typeToDrop, caseBody) => - bc drop typeToDrop - genLoad(caseBody, kind) // adapts caseBody to `kind`, thus it can be stored, if `guardResult`, in `tmp`. - nopIfNeeded(startHandler) - endHandler = currProgramPoint() - excType = typeToDrop - - case BoundEH (patSymbol, caseBody) => - // test/files/run/contrib674.scala , a local-var already exists for patSymbol. - // rather than creating on first-access, we do it right away to emit debug-info for the created local var. - val Local(patTK, _, patIdx, _) = locals.getOrMakeLocal(patSymbol) - bc.store(patIdx, patTK) - genLoad(caseBody, kind) - nopIfNeeded(startHandler) - endHandler = currProgramPoint() - emitLocalVarScope(patSymbol, startHandler, endHandler) - excType = patTK.asClassBType - } - unregisterCleanup(finCleanup) - // (2.b) mark the try-body as protected by this case clause. - protect(startTryBody, endTryBody, startHandler, excType) - // (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given. - bc goTo postHandlers - - } - - // Need to save the state of `shouldEmitCleanup` at this point: while emitting the first - // version of the `finally` block below, the variable may become true. But this does not mean - // that we need a cleanup version for the current block, only for the enclosing ones. - val currentFinallyBlockNeedsCleanup = shouldEmitCleanup - - /* ------ (3.A) The exception-handler-version of the finally-clause. - * Reached upon abrupt termination of (1) or one of the EHs in (2). - * Protected only by whatever protects the whole try-catch-finally expression. - * ------ - */ - - // a note on terminology: this is not "postHandlers", despite appearances. - // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts. - if (hasFinally) withFreshCleanupScope { - nopIfNeeded(startTryBody) - val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception. - protect(startTryBody, finalHandler, finalHandler, null) - val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(jlThrowableRef, "exc", defn.ThrowableType, finalizer.span)) - bc.store(eIdx, eTK) - emitFinalizer(finalizer, null, isDuplicate = true) - bc.load(eIdx, eTK) - emit(asm.Opcodes.ATHROW) - } - - /* ------ (3.B) Cleanup-version of the finally-clause. - * Reached upon early RETURN from (1) or upon early RETURN from one of the EHs in (2) - * (and only from there, ie reached only upon early RETURN from - * program regions bracketed by registerCleanup/unregisterCleanup). - * Protected only by whatever protects the whole try-catch-finally expression. - * - * Given that control arrives to a cleanup section only upon early RETURN, - * the value to return (if any) is always available. Therefore, a further RETURN - * found in a cleanup section is always ignored (a warning is displayed, @see `genReturn()`). - * In order for `genReturn()` to know whether the return statement is enclosed in a cleanup section, - * the variable `insideCleanupBlock` is used. - * ------ - */ - - // this is not "postHandlers" either. - // `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause. - // In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid. - if (hasFinally && currentFinallyBlockNeedsCleanup) { - markProgramPoint(finCleanup) - // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted. - emitFinalizer(finalizer, null, isDuplicate = true) - pendingCleanups() - } - - /* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit - * Reached upon normal, non-early-return termination of (1) or of an EH in (2). - * Protected only by whatever protects the whole try-catch-finally expression. - * TODO explain what happens upon RETURN contained in (4) - * ------ - */ - - markProgramPoint(postHandlers) - if (hasFinally) { - emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false` - } - - kind - } // end of genLoadTry() - - /* if no more pending cleanups, all that remains to do is return. Otherwise jump to the next (outer) pending cleanup. */ - private def pendingCleanups(): Unit = { - cleanups match { - case Nil => - if (earlyReturnVar != null) { - locals.load(earlyReturnVar) - bc.emitRETURN(locals(earlyReturnVar).tk) - } else { - bc emitRETURN UNIT - } - shouldEmitCleanup = false - - case nextCleanup :: _ => - bc goTo nextCleanup - } - } - - def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType): Unit = { - val excInternalName: String = - if (excType == null) null - else excType.internalName - assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.") - mnode.visitTryCatchBlock(start, end, handler, excInternalName) - } - - /* `tmp` (if non-null) is the symbol of the local-var used to preserve the result of the try-body, see `guardResult` */ - def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean): Unit = { - var saved: immutable.Map[ /* Labeled */ Symbol, (BType, LoadDestination) ] = null - if (isDuplicate) { - saved = jumpDest - } - // when duplicating, the above guarantees new asm.Labels are used for LabelDefs contained in the finalizer (their vars are reused, that's ok) - if (tmp != null) { locals.store(tmp) } - genLoad(finalizer, UNIT) - if (tmp != null) { locals.load(tmp) } - if (isDuplicate) { - jumpDest = saved - } - } - - /* Does this tree have a try-catch block? */ - def mayCleanStack(tree: Tree): Boolean = tree.find { t => t match { // TODO: use existsSubTree - case Try(_, _, _) => true - case _ => false - } - }.isDefined - - trait EHClause - case class NamelessEH(typeToDrop: ClassBType, caseBody: Tree) extends EHClause - case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause - - } - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala b/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala deleted file mode 100644 index f9a3a3aae105..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala +++ /dev/null @@ -1,864 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.tools.asm - -/** - * The BTypes component defines The BType class hierarchy. BTypes encapsulates all type information - * that is required after building the ASM nodes. This includes optimizations, geneartion of - * InnerClass attributes and generation of stack map frames. - * - * This representation is immutable and independent of the compiler data structures, hence it can - * be queried by concurrent threads. - */ -abstract class BTypes extends Pure { - - val int: DottyBackendInterface - import int.given - /** - * A map from internal names to ClassBTypes. Every ClassBType is added to this map on its - * construction. - * - * This map is used when computing stack map frames. The asm.ClassWriter invokes the method - * `getCommonSuperClass`. In this method we need to obtain the ClassBType for a given internal - * name. The method assumes that every class type that appears in the bytecode exists in the map. - * - * Concurrent because stack map frames are computed when in the class writer, which might run - * on multiple classes concurrently. - */ - protected def classBTypeFromInternalNameMap: collection.concurrent.Map[String, ClassBType] - // NOTE: Should be a lazy val but scalac does not allow abstract lazy vals (dotty does) - - /** - * Obtain a previously constructed ClassBType for a given internal name. - */ - def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName) - - // Some core BTypes are required here, in class BType, where no Global instance is available. - // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual - // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol. - val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type] - import coreBTypes._ - - /** - * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType - * referring to BTypes. - */ - /*sealed*/ trait BType extends Pure { // Not sealed for now due to SI-8546 - final override def toString: String = this match { - case UNIT => "V" - case BOOL => "Z" - case CHAR => "C" - case BYTE => "B" - case SHORT => "S" - case INT => "I" - case FLOAT => "F" - case LONG => "J" - case DOUBLE => "D" - case ClassBType(internalName) => "L" + internalName + ";" - case ArrayBType(component) => "[" + component - case MethodBType(args, res) => args.mkString("(", "", ")" + res) - } - - /** - * @return The Java descriptor of this type. Examples: - * - int: I - * - java.lang.String: Ljava/lang/String; - * - int[]: [I - * - Object m(String s, double d): (Ljava/lang/String;D)Ljava/lang/Object; - */ - final def descriptor = toString - - /** - * @return 0 for void, 2 for long and double, 1 otherwise - */ - final def size: Int = this match { - case UNIT => 0 - case LONG | DOUBLE => 2 - case _ => 1 - } - - final def isPrimitive: Boolean = this.isInstanceOf[PrimitiveBType] - final def isRef: Boolean = this.isInstanceOf[RefBType] - final def isArray: Boolean = this.isInstanceOf[ArrayBType] - final def isClass: Boolean = this.isInstanceOf[ClassBType] - final def isMethod: Boolean = this.isInstanceOf[MethodBType] - - final def isNonVoidPrimitiveType = isPrimitive && this != UNIT - - final def isNullType = this == srNullRef - final def isNothingType = this == srNothingRef - - final def isBoxed = this.isClass && boxedClasses(this.asClassBType) - - final def isIntSizedType = this == BOOL || this == CHAR || this == BYTE || - this == SHORT || this == INT - final def isIntegralType = this == INT || this == BYTE || this == LONG || - this == CHAR || this == SHORT - final def isRealType = this == FLOAT || this == DOUBLE - final def isNumericType = isIntegralType || isRealType - final def isWideType = size == 2 - - /* - * Subtype check `this <:< other` on BTypes that takes into account the JVM built-in numeric - * promotions (e.g. BYTE to INT). Its operation can be visualized more easily in terms of the - * Java bytecode type hierarchy. - */ - final def conformsTo(other: BType): Boolean = { - assert(isRef || isPrimitive, s"conformsTo cannot handle $this") - assert(other.isRef || other.isPrimitive, s"conformsTo cannot handle $other") - - this match { - case ArrayBType(component) => - if (other == ObjectRef || other == jlCloneableRef || other == jiSerializableRef) true - else other match { - case ArrayBType(otherComponoent) => component.conformsTo(otherComponoent) - case _ => false - } - - case classType: ClassBType => - if (isBoxed) { - if (other.isBoxed) this == other - else if (other == ObjectRef) true - else other match { - case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) // e.g., java/lang/Double conforms to java/lang/Number - case _ => false - } - } else if (isNullType) { - if (other.isNothingType) false - else if (other.isPrimitive) false - else true // Null conforms to all classes (except Nothing) and arrays. - } else if (isNothingType) { - true - } else other match { - case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) - // case ArrayBType(_) => this.isNullType // documentation only, because `if (isNullType)` above covers this case - case _ => - // isNothingType || // documentation only, because `if (isNothingType)` above covers this case - false - } - - case UNIT => - other == UNIT - case BOOL | BYTE | SHORT | CHAR => - this == other || other == INT || other == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt(). - case _ => - assert(isPrimitive && other.isPrimitive, s"Expected primitive types $this - $other") - this == other - } - } - - /** - * Compute the upper bound of two types. - * Takes promotions of numeric primitives into account. - */ - final def maxType(other: BType): BType = this match { - case pt: PrimitiveBType => pt.maxValueType(other) - - case _: ArrayBType | _: ClassBType => - if (isNothingType) return other - if (other.isNothingType) return this - if (this == other) return this - - assert(other.isRef, s"Cannot compute maxType: $this, $other") - // Approximate `lub`. The common type of two references is always ObjectReference. - ObjectRef - } - - /** - * See documentation of [[typedOpcode]]. - * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 8. - */ - private def loadStoreOpcodeOffset: Int = this match { - case UNIT | INT => 0 - case BOOL | BYTE => 5 - case CHAR => 6 - case SHORT => 7 - case FLOAT => 2 - case LONG => 1 - case DOUBLE => 3 - case _ => 4 - } - - /** - * See documentation of [[typedOpcode]]. - * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 16. - */ - private def typedOpcodeOffset: Int = this match { - case UNIT => 5 - case BOOL | CHAR | BYTE | SHORT | INT => 0 - case FLOAT => 2 - case LONG => 1 - case DOUBLE => 3 - case _ => 4 - } - - /** - * Some JVM opcodes have typed variants. This method returns the correct opcode according to - * the type. - * - * @param opcode A JVM instruction opcode. This opcode must be one of ILOAD, ISTORE, IALOAD, - * IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL, ISHR, IUSHR, IAND, IOR - * IXOR and IRETURN. - * @return The opcode adapted to this java type. For example, if this type is `float` and - * `opcode` is `IRETURN`, this method returns `FRETURN`. - */ - final def typedOpcode(opcode: Int): Int = { - if (opcode == asm.Opcodes.IALOAD || opcode == asm.Opcodes.IASTORE) - opcode + loadStoreOpcodeOffset - else - opcode + typedOpcodeOffset - } - - /** - * The asm.Type corresponding to this BType. - * - * Note about asm.Type.getObjectType (*): For class types, the method expects the internal - * name, i.e. without the surrounding 'L' and ';'. For array types on the other hand, the - * method expects a full descriptor, for example "[Ljava/lang/String;". - * - * See method asm.Type.getType that creates a asm.Type from a type descriptor - * - for an OBJECT type, the 'L' and ';' are not part of the range of the created Type - * - for an ARRAY type, the full descriptor is part of the range - */ - def toASMType: asm.Type = this match { - case UNIT => asm.Type.VOID_TYPE - case BOOL => asm.Type.BOOLEAN_TYPE - case CHAR => asm.Type.CHAR_TYPE - case BYTE => asm.Type.BYTE_TYPE - case SHORT => asm.Type.SHORT_TYPE - case INT => asm.Type.INT_TYPE - case FLOAT => asm.Type.FLOAT_TYPE - case LONG => asm.Type.LONG_TYPE - case DOUBLE => asm.Type.DOUBLE_TYPE - case ClassBType(internalName) => asm.Type.getObjectType(internalName) // see (*) above - case a: ArrayBType => asm.Type.getObjectType(a.descriptor) - case m: MethodBType => asm.Type.getMethodType(m.descriptor) - } - - def asRefBType : RefBType = this.asInstanceOf[RefBType] - def asArrayBType : ArrayBType = this.asInstanceOf[ArrayBType] - def asClassBType : ClassBType = this.asInstanceOf[ClassBType] - def asPrimitiveBType : PrimitiveBType = this.asInstanceOf[PrimitiveBType] - } - - sealed trait PrimitiveBType extends BType { - - /** - * The upper bound of two primitive types. The `other` type has to be either a primitive - * type or Nothing. - * - * The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative - * values of Byte and Short. See ticket #2087. - */ - final def maxValueType(other: BType): BType = { - - def uncomparable: Nothing = throw new AssertionError(s"Cannot compute maxValueType: $this, $other") - - if (!other.isPrimitive && !other.isNothingType) uncomparable - - if (other.isNothingType) return this - if (this == other) return this - - this match { - case BYTE => - if (other == CHAR) INT - else if (other.isNumericType) other - else uncomparable - - case SHORT => - other match { - case BYTE => SHORT - case CHAR => INT - case INT | LONG | FLOAT | DOUBLE => other - case _ => uncomparable - } - - case CHAR => - other match { - case BYTE | SHORT => INT - case INT | LONG | FLOAT | DOUBLE => other - case _ => uncomparable - } - - case INT => - other match { - case BYTE | SHORT | CHAR => INT - case LONG | FLOAT | DOUBLE => other - case _ => uncomparable - } - - case LONG => - other match { - case INT | BYTE | LONG | CHAR | SHORT => LONG - case DOUBLE => DOUBLE - case FLOAT => FLOAT - case _ => uncomparable - } - - case FLOAT => - if (other == DOUBLE) DOUBLE - else if (other.isNumericType) FLOAT - else uncomparable - - case DOUBLE => - if (other.isNumericType) DOUBLE - else uncomparable - - case UNIT | BOOL => uncomparable - } - } - } - - case object UNIT extends PrimitiveBType - case object BOOL extends PrimitiveBType - case object CHAR extends PrimitiveBType - case object BYTE extends PrimitiveBType - case object SHORT extends PrimitiveBType - case object INT extends PrimitiveBType - case object FLOAT extends PrimitiveBType - case object LONG extends PrimitiveBType - case object DOUBLE extends PrimitiveBType - - sealed trait RefBType extends BType { - /** - * The class or array type of this reference type. Used for ANEWARRAY, MULTIANEWARRAY, - * INSTANCEOF and CHECKCAST instructions. Also used for emitting invokevirtual calls to - * (a: Array[T]).clone() for any T, see genApply. - * - * In contrast to the descriptor, this string does not contain the surrounding 'L' and ';' for - * class types, for example "java/lang/String". - * However, for array types, the full descriptor is used, for example "[Ljava/lang/String;". - * - * This can be verified for example using javap or ASMifier. - */ - def classOrArrayType: String = this match { - case ClassBType(internalName) => internalName - case a: ArrayBType => a.descriptor - } - } - - /** - * InnerClass and EnclosingMethod attributes (EnclosingMethod is displayed as OUTERCLASS in asm). - * - * In this summary, "class" means "class or interface". - * - * JLS: http://docs.oracle.com/javase/specs/jls/se8/html/index.html - * JVMS: http://docs.oracle.com/javase/specs/jvms/se8/html/index.html - * - * Terminology - * ----------- - * - * - Nested class (JLS 8): class whose declaration occurs within the body of another class - * - * - Top-level class (JLS 8): non-nested class - * - * - Inner class (JLS 8.1.3): nested class that is not (explicitly or implicitly) static - * - * - Member class (JLS 8.5): class directly enclosed in the body of a class (and not, for - * example, defined in a method). Member classes cannot be anonymous. May be static. - * - * - Local class (JLS 14.3): nested, non-anonymous class that is not a member of a class - * - cannot be static (therefore they are "inner" classes) - * - can be defined in a method, a constructor or in an initializer block - * - * - Initializer block (JLS 8.6 / 8.7): block of statements in a java class - * - static initializer: executed before constructor body - * - instance initializer: executed when class is initialized (instance creation, static - * field access, ...) - * - * - A static nested class can be defined as - * - a static member class (explicitly static), or - * - a member class of an interface (implicitly static) - * - local classes are never static, even if they are defined in a static method. - * - * Note: it is NOT the case that all inner classes (non-static) have an outer pointer. Example: - * class C { static void foo { class D {} } } - * The class D is an inner class (non-static), but javac does not add an outer pointer to it. - * - * InnerClass - * ---------- - * - * The JVMS 4.7.6 requires an entry for every class mentioned in a CONSTANT_Class_info in the - * constant pool (CP) that is not a member of a package (JLS 7.1). - * - * The JLS 13.1, points 9. / 10. requires: a class must reference (in the CP) - * - its immediately enclosing class - * - all of its member classes - * - all local and anonymous classes that are referenced (or declared) elsewhere (method, - * constructor, initializer block, field initializer) - * - * In a comment, the 4.7.6 spec says: this implies an entry in the InnerClass attribute for - * - All enclosing classes (except the outermost, which is top-level) - * - My comment: not sure how this is implied, below (*) a Java counter-example. - * In any case, the Java compiler seems to add all enclosing classes, even if they are not - * otherwise mentioned in the CP. So we should do the same. - * - All nested classes (including anonymous and local, but not transitively) - * - * Fields in the InnerClass entries: - * - inner class: the (nested) class C we are talking about - * - outer class: the class of which C is a member. Has to be null for non-members, i.e. for - * local and anonymous classes. NOTE: this co-incides with the presence of an - * EnclosingMethod attribute (see below) - * - inner name: A string with the simple name of the inner class. Null for anonymous classes. - * - flags: access property flags, details in JVMS, table in 4.7.6. Static flag: see - * discussion below. - * - * - * Note 1: when a nested class is present in the InnerClass attribute, all of its enclosing - * classes have to be present as well (by the rules above). Example: - * - * class Outer { class I1 { class I2 { } } } - * class User { Outer.I1.I2 foo() { } } - * - * The return type "Outer.I1.I2" puts "Outer$I1$I2" in the CP, therefore the class is added to the - * InnerClass attribute. For this entry, the "outer class" field will be "Outer$I1". This in turn - * adds "Outer$I1" to the CP, which requires adding that class to the InnerClass attribute. - * (For local / anonymous classes this would not be the case, since the "outer class" attribute - * would be empty. However, no class (other than the enclosing class) can refer to them, as they - * have no name.) - * - * In the current implementation of the Scala compiler, when adding a class to the InnerClass - * attribute, all of its enclosing classes will be added as well. Javac seems to do the same, - * see (*). - * - * - * Note 2: If a class name is mentioned only in a CONSTANT_Utf8_info, but not in a - * CONSTANT_Class_info, the JVMS does not require an entry in the InnerClass attribute. However, - * the Java compiler seems to add such classes anyway. For example, when using an annotation, the - * annotation class is stored as a CONSTANT_Utf8_info in the CP: - * - * @O.Ann void foo() { } - * - * adds "const #13 = Asciz LO$Ann;;" in the constant pool. The "RuntimeInvisibleAnnotations" - * attribute refers to that constant pool entry. Even though there is no other reference to - * `O.Ann`, the java compiler adds an entry for that class to the InnerClass attribute (which - * entails adding a CONSTANT_Class_info for the class). - * - * - * - * EnclosingMethod - * --------------- - * - * JVMS 4.7.7: the attribute must be present "if and only if it represents a local class - * or an anonymous class" (i.e. not for member classes). - * - * The attribute is mis-named, it should be called "EnclosingClass". It has to be defined for all - * local and anonymous classes, no matter if there is an enclosing method or not. Accordingly, the - * "class" field (see below) must be always defined, while the "method" field may be null. - * - * NOTE: When a EnclosingMethod attribute is required (local and anonymous classes), the "outer" - * field in the InnerClass table must be null. - * - * Fields: - * - class: the enclosing class - * - method: the enclosing method (or constructor). Null if the class is not enclosed by a - * method, i.e. for - * - local or anonymous classes defined in (static or non-static) initializer blocks - * - anonymous classes defined in initializer blocks or field initializers - * - * Note: the field is required for anonymous classes defined within local variable - * initializers (within a method), Java example below (**). - * - * For local and anonymous classes in initializer blocks or field initializers, and - * class-level anonymous classes, the scala compiler sets the "method" field to null. - * - * - * (*) - * public class Test { - * void foo() { - * class Foo1 { - * // constructor statement block - * { - * class Foo2 { - * class Foo3 { } - * } - * } - * } - * } - * } - * - * The class file Test$1Foo1$1Foo2$Foo3 has no reference to the class Test$1Foo1, however it - * still contains an InnerClass attribute for Test$1Foo1. - * Maybe this is just because the Java compiler follows the JVMS comment ("InnerClasses - * information for each enclosing class"). - * - * - * (**) - * void foo() { - * // anonymous class defined in local variable initializer expression. - * Runnable x = true ? (new Runnable() { - * public void run() { return; } - * }) : null; - * } - * - * The EnclosingMethod attribute of the anonymous class mentions "foo" in the "method" field. - * - * - * Java Compatibility - * ------------------ - * - * In the InnerClass entry for classes in top-level modules, the "outer class" is emitted as the - * mirror class (or the existing companion class), i.e. C1 is nested in T (not T$). - * For classes nested in a nested object, the "outer class" is the module class: C2 is nested in T$N$ - * object T { - * class C1 - * object N { class C2 } - * } - * - * Reason: java compat. It's a "best effort" "solution". If you want to use "C1" from Java, you - * can write "T.C1", and the Java compiler will translate that to the classfile T$C1. - * - * If we would emit the "outer class" of C1 as "T$", then in Java you'd need to write "T$.C1" - * because the java compiler looks at the InnerClass attribute to find if an inner class exists. - * However, the Java compiler would then translate the '.' to '$' and you'd get the class name - * "T$$C1". This class file obviously does not exist. - * - * Directly using the encoded class name "T$C1" in Java does not work: since the classfile - * describes a nested class, the Java compiler hides it from the classpath and will report - * "cannot find symbol T$C1". This means that the class T.N.C2 cannot be referenced from a - * Java source file in any way. - * - * - * STATIC flag - * ----------- - * - * Java: static member classes have the static flag in the InnerClass attribute, for example B in - * class A { static class B { } } - * - * The spec is not very clear about when the static flag should be emitted. It says: "Marked or - * implicitly static in source." - * - * The presence of the static flag does NOT coincide with the absence of an "outer" field in the - * class. The java compiler never puts the static flag for local classes, even if they don't have - * an outer pointer: - * - * class A { - * void f() { class B {} } - * static void g() { calss C {} } - * } - * - * B has an outer pointer, C doesn't. Both B and C are NOT marked static in the InnerClass table. - * - * It seems sane to follow the same principle in the Scala compiler. So: - * - * package p - * object O1 { - * class C1 // static inner class - * object O2 { // static inner module - * def f = { - * class C2 { // non-static inner class, even though there's no outer pointer - * class C3 // non-static, has an outer pointer - * } - * } - * } - * } - * - * Mirror Classes - * -------------- - * - * TODO: innerclass attributes on mirror class - */ - - /** - * A ClassBType represents a class or interface type. The necessary information to build a - * ClassBType is extracted from compiler symbols and types, see BTypesFromSymbols. - * - * The `offset` and `length` fields are used to represent the internal name of the class. They - * are indices into some character array. The internal name can be obtained through the method - * `internalNameString`, which is abstract in this component. Name creation is assumed to be - * hash-consed, so if two ClassBTypes have the same internal name, they NEED to have the same - * `offset` and `length`. - * - * The actual implementation in subclass BTypesFromSymbols uses the global `chrs` array from the - * name table. This representation is efficient because the JVM class name is obtained through - * `classSymbol.javaBinaryName`. This already adds the necessary string to the `chrs` array, - * so it makes sense to reuse the same name table in the backend. - * - * ClassBType is not a case class because we want a custom equals method, and because the - * extractor extracts the internalName, which is what you typically need. - */ - final class ClassBType(val internalName: String) extends RefBType { - /** - * Write-once variable allows initializing a cyclic graph of infos. This is required for - * nested classes. Example: for the definition `class A { class B }` we have - * - * B.info.nestedInfo.outerClass == A - * A.info.memberClasses contains B - */ - private var _info: ClassInfo = null - - def info: ClassInfo = { - assert(_info != null, s"ClassBType.info not yet assigned: $this") - _info - } - - def info_=(i: ClassInfo): Unit = { - assert(_info == null, s"Cannot set ClassBType.info multiple times: $this") - _info = i - checkInfoConsistency() - } - - classBTypeFromInternalNameMap(internalName) = this - - private def checkInfoConsistency(): Unit = { - // we assert some properties. however, some of the linked ClassBType (members, superClass, - // interfaces) may not yet have an `_info` (initialization of cyclic structures). so we do a - // best-effort verification. - def ifInit(c: ClassBType)(p: ClassBType => Boolean): Boolean = c._info == null || p(c) - - def isJLO(t: ClassBType) = t.internalName == "java/lang/Object" - - assert(!ClassBType.isInternalPhantomType(internalName), s"Cannot create ClassBType for phantom type $this") - - assert( - if (info.superClass.isEmpty) { isJLO(this) || (DottyBackendInterface.isCompilingPrimitive && ClassBType.hasNoSuper(internalName)) } - else if (isInterface) isJLO(info.superClass.get) - else !isJLO(this) && ifInit(info.superClass.get)(!_.isInterface), - s"Invalid superClass in $this: ${info.superClass}" - ) - assert( - info.interfaces.forall(c => ifInit(c)(_.isInterface)), - s"Invalid interfaces in $this: ${info.interfaces}" - ) - - assert(info.memberClasses.forall(c => ifInit(c)(_.isNestedClass)), info.memberClasses) - } - - /** - * The internal name of a class is the string returned by java.lang.Class.getName, with all '.' - * replaced by '/'. For example "java/lang/String". - */ - //def internalName: String = internalNameString(offset, length) - - /** - * @return The class name without the package prefix - */ - def simpleName: String = internalName.split("/").last - - def isInterface = (info.flags & asm.Opcodes.ACC_INTERFACE) != 0 - - def superClassesTransitive: List[ClassBType] = info.superClass match { - case None => Nil - case Some(sc) => sc :: sc.superClassesTransitive - } - - def isNestedClass = info.nestedInfo.isDefined - - def enclosingNestedClassesChain: List[ClassBType] = - if (isNestedClass) this :: info.nestedInfo.get.enclosingClass.enclosingNestedClassesChain - else Nil - - def innerClassAttributeEntry: Option[InnerClassEntry] = info.nestedInfo map { - case NestedInfo(_, outerName, innerName, isStaticNestedClass) => - import GenBCodeOps.addFlagIf - InnerClassEntry( - internalName, - outerName.orNull, - innerName.orNull, - info.flags.addFlagIf(isStaticNestedClass, asm.Opcodes.ACC_STATIC) - & ClassBType.INNER_CLASSES_FLAGS - ) - } - - def isSubtypeOf(other: ClassBType): Boolean = { - if (this == other) return true - - if (isInterface) { - if (other == ObjectRef) return true // interfaces conform to Object - if (!other.isInterface) return false // this is an interface, the other is some class other than object. interfaces cannot extend classes, so the result is false. - // else: this and other are both interfaces. continue to (*) - } else { - val sc = info.superClass - if (sc.isDefined && sc.get.isSubtypeOf(other)) return true // the superclass of this class conforms to other - if (!other.isInterface) return false // this and other are both classes, and the superclass of this does not conform - // else: this is a class, the other is an interface. continue to (*) - } - - // (*) check if some interface of this class conforms to other. - info.interfaces.exists(_.isSubtypeOf(other)) - } - - /** - * Finding the least upper bound in agreement with the bytecode verifier - * Background: - * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf - * http://comments.gmane.org/gmane.comp.java.vm.languages/2293 - * https://issues.scala-lang.org/browse/SI-3872 - */ - def jvmWiseLUB(other: ClassBType): ClassBType = { - def isNotNullOrNothing(c: ClassBType) = !c.isNullType && !c.isNothingType - assert(isNotNullOrNothing(this) && isNotNullOrNothing(other), s"jvmWiseLub for null or nothing: $this - $other") - - val res: ClassBType = (this.isInterface, other.isInterface) match { - case (true, true) => - // exercised by test/files/run/t4761.scala - if (other.isSubtypeOf(this)) this - else if (this.isSubtypeOf(other)) other - else ObjectRef - - case (true, false) => - if (other.isSubtypeOf(this)) this else ObjectRef - - case (false, true) => - if (this.isSubtypeOf(other)) other else ObjectRef - - case _ => - // TODO @lry I don't really understand the reasoning here. - // Both this and other are classes. The code takes (transitively) all superclasses and - // finds the first common one. - // MOST LIKELY the answer can be found here, see the comments and links by Miguel: - // - https://issues.scala-lang.org/browse/SI-3872 - firstCommonSuffix(this :: this.superClassesTransitive, other :: other.superClassesTransitive) - } - - assert(isNotNullOrNothing(res), s"jvmWiseLub computed: $res") - res - } - - private def firstCommonSuffix(as: List[ClassBType], bs: List[ClassBType]): ClassBType = { - var chainA = as - var chainB = bs - var fcs: ClassBType = null - while { - if (chainB contains chainA.head) fcs = chainA.head - else if (chainA contains chainB.head) fcs = chainB.head - else { - chainA = chainA.tail - chainB = chainB.tail - } - fcs == null - } do () - fcs - } - - /** - * Custom equals / hashCode: we only compare the name (offset / length) - */ - override def equals(o: Any): Boolean = (this eq o.asInstanceOf[Object]) || (o match { - case c: ClassBType @unchecked => c.internalName == this.internalName - case _ => false - }) - - override def hashCode: Int = { - import scala.runtime.Statics - var acc: Int = -889275714 - acc = Statics.mix(acc, internalName.hashCode) - Statics.finalizeHash(acc, 2) - } - } - - object ClassBType { - /** - * Pattern matching on a ClassBType extracts the `internalName` of the class. - */ - def unapply(c: ClassBType): Some[String] = Some(c.internalName) - - /** - * Valid flags for InnerClass attribute entry. - * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6 - */ - private val INNER_CLASSES_FLAGS = { - asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | - asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE | - asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION | - asm.Opcodes.ACC_ENUM - } - - // Primitive classes have no super class. A ClassBType for those is only created when - // they are actually being compiled (e.g., when compiling scala/Boolean.scala). - private val hasNoSuper = Set( - "scala/Unit", - "scala/Boolean", - "scala/Char", - "scala/Byte", - "scala/Short", - "scala/Int", - "scala/Float", - "scala/Long", - "scala/Double" - ) - - private val isInternalPhantomType = Set( - "scala/Null", - "scala/Nothing" - ) - } - - /** - * The type info for a class. Used for symboltable-independent subtype checks in the backend. - * - * @param superClass The super class, not defined for class java/lang/Object. - * @param interfaces All transitively implemented interfaces, except for those inherited - * through the superclass. - * @param flags The java flags, obtained through `javaFlags`. Used also to derive - * the flags for InnerClass entries. - * @param memberClasses Classes nested in this class. Those need to be added to the - * InnerClass table, see the InnerClass spec summary above. - * @param nestedInfo If this describes a nested class, information for the InnerClass table. - */ - case class ClassInfo(superClass: Option[ClassBType], interfaces: List[ClassBType], flags: Int, - memberClasses: List[ClassBType], nestedInfo: Option[NestedInfo]) - - /** - * Information required to add a class to an InnerClass table. - * The spec summary above explains what information is required for the InnerClass entry. - * - * @param enclosingClass The enclosing class, if it is also nested. When adding a class - * to the InnerClass table, enclosing nested classes are also added. - * @param outerName The outerName field in the InnerClass entry, may be None. - * @param innerName The innerName field, may be None. - * @param isStaticNestedClass True if this is a static nested class (not inner class) (*) - * - * (*) Note that the STATIC flag in ClassInfo.flags, obtained through javaFlags(classSym), is not - * correct for the InnerClass entry, see javaFlags. The static flag in the InnerClass describes - * a source-level propety: if the class is in a static context (does not have an outer pointer). - * This is checked when building the NestedInfo. - */ - case class NestedInfo(enclosingClass: ClassBType, - outerName: Option[String], - innerName: Option[String], - isStaticNestedClass: Boolean) - - /** - * This class holds the data for an entry in the InnerClass table. See the InnerClass summary - * above in this file. - * - * There's some overlap with the class NestedInfo, but it's not exactly the same and cleaner to - * keep separate. - * @param name The internal name of the class. - * @param outerName The internal name of the outer class, may be null. - * @param innerName The simple name of the inner class, may be null. - * @param flags The flags for this class in the InnerClass entry. - */ - case class InnerClassEntry(name: String, outerName: String, innerName: String, flags: Int) - - case class ArrayBType(componentType: BType) extends RefBType { - def dimension: Int = componentType match { - case a: ArrayBType => 1 + a.dimension - case _ => 1 - } - - def elementType: BType = componentType match { - case a: ArrayBType => a.elementType - case t => t - } - } - - case class MethodBType(argumentTypes: List[BType], returnType: BType) extends BType - - /* Some definitions that are required for the implementation of BTypes. They are abstract because - * initializing them requires information from types / symbols, which is not accessible here in - * BTypes. - * - * They are defs (not vals) because they are implemented using vars (see comment on CoreBTypes). - */ - - /** - * Just a named pair, used in CoreBTypes.asmBoxTo/asmUnboxTo. - */ - /*final*/ case class MethodNameAndType(name: String, methodType: MethodBType) -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala b/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala deleted file mode 100644 index d78008d65cc6..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala +++ /dev/null @@ -1,348 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.tools.asm -import scala.annotation.threadUnsafe -import scala.collection.mutable -import scala.collection.mutable.Clearable - -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.core.StdNames - -/** - * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary - * information from a symbol and its type to create the corresponding ClassBType. It requires - * access to the compiler (global parameter). - * - * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes - * uses classBTypeFromSymbol, hence requires access to the compiler (global). - * - * BTypesFromSymbols extends BTypes because the implementation of BTypes requires access to some - * of the core btypes. They are declared in BTypes as abstract members. Note that BTypes does - * not have access to the compiler instance. - */ -class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { - import int.{_, given} - import DottyBackendInterface.{symExtensions, _} - - lazy val TransientAttr = requiredClass[scala.transient] - lazy val VolatileAttr = requiredClass[scala.volatile] - - val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int) - import bCodeAsmCommon._ - - // Why the proxy, see documentation of class [[CoreBTypes]]. - val coreBTypes: CoreBTypesProxy[this.type] = new CoreBTypesProxy[this.type](this) - import coreBTypes._ - - final def intializeCoreBTypes(): Unit = { - coreBTypes.setBTypes(new CoreBTypes[this.type](this)) - } - - private[this] val perRunCaches: Caches = new Caches { - def newAnyRefMap[K <: AnyRef, V](): mutable.AnyRefMap[K, V] = new mutable.AnyRefMap[K, V]() - def newWeakMap[K, V](): mutable.WeakHashMap[K, V] = new mutable.WeakHashMap[K, V]() - def recordCache[T <: Clearable](cache: T): T = cache - def newMap[K, V](): mutable.HashMap[K, V] = new mutable.HashMap[K, V]() - def newSet[K](): mutable.Set[K] = new mutable.HashSet[K] - } - - // TODO remove abstraction - private abstract class Caches { - def recordCache[T <: Clearable](cache: T): T - def newWeakMap[K, V](): collection.mutable.WeakHashMap[K, V] - def newMap[K, V](): collection.mutable.HashMap[K, V] - def newSet[K](): collection.mutable.Set[K] - def newAnyRefMap[K <: AnyRef, V](): collection.mutable.AnyRefMap[K, V] - } - - @threadUnsafe protected lazy val classBTypeFromInternalNameMap = { - perRunCaches.recordCache(collection.concurrent.TrieMap.empty[String, ClassBType]) - } - - /** - * Cache for the method classBTypeFromSymbol. - */ - @threadUnsafe private lazy val convertedClasses = perRunCaches.newMap[Symbol, ClassBType]() - - /** - * The ClassBType for a class symbol `sym`. - */ - final def classBTypeFromSymbol(classSym: Symbol): ClassBType = { - assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") - assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") - assert( - (!primitiveTypeMap.contains(classSym) || isCompilingPrimitive) && - (classSym != defn.NothingClass && classSym != defn.NullClass), - s"Cannot create ClassBType for special class symbol ${classSym.showFullName}") - - convertedClasses.getOrElse(classSym, { - val internalName = classSym.javaBinaryName - // We first create and add the ClassBType to the hash map before computing its info. This - // allows initializing cylic dependencies, see the comment on variable ClassBType._info. - val classBType = new ClassBType(internalName) - convertedClasses(classSym) = classBType - setClassInfo(classSym, classBType) - }) - } - - final def mirrorClassBTypeFromSymbol(moduleClassSym: Symbol): ClassBType = { - assert(moduleClassSym.isTopLevelModuleClass, s"not a top-level module class: $moduleClassSym") - val internalName = moduleClassSym.javaBinaryName.stripSuffix(StdNames.str.MODULE_SUFFIX) - val bType = ClassBType(internalName) - bType.info = ClassInfo( - superClass = Some(ObjectRef), - interfaces = Nil, - flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, - memberClasses = getMemberClasses(moduleClassSym).map(classBTypeFromSymbol), - nestedInfo = None - ) - bType - } - - private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = { - val superClassSym: Symbol = { - val t = classSym.asClass.superClass - if (t.exists) t - else if (classSym.is(ModuleClass)) { - // workaround #371 - - println(s"Warning: mocking up superclass for $classSym") - defn.ObjectClass - } - else t - } - assert( - if (classSym == defn.ObjectClass) - superClassSym == NoSymbol - else if (classSym.isInterface) - superClassSym == defn.ObjectClass - else - // A ClassBType for a primitive class (scala.Boolean et al) is only created when compiling these classes. - ((superClassSym != NoSymbol) && !superClassSym.isInterface) || (isCompilingPrimitive && primitiveTypeMap.contains(classSym)), - s"Bad superClass for $classSym: $superClassSym" - ) - val superClass = if (superClassSym == NoSymbol) None - else Some(classBTypeFromSymbol(superClassSym)) - - /** - * All interfaces implemented by a class, except for those inherited through the superclass. - * Redundant interfaces are removed unless there is a super call to them. - */ - extension (sym: Symbol) def superInterfaces: List[Symbol] = { - val directlyInheritedTraits = sym.directlyInheritedTraits - val directlyInheritedTraitsSet = directlyInheritedTraits.toSet - val allBaseClasses = directlyInheritedTraits.iterator.flatMap(_.asClass.baseClasses.drop(1)).toSet - val superCalls = superCallsMap.getOrElse(sym, Set.empty) - val additional = (superCalls -- directlyInheritedTraitsSet).filter(_.is(Trait)) -// if (additional.nonEmpty) -// println(s"$fullName: adding supertraits $additional") - directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCalls(t)) ++ additional - } - - val interfaces = classSym.superInterfaces.map(classBTypeFromSymbol) - - val flags = javaFlags(classSym) - - /* The InnerClass table of a class C must contain all nested classes of C, even if they are only - * declared but not otherwise referenced in C (from the bytecode or a method / field signature). - * We collect them here. - */ - val nestedClassSymbols = { - // The lambdalift phase lifts all nested classes to the enclosing class, so if we collect - // member classes right after lambdalift, we obtain all nested classes, including local and - // anonymous ones. - val nestedClasses = getNestedClasses(classSym) - - // If this is a top-level class, and it has a companion object, the member classes of the - // companion are added as members of the class. For example: - // class C { } - // object C { - // class D - // def f = { class E } - // } - // The class D is added as a member of class C. The reason is that the InnerClass attribute - // for D will containt class "C" and NOT the module class "C$" as the outer class of D. - // This is done by buildNestedInfo, the reason is Java compatibility, see comment in BTypes. - // For consistency, the InnerClass entry for D needs to be present in C - to Java it looks - // like D is a member of C, not C$. - val linkedClass = classSym.linkedClass - val companionModuleMembers = { - if (classSym.linkedClass.isTopLevelModuleClass) getMemberClasses(classSym.linkedClass) - else Nil - } - - nestedClasses ++ companionModuleMembers - } - - /** - * For nested java classes, the scala compiler creates both a class and a module (and therefore - * a module class) symbol. For example, in `class A { class B {} }`, the nestedClassSymbols - * for A contain both the class B and the module class B. - * Here we get rid of the module class B, making sure that the class B is present. - */ - val nestedClassSymbolsNoJavaModuleClasses = nestedClassSymbols.filter(s => { - if (s.is(JavaDefined) && s.is(ModuleClass)) { - // We could also search in nestedClassSymbols for s.linkedClassOfClass, but sometimes that - // returns NoSymbol, so it doesn't work. - val nb = nestedClassSymbols.count(mc => mc.name == s.name && mc.owner == s.owner) - // this assertion is specific to how ScalaC works. It doesn't apply to dotty, as n dotty there will be B & B$ - // assert(nb == 2, s"Java member module without member class: $s - $nestedClassSymbols") - false - } else true - }) - - val memberClasses = nestedClassSymbolsNoJavaModuleClasses.map(classBTypeFromSymbol) - - val nestedInfo = buildNestedInfo(classSym) - - classBType.info = ClassInfo(superClass, interfaces, flags, memberClasses, nestedInfo) - classBType - } - - /** For currently compiled classes: All locally defined classes including local classes. - * The empty list for classes that are not currently compiled. - */ - private def getNestedClasses(sym: Symbol): List[Symbol] = definedClasses(sym, flattenPhase) - - /** For currently compiled classes: All classes that are declared as members of this class - * (but not inherited ones). The empty list for classes that are not currently compiled. - */ - private def getMemberClasses(sym: Symbol): List[Symbol] = definedClasses(sym, lambdaLiftPhase) - - private def definedClasses(sym: Symbol, phase: Phase) = - if (sym.isDefinedInCurrentRun) - atPhase(phase) { - toDenot(sym).info.decls.filter(sym => sym.isClass && !sym.isEffectivelyErased) - } - else Nil - - private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = { - assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym") - - val isNested = !innerClassSym.originalOwner.originalLexicallyEnclosingClass.is(PackageClass) - if (!isNested) None - else { - // See comment in BTypes, when is a class marked static in the InnerClass table. - val isStaticNestedClass = innerClassSym.originalOwner.originalLexicallyEnclosingClass.isOriginallyStaticOwner - - // After lambdalift (which is where we are), the rawowoner field contains the enclosing class. - val enclosingClassSym = { - if (innerClassSym.isClass) { - atPhase(flattenPhase.prev) { - toDenot(innerClassSym).owner.enclosingClass - } - } - else atPhase(flattenPhase.prev)(innerClassSym.enclosingClass) - } //todo is handled specially for JavaDefined symbols in scalac - - val enclosingClass: ClassBType = classBTypeFromSymbol(enclosingClassSym) - - val outerName: Option[String] = { - if (isAnonymousOrLocalClass(innerClassSym)) { - None - } else { - val outerName = innerClassSym.originalOwner.originalLexicallyEnclosingClass.javaBinaryName - def dropModule(str: String): String = - if (!str.isEmpty && str.last == '$') str.take(str.length - 1) else str - // Java compatibility. See the big comment in BTypes that summarizes the InnerClass spec. - val outerNameModule = - if (innerClassSym.originalOwner.originalLexicallyEnclosingClass.isTopLevelModuleClass) dropModule(outerName) - else outerName - Some(outerNameModule.toString) - } - } - - val innerName: Option[String] = { - if (innerClassSym.isAnonymousClass || innerClassSym.isAnonymousFunction) None - else { - val original = innerClassSym.initial - Some(atPhase(original.validFor.phaseId)(innerClassSym.name).mangledString) // moduleSuffix for module classes - } - } - - Some(NestedInfo(enclosingClass, outerName, innerName, isStaticNestedClass)) - } - } - - /** - * This is basically a re-implementation of sym.isStaticOwner, but using the originalOwner chain. - * - * The problem is that we are interested in a source-level property. Various phases changed the - * symbol's properties in the meantime, mostly lambdalift modified (destructively) the owner. - * Therefore, `sym.isStatic` is not what we want. For example, in - * object T { def f { object U } } - * the owner of U is T, so UModuleClass.isStatic is true. Phase travel does not help here. - */ - extension (sym: Symbol) - private def isOriginallyStaticOwner: Boolean = - sym.is(PackageClass) || sym.is(ModuleClass) && sym.originalOwner.originalLexicallyEnclosingClass.isOriginallyStaticOwner - - /** - * Return the Java modifiers for the given symbol. - * Java modifiers for classes: - * - public, abstract, final, strictfp (not used) - * for interfaces: - * - the same as for classes, without 'final' - * for fields: - * - public, private (*) - * - static, final - * for methods: - * - the same as for fields, plus: - * - abstract, synchronized (not used), strictfp (not used), native (not used) - * for all: - * - deprecated - * - * (*) protected cannot be used, since inner classes 'see' protected members, - * and they would fail verification after lifted. - */ - final def javaFlags(sym: Symbol): Int = { - - // Classes are always emitted as public. This matches the behavior of Scala 2 - // and is necessary for object deserialization to work properly, otherwise - // ModuleSerializationProxy may fail with an accessiblity error (see - // tests/run/serialize.scala and https://github.com/typelevel/cats-effect/pull/2360). - val privateFlag = !sym.isClass && (sym.is(Private) || (sym.isPrimaryConstructor && sym.owner.isTopLevelModuleClass)) - - val finalFlag = sym.is(Final) && !toDenot(sym).isClassConstructor && !sym.is(Mutable, butNot = Accessor) && !sym.enclosingClass.is(Trait) - - import asm.Opcodes._ - import GenBCodeOps.addFlagIf - 0 .addFlagIf(privateFlag, ACC_PRIVATE) - .addFlagIf(!privateFlag, ACC_PUBLIC) - .addFlagIf(sym.is(Deferred) || sym.isOneOf(AbstractOrTrait), ACC_ABSTRACT) - .addFlagIf(sym.isInterface, ACC_INTERFACE) - .addFlagIf(finalFlag - // Primitives are "abstract final" to prohibit instantiation - // without having to provide any implementations, but that is an - // illegal combination of modifiers at the bytecode level so - // suppress final if abstract if present. - && !sym.isOneOf(AbstractOrTrait) - // Mixin forwarders are bridges and can be final, but final bridges confuse some frameworks - && !sym.is(Bridge), ACC_FINAL) - .addFlagIf(sym.isStaticMember, ACC_STATIC) - .addFlagIf(sym.is(Bridge), ACC_BRIDGE | ACC_SYNTHETIC) - .addFlagIf(sym.is(Artifact), ACC_SYNTHETIC) - .addFlagIf(sym.isClass && !sym.isInterface, ACC_SUPER) - .addFlagIf(sym.isAllOf(JavaEnum), ACC_ENUM) - .addFlagIf(sym.is(JavaVarargs), ACC_VARARGS) - .addFlagIf(sym.is(Synchronized), ACC_SYNCHRONIZED) - .addFlagIf(sym.isDeprecated, ACC_DEPRECATED) - .addFlagIf(sym.is(Enum), ACC_ENUM) - } - - def javaFieldFlags(sym: Symbol) = { - import asm.Opcodes._ - import GenBCodeOps.addFlagIf - javaFlags(sym) - .addFlagIf(sym.hasAnnotation(TransientAttr), ACC_TRANSIENT) - .addFlagIf(sym.hasAnnotation(VolatileAttr), ACC_VOLATILE) - .addFlagIf(!sym.is(Mutable), ACC_FINAL) - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BytecodeWriters.scala b/tests/pos-with-compiler-cc/backend/jvm/BytecodeWriters.scala deleted file mode 100644 index 551d4f8d809e..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BytecodeWriters.scala +++ /dev/null @@ -1,147 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import java.io.{ DataOutputStream, FileOutputStream, IOException, File as JFile } -import java.nio.channels.ClosedByInterruptException -import dotty.tools.io._ -import dotty.tools.dotc.report - - -/** Can't output a file due to the state of the file system. */ -class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) - -/** For the last mile: turning generated bytecode in memory into - * something you can use. Has implementations for writing to class - * files, jars, and disassembled/javap output. - */ -trait BytecodeWriters { - val int: DottyBackendInterface - import int.{_, given} - - /** - * @param clsName cls.getName - */ - def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - def ensureDirectory(dir: AbstractFile): AbstractFile = - if (dir.isDirectory) dir - else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) - var dir = base - val pathParts = clsName.split("[./]").toList - for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part - ensureDirectory(dir) fileNamed pathParts.last + suffix - } - def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile = - getFile(outputDirectory, clsName, suffix) - - def factoryNonJarBytecodeWriter(): BytecodeWriter = { - val emitAsmp = None - val doDump = dumpClasses - (emitAsmp.isDefined, doDump.isDefined) match { - case (false, false) => new ClassBytecodeWriter { } - case (false, true ) => new ClassBytecodeWriter with DumpBytecodeWriter { } - case (true, false) => new ClassBytecodeWriter with AsmpBytecodeWriter - case (true, true ) => new ClassBytecodeWriter with AsmpBytecodeWriter with DumpBytecodeWriter { } - } - } - - trait BytecodeWriter { - def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit - def close(): Unit = () - } - - class DirectToJarfileWriter(jfile: JFile) extends BytecodeWriter { - val writer = new Jar(jfile).jarWriter() - - def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - assert(outfile == null, - "The outfile formal param is there just because ClassBytecodeWriter overrides this method and uses it.") - val path = jclassName + ".class" - val out = writer.newOutputStream(path) - - try out.write(jclassBytes, 0, jclassBytes.length) - finally out.flush() - - report.informProgress("added " + label + path + " to jar") - } - override def close() = writer.close() - } - - /* - * The ASM textual representation for bytecode overcomes disadvantages of javap output in three areas: - * (a) pickle dingbats undecipherable to the naked eye; - * (b) two constant pools, while having identical contents, are displayed differently due to physical layout. - * (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap, - * their expansion by ASM is more readable. - * - * */ - trait AsmpBytecodeWriter extends BytecodeWriter { - import scala.tools.asm - - private val baseDir = new Directory(None.get).createDirectory() // FIXME missing directoy - // new needed here since resolution of user-defined `apply` methods is ambiguous, and we want the constructor. - - private def emitAsmp(jclassBytes: Array[Byte], asmpFile: dotty.tools.io.File): Unit = { - val pw = asmpFile.printWriter() - try { - val cnode = new ClassNode1() - val cr = new asm.ClassReader(jclassBytes) - cr.accept(cnode, 0) - val trace = new scala.tools.asm.util.TraceClassVisitor(new java.io.PrintWriter(new java.io.StringWriter())) - cnode.accept(trace) - trace.p.print(pw) - } - finally pw.close() - } - - abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - super.writeClass(label, jclassName, jclassBytes, outfile) - - val segments = jclassName.split("[./]") - val asmpFile = segments.foldLeft(baseDir: Path)(_ / _).changeExtension("asmp").toFile - - asmpFile.parent.createDirectory() - emitAsmp(jclassBytes, asmpFile) - } - } - - trait ClassBytecodeWriter extends BytecodeWriter { - def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - assert(outfile != null, - "Precisely this override requires its invoker to hand out a non-null AbstractFile.") - val outstream = new DataOutputStream(outfile.bufferedOutput) - - try outstream.write(jclassBytes, 0, jclassBytes.length) - catch case ex: ClosedByInterruptException => - try - outfile.delete() // don't leave an empty or half-written classfile around after an interrupt - catch - case _: Throwable => - throw ex - finally outstream.close() - report.informProgress("wrote '" + label + "' to " + outfile) - } - } - - trait DumpBytecodeWriter extends BytecodeWriter { - val baseDir = Directory(dumpClasses.get).createDirectory() - - abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - super.writeClass(label, jclassName, jclassBytes, outfile) - - val pathName = jclassName - val dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _).changeExtension("class").toFile - dumpFile.parent.createDirectory() - val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path)) - - try outstream.write(jclassBytes, 0, jclassBytes.length) - finally outstream.close() - } - } - - private def dumpClasses: Option[String] = - if (ctx.settings.Ydumpclasses.isDefault) None - else Some(ctx.settings.Ydumpclasses.value) -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java b/tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java deleted file mode 100644 index c5594ae3dea6..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package dotty.tools.backend.jvm; - -import scala.tools.asm.MethodVisitor; -import scala.tools.asm.Opcodes; -import scala.tools.asm.tree.ClassNode; -import scala.tools.asm.tree.MethodNode; - -/** - * A subclass of {@link ClassNode} to customize the representation of - * label nodes with {@link LabelNode1}. - */ -public class ClassNode1 extends ClassNode { - public ClassNode1() { - this(Opcodes.ASM6); - } - - public ClassNode1(int api) { - super(api); - } - - @Override - public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { - MethodNode method = new MethodNode1(access, name, descriptor, signature, exceptions); - methods.add(method); - return method; - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala b/tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala deleted file mode 100644 index 299c1c75d6cf..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala +++ /dev/null @@ -1,48 +0,0 @@ -package dotty.tools.backend.jvm - -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Flags.Trait -import dotty.tools.dotc.transform.MegaPhase.MiniPhase - -/** Collect all super calls to trait members. - * - * For each super reference to trait member, register a call from the current class to the - * owner of the referenced member. - * - * This information is used to know if it is safe to remove a redundant mixin class. - * A redundant mixin class is one that is implemented by another mixin class. As the - * methods in a redundant mixin class could be implemented with a default abstract method, - * the redundant mixin class could be required as a parent by the JVM. - */ -class CollectSuperCalls extends MiniPhase { - import tpd._ - - override def phaseName: String = CollectSuperCalls.name - - override def description: String = CollectSuperCalls.description - - override def transformSelect(tree: Select)(using Context): Tree = { - tree.qualifier match { - case sup: Super => - if (tree.symbol.owner.is(Trait)) - registerSuperCall(ctx.owner.enclosingClass.asClass, tree.symbol.owner.asClass) - case _ => - } - tree - } - - private def registerSuperCall(sym: ClassSymbol, calls: ClassSymbol)(using Context) = { - genBCodePhase match { - case genBCodePhase: GenBCode => - genBCodePhase.registerSuperCall(sym, calls) - case _ => - } - } -} - -object CollectSuperCalls: - val name: String = "collectSuperCalls" - val description: String = "find classes that are called with super" diff --git a/tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala b/tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala deleted file mode 100644 index d5fce3f53627..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala +++ /dev/null @@ -1,294 +0,0 @@ -package dotty.tools -package backend -package jvm - - -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.transform.Erasure -import scala.tools.asm.{Handle, Opcodes} -import dotty.tools.dotc.core.StdNames - -/** - * Core BTypes and some other definitions. The initialization of these definitions requies access - * to symbols / types (global). - * - * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To - * make sure the definitions are consistent with the symbols in the current run, the - * `intializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each - * compiler run. - * - * The class BTypesFromSymbols does not directly reference CoreBTypes, but CoreBTypesProxy. The - * reason is that having a `var bTypes: CoreBTypes` would not allow `import bTypes._`. Instead, the - * proxy class holds a `CoreBTypes` in a variable field and forwards to this instance. - * - * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When - * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the - * constructor will actucally go through the proxy. The lazy vals make sure the instance is assigned - * in the proxy before the fields are initialized. - * - * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap - * could not be a perRunCache anymore: the classes defeined here need to be in that map, they are - * added when the ClassBTypes are created. The per run cache removes them, so they would be missing - * in the second run. - */ -class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) { - import bTypes._ - import int.given - import DottyBackendInterface._ - - //import global._ - //import rootMirror.{requiredClass, getClassIfDefined} - //import definitions._ - - /** - * Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above - * the first use of `classBTypeFromSymbol` because that method looks at the map. - */ - lazy val primitiveTypeMap: Map[Symbol, PrimitiveBType] = Map( - defn.UnitClass -> UNIT, - defn.BooleanClass -> BOOL, - defn.CharClass -> CHAR, - defn.ByteClass -> BYTE, - defn.ShortClass -> SHORT, - defn.IntClass -> INT, - defn.LongClass -> LONG, - defn.FloatClass -> FLOAT, - defn.DoubleClass -> DOUBLE - ) - - private lazy val BOXED_UNIT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Void]) - private lazy val BOXED_BOOLEAN : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Boolean]) - private lazy val BOXED_BYTE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Byte]) - private lazy val BOXED_SHORT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Short]) - private lazy val BOXED_CHAR : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Character]) - private lazy val BOXED_INT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Integer]) - private lazy val BOXED_LONG : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Long]) - private lazy val BOXED_FLOAT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Float]) - private lazy val BOXED_DOUBLE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Double]) - - /** - * Map from primitive types to their boxed class type. Useful when pushing class literals onto the - * operand stack (ldc instruction taking a class literal), see genConstant. - */ - lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map( - UNIT -> BOXED_UNIT, - BOOL -> BOXED_BOOLEAN, - BYTE -> BOXED_BYTE, - SHORT -> BOXED_SHORT, - CHAR -> BOXED_CHAR, - INT -> BOXED_INT, - LONG -> BOXED_LONG, - FLOAT -> BOXED_FLOAT, - DOUBLE -> BOXED_DOUBLE - ) - - lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet - - /** - * Maps the method symbol for a box method to the boxed type of the result. For example, the - * method symbol for `Byte.box()` is mapped to the ClassBType `java/lang/Byte`. - */ - lazy val boxResultType: Map[Symbol, ClassBType] = { - val boxMethods = defn.ScalaValueClasses().map{x => // @darkdimius Are you sure this should be a def? - (x, Erasure.Boxing.boxMethod(x.asClass)) - }.toMap - for ((valueClassSym, boxMethodSym) <- boxMethods) - yield boxMethodSym -> boxedClassOfPrimitive(primitiveTypeMap(valueClassSym)) - } - - /** - * Maps the method symbol for an unbox method to the primitive type of the result. - * For example, the method symbol for `Byte.unbox()`) is mapped to the PrimitiveBType BYTE. */ - lazy val unboxResultType: Map[Symbol, PrimitiveBType] = { - val unboxMethods: Map[Symbol, Symbol] = - defn.ScalaValueClasses().map(x => (x, Erasure.Boxing.unboxMethod(x.asClass))).toMap - for ((valueClassSym, unboxMethodSym) <- unboxMethods) - yield unboxMethodSym -> primitiveTypeMap(valueClassSym) - } - - /* - * srNothingRef and srNullRef exist at run-time only. They are the bytecode-level manifestation (in - * method signatures only) of what shows up as NothingClass (scala.Nothing) resp. NullClass (scala.Null) in Scala ASTs. - * - * Therefore, when srNothingRef or srNullRef are to be emitted, a mapping is needed: the internal - * names of NothingClass and NullClass can't be emitted as-is. - * TODO @lry Once there's a 2.11.3 starr, use the commented argument list. The current starr crashes on the type literal `scala.runtime.Nothing$` - */ - lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) // (requiredClass[scala.runtime.Nothing$]) - lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) // (requiredClass[scala.runtime.Null$]) - - lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) - lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) - lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) - lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) - lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) - lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) - lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) - lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) // java/lang/Cloneable - lazy val jioSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) // java/io/Serializable - lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) // java/lang/ClassCastException - lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) - lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) - - lazy val srBoxesRunTimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) - - private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) - private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) - private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(defn.MethodHandleClass) - private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(defn.MethodHandlesLookupClass) - private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) - private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 - private lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) - - lazy val jliLambdaMetaFactoryMetafactoryHandle: Handle = new Handle( - Opcodes.H_INVOKESTATIC, - jliLambdaMetafactoryRef.internalName, - "metafactory", - MethodBType( - List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, jliMethodTypeRef, jliMethodHandleRef, jliMethodTypeRef), - jliCallSiteRef - ).descriptor, - /* itf = */ false) - - lazy val jliLambdaMetaFactoryAltMetafactoryHandle: Handle = new Handle( - Opcodes.H_INVOKESTATIC, - jliLambdaMetafactoryRef.internalName, - "altMetafactory", - MethodBType( - List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, ArrayBType(ObjectRef)), - jliCallSiteRef - ).descriptor, - /* itf = */ false) - - lazy val jliLambdaDeserializeBootstrapHandle: Handle = new Handle( - Opcodes.H_INVOKESTATIC, - srLambdaDeserialize.internalName, - "bootstrap", - MethodBType( - List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, ArrayBType(jliMethodHandleRef)), - jliCallSiteRef - ).descriptor, - /* itf = */ false) - - lazy val jliStringConcatFactoryMakeConcatWithConstantsHandle = new Handle( - Opcodes.H_INVOKESTATIC, - jliStringConcatFactoryRef.internalName, - "makeConcatWithConstants", - MethodBType( - List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, StringRef, ArrayBType(ObjectRef)), - jliCallSiteRef - ).descriptor, - /* itf = */ false) - - /** - * Methods in scala.runtime.BoxesRuntime - */ - lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map( - BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), BOXED_BOOLEAN)), - BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), BOXED_BYTE)), - CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), BOXED_CHAR)), - SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), BOXED_SHORT)), - INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), BOXED_INT)), - LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), BOXED_LONG)), - FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), BOXED_FLOAT)), - DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), BOXED_DOUBLE)) - ) - - lazy val asmUnboxTo: Map[BType, MethodNameAndType] = Map( - BOOL -> MethodNameAndType("unboxToBoolean", MethodBType(List(ObjectRef), BOOL)), - BYTE -> MethodNameAndType("unboxToByte", MethodBType(List(ObjectRef), BYTE)), - CHAR -> MethodNameAndType("unboxToChar", MethodBType(List(ObjectRef), CHAR)), - SHORT -> MethodNameAndType("unboxToShort", MethodBType(List(ObjectRef), SHORT)), - INT -> MethodNameAndType("unboxToInt", MethodBType(List(ObjectRef), INT)), - LONG -> MethodNameAndType("unboxToLong", MethodBType(List(ObjectRef), LONG)), - FLOAT -> MethodNameAndType("unboxToFloat", MethodBType(List(ObjectRef), FLOAT)), - DOUBLE -> MethodNameAndType("unboxToDouble", MethodBType(List(ObjectRef), DOUBLE)) - ) - - lazy val typeOfArrayOp: Map[Int, BType] = { - import dotty.tools.backend.ScalaPrimitivesOps._ - Map( - (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++ - (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++ - (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++ - (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++ - (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++ - (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++ - (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++ - (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++ - (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectRef)) : _* - ) - } -} - -/** - * This trait make some core BTypes availalbe that don't depend on a Global instance. Some core - * BTypes are required to be accessible in the BTypes trait, which does not have access to Global. - * - * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example - * the type Symbol in - * def primitiveTypeMap: Map[Symbol, PrimitiveBType] - */ -trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { - val bTypes: BTS - import bTypes._ - - def boxedClasses: Set[ClassBType] - - def srNothingRef : ClassBType - def srNullRef : ClassBType - - def ObjectRef : ClassBType - def jlCloneableRef : ClassBType - def jiSerializableRef : ClassBType -} - -/** - * See comment in class [[CoreBTypes]]. - */ -final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) extends CoreBTypesProxyGlobalIndependent[BTFS] { - import bTypes._ - - private var _coreBTypes: CoreBTypes[bTypes.type] = _ - def setBTypes(coreBTypes: CoreBTypes[BTFS]): Unit = { - _coreBTypes = coreBTypes.asInstanceOf[CoreBTypes[bTypes.type]] - } - - def primitiveTypeMap: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeMap - - def boxedClasses: Set[ClassBType] = _coreBTypes.boxedClasses - - def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _coreBTypes.boxedClassOfPrimitive - - def boxResultType: Map[Symbol, ClassBType] = _coreBTypes.boxResultType - - def unboxResultType: Map[Symbol, PrimitiveBType] = _coreBTypes.unboxResultType - - def srNothingRef : ClassBType = _coreBTypes.srNothingRef - def srNullRef : ClassBType = _coreBTypes.srNullRef - - def ObjectRef : ClassBType = _coreBTypes.ObjectRef - def StringRef : ClassBType = _coreBTypes.StringRef - def jlStringBuilderRef : ClassBType = _coreBTypes.jlStringBuilderRef - def jlStringBufferRef : ClassBType = _coreBTypes.jlStringBufferRef - def jlCharSequenceRef : ClassBType = _coreBTypes.jlCharSequenceRef - def jlClassRef : ClassBType = _coreBTypes.jlClassRef - def jlThrowableRef : ClassBType = _coreBTypes.jlThrowableRef - def jlCloneableRef : ClassBType = _coreBTypes.jlCloneableRef - def jiSerializableRef : ClassBType = _coreBTypes.jioSerializableRef - def jlClassCastExceptionRef : ClassBType = _coreBTypes.jlClassCastExceptionRef - def jlIllegalArgExceptionRef : ClassBType = _coreBTypes.jlIllegalArgExceptionRef - def jliSerializedLambdaRef : ClassBType = _coreBTypes.jliSerializedLambdaRef - - def srBoxesRuntimeRef: ClassBType = _coreBTypes.srBoxesRunTimeRef - - def jliLambdaMetaFactoryMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryMetafactoryHandle - def jliLambdaMetaFactoryAltMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - def jliLambdaDeserializeBootstrapHandle : Handle = _coreBTypes.jliLambdaDeserializeBootstrapHandle - def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle = _coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle - - def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo - def asmUnboxTo: Map[BType, MethodNameAndType] = _coreBTypes.asmUnboxTo - - def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala b/tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala deleted file mode 100644 index 6ce434015b8c..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala +++ /dev/null @@ -1,204 +0,0 @@ -package dotty.tools.backend.jvm - -import scala.language.unsafeNulls - -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.transform.SymUtils._ -import java.io.{File => _} - -import scala.reflect.ClassTag -import dotty.tools.io.AbstractFile -import dotty.tools.dotc.core._ -import Contexts._ -import Types._ -import Symbols._ -import Phases._ -import Decorators.em - -import dotty.tools.dotc.util.ReadOnlyMap -import dotty.tools.dotc.report - -import tpd._ - -import StdNames.nme -import NameKinds.LazyBitMapName -import Names.Name - -class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: DetachedContext) { - - private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] - - def cachedDesugarIdent(i: Ident): Option[tpd.Select] = { - var found = desugared.get(i.tpe) - if (found == null) { - tpd.desugarIdent(i) match { - case sel: tpd.Select => - desugared.put(i.tpe, sel) - found = sel - case _ => - } - } - if (found == null) None else Some(found) - } - - object DesugaredSelect extends DeconstructorCommon[tpd.Tree] { - - var desugared: tpd.Select = null - - override def isEmpty: Boolean = - desugared eq null - - def _1: Tree = desugared.qualifier - - def _2: Name = desugared.name - - override def unapply(s: tpd.Tree): this.type = { - s match { - case t: tpd.Select => desugared = t - case t: Ident => - cachedDesugarIdent(t) match { - case Some(t) => desugared = t - case None => desugared = null - } - case _ => desugared = null - } - - this - } - } - - object ArrayValue extends DeconstructorCommon[tpd.JavaSeqLiteral] { - def _1: Type = field.tpe match { - case JavaArrayType(elem) => elem - case _ => - report.error(em"JavaSeqArray with type ${field.tpe} reached backend: $field", ctx.source.atSpan(field.span)) - UnspecifiedErrorType - } - def _2: List[Tree] = field.elems - } - - abstract class DeconstructorCommon[T >: Null <: AnyRef] { - var field: T = null - def get: this.type = this - def isEmpty: Boolean = field eq null - def isDefined = !isEmpty - def unapply(s: T): this.type ={ - field = s - this - } - } - -} - -object DottyBackendInterface { - - private def erasureString(clazz: Class[_]): String = { - if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]" - else clazz.getName - } - - def requiredClass(str: String)(using Context): ClassSymbol = - Symbols.requiredClass(str) - - def requiredClass[T](using evidence: ClassTag[T], ctx: Context): Symbol = - requiredClass(erasureString(evidence.runtimeClass)) - - def requiredModule(str: String)(using Context): Symbol = - Symbols.requiredModule(str) - - def requiredModule[T](using evidence: ClassTag[T], ctx: Context): Symbol = { - val moduleName = erasureString(evidence.runtimeClass) - val className = if (moduleName.endsWith("$")) moduleName.dropRight(1) else moduleName - requiredModule(className) - } - - given symExtensions: AnyRef with - extension (sym: Symbol) - - def isInterface(using Context): Boolean = (sym.is(PureInterface)) || sym.is(Trait) - - def isStaticConstructor(using Context): Boolean = (sym.isStaticMember && sym.isClassConstructor) || (sym.name eq nme.STATIC_CONSTRUCTOR) - - /** Fields of static modules will be static at backend - * - * Note that lazy val encoding assumes bitmap fields are non-static. - * See also `genPlainClass` in `BCodeSkelBuilder.scala`. - * - * TODO: remove the special handing of `LazyBitMapName` once we swtich to - * the new lazy val encoding: https://github.com/scala/scala3/issues/7140 - */ - def isStaticModuleField(using Context): Boolean = - sym.owner.isStaticModuleClass && sym.isField && !sym.name.is(LazyBitMapName) - - def isStaticMember(using Context): Boolean = (sym ne NoSymbol) && - (sym.is(JavaStatic) || sym.isScalaStatic || sym.isStaticModuleField) - // guard against no sumbol cause this code is executed to select which call type(static\dynamic) to use to call array.clone - - /** - * True for module classes of modules that are top-level or owned only by objects. Module classes - * for such objects will get a MODULE$ flag and a corresponding static initializer. - */ - def isStaticModuleClass(using Context): Boolean = - (sym.is(Module)) && { - // scalac uses atPickling here - // this would not work if modules are created after pickling - // for example by specialization - val original = toDenot(sym).initial - val validity = original.validFor - atPhase(validity.phaseId) { - toDenot(sym).isStatic - } - } - - - - def originalLexicallyEnclosingClass(using Context): Symbol = - // used to populate the EnclosingMethod attribute. - // it is very tricky in presence of classes(and annonymous classes) defined inside supper calls. - if (sym.exists) { - val validity = toDenot(sym).initial.validFor - atPhase(validity.phaseId) { - toDenot(sym).lexicallyEnclosingClass - } - } else NoSymbol - - /** - * True for module classes of package level objects. The backend will generate a mirror class for - * such objects. - */ - def isTopLevelModuleClass(using Context): Boolean = - sym.is(ModuleClass) && - atPhase(flattenPhase) { - toDenot(sym).owner.is(PackageClass) - } - - def javaSimpleName(using Context): String = toDenot(sym).name.mangledString - def javaClassName(using Context): String = toDenot(sym).fullName.mangledString - def javaBinaryName(using Context): String = javaClassName.replace('.', '/') - - end extension - - end symExtensions - - private val primitiveCompilationUnits = Set( - "Unit.scala", - "Boolean.scala", - "Char.scala", - "Byte.scala", - "Short.scala", - "Int.scala", - "Float.scala", - "Long.scala", - "Double.scala" - ) - - /** - * True if the current compilation unit is of a primitive class (scala.Boolean et al). - * Used only in assertions. - */ - def isCompilingPrimitive(using Context) = { - primitiveCompilationUnits(ctx.compilationUnit.source.file.name) - } - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala deleted file mode 100644 index 3bc9fb5592ee..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala +++ /dev/null @@ -1,672 +0,0 @@ -package dotty.tools.backend.jvm - -import scala.language.unsafeNulls - -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Phases.Phase - -import scala.collection.mutable -import scala.jdk.CollectionConverters._ -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.interfaces -import dotty.tools.dotc.report - -import dotty.tools.dotc.util.SourceFile -import java.util.Optional - -import dotty.tools.dotc.core._ -import dotty.tools.dotc.sbt.ExtractDependencies -import Contexts._ -import Phases._ -import Symbols._ -import Decorators.em - -import java.io.DataOutputStream -import java.nio.channels.ClosedByInterruptException - -import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler, UnpicklerConfig } -import dotty.tools.tasty.core.TastyUnpickler - -import scala.tools.asm -import scala.tools.asm.Handle -import scala.tools.asm.tree._ -import tpd._ -import StdNames._ -import dotty.tools.io._ -import scala.tools.asm.MethodTooLargeException -import scala.tools.asm.ClassTooLargeException - -class GenBCode extends Phase { - - override def phaseName: String = GenBCode.name - - override def description: String = GenBCode.description - - private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] - def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { - val old = superCallsMap.getOrElse(sym, Set.empty) - superCallsMap.update(sym, old + calls) - } - - private val entryPoints = new mutable.HashSet[String]() - def registerEntryPoint(s: String): Unit = entryPoints += s - - private var myOutput: AbstractFile = _ - - private def outputDir(using Context): AbstractFile = { - if (myOutput eq null) - myOutput = ctx.settings.outputDir.value - myOutput - } - - private var myPrimitives: DottyPrimitives = null - - override def run(using Context): Unit = - inDetachedContext: ctx ?=> - if myPrimitives == null then myPrimitives = new DottyPrimitives(ctx) - new GenBCodePipeline( - DottyBackendInterface(outputDir, superCallsMap), - myPrimitives - ).run(ctx.compilationUnit.tpdTree) - - - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - outputDir match - case jar: JarArchive => - updateJarManifestWithMainClass(jar, entryPoints.toList) - case _ => - try super.runOn(units) - finally outputDir match { - case jar: JarArchive => - if (ctx.run.nn.suspendedUnits.nonEmpty) - // If we close the jar the next run will not be able to write on the jar. - // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. - report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") - - jar.close() - case _ => - } - } - - private def updateJarManifestWithMainClass(jarArchive: JarArchive, entryPoints: List[String])(using Context): Unit = - val mainClass = Option.when(!ctx.settings.XmainClass.isDefault)(ctx.settings.XmainClass.value).orElse { - entryPoints match - case List(mainClass) => - Some(mainClass) - case Nil => - report.warning("No Main-Class designated or discovered.") - None - case mcs => - report.warning(s"No Main-Class due to multiple entry points:\n ${mcs.mkString("\n ")}") - None - } - mainClass.map { mc => - val manifest = Jar.WManifest() - manifest.mainClass = mc - val file = jarArchive.subdirectoryNamed("META-INF").fileNamed("MANIFEST.MF") - val os = file.output - manifest.underlying.write(os) - os.close() - } - end updateJarManifestWithMainClass -} - -object GenBCode { - val name: String = "genBCode" - val description: String = "generate JVM bytecode" -} - -class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrimitives)(using DetachedContext) extends BCodeSyncAndTry { - import DottyBackendInterface.symExtensions - - private var tree: Tree = _ - - private val sourceFile: SourceFile = ctx.compilationUnit.source - - /** Convert a `dotty.tools.io.AbstractFile` into a - * `dotty.tools.dotc.interfaces.AbstractFile`. - */ - private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = - new interfaces.AbstractFile { - override def name = absfile.name - override def path = absfile.path - override def jfile = Optional.ofNullable(absfile.file) - } - - final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit) - -// class BCodePhase() { - - private var bytecodeWriter : BytecodeWriter = null - private var mirrorCodeGen : JMirrorBuilder = null - - /* ---------------- q1 ---------------- */ - - case class Item1(arrivalPos: Int, cd: TypeDef, cunit: CompilationUnit) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } - } - private val poison1 = Item1(Int.MaxValue, null, ctx.compilationUnit) - private val q1 = new java.util.LinkedList[Item1] - - /* ---------------- q2 ---------------- */ - - case class SubItem2(classNode: asm.tree.ClassNode, - file: dotty.tools.io.AbstractFile) - - case class Item2(arrivalPos: Int, - mirror: SubItem2, - plain: SubItem2) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } - } - - private val poison2 = Item2(Int.MaxValue, null, null) - private val q2 = new _root_.java.util.LinkedList[Item2] - - /* ---------------- q3 ---------------- */ - - /* - * An item of queue-3 (the last queue before serializing to disk) contains three of these - * (one for each of mirror and plain classes). - * - * @param jclassName internal name of the class - * @param jclassBytes bytecode emitted for the class SubItem3 represents - */ - case class SubItem3( - jclassName: String, - jclassBytes: Array[Byte], - jclassFile: dotty.tools.io.AbstractFile - ) - - case class Item3(arrivalPos: Int, - mirror: SubItem3, - plain: SubItem3) { - - def isPoison: Boolean = { arrivalPos == Int.MaxValue } - } - private val i3comparator = new java.util.Comparator[Item3] { - override def compare(a: Item3, b: Item3) = { - if (a.arrivalPos < b.arrivalPos) -1 - else if (a.arrivalPos == b.arrivalPos) 0 - else 1 - } - } - private val poison3 = Item3(Int.MaxValue, null, null) - private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator) - - /* - * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2 - */ - class Worker1(needsOutFolder: Boolean) { - - private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] - private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { - val lowerCaseName = javaClassName.toLowerCase - lowerCaseNames.get(lowerCaseName) match { - case None => - lowerCaseNames.put(lowerCaseName, classSymbol) - case Some(dupClassSym) => - // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting - val (cl1, cl2) = - if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) - else (dupClassSym, classSymbol) - val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString - atPhase(typerPhase) { - if (same) - report.error( - em"$cl1 and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) - else - report.warning( - em"""$cl1 differs only in case from ${cl2.showLocated}. - |uch classes will overwrite one another on case-insensitive filesystems.""", cl1.sourcePos) - } - } - } - - def run(): Unit = { - while (true) { - val item = q1.poll - if (item.isPoison) { - q2 add poison2 - return - } - else { - try { /*withCurrentUnit(item.cunit)*/(visit(item)) } - catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.cunit.source.file.name}") - throw ex - } - } - } - } - - /* - * Checks for duplicate internal names case-insensitively, - * builds ASM ClassNodes for mirror and plain classes; - * enqueues them in queue-2. - * - */ - def visit(item: Item1): Boolean = { - val Item1(arrivalPos, cd, cunit) = item - val claszSymbol = cd.symbol - - // -------------- mirror class, if needed -------------- - val mirrorC = - if (claszSymbol.isTopLevelModuleClass) { - if (claszSymbol.companionClass == NoSymbol) { - mirrorCodeGen.genMirrorClass(claszSymbol, cunit) - } else { - report.log(s"No mirror class for module with linked class: ${claszSymbol.showFullName}") - null - } - } else null - - // -------------- "plain" class -------------- - val pcb = new PlainClassBuilder(cunit) - pcb.genPlainClass(cd) - val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName) else null; - val plainC = pcb.cnode - - if (claszSymbol.isClass) // @DarkDimius is this test needed here? - for (binary <- ctx.compilationUnit.pickled.get(claszSymbol.asClass)) { - val store = if (mirrorC ne null) mirrorC else plainC - val tasty = - val outTastyFile = getFileForClassfile(outF, store.name, ".tasty") - val outstream = new DataOutputStream(outTastyFile.bufferedOutput) - try outstream.write(binary()) - catch case ex: ClosedByInterruptException => - try - outTastyFile.delete() // don't leave an empty or half-written tastyfile around after an interrupt - catch - case _: Throwable => - throw ex - finally outstream.close() - - val uuid = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, binary()).readHeader() - val lo = uuid.getMostSignificantBits - val hi = uuid.getLeastSignificantBits - - // TASTY attribute is created but only the UUID bytes are stored in it. - // A TASTY attribute has length 16 if and only if the .tasty file exists. - val buffer = new TastyBuffer(16) - buffer.writeUncompressedLong(lo) - buffer.writeUncompressedLong(hi) - buffer.bytes - - val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) - store.visitAttribute(dataAttr) - } - - - // ----------- create files - - val classNodes = List(mirrorC, plainC) - val classFiles = classNodes.map(cls => - if (outF != null && cls != null) { - try { - checkForCaseConflict(cls.name, claszSymbol) - getFileForClassfile(outF, cls.name, ".class") - } catch { - case e: FileConflictException => - report.error(em"error writing ${cls.name}: ${e.getMessage}") - null - } - } else null - ) - - // ----------- compiler and sbt's callbacks - - val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { - (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) - } - - for ((cls, clsFile) <- classNodes.zip(classFiles)) { - if (cls != null) { - val className = cls.name.replace('/', '.') - if (ctx.compilerCallback != null) - ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - if (ctx.sbtCallback != null) { - if (isLocal) - ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), clsFile.file) - else { - ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), clsFile.file, - className, fullClassName) - } - } - } - } - - // ----------- hand over to pipeline-2 - - val item2 = - Item2(arrivalPos, - SubItem2(mirrorC, classFiles(0)), - SubItem2(plainC, classFiles(1))) - - q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. - - } // end of method visit(Item1) - - } // end of class BCodePhase.Worker1 - - /* - * Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level: - * - * (a) no optimization involves: - * - converting the plain ClassNode to byte array and placing it on queue-3 - */ - class Worker2 { - import bTypes.ClassBType - import bTypes.coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - // lazy val localOpt = new LocalOpt(new Settings()) - - private def localOptimizations(classNode: ClassNode): Unit = { - // BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode)) - } - - - /* Return an array of all serializable lambdas in this class */ - private def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { - val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] - for (m <- classNode.methods.asScala) { - val iter = m.instructions.iterator - while (iter.hasNext) { - val insn = iter.next() - insn match { - case indy: InvokeDynamicInsnNode - if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => - import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE - val metafactoryFlags = indy.bsmArgs(3).asInstanceOf[Integer].toInt - val isSerializable = (metafactoryFlags & FLAG_SERIALIZABLE) != 0 - if isSerializable then - val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] - indyLambdaBodyMethods += implMethod - case _ => - } - } - } - indyLambdaBodyMethods.toArray - } - - /* - * Add: - * - * private static Object $deserializeLambda$(SerializedLambda l) { - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$0](l) - * catch { - * case i: IllegalArgumentException => - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$1](l) - * catch { - * case i: IllegalArgumentException => - * ... - * return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup${NUM_GROUPS-1}](l) - * } - * - * We use invokedynamic here to enable caching within the deserializer without needing to - * host a static field in the enclosing class. This allows us to add this method to interfaces - * that define lambdas in default methods. - * - * SI-10232 we can't pass arbitrary number of method handles to the final varargs parameter of the bootstrap - * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target - * methods. - */ - private def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { - import asm.Opcodes._ - import bTypes._ - import coreBTypes._ - - val cw = classNode - - // Make sure to reference the ClassBTypes of all types that are used in the code generated - // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to - // `classBTypeFromInternalNameMap`. When writing the classfile, the asm ClassWriter computes - // stack map frames and invokes the `getCommonSuperClass` method. This method expects all - // ClassBTypes mentioned in the source code to exist in the map. - - val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor - - val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) - def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { - mv.visitVarInsn(ALOAD, 0) - mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) - } - - val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java - val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray - val numGroups = groups.length - - import scala.tools.asm.Label - val initialLabels = Array.fill(numGroups - 1)(new Label()) - val terminalLabel = new Label - def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) - - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) - } - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitLabel(label) - emitLambdaDeserializeIndy(groups(i).toIndexedSeq) - mv.visitInsn(ARETURN) - } - mv.visitLabel(terminalLabel) - emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) - mv.visitInsn(ARETURN) - } - - private def setInnerClasses(classNode: ClassNode): Unit = if (classNode != null) { - classNode.innerClasses.clear() - val (declared, referred) = collectNestedClasses(classNode) - addInnerClasses(classNode, declared, referred) - } - - /** - * Visit the class node and collect all referenced nested classes. - */ - private def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { - // type InternalName = String - val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { - def declaredNestedClasses(internalName: InternalName): List[ClassBType] = - bTypes.classBTypeFromInternalName(internalName).info.memberClasses - - def getClassIfNested(internalName: InternalName): Option[ClassBType] = { - val c = bTypes.classBTypeFromInternalName(internalName) - Option.when(c.isNestedClass)(c) - } - - def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { - // don't crash on invalid generic signatures - } - } - c.visit(classNode) - (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) - } - - def run(): Unit = { - while (true) { - val item = q2.poll - if (item.isPoison) { - q3 add poison3 - return - } - else { - try { - val plainNode = item.plain.classNode - localOptimizations(plainNode) - val serializableLambdas = collectSerializableLambdas(plainNode) - if (serializableLambdas.nonEmpty) - addLambdaDeserialize(plainNode, serializableLambdas) - setInnerClasses(plainNode) - setInnerClasses(item.mirror.classNode) - addToQ3(item) - } catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.plain.classNode.name}") - throw ex - } - } - } - } - - private def addToQ3(item: Item2) = { - - def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = { - val cw = new CClassWriter(extraProc) - cn.accept(cw) - cw.toByteArray - } - - val Item2(arrivalPos, SubItem2(mirror, mirrorFile), SubItem2(plain, plainFile)) = item - - val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror), mirrorFile) - val plainC = SubItem3(plain.name, getByteArray(plain), plainFile) - - if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) { - if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes) - AsmUtils.traceClass(plainC.jclassBytes) - } - - q3 add Item3(arrivalPos, mirrorC, plainC) - } - - } // end of class BCodePhase.Worker2 - - var arrivalPos: Int = 0 - - /* - * A run of the BCodePhase phase comprises: - * - * (a) set-up steps (most notably supporting maps in `BCodeTypes`, - * but also "the" writer where class files in byte-array form go) - * - * (b) building of ASM ClassNodes, their optimization and serialization. - * - * (c) tear down (closing the classfile-writer and clearing maps) - * - */ - def run(t: Tree)(using Context): Unit = { - this.tree = t - - // val bcodeStart = Statistics.startTimer(BackendStats.bcodeTimer) - - // val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer) - arrivalPos = 0 // just in case - // scalaPrimitives.init() - bTypes.intializeCoreBTypes() - // Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart) - - // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated. - bytecodeWriter = initBytecodeWriter() - mirrorCodeGen = new JMirrorBuilder - - val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter] - buildAndSendToDisk(needsOutfileForSymbol) - - // closing output files. - bytecodeWriter.close() - // Statistics.stopTimer(BackendStats.bcodeTimer, bcodeStart) - - if (ctx.compilerCallback != null) - ctx.compilerCallback.onSourceCompiled(sourceFile) - - /* TODO Bytecode can be verified (now that all classfiles have been written to disk) - * - * (1) asm.util.CheckAdapter.verify() - * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw) - * passing a custom ClassLoader to verify inter-dependent classes. - * Alternatively, - * - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool). - * - -Xverify:all - * - * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()` - * - */ - } - - /* - * Sequentially: - * (a) place all ClassDefs in queue-1 - * (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2 - * (c) dequeue one at a time from queue-2, convert it to byte-array, place in queue-3 - * (d) serialize to disk by draining queue-3. - */ - private def buildAndSendToDisk(needsOutFolder: Boolean)(using Context) = { - try - feedPipeline1() - // val genStart = Statistics.startTimer(BackendStats.bcodeGenStat) - (new Worker1(needsOutFolder)).run() - // Statistics.stopTimer(BackendStats.bcodeGenStat, genStart) - - (new Worker2).run() - - // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) - drainQ3() - // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) - catch - case e: MethodTooLargeException => - val method = - s"${e.getClassName.replaceAll("/", ".")}.${e.getMethodName}" - val msg = - em"Generated bytecode for method '$method' is too large. Size: ${e.getCodeSize} bytes. Limit is 64KB" - report.error(msg) - case e: ClassTooLargeException => - val msg = - em"Class '${e.getClassName.replaceAll("/", ".")}' is too large. Constant pool size: ${e.getConstantPoolCount}. Limit is 64K entries" - report.error(msg) - - } - - /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */ - private def feedPipeline1() = { - def gen(tree: Tree): Unit = { - tree match { - case EmptyTree => () - case PackageDef(_, stats) => stats foreach gen - case ValDef(name, tpt, rhs) => () // module val not emitted - case cd: TypeDef => - q1 add Item1(arrivalPos, cd, int.ctx.compilationUnit) - arrivalPos += 1 - } - } - gen(tree) - q1 add poison1 - } - - /* Pipeline that writes classfile representations to disk. */ - private def drainQ3() = { - - def sendToDisk(cfr: SubItem3): Unit = { - if (cfr != null){ - val SubItem3(jclassName, jclassBytes, jclassFile) = cfr - bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, jclassFile) - } - } - - var moreComing = true - // `expected` denotes the arrivalPos whose Item3 should be serialized next - var expected = 0 - - while (moreComing) { - val incoming = q3.poll - moreComing = !incoming.isPoison - if (moreComing) { - val item = incoming - sendToDisk(item.mirror) - sendToDisk(item.plain) - expected += 1 - } - } - - // we're done - assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1") - assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2") - assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3") - - } - //} // end of class BCodePhase -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala deleted file mode 100644 index 210e47566cb9..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala +++ /dev/null @@ -1,16 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.tools.asm - -object GenBCodeOps extends GenBCodeOps - -class GenBCodeOps { - extension (flags: Int) - def addFlagIf(cond: Boolean, flag: Int): Int = if cond then flags | flag else flags - - final val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC - final val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL - final val PrivateStaticFinal = asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala b/tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala deleted file mode 100644 index e9e532933290..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala +++ /dev/null @@ -1,326 +0,0 @@ -package dotty.tools.backend.jvm - -import scala.language.unsafeNulls - -import scala.tools.asm.{ClassReader, Type, Handle } -import scala.tools.asm.tree._ - -import scala.collection.mutable -import scala.util.control.{NoStackTrace, NonFatal} -import scala.annotation._ -import scala.jdk.CollectionConverters._ - -// Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf -// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L928 -abstract class GenericSignatureVisitor(nestedOnly: Boolean) { - // For performance (`Char => Boolean` is not specialized) - private trait CharBooleanFunction { def apply(c: Char): Boolean } - - final def visitInternalName(internalName: String): Unit = visitInternalName(internalName, 0, if (internalName eq null) 0 else internalName.length) - def visitInternalName(internalName: String, offset: Int, length: Int): Unit - - def raiseError(msg: String, sig: String, e: Option[Throwable] = None): Unit - - def visitClassSignature(sig: String): Unit = if (sig != null) { - val p = new Parser(sig, nestedOnly) - p.safely { p.classSignature() } - } - - def visitMethodSignature(sig: String): Unit = if (sig != null) { - val p = new Parser(sig, nestedOnly) - p.safely { p.methodSignature() } - } - - def visitFieldSignature(sig: String): Unit = if (sig != null) { - val p = new Parser(sig, nestedOnly) - p.safely { p.fieldSignature() } - } - - private final class Parser(sig: String, nestedOnly: Boolean) { - - private var index = 0 - private val end = sig.length - - private val Aborted: Throwable = new NoStackTrace { } - private def abort(): Nothing = throw Aborted - - @inline def safely(f: => Unit): Unit = try f catch { - case Aborted => - case NonFatal(e) => raiseError(s"Exception thrown during signature parsing", sig, Some(e)) - } - - private def current = { - if (index >= end) { - raiseError(s"Out of bounds, $index >= $end", sig) - abort() // Don't continue, even if `notifyInvalidSignature` returns - } - sig.charAt(index) - } - - private def accept(c: Char): Unit = { - if (current != c) { - raiseError(s"Expected $c at $index, found $current", sig) - abort() - } - index += 1 - } - - private def skip(): Unit = { index += 1 } - private def getCurrentAndSkip(): Char = { val c = current; skip(); c } - - private def skipUntil(isDelimiter: CharBooleanFunction): Unit = { - while (!isDelimiter(current)) { index += 1 } - } - private def skipUntilDelimiter(delimiter: Char): Unit = { - sig.indexOf(delimiter, index) match { - case -1 => - raiseError(s"Out of bounds", sig) - abort() // Don't continue, even if `notifyInvalidSignature` returns - case i => - index = i - } - } - - private def appendUntil(builder: java.lang.StringBuilder, isDelimiter: CharBooleanFunction): Unit = { - val start = index - skipUntil(isDelimiter) - builder.append(sig, start, index) - } - - def isBaseType(c: Char): Boolean = c match { - case 'B' | 'C' | 'D' | 'F' | 'I' | 'J' | 'S' | 'Z' => true - case _ => false - } - - private val isClassNameEnd: CharBooleanFunction = (c: Char) => c == '<' || c == '.' || c == ';' - - private def typeArguments(): Unit = if (current == '<') { - skip() - while (current != '>') current match { - case '*' | '+' | '-' => - skip() - case _ => - referenceTypeSignature() - } - accept('>') - } - - @tailrec private def referenceTypeSignature(): Unit = getCurrentAndSkip() match { - case 'L' => - var names: java.lang.StringBuilder = null - - val start = index - var seenDollar = false - while (!isClassNameEnd(current)) { - seenDollar ||= current == '$' - index += 1 - } - if ((current == '.' || seenDollar) || !nestedOnly) { - // OPT: avoid allocations when only a top-level class is encountered - names = new java.lang.StringBuilder(32) - names.append(sig, start, index) - visitInternalName(names.toString) - } - typeArguments() - - while (current == '.') { - skip() - names.append('$') - appendUntil(names, isClassNameEnd) - visitInternalName(names.toString) - typeArguments() - } - accept(';') - - case 'T' => - skipUntilDelimiter(';') - skip() - - case '[' => - if (isBaseType(current)) skip() - else referenceTypeSignature() - } - - private def typeParameters(): Unit = if (current == '<') { - skip() - while (current != '>') { - skipUntilDelimiter(':'); skip() - val c = current - // The ClassBound can be missing, but only if there's an InterfaceBound after. - // This is an assumption that's not in the spec, see https://stackoverflow.com/q/44284928 - if (c != ':' && c != '>') { referenceTypeSignature() } - while (current == ':') { skip(); referenceTypeSignature() } - } - accept('>') - } - - def classSignature(): Unit = { - typeParameters() - while (index < end) referenceTypeSignature() - } - - def methodSignature(): Unit = { - typeParameters() - - accept('(') - while (current != ')') { - if (isBaseType(current)) skip() - else referenceTypeSignature() - } - accept(')') - - if (current == 'V' || isBaseType(current)) skip() - else referenceTypeSignature() - - while (index < end) { - accept('^') - referenceTypeSignature() - } - } - - def fieldSignature(): Unit = if (sig != null) safely { - referenceTypeSignature() - } - } -} - -// Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf -// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 -abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSignatureVisitor(nestedOnly) { - type InternalName = String - - def declaredNestedClasses(internalName: InternalName): List[T] - def getClassIfNested(internalName: InternalName): Option[T] - - val declaredInnerClasses = mutable.Set.empty[T] - val referredInnerClasses = mutable.Set.empty[T] - - def innerClasses: collection.Set[T] = declaredInnerClasses ++ referredInnerClasses - def clear(): Unit = { - declaredInnerClasses.clear() - referredInnerClasses.clear() - } - - def visit(classNode: ClassNode): Unit = { - visitInternalName(classNode.name) - declaredInnerClasses ++= declaredNestedClasses(classNode.name) - - visitInternalName(classNode.superName) - classNode.interfaces.asScala foreach visitInternalName - visitInternalName(classNode.outerClass) - - visitAnnotations(classNode.visibleAnnotations) - visitAnnotations(classNode.visibleTypeAnnotations) - visitAnnotations(classNode.invisibleAnnotations) - visitAnnotations(classNode.invisibleTypeAnnotations) - - visitClassSignature(classNode.signature) - - for (f <- classNode.fields.asScala) { - visitDescriptor(f.desc) - visitAnnotations(f.visibleAnnotations) - visitAnnotations(f.visibleTypeAnnotations) - visitAnnotations(f.invisibleAnnotations) - visitAnnotations(f.invisibleTypeAnnotations) - visitFieldSignature(f.signature) - } - - for (m <- classNode.methods.asScala) { - visitDescriptor(m.desc) - - visitAnnotations(m.visibleAnnotations) - visitAnnotations(m.visibleTypeAnnotations) - visitAnnotations(m.invisibleAnnotations) - visitAnnotations(m.invisibleTypeAnnotations) - visitAnnotationss(m.visibleParameterAnnotations) - visitAnnotationss(m.invisibleParameterAnnotations) - visitAnnotations(m.visibleLocalVariableAnnotations) - visitAnnotations(m.invisibleLocalVariableAnnotations) - - m.exceptions.asScala foreach visitInternalName - for (tcb <- m.tryCatchBlocks.asScala) visitInternalName(tcb.`type`) - - val iter = m.instructions.iterator - while (iter.hasNext) iter.next() match { - case ti: TypeInsnNode => visitInternalNameOrArrayReference(ti.desc) - case fi: FieldInsnNode => visitInternalNameOrArrayReference(fi.owner); visitDescriptor(fi.desc) - case mi: MethodInsnNode => visitInternalNameOrArrayReference(mi.owner); visitDescriptor(mi.desc) - case id: InvokeDynamicInsnNode => visitDescriptor(id.desc); visitHandle(id.bsm); id.bsmArgs foreach visitConstant - case ci: LdcInsnNode => visitConstant(ci.cst) - case ma: MultiANewArrayInsnNode => visitDescriptor(ma.desc) - case _ => - } - - visitMethodSignature(m.signature) - } - } - - private def containsChar(s: String, offset: Int, length: Int, char: Char): Boolean = { - val ix = s.indexOf(char, offset) - !(ix == -1 || ix >= offset + length) - } - - def visitInternalName(internalName: String, offset: Int, length: Int): Unit = if (internalName != null && containsChar(internalName, offset, length, '$')) { - for (c <- getClassIfNested(internalName.substring(offset, length))) - if (!declaredInnerClasses.contains(c)) - referredInnerClasses += c - } - - // either an internal/Name or [[Linternal/Name; -- there are certain references in classfiles - // that are either an internal name (without the surrounding `L;`) or an array descriptor - // `[Linternal/Name;`. - def visitInternalNameOrArrayReference(ref: String): Unit = if (ref != null) { - val bracket = ref.lastIndexOf('[') - if (bracket == -1) visitInternalName(ref) - else if (ref.charAt(bracket + 1) == 'L') visitInternalName(ref, bracket + 2, ref.length - 1) - } - - // we are only interested in the class references in the descriptor, so we can skip over - // primitives and the brackets of array descriptors - def visitDescriptor(desc: String): Unit = (desc.charAt(0): @switch) match { - case '(' => - var i = 1 - while (i < desc.length) { - if (desc.charAt(i) == 'L') { - val start = i + 1 // skip the L - var seenDollar = false - while ({val ch = desc.charAt(i); seenDollar ||= (ch == '$'); ch != ';'}) i += 1 - if (seenDollar) - visitInternalName(desc, start, i) - } - // skips over '[', ')', primitives - i += 1 - } - - case 'L' => - visitInternalName(desc, 1, desc.length - 1) - - case '[' => - visitInternalNameOrArrayReference(desc) - - case _ => // skip over primitive types - } - - def visitConstant(const: AnyRef): Unit = const match { - case t: Type => visitDescriptor(t.getDescriptor) - case _ => - } - - // in principle we could references to annotation types, as they only end up as strings in the - // constant pool, not as class references. however, the java compiler still includes nested - // annotation classes in the innerClass table, so we do the same. explained in detail in the - // large comment in class BTypes. - def visitAnnotation(annot: AnnotationNode): Unit = { - visitDescriptor(annot.desc) - if (annot.values != null) annot.values.asScala foreach visitConstant - } - - def visitAnnotations(annots: java.util.List[_ <: AnnotationNode]) = if (annots != null) annots.asScala foreach visitAnnotation - def visitAnnotationss(annotss: Array[java.util.List[AnnotationNode]]) = if (annotss != null) annotss foreach visitAnnotations - - def visitHandle(handle: Handle): Unit = { - visitInternalNameOrArrayReference(handle.getOwner) - visitDescriptor(handle.getDesc) - } -} - diff --git a/tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java b/tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java deleted file mode 100644 index cf91fe619f5d..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package dotty.tools.backend.jvm; - -import scala.tools.asm.Label; -import scala.tools.asm.tree.ClassNode; -import scala.tools.asm.tree.LabelNode; - -/** - * A subclass of {@link LabelNode} to add user-definable flags. - */ -public class LabelNode1 extends LabelNode { - public LabelNode1() { - } - - public LabelNode1(Label label) { - super(label); - } - - public int flags; -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java b/tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java deleted file mode 100644 index bfa4401830ba..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package dotty.tools.backend.jvm; - -import scala.tools.asm.Label; -import scala.tools.asm.Opcodes; -import scala.tools.asm.tree.LabelNode; -import scala.tools.asm.tree.MethodNode; -/** - * A subclass of {@link MethodNode} to customize the representation of - * label nodes with {@link LabelNode1}. - */ -public class MethodNode1 extends MethodNode { - public MethodNode1(int api, int access, String name, String descriptor, String signature, String[] exceptions) { - super(api, access, name, descriptor, signature, exceptions); - } - - public MethodNode1(int access, String name, String descriptor, String signature, String[] exceptions) { - this(Opcodes.ASM6, access, name, descriptor, signature, exceptions); - } - - public MethodNode1(int api) { - super(api); - } - - public MethodNode1() { - this(Opcodes.ASM6); - } - - @Override - protected LabelNode getLabelNode(Label label) { - if (!(label.info instanceof LabelNode)) { - label.info = new LabelNode1(label); - } - return (LabelNode) label.info; - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/Primitives.scala b/tests/pos-with-compiler-cc/backend/jvm/Primitives.scala deleted file mode 100644 index c9ddfeab24e1..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/Primitives.scala +++ /dev/null @@ -1,191 +0,0 @@ -package dotty.tools -package backend -package jvm - -import java.io.PrintWriter - -object Primitives { - /** This class represents a primitive operation. */ - class Primitive { - } - - /** This class represents a test operation. */ - sealed abstract class TestOp { - - /** Returns the negation of this operation. */ - def negate(): TestOp - - /** Returns a string representation of this operation. */ - override def toString(): String - - /** used only from GenASM */ - def opcodeIF(): Int - - /** used only from GenASM */ - def opcodeIFICMP(): Int - - } - - /** An equality test */ - case object EQ extends TestOp { - def negate() = NE - override def toString() = "EQ" - override def opcodeIF() = scala.tools.asm.Opcodes.IFEQ - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPEQ - } - - /** A non-equality test */ - case object NE extends TestOp { - def negate() = EQ - override def toString() = "NE" - override def opcodeIF() = scala.tools.asm.Opcodes.IFNE - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPNE - } - - /** A less-than test */ - case object LT extends TestOp { - def negate() = GE - override def toString() = "LT" - override def opcodeIF() = scala.tools.asm.Opcodes.IFLT - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLT - } - - /** A greater-than-or-equal test */ - case object GE extends TestOp { - def negate() = LT - override def toString() = "GE" - override def opcodeIF() = scala.tools.asm.Opcodes.IFGE - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGE - } - - /** A less-than-or-equal test */ - case object LE extends TestOp { - def negate() = GT - override def toString() = "LE" - override def opcodeIF() = scala.tools.asm.Opcodes.IFLE - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLE - } - - /** A greater-than test */ - case object GT extends TestOp { - def negate() = LE - override def toString() = "GT" - override def opcodeIF() = scala.tools.asm.Opcodes.IFGT - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGT - } - - /** This class represents an arithmetic operation. */ - class ArithmeticOp { - - /** Returns a string representation of this operation. */ - override def toString(): String = this match { - case ADD => "ADD" - case SUB => "SUB" - case MUL => "MUL" - case DIV => "DIV" - case REM => "REM" - case NOT => "NOT" - case _ => throw new RuntimeException("ArithmeticOp unknown case") - } - } - - /** An arithmetic addition operation */ - case object ADD extends ArithmeticOp - - /** An arithmetic subtraction operation */ - case object SUB extends ArithmeticOp - - /** An arithmetic multiplication operation */ - case object MUL extends ArithmeticOp - - /** An arithmetic division operation */ - case object DIV extends ArithmeticOp - - /** An arithmetic remainder operation */ - case object REM extends ArithmeticOp - - /** Bitwise negation. */ - case object NOT extends ArithmeticOp - - /** This class represents a shift operation. */ - class ShiftOp { - - /** Returns a string representation of this operation. */ - override def toString(): String = this match { - case LSL => "LSL" - case ASR => "ASR" - case LSR => "LSR" - case _ => throw new RuntimeException("ShitOp unknown case") - } - } - - /** A logical shift to the left */ - case object LSL extends ShiftOp - - /** An arithmetic shift to the right */ - case object ASR extends ShiftOp - - /** A logical shift to the right */ - case object LSR extends ShiftOp - - /** This class represents a logical operation. */ - class LogicalOp { - - /** Returns a string representation of this operation. */ - override def toString(): String = this match { - case AND => "AND" - case OR => "OR" - case XOR => "XOR" - case _ => throw new RuntimeException("LogicalOp unknown case") - } - } - - /** A bitwise AND operation */ - case object AND extends LogicalOp - - /** A bitwise OR operation */ - case object OR extends LogicalOp - - /** A bitwise XOR operation */ - case object XOR extends LogicalOp - - /** Signals the beginning of a series of concatenations. - * On the JVM platform, it should create a new StringBuffer - */ - case object StartConcat extends Primitive - - /** - * type: (buf) => STR - * jvm : It should turn the StringBuffer into a String. - */ - case object EndConcat extends Primitive - - /** Pretty printer for primitives */ - class PrimitivePrinter(out: PrintWriter) { - def print(s: String): PrimitivePrinter = { - out.print(s) - this - } - } - - /** This class represents a comparison operation. */ - class ComparisonOp { - - /** Returns a string representation of this operation. */ - override def toString(): String = this match { - case CMPL => "CMPL" - case CMP => "CMP" - case CMPG => "CMPG" - case _ => throw new RuntimeException("ComparisonOp unknown case") - } - } - - /** A comparison operation with -1 default for NaNs */ - case object CMPL extends ComparisonOp - - /** A comparison operation with no default for NaNs */ - case object CMP extends ComparisonOp - - /** A comparison operation with +1 default for NaNs */ - case object CMPG extends ComparisonOp -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala b/tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala deleted file mode 100644 index 420ff7b20423..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala +++ /dev/null @@ -1,412 +0,0 @@ -package dotty.tools -package backend.jvm - -import dotc.ast.Trees.Select -import dotc.ast.tpd._ -import dotc.core._ -import Contexts._ -import Names.TermName, StdNames._ -import Types.{JavaArrayType, UnspecifiedErrorType, Type} -import Symbols.{Symbol, NoSymbol} -import Decorators.em -import dotc.report -import dotc.util.ReadOnlyMap - -import scala.annotation.threadUnsafe - -/** Scala primitive operations are represented as methods in `Any` and - * `AnyVal` subclasses. Here we demultiplex them by providing a mapping - * from their symbols to integers. Different methods exist for - * different value types, but with the same meaning (like plus, minus, - * etc.). They will all be mapped to the same int. - * - * Note: The three equal methods have the following semantics: - * - `"=="` checks for `null`, and if non-null, calls - * `java.lang.Object.equals` - * `(class: Any; modifier: final)`. Primitive: `EQ` - * - `"eq"` usual reference comparison - * `(class: AnyRef; modifier: final)`. Primitive: `ID` - * - `"equals"` user-defined equality (Java semantics) - * `(class: Object; modifier: none)`. Primitive: `EQUALS` - * - * Inspired from the `scalac` compiler. - */ -class DottyPrimitives(ictx: DetachedContext) { - import dotty.tools.backend.ScalaPrimitivesOps._ - - @threadUnsafe private lazy val primitives: ReadOnlyMap[Symbol, Int] = init - - /** Return the code for the given symbol. */ - def getPrimitive(sym: Symbol): Int = { - primitives(sym) - } - - /** - * Return the primitive code of the given operation. If the - * operation is an array get/set, we inspect the type of the receiver - * to demux the operation. - * - * @param fun The method symbol - * @param tpe The type of the receiver object. It is used only for array - * operations - */ - def getPrimitive(app: Apply, tpe: Type)(using Context): Int = { - val fun = app.fun.symbol - val defn = ctx.definitions - val code = app.fun match { - case Select(_, nme.primitive.arrayLength) => - LENGTH - case Select(_, nme.primitive.arrayUpdate) => - UPDATE - case Select(_, nme.primitive.arrayApply) => - APPLY - case _ => getPrimitive(fun) - } - - def elementType: Type = tpe.widenDealias match { - case defn.ArrayOf(el) => el - case JavaArrayType(el) => el - case _ => - report.error(em"expected Array $tpe") - UnspecifiedErrorType - } - - code match { - - case APPLY => - defn.scalaClassName(elementType) match { - case tpnme.Boolean => ZARRAY_GET - case tpnme.Byte => BARRAY_GET - case tpnme.Short => SARRAY_GET - case tpnme.Char => CARRAY_GET - case tpnme.Int => IARRAY_GET - case tpnme.Long => LARRAY_GET - case tpnme.Float => FARRAY_GET - case tpnme.Double => DARRAY_GET - case _ => OARRAY_GET - } - - case UPDATE => - defn.scalaClassName(elementType) match { - case tpnme.Boolean => ZARRAY_SET - case tpnme.Byte => BARRAY_SET - case tpnme.Short => SARRAY_SET - case tpnme.Char => CARRAY_SET - case tpnme.Int => IARRAY_SET - case tpnme.Long => LARRAY_SET - case tpnme.Float => FARRAY_SET - case tpnme.Double => DARRAY_SET - case _ => OARRAY_SET - } - - case LENGTH => - defn.scalaClassName(elementType) match { - case tpnme.Boolean => ZARRAY_LENGTH - case tpnme.Byte => BARRAY_LENGTH - case tpnme.Short => SARRAY_LENGTH - case tpnme.Char => CARRAY_LENGTH - case tpnme.Int => IARRAY_LENGTH - case tpnme.Long => LARRAY_LENGTH - case tpnme.Float => FARRAY_LENGTH - case tpnme.Double => DARRAY_LENGTH - case _ => OARRAY_LENGTH - } - - case _ => - code - } - } - - /** Initialize the primitive map */ - private def init: ReadOnlyMap[Symbol, Int] = { - - given Context = ictx - - import Symbols.defn - val primitives = Symbols.MutableSymbolMap[Int](512) - - /** Add a primitive operation to the map */ - def addPrimitive(s: Symbol, code: Int): Unit = { - assert(!(primitives contains s), "Duplicate primitive " + s) - primitives(s) = code - } - - def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { - val alts = cls.info.member(method).alternatives.map(_.symbol) - if (alts.isEmpty) - report.error(em"Unknown primitive method $cls.$method") - else alts foreach (s => - addPrimitive(s, - s.info.paramInfoss match { - case List(tp :: _) if code == ADD && tp =:= ctx.definitions.StringType => CONCAT - case _ => code - } - ) - ) - } - - // scala.Any - addPrimitive(defn.Any_==, EQ) - addPrimitive(defn.Any_!=, NE) - addPrimitive(defn.Any_isInstanceOf, IS) - addPrimitive(defn.Any_asInstanceOf, AS) - addPrimitive(defn.Any_##, HASH) - - // java.lang.Object - addPrimitive(defn.Object_eq, ID) - addPrimitive(defn.Object_ne, NI) - /* addPrimitive(defn.Any_==, EQ) - addPrimitive(defn.Any_!=, NE)*/ - addPrimitive(defn.Object_synchronized, SYNCHRONIZED) - /*addPrimitive(defn.Any_isInstanceOf, IS) - addPrimitive(defn.Any_asInstanceOf, AS)*/ - - // java.lang.String - addPrimitive(defn.String_+, CONCAT) - - // scala.Array - lazy val ArrayClass = defn.ArrayClass - addPrimitives(ArrayClass, nme.length, LENGTH) - addPrimitives(ArrayClass, nme.apply, APPLY) - addPrimitives(ArrayClass, nme.update, UPDATE) - - // scala.Boolean - lazy val BooleanClass = defn.BooleanClass - addPrimitives(BooleanClass, nme.EQ, EQ) - addPrimitives(BooleanClass, nme.NE, NE) - addPrimitives(BooleanClass, nme.UNARY_!, ZNOT) - addPrimitives(BooleanClass, nme.ZOR, ZOR) - addPrimitives(BooleanClass, nme.ZAND, ZAND) - addPrimitives(BooleanClass, nme.OR, OR) - addPrimitives(BooleanClass, nme.AND, AND) - addPrimitives(BooleanClass, nme.XOR, XOR) - - // scala.Byte - lazy val ByteClass = defn.ByteClass - addPrimitives(ByteClass, nme.EQ, EQ) - addPrimitives(ByteClass, nme.NE, NE) - addPrimitives(ByteClass, nme.ADD, ADD) - addPrimitives(ByteClass, nme.SUB, SUB) - addPrimitives(ByteClass, nme.MUL, MUL) - addPrimitives(ByteClass, nme.DIV, DIV) - addPrimitives(ByteClass, nme.MOD, MOD) - addPrimitives(ByteClass, nme.LT, LT) - addPrimitives(ByteClass, nme.LE, LE) - addPrimitives(ByteClass, nme.GT, GT) - addPrimitives(ByteClass, nme.GE, GE) - addPrimitives(ByteClass, nme.XOR, XOR) - addPrimitives(ByteClass, nme.OR, OR) - addPrimitives(ByteClass, nme.AND, AND) - addPrimitives(ByteClass, nme.LSL, LSL) - addPrimitives(ByteClass, nme.LSR, LSR) - addPrimitives(ByteClass, nme.ASR, ASR) - // conversions - addPrimitives(ByteClass, nme.toByte, B2B) - addPrimitives(ByteClass, nme.toShort, B2S) - addPrimitives(ByteClass, nme.toChar, B2C) - addPrimitives(ByteClass, nme.toInt, B2I) - addPrimitives(ByteClass, nme.toLong, B2L) - // unary methods - addPrimitives(ByteClass, nme.UNARY_+, POS) - addPrimitives(ByteClass, nme.UNARY_-, NEG) - addPrimitives(ByteClass, nme.UNARY_~, NOT) - - addPrimitives(ByteClass, nme.toFloat, B2F) - addPrimitives(ByteClass, nme.toDouble, B2D) - - // scala.Short - lazy val ShortClass = defn.ShortClass - addPrimitives(ShortClass, nme.EQ, EQ) - addPrimitives(ShortClass, nme.NE, NE) - addPrimitives(ShortClass, nme.ADD, ADD) - addPrimitives(ShortClass, nme.SUB, SUB) - addPrimitives(ShortClass, nme.MUL, MUL) - addPrimitives(ShortClass, nme.DIV, DIV) - addPrimitives(ShortClass, nme.MOD, MOD) - addPrimitives(ShortClass, nme.LT, LT) - addPrimitives(ShortClass, nme.LE, LE) - addPrimitives(ShortClass, nme.GT, GT) - addPrimitives(ShortClass, nme.GE, GE) - addPrimitives(ShortClass, nme.XOR, XOR) - addPrimitives(ShortClass, nme.OR, OR) - addPrimitives(ShortClass, nme.AND, AND) - addPrimitives(ShortClass, nme.LSL, LSL) - addPrimitives(ShortClass, nme.LSR, LSR) - addPrimitives(ShortClass, nme.ASR, ASR) - // conversions - addPrimitives(ShortClass, nme.toByte, S2B) - addPrimitives(ShortClass, nme.toShort, S2S) - addPrimitives(ShortClass, nme.toChar, S2C) - addPrimitives(ShortClass, nme.toInt, S2I) - addPrimitives(ShortClass, nme.toLong, S2L) - // unary methods - addPrimitives(ShortClass, nme.UNARY_+, POS) - addPrimitives(ShortClass, nme.UNARY_-, NEG) - addPrimitives(ShortClass, nme.UNARY_~, NOT) - - addPrimitives(ShortClass, nme.toFloat, S2F) - addPrimitives(ShortClass, nme.toDouble, S2D) - - // scala.Char - lazy val CharClass = defn.CharClass - addPrimitives(CharClass, nme.EQ, EQ) - addPrimitives(CharClass, nme.NE, NE) - addPrimitives(CharClass, nme.ADD, ADD) - addPrimitives(CharClass, nme.SUB, SUB) - addPrimitives(CharClass, nme.MUL, MUL) - addPrimitives(CharClass, nme.DIV, DIV) - addPrimitives(CharClass, nme.MOD, MOD) - addPrimitives(CharClass, nme.LT, LT) - addPrimitives(CharClass, nme.LE, LE) - addPrimitives(CharClass, nme.GT, GT) - addPrimitives(CharClass, nme.GE, GE) - addPrimitives(CharClass, nme.XOR, XOR) - addPrimitives(CharClass, nme.OR, OR) - addPrimitives(CharClass, nme.AND, AND) - addPrimitives(CharClass, nme.LSL, LSL) - addPrimitives(CharClass, nme.LSR, LSR) - addPrimitives(CharClass, nme.ASR, ASR) - // conversions - addPrimitives(CharClass, nme.toByte, C2B) - addPrimitives(CharClass, nme.toShort, C2S) - addPrimitives(CharClass, nme.toChar, C2C) - addPrimitives(CharClass, nme.toInt, C2I) - addPrimitives(CharClass, nme.toLong, C2L) - // unary methods - addPrimitives(CharClass, nme.UNARY_+, POS) - addPrimitives(CharClass, nme.UNARY_-, NEG) - addPrimitives(CharClass, nme.UNARY_~, NOT) - addPrimitives(CharClass, nme.toFloat, C2F) - addPrimitives(CharClass, nme.toDouble, C2D) - - // scala.Int - lazy val IntClass = defn.IntClass - addPrimitives(IntClass, nme.EQ, EQ) - addPrimitives(IntClass, nme.NE, NE) - addPrimitives(IntClass, nme.ADD, ADD) - addPrimitives(IntClass, nme.SUB, SUB) - addPrimitives(IntClass, nme.MUL, MUL) - addPrimitives(IntClass, nme.DIV, DIV) - addPrimitives(IntClass, nme.MOD, MOD) - addPrimitives(IntClass, nme.LT, LT) - addPrimitives(IntClass, nme.LE, LE) - addPrimitives(IntClass, nme.GT, GT) - addPrimitives(IntClass, nme.GE, GE) - addPrimitives(IntClass, nme.XOR, XOR) - addPrimitives(IntClass, nme.OR, OR) - addPrimitives(IntClass, nme.AND, AND) - addPrimitives(IntClass, nme.LSL, LSL) - addPrimitives(IntClass, nme.LSR, LSR) - addPrimitives(IntClass, nme.ASR, ASR) - // conversions - addPrimitives(IntClass, nme.toByte, I2B) - addPrimitives(IntClass, nme.toShort, I2S) - addPrimitives(IntClass, nme.toChar, I2C) - addPrimitives(IntClass, nme.toInt, I2I) - addPrimitives(IntClass, nme.toLong, I2L) - // unary methods - addPrimitives(IntClass, nme.UNARY_+, POS) - addPrimitives(IntClass, nme.UNARY_-, NEG) - addPrimitives(IntClass, nme.UNARY_~, NOT) - addPrimitives(IntClass, nme.toFloat, I2F) - addPrimitives(IntClass, nme.toDouble, I2D) - - // scala.Long - lazy val LongClass = defn.LongClass - addPrimitives(LongClass, nme.EQ, EQ) - addPrimitives(LongClass, nme.NE, NE) - addPrimitives(LongClass, nme.ADD, ADD) - addPrimitives(LongClass, nme.SUB, SUB) - addPrimitives(LongClass, nme.MUL, MUL) - addPrimitives(LongClass, nme.DIV, DIV) - addPrimitives(LongClass, nme.MOD, MOD) - addPrimitives(LongClass, nme.LT, LT) - addPrimitives(LongClass, nme.LE, LE) - addPrimitives(LongClass, nme.GT, GT) - addPrimitives(LongClass, nme.GE, GE) - addPrimitives(LongClass, nme.XOR, XOR) - addPrimitives(LongClass, nme.OR, OR) - addPrimitives(LongClass, nme.AND, AND) - addPrimitives(LongClass, nme.LSL, LSL) - addPrimitives(LongClass, nme.LSR, LSR) - addPrimitives(LongClass, nme.ASR, ASR) - // conversions - addPrimitives(LongClass, nme.toByte, L2B) - addPrimitives(LongClass, nme.toShort, L2S) - addPrimitives(LongClass, nme.toChar, L2C) - addPrimitives(LongClass, nme.toInt, L2I) - addPrimitives(LongClass, nme.toLong, L2L) - // unary methods - addPrimitives(LongClass, nme.UNARY_+, POS) - addPrimitives(LongClass, nme.UNARY_-, NEG) - addPrimitives(LongClass, nme.UNARY_~, NOT) - addPrimitives(LongClass, nme.toFloat, L2F) - addPrimitives(LongClass, nme.toDouble, L2D) - - // scala.Float - lazy val FloatClass = defn.FloatClass - addPrimitives(FloatClass, nme.EQ, EQ) - addPrimitives(FloatClass, nme.NE, NE) - addPrimitives(FloatClass, nme.ADD, ADD) - addPrimitives(FloatClass, nme.SUB, SUB) - addPrimitives(FloatClass, nme.MUL, MUL) - addPrimitives(FloatClass, nme.DIV, DIV) - addPrimitives(FloatClass, nme.MOD, MOD) - addPrimitives(FloatClass, nme.LT, LT) - addPrimitives(FloatClass, nme.LE, LE) - addPrimitives(FloatClass, nme.GT, GT) - addPrimitives(FloatClass, nme.GE, GE) - // conversions - addPrimitives(FloatClass, nme.toByte, F2B) - addPrimitives(FloatClass, nme.toShort, F2S) - addPrimitives(FloatClass, nme.toChar, F2C) - addPrimitives(FloatClass, nme.toInt, F2I) - addPrimitives(FloatClass, nme.toLong, F2L) - addPrimitives(FloatClass, nme.toFloat, F2F) - addPrimitives(FloatClass, nme.toDouble, F2D) - // unary methods - addPrimitives(FloatClass, nme.UNARY_+, POS) - addPrimitives(FloatClass, nme.UNARY_-, NEG) - - // scala.Double - lazy val DoubleClass = defn.DoubleClass - addPrimitives(DoubleClass, nme.EQ, EQ) - addPrimitives(DoubleClass, nme.NE, NE) - addPrimitives(DoubleClass, nme.ADD, ADD) - addPrimitives(DoubleClass, nme.SUB, SUB) - addPrimitives(DoubleClass, nme.MUL, MUL) - addPrimitives(DoubleClass, nme.DIV, DIV) - addPrimitives(DoubleClass, nme.MOD, MOD) - addPrimitives(DoubleClass, nme.LT, LT) - addPrimitives(DoubleClass, nme.LE, LE) - addPrimitives(DoubleClass, nme.GT, GT) - addPrimitives(DoubleClass, nme.GE, GE) - // conversions - addPrimitives(DoubleClass, nme.toByte, D2B) - addPrimitives(DoubleClass, nme.toShort, D2S) - addPrimitives(DoubleClass, nme.toChar, D2C) - addPrimitives(DoubleClass, nme.toInt, D2I) - addPrimitives(DoubleClass, nme.toLong, D2L) - addPrimitives(DoubleClass, nme.toFloat, D2F) - addPrimitives(DoubleClass, nme.toDouble, D2D) - // unary methods - addPrimitives(DoubleClass, nme.UNARY_+, POS) - addPrimitives(DoubleClass, nme.UNARY_-, NEG) - - - primitives - } - - def isPrimitive(sym: Symbol): Boolean = - primitives.contains(sym) - - def isPrimitive(fun: Tree): Boolean = - given Context = ictx - primitives.contains(fun.symbol) - || (fun.symbol == NoSymbol // the only trees that do not have a symbol assigned are array.{update,select,length,clone}} - && { - fun match - case Select(_, StdNames.nme.clone_) => false // but array.clone is NOT a primitive op. - case _ => true - }) -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala b/tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala deleted file mode 100644 index 1579b4577933..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala +++ /dev/null @@ -1,23 +0,0 @@ -package dotty.tools.backend.sjs - -import dotty.tools.dotc.core._ -import Contexts._ -import Phases._ - -/** Generates Scala.js IR files for the compilation unit. */ -class GenSJSIR extends Phase { - - override def phaseName: String = GenSJSIR.name - - override def description: String = GenSJSIR.description - - override def isRunnable(using Context): Boolean = - super.isRunnable && ctx.settings.scalajs.value - - def run(using Context): Unit = - new JSCodeGen().run() -} - -object GenSJSIR: - val name: String = "genSJSIR" - val description: String = "generate .sjsir files for Scala.js" diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala deleted file mode 100644 index 81f09b082850..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala +++ /dev/null @@ -1,4897 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import scala.annotation.switch -import scala.collection.mutable - -import dotty.tools.FatalError -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core._ -import Contexts._ -import Decorators._ -import Flags._ -import Names._ -import NameKinds.DefaultGetterName -import Types._ -import Symbols._ -import Phases._ -import StdNames._ -import TypeErasure.ErasedValueType - -import dotty.tools.dotc.transform.{Erasure, ValueClasses} -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.util.SourcePosition -import dotty.tools.dotc.report - -import dotty.tools.sjs.ir -import dotty.tools.sjs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} -import dotty.tools.sjs.ir.Names.{ClassName, MethodName, SimpleMethodName} -import dotty.tools.sjs.ir.OriginalName -import dotty.tools.sjs.ir.OriginalName.NoOriginalName -import dotty.tools.sjs.ir.Trees.OptimizerHints - -import dotty.tools.dotc.transform.sjs.JSSymUtils._ - -import JSEncoding._ -import ScopedVar.withScopedVars -import annotation.retains - -/** Main codegen for Scala.js IR. - * - * [[GenSJSIR]] creates one instance of `JSCodeGen` per compilation unit. - * The `run()` method processes the whole compilation unit and generates - * `.sjsir` files for it. - * - * There are 4 main levels of translation: - * - * - `genCompilationUnit()` iterates through all the type definitions in the - * compilation unit. Each generated `js.ClassDef` is serialized to an - * `.sjsir` file. - * - `genScalaClass()` and other similar methods generate the skeleton of - * classes. - * - `genMethod()` and similar methods generate the declarations of methods. - * - `genStatOrExpr()` and everything else generate the bodies of methods. - */ -class JSCodeGen()(using genCtx: DetachedContext) { - import JSCodeGen._ - import tpd._ - - val sjsPlatform = dotty.tools.dotc.config.SJSPlatform.sjsPlatform - val jsdefn = JSDefinitions.jsdefn - private val primitives = new JSPrimitives(genCtx) - - val positionConversions = new JSPositions()(using genCtx) - import positionConversions._ - - private val jsExportsGen = new JSExportsGen(this) - - // Some state -------------------------------------------------------------- - - private val lazilyGeneratedAnonClasses = new MutableSymbolMap[TypeDef] - private val generatedClasses = mutable.ListBuffer.empty[js.ClassDef] - private val generatedStaticForwarderClasses = mutable.ListBuffer.empty[(Symbol, js.ClassDef)] - - val currentClassSym: ScopedVar[Symbol] = new ScopedVar[Symbol] - private val currentMethodSym = new ScopedVar[Symbol] - private val localNames = new ScopedVar[LocalNameGenerator] - private val thisLocalVarIdent = new ScopedVar[Option[js.LocalIdent]] - private val isModuleInitialized = new ScopedVar[ScopedVar.VarBox[Boolean]] - private val undefinedDefaultParams = new ScopedVar[mutable.Set[Symbol]] - - /* Contextual JS class value for some operations of nested JS classes that need one. */ - private val contextualJSClassValue = new ScopedVar[Option[js.Tree]](None) - - /** Resets all of the scoped state in the context of `body`. */ - private def resetAllScopedVars[T](body: => T): T = { - withScopedVars( - currentClassSym := null, - currentMethodSym := null, - localNames := null, - thisLocalVarIdent := null, - isModuleInitialized := null, - undefinedDefaultParams := null - ) { - body - } - } - - private def withPerMethodBodyState[A](methodSym: Symbol)(body: => A): A = { - withScopedVars( - currentMethodSym := methodSym, - thisLocalVarIdent := None, - isModuleInitialized := new ScopedVar.VarBox(false), - undefinedDefaultParams := mutable.Set.empty, - ) { - body - } - } - - private def acquireContextualJSClassValue[A](f: Option[js.Tree] => A): A = { - val jsClassValue = contextualJSClassValue.get - withScopedVars( - contextualJSClassValue := None - ) { - f(jsClassValue) - } - } - - def withNewLocalNameScope[A](body: => A): A = { - withScopedVars(localNames := new LocalNameGenerator) { - body - } - } - - /** Implicitly materializes the current local name generator. */ - implicit def implicitLocalNames: LocalNameGenerator = localNames.get - - def currentThisType: jstpe.Type = { - encodeClassType(currentClassSym) match { - case tpe @ jstpe.ClassType(cls) => - jstpe.BoxedClassToPrimType.getOrElse(cls, tpe) - case tpe => - tpe - } - } - - /** Returns a new fresh local identifier. */ - private def freshLocalIdent()(implicit pos: Position): js.LocalIdent = - localNames.get.freshLocalIdent() - - /** Returns a new fresh local identifier. */ - def freshLocalIdent(base: String)(implicit pos: Position): js.LocalIdent = - localNames.get.freshLocalIdent(base) - - /** Returns a new fresh local identifier. */ - private def freshLocalIdent(base: TermName)(implicit pos: Position): js.LocalIdent = - localNames.get.freshLocalIdent(base) - - private def consumeLazilyGeneratedAnonClass(sym: Symbol): TypeDef = { - val typeDef = lazilyGeneratedAnonClasses.remove(sym) - if (typeDef == null) { - throw new FatalError( - i"Could not find tree for lazily generated anonymous class ${sym.fullName} at ${sym.sourcePos}") - } else { - typeDef - } - } - - // Compilation unit -------------------------------------------------------- - - def run(): Unit = { - try { - genCompilationUnit(ctx.compilationUnit) - } finally { - generatedClasses.clear() - generatedStaticForwarderClasses.clear() - } - } - - /** Generates the Scala.js IR for a compilation unit - * This method iterates over all the class and interface definitions - * found in the compilation unit and emits their IR (.sjsir). - * - * Some classes are never actually emitted: - * - Classes representing primitive types - * - The scala.Array class - * - * TODO Some classes representing anonymous functions are not actually emitted. - * Instead, a temporary representation of their `apply` method is built - * and recorded, so that it can be inlined as a JavaScript anonymous - * function in the method that instantiates it. - * - * Other ClassDefs are emitted according to their nature: - * * Non-native JS class -> `genNonNativeJSClass()` - * * Other JS type (<: js.Any) -> `genRawJSClassData()` - * * Interface -> `genInterface()` - * * Normal class -> `genClass()` - */ - private def genCompilationUnit(cunit: CompilationUnit): Unit = { - def collectTypeDefs(tree: Tree): List[TypeDef] = { - tree match { - case EmptyTree => Nil - case PackageDef(_, stats) => stats.flatMap(collectTypeDefs) - case cd: TypeDef => cd :: Nil - case _: ValDef => Nil // module instance - } - } - val allTypeDefs = collectTypeDefs(cunit.tpdTree) - - /* #13221 Set JavaStatic on all the Module fields of static module classes. - * This is necessary for `desugarIdent` not to crash in some obscure - * scenarios. - * - * !!! Part of this logic is duplicated in BCodeSkelBuilder.genPlainClass - * - * However, here we only do this for Module fields, not all fields. - */ - for (typeDef <- allTypeDefs) { - if (typeDef.symbol.is(ModuleClass)) { - typeDef.symbol.info.decls.foreach { f => - if (f.isField && f.is(Module)) - f.setFlag(JavaStatic) - } - } - } - - val (anonJSClassTypeDefs, otherTypeDefs) = - allTypeDefs.partition(td => td.symbol.isAnonymousClass && td.symbol.isJSType) - - // Record the TypeDefs of anonymous JS classes to be lazily generated - for (td <- anonJSClassTypeDefs) - lazilyGeneratedAnonClasses(td.symbol) = td - - /* Finally, we emit true code for the remaining class defs. */ - for (td <- otherTypeDefs) { - val sym = td.symbol - implicit val pos: Position = sym.span - - /* Do not actually emit code for primitive types nor scala.Array. */ - val isPrimitive = - sym.isPrimitiveValueClass || sym == defn.ArrayClass - - if (!isPrimitive) { - withScopedVars( - currentClassSym := sym - ) { - val tree = if (sym.isJSType) { - if (!sym.is(Trait) && sym.isNonNativeJSClass) - genNonNativeJSClass(td) - else - genRawJSClassData(td) - } else if (sym.is(Trait)) { - genInterface(td) - } else { - genScalaClass(td) - } - - generatedClasses += tree - } - } - } - - for (tree <- generatedClasses) - genIRFile(cunit, tree) - - if (generatedStaticForwarderClasses.nonEmpty) { - /* #4148 Add generated static forwarder classes, except those that - * would collide with regular classes on case insensitive file systems. - */ - - /* I could not find any reference anywhere about what locale is used - * by case insensitive file systems to compare case-insensitively. - * In doubt, force the English locale, which is probably going to do - * the right thing in virtually all cases (especially if users stick - * to ASCII class names), and it has the merit of being deterministic, - * as opposed to using the OS' default locale. - * The JVM backend performs a similar test to emit a warning for - * conflicting top-level classes. However, it uses `toLowerCase()` - * without argument, which is not deterministic. - */ - def caseInsensitiveNameOf(classDef: js.ClassDef): String = - classDef.name.name.nameString.toLowerCase(java.util.Locale.ENGLISH) - - val generatedCaseInsensitiveNames = - generatedClasses.map(caseInsensitiveNameOf).toSet - - for ((site, classDef) <- generatedStaticForwarderClasses) { - if (!generatedCaseInsensitiveNames.contains(caseInsensitiveNameOf(classDef))) { - genIRFile(cunit, classDef) - } else { - report.warning( - s"Not generating the static forwarders of ${classDef.name.name.nameString} " + - "because its name differs only in case from the name of another class or trait in this compilation unit.", - site.srcPos) - } - } - } - } - - private def genIRFile(cunit: CompilationUnit, tree: ir.Trees.ClassDef): Unit = { - val outfile = getFileFor(cunit, tree.name.name, ".sjsir") - val output = outfile.bufferedOutput - try { - ir.Serializers.serialize(output, tree) - } finally { - output.close() - } - } - - private def getFileFor(cunit: CompilationUnit, className: ClassName, - suffix: String): dotty.tools.io.AbstractFile = { - val outputDirectory = ctx.settings.outputDir.value - val pathParts = className.nameString.split('.') - val dir = pathParts.init.foldLeft(outputDirectory)(_.subdirectoryNamed(_)) - val filename = pathParts.last - dir.fileNamed(filename + suffix) - } - - // Generate a class -------------------------------------------------------- - - /** Gen the IR ClassDef for a Scala class definition (maybe a module class). - */ - private def genScalaClass(td: TypeDef): js.ClassDef = { - val sym = td.symbol.asClass - implicit val pos: SourcePosition = sym.sourcePos - - assert(!sym.is(Trait), - "genScalaClass() must be called only for normal classes: "+sym) - assert(sym.superClass != NoSymbol, sym) - - if (hasDefaultCtorArgsAndJSModule(sym)) { - report.error( - "Implementation restriction: " + - "constructors of Scala classes cannot have default parameters if their companion module is JS native.", - td) - } - - val classIdent = encodeClassNameIdent(sym) - val originalName = originalNameOfClass(sym) - val isHijacked = false //isHijackedBoxedClass(sym) - - // Optimizer hints - - val isDynamicImportThunk = sym.isSubClass(jsdefn.DynamicImportThunkClass) - - def isStdLibClassWithAdHocInlineAnnot(sym: Symbol): Boolean = { - val fullName = sym.fullName.toString - (fullName.startsWith("scala.Tuple") && !fullName.endsWith("$")) || - (fullName.startsWith("scala.collection.mutable.ArrayOps$of")) - } - - val shouldMarkInline = ( - isDynamicImportThunk || - sym.hasAnnotation(jsdefn.InlineAnnot) || - (sym.isAnonymousFunction && !sym.isSubClass(defn.PartialFunctionClass)) || - isStdLibClassWithAdHocInlineAnnot(sym)) - - val optimizerHints = { - OptimizerHints.empty - .withInline(shouldMarkInline) - .withNoinline(sym.hasAnnotation(jsdefn.NoinlineAnnot)) - } - - // Generate members (constructor + methods) - - val generatedNonFieldMembers = new mutable.ListBuffer[js.MemberDef] - - val tpl = td.rhs.asInstanceOf[Template] - for (tree <- tpl.constr :: tpl.body) { - tree match { - case EmptyTree => () - - case vd: ValDef => - // fields are added via genClassFields(), but we need to generate the JS native members - val sym = vd.symbol - if (!sym.is(Module) && sym.hasAnnotation(jsdefn.JSNativeAnnot)) - generatedNonFieldMembers += genJSNativeMemberDef(vd) - - case dd: DefDef => - val sym = dd.symbol - if sym.hasAnnotation(jsdefn.JSNativeAnnot) then - if !sym.is(Accessor) then - generatedNonFieldMembers += genJSNativeMemberDef(dd) - else - generatedNonFieldMembers ++= genMethod(dd) - - case _ => - throw new FatalError("Illegal tree in body of genScalaClass(): " + tree) - } - } - - // Generate fields and add to methods + ctors - val generatedMembers = genClassFields(td) ++ generatedNonFieldMembers.toList - - // Generate member exports - val memberExports = jsExportsGen.genMemberExports(sym) - - // Generate top-level export definitions - val topLevelExportDefs = jsExportsGen.genTopLevelExports(sym) - - // Static initializer - val optStaticInitializer = { - // Initialization of reflection data, if required - val reflectInit = { - val enableReflectiveInstantiation = { - sym.baseClasses.exists { ancestor => - ancestor.hasAnnotation(jsdefn.EnableReflectiveInstantiationAnnot) - } - } - if (enableReflectiveInstantiation) - genRegisterReflectiveInstantiation(sym).toList - else - Nil - } - - // Initialization of the module because of field exports - val needsStaticModuleInit = - topLevelExportDefs.exists(_.isInstanceOf[js.TopLevelFieldExportDef]) - val staticModuleInit = - if (!needsStaticModuleInit) Nil - else List(genLoadModule(sym)) - - val staticInitializerStats = reflectInit ::: staticModuleInit - if (staticInitializerStats.nonEmpty) - List(genStaticConstructorWithStats(ir.Names.StaticInitializerName, js.Block(staticInitializerStats))) - else - Nil - } - - val optDynamicImportForwarder = - if (isDynamicImportThunk) List(genDynamicImportForwarder(sym)) - else Nil - - val allMemberDefsExceptStaticForwarders = - generatedMembers ::: memberExports ::: optStaticInitializer ::: optDynamicImportForwarder - - // Add static forwarders - val allMemberDefs = if (!isCandidateForForwarders(sym)) { - allMemberDefsExceptStaticForwarders - } else { - if (isStaticModule(sym)) { - /* If the module class has no linked class, we must create one to - * hold the static forwarders. Otherwise, this is going to be handled - * when generating the companion class. - */ - if (!sym.linkedClass.exists) { - val forwarders = genStaticForwardersFromModuleClass(Nil, sym) - if (forwarders.nonEmpty) { - val forwardersClassDef = js.ClassDef( - js.ClassIdent(ClassName(classIdent.name.nameString.stripSuffix("$"))), - originalName, - ClassKind.Class, - None, - Some(js.ClassIdent(ir.Names.ObjectClass)), - Nil, - None, - None, - forwarders, - Nil - )(js.OptimizerHints.empty) - generatedStaticForwarderClasses += sym -> forwardersClassDef - } - } - allMemberDefsExceptStaticForwarders - } else { - val forwarders = genStaticForwardersForClassOrInterface( - allMemberDefsExceptStaticForwarders, sym) - allMemberDefsExceptStaticForwarders ::: forwarders - } - } - - // Hashed definitions of the class - val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) - - // The complete class definition - val kind = - if (isStaticModule(sym)) ClassKind.ModuleClass - else if (isHijacked) ClassKind.HijackedClass - else ClassKind.Class - - val classDefinition = js.ClassDef( - classIdent, - originalName, - kind, - None, - Some(encodeClassNameIdent(sym.superClass)), - genClassInterfaces(sym, forJSClass = false), - None, - None, - hashedDefs, - topLevelExportDefs)( - optimizerHints) - - classDefinition - } - - /** Gen the IR ClassDef for a Scala.js-defined JS class. */ - private def genNonNativeJSClass(td: TypeDef): js.ClassDef = { - val sym = td.symbol.asClass - implicit val pos: SourcePosition = sym.sourcePos - - assert(sym.isNonNativeJSClass, - i"genNonNativeJSClass() must be called only for non-native JS classes: $sym") - assert(sym.superClass != NoSymbol, sym) - - if (hasDefaultCtorArgsAndJSModule(sym)) { - report.error( - "Implementation restriction: " + - "constructors of non-native JS classes cannot have default parameters if their companion module is JS native.", - td) - } - - val classIdent = encodeClassNameIdent(sym) - val originalName = originalNameOfClass(sym) - - // Generate members (constructor + methods) - - val constructorTrees = new mutable.ListBuffer[DefDef] - val generatedMethods = new mutable.ListBuffer[js.MethodDef] - val dispatchMethodNames = new mutable.ListBuffer[JSName] - - val tpl = td.rhs.asInstanceOf[Template] - for (tree <- tpl.constr :: tpl.body) { - tree match { - case EmptyTree => () - - case _: ValDef => - () // fields are added via genClassFields() - - case dd: DefDef => - val sym = dd.symbol - val exposed = sym.isJSExposed - - if (sym.isClassConstructor) { - constructorTrees += dd - } else if (exposed && sym.is(Accessor, butNot = Lazy)) { - // Exposed accessors must not be emitted, since the field they access is enough. - } else if (sym.hasAnnotation(jsdefn.JSOptionalAnnot)) { - // Optional methods must not be emitted - } else { - generatedMethods ++= genMethod(dd) - - // Collect the names of the dispatchers we have to create - if (exposed && !sym.is(Deferred)) { - /* We add symbols that we have to expose here. This way we also - * get inherited stuff that is implemented in this class. - */ - dispatchMethodNames += sym.jsName - } - } - - case _ => - throw new FatalError("Illegal tree in gen of genNonNativeJSClass(): " + tree) - } - } - - // Static members (exported from the companion object) - val staticMembers = { - val module = sym.companionModule - if (!module.exists) { - Nil - } else { - val companionModuleClass = module.moduleClass - val exports = withScopedVars(currentClassSym := companionModuleClass) { - jsExportsGen.genStaticExports(companionModuleClass) - } - if (exports.exists(_.isInstanceOf[js.JSFieldDef])) { - val classInitializer = - genStaticConstructorWithStats(ir.Names.ClassInitializerName, genLoadModule(companionModuleClass)) - exports :+ classInitializer - } else { - exports - } - } - } - - val topLevelExports = jsExportsGen.genTopLevelExports(sym) - - val (generatedConstructor, jsClassCaptures) = withNewLocalNameScope { - val isNested = sym.isNestedJSClass - - if (isNested) - localNames.reserveLocalName(JSSuperClassParamName) - - val (captures, ctor) = genJSClassCapturesAndConstructor(constructorTrees.toList) - - val jsClassCaptures = if (isNested) { - val superParam = js.ParamDef(js.LocalIdent(JSSuperClassParamName), - NoOriginalName, jstpe.AnyType, mutable = false) - Some(superParam :: captures) - } else { - assert(captures.isEmpty, s"found non nested JS class with captures $captures at $pos") - None - } - - (ctor, jsClassCaptures) - } - - // Generate fields (and add to methods + ctors) - val generatedMembers = { - genClassFields(td) ::: - generatedConstructor :: - jsExportsGen.genJSClassDispatchers(sym, dispatchMethodNames.result().distinct) ::: - generatedMethods.toList ::: - staticMembers - } - - // Hashed definitions of the class - val hashedMemberDefs = ir.Hashers.hashMemberDefs(generatedMembers) - - // The complete class definition - val kind = - if (isStaticModule(sym)) ClassKind.JSModuleClass - else ClassKind.JSClass - - val classDefinition = js.ClassDef( - classIdent, - originalNameOfClass(sym), - kind, - jsClassCaptures, - Some(encodeClassNameIdent(sym.superClass)), - genClassInterfaces(sym, forJSClass = true), - jsSuperClass = jsClassCaptures.map(_.head.ref), - None, - hashedMemberDefs, - topLevelExports)( - OptimizerHints.empty) - - classDefinition - } - - /** Gen the IR ClassDef for a raw JS class or trait. - */ - private def genRawJSClassData(td: TypeDef): js.ClassDef = { - val sym = td.symbol.asClass - implicit val pos: Position = sym.span - - val classIdent = encodeClassNameIdent(sym) - val kind = { - if (sym.is(Trait)) ClassKind.AbstractJSType - else if (sym.is(ModuleClass)) ClassKind.NativeJSModuleClass - else ClassKind.NativeJSClass - } - val superClass = - if (sym.is(Trait)) None - else Some(encodeClassNameIdent(sym.superClass)) - val jsNativeLoadSpec = computeJSNativeLoadSpecOfClass(sym) - - js.ClassDef( - classIdent, - originalNameOfClass(sym), - kind, - None, - superClass, - genClassInterfaces(sym, forJSClass = false), - None, - jsNativeLoadSpec, - Nil, - Nil)( - OptimizerHints.empty) - } - - /** Gen the IR ClassDef for an interface definition. - */ - private def genInterface(td: TypeDef): js.ClassDef = { - val sym = td.symbol.asClass - implicit val pos: SourcePosition = sym.sourcePos - - val classIdent = encodeClassNameIdent(sym) - - val generatedMethods = new mutable.ListBuffer[js.MethodDef] - - val tpl = td.rhs.asInstanceOf[Template] - for (tree <- tpl.constr :: tpl.body) { - tree match { - case EmptyTree => () - case dd: DefDef => generatedMethods ++= genMethod(dd) - case _ => - throw new FatalError( - i"""Illegal tree in gen of genInterface(): $tree - |class = $td - |in ${ctx.compilationUnit}""") - } - } - - val superInterfaces = genClassInterfaces(sym, forJSClass = false) - - val genMethodsList = generatedMethods.toList - val allMemberDefs = - if (!isCandidateForForwarders(sym)) genMethodsList - else genMethodsList ::: genStaticForwardersForClassOrInterface(genMethodsList, sym) - - // Hashed definitions of the interface - val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) - - js.ClassDef( - classIdent, - originalNameOfClass(sym), - ClassKind.Interface, - None, - None, - superInterfaces, - None, - None, - hashedDefs, - Nil)( - OptimizerHints.empty) - } - - private def genClassInterfaces(sym: ClassSymbol, forJSClass: Boolean)( - implicit pos: Position): List[js.ClassIdent] = { - for { - intf <- sym.directlyInheritedTraits - if !(forJSClass && intf == defn.DynamicClass) - } yield { - encodeClassNameIdent(intf) - } - } - - // Static forwarders ------------------------------------------------------- - - /* This mimics the logic in BCodeHelpers.addForwarders and the code that - * calls it, except that we never have collisions with existing methods in - * the companion class. This is because in the IR, only methods with the - * same `MethodName` (including signature) and that are also - * `PublicStatic` would collide. There should never be an actual collision - * because the only `PublicStatic` methods that are otherwise generated are - * the bodies of SAMs, which have mangled names. If that assumption is - * broken, an error message is emitted asking the user to report a bug. - * - * It is important that we always emit forwarders, because some Java APIs - * actually have a public static method and a public instance method with - * the same name. For example the class `Integer` has a - * `def hashCode(): Int` and a `static def hashCode(Int): Int`. The JVM - * back-end considers them as colliding because they have the same name, - * but we must not. - * - * By default, we only emit forwarders for top-level objects, like the JVM - * back-end. However, if requested via a compiler option, we enable them - * for all static objects. This is important so we can implement static - * methods of nested static classes of JDK APIs (see scala-js/#3950). - */ - - /** Is the given Scala class, interface or module class a candidate for - * static forwarders? - * - * - the flag `-XnoForwarders` is not set to true, and - * - the symbol is static, and - * - either of both of the following is true: - * - the flag `-scalajsGenStaticForwardersForNonTopLevelObjects` is set to true, or - * - the symbol was originally at the package level - * - * Other than the Scala.js-specific flag, and the fact that we also consider - * interfaces, this performs the same tests as the JVM back-end. - */ - def isCandidateForForwarders(sym: Symbol): Boolean = { - !ctx.settings.XnoForwarders.value && sym.isStatic && { - ctx.settings.scalajsGenStaticForwardersForNonTopLevelObjects.value || { - atPhase(flattenPhase) { - toDenot(sym).owner.is(PackageClass) - } - } - } - } - - /** Gen the static forwarders to the members of a class or interface for - * methods of its companion object. - * - * This is only done if there exists a companion object and it is not a JS - * type. - * - * Precondition: `isCandidateForForwarders(sym)` is true - */ - def genStaticForwardersForClassOrInterface( - existingMembers: List[js.MemberDef], sym: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { - val module = sym.companionModule - if (!module.exists) { - Nil - } else { - val moduleClass = module.moduleClass - if (!moduleClass.isJSType) - genStaticForwardersFromModuleClass(existingMembers, moduleClass) - else - Nil - } - } - - /** Gen the static forwarders for the methods of a module class. - * - * Precondition: `isCandidateForForwarders(moduleClass)` is true - */ - def genStaticForwardersFromModuleClass(existingMembers: List[js.MemberDef], - moduleClass: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { - - assert(moduleClass.is(ModuleClass), moduleClass) - - val existingPublicStaticMethodNames = existingMembers.collect { - case js.MethodDef(flags, name, _, _, _, _) - if flags.namespace == js.MemberNamespace.PublicStatic => - name.name - }.toSet - - val members = { - moduleClass.info.membersBasedOnFlags(required = Flags.Method, - excluded = Flags.ExcludedForwarder).map(_.symbol) - } - - def isExcluded(m: Symbol): Boolean = { - def hasAccessBoundary = m.accessBoundary(defn.RootClass) ne defn.RootClass - - def isOfJLObject: Boolean = m.owner eq defn.ObjectClass - - def isDefaultParamOfJSNativeDef: Boolean = { - m.name.is(DefaultGetterName) && { - val info = new DefaultParamInfo(m) - !info.isForConstructor && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) - } - } - - m.is(Deferred) - || m.isConstructor - || hasAccessBoundary - || isOfJLObject - || m.hasAnnotation(jsdefn.JSNativeAnnot) || isDefaultParamOfJSNativeDef // #4557 - } - - val forwarders = for { - m <- members - if !isExcluded(m) - } yield { - withNewLocalNameScope { - val flags = js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic) - val methodIdent = encodeMethodSym(m) - val originalName = originalNameOfMethod(m) - val jsParams = for { - (paramName, paramInfo) <- m.info.paramNamess.flatten.zip(m.info.paramInfoss.flatten) - } yield { - js.ParamDef(freshLocalIdent(paramName), NoOriginalName, - toIRType(paramInfo), mutable = false) - } - val resultType = toIRType(m.info.resultType) - - if (existingPublicStaticMethodNames.contains(methodIdent.name)) { - report.error( - "Unexpected situation: found existing public static method " + - s"${methodIdent.name.nameString} in the companion class of " + - s"${moduleClass.fullName}; cannot generate a static forwarder " + - "the method of the same name in the object." + - "Please report this as a bug in the Scala.js support in dotty.", - pos) - } - - js.MethodDef(flags, methodIdent, originalName, jsParams, resultType, Some { - genApplyMethod(genLoadModule(moduleClass), m, jsParams.map(_.ref)) - })(OptimizerHints.empty, None) - } - } - - forwarders.toList - } - - // Generate the fields of a class ------------------------------------------ - - /** Gen definitions for the fields of a class. */ - private def genClassFields(td: TypeDef): List[js.MemberDef] = { - val classSym = td.symbol.asClass - assert(currentClassSym.get == classSym, - "genClassFields called with a ClassDef other than the current one") - - val isJSClass = classSym.isNonNativeJSClass - - // Term members that are neither methods nor modules are fields - classSym.info.decls.filter { f => - !f.isOneOf(MethodOrModule) && f.isTerm - && !f.hasAnnotation(jsdefn.JSNativeAnnot) - && !f.hasAnnotation(jsdefn.JSOptionalAnnot) - && !f.hasAnnotation(jsdefn.JSExportStaticAnnot) - }.flatMap({ f => - implicit val pos = f.span - - val isTopLevelExport = f.hasAnnotation(jsdefn.JSExportTopLevelAnnot) - val isJavaStatic = f.is(JavaStatic) - assert(!(isTopLevelExport && isJavaStatic), - em"found ${f.fullName} which is both a top-level export and a Java static") - val isStaticField = isTopLevelExport || isJavaStatic - - val namespace = if isStaticField then js.MemberNamespace.PublicStatic else js.MemberNamespace.Public - val mutable = isStaticField || f.is(Mutable) - - val flags = js.MemberFlags.empty.withMutable(mutable).withNamespace(namespace) - - val irTpe0 = - if (isJSClass) genExposedFieldIRType(f) - else if (isTopLevelExport) jstpe.AnyType - else toIRType(f.info) - - // scala-js/#4370 Fields cannot have type NothingType - val irTpe = - if (irTpe0 == jstpe.NothingType) encodeClassType(defn.NothingClass) - else irTpe0 - - if (isJSClass && f.isJSExposed) - js.JSFieldDef(flags, genExpr(f.jsName)(f.sourcePos), irTpe) :: Nil - else - val fieldIdent = encodeFieldSym(f) - val originalName = originalNameOfField(f) - val fieldDef = js.FieldDef(flags, fieldIdent, originalName, irTpe) - val optionalStaticFieldGetter = - if isJavaStatic then - // Here we are generating a public static getter for the static field, - // this is its API for other units. This is necessary for singleton - // enum values, which are backed by static fields. - val className = encodeClassName(classSym) - val body = js.Block( - js.LoadModule(className), - js.SelectStatic(className, fieldIdent)(irTpe)) - js.MethodDef(js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), - encodeStaticMemberSym(f), originalName, Nil, irTpe, - Some(body))( - OptimizerHints.empty, None) :: Nil - else - Nil - fieldDef :: optionalStaticFieldGetter - }).toList - } - - def genExposedFieldIRType(f: Symbol): jstpe.Type = { - val tpeEnteringPosterasure = atPhase(elimErasedValueTypePhase)(f.info) - tpeEnteringPosterasure match { - case tpe: ErasedValueType => - /* Here, we must store the field as the boxed representation of - * the value class. The default value of that field, as - * initialized at the time the instance is created, will - * therefore be null. This will not match the behavior we would - * get in a Scala class. To match the behavior, we would need to - * initialized to an instance of the boxed representation, with - * an underlying value set to the zero of its type. However we - * cannot implement that, so we live with the discrepancy. - * - * In dotc this is usually not an issue, because it unboxes `null` to - * the zero of the underlying type, unlike scalac which throws an NPE. - */ - jstpe.ClassType(encodeClassName(tpe.tycon.typeSymbol)) - - case _ => - // Other types are not boxed, so we can initialized them to their true zero. - toIRType(f.info) - } - } - - // Static initializers ----------------------------------------------------- - - private def genStaticConstructorWithStats(name: MethodName, stats: js.Tree)( - implicit pos: Position): js.MethodDef = { - js.MethodDef( - js.MemberFlags.empty.withNamespace(js.MemberNamespace.StaticConstructor), - js.MethodIdent(name), - NoOriginalName, - Nil, - jstpe.NoType, - Some(stats))( - OptimizerHints.empty, None) - } - - private def genRegisterReflectiveInstantiation(sym: Symbol)( - implicit pos: SourcePosition): Option[js.Tree] = { - if (isStaticModule(sym)) - genRegisterReflectiveInstantiationForModuleClass(sym) - else if (sym.is(ModuleClass)) - None // scala-js#3228 - else if (sym.is(Lifted) && !sym.originalOwner.isClass) - None // scala-js#3227 - else - genRegisterReflectiveInstantiationForNormalClass(sym) - } - - private def genRegisterReflectiveInstantiationForModuleClass(sym: Symbol)( - implicit pos: SourcePosition): Option[js.Tree] = { - val fqcnArg = js.StringLiteral(sym.fullName.toString) - val runtimeClassArg = js.ClassOf(toTypeRef(sym.info)) - val loadModuleFunArg = - js.Closure(arrow = true, Nil, Nil, None, genLoadModule(sym), Nil) - - val stat = genApplyMethod( - genLoadModule(jsdefn.ReflectModule), - jsdefn.Reflect_registerLoadableModuleClass, - List(fqcnArg, runtimeClassArg, loadModuleFunArg)) - - Some(stat) - } - - private def genRegisterReflectiveInstantiationForNormalClass(sym: Symbol)( - implicit pos: SourcePosition): Option[js.Tree] = { - val ctors = - if (sym.is(Abstract)) Nil - else sym.info.member(nme.CONSTRUCTOR).alternatives.map(_.symbol).filter(m => !m.isOneOf(Private | Protected)) - - if (ctors.isEmpty) { - None - } else { - val constructorsInfos = for { - ctor <- ctors - } yield { - withNewLocalNameScope { - val (parameterTypes, formalParams, actualParams) = (for { - (paramName, paramInfo) <- ctor.info.paramNamess.flatten.zip(ctor.info.paramInfoss.flatten) - } yield { - val paramType = js.ClassOf(toTypeRef(paramInfo)) - val paramDef = js.ParamDef(freshLocalIdent(paramName), - NoOriginalName, jstpe.AnyType, mutable = false) - val actualParam = unbox(paramDef.ref, paramInfo) - (paramType, paramDef, actualParam) - }).unzip3 - - val paramTypesArray = js.JSArrayConstr(parameterTypes) - - val newInstanceFun = js.Closure(arrow = true, Nil, formalParams, None, { - js.New(encodeClassName(sym), encodeMethodSym(ctor), actualParams) - }, Nil) - - js.JSArrayConstr(List(paramTypesArray, newInstanceFun)) - } - } - - val fqcnArg = js.StringLiteral(sym.fullName.toString) - val runtimeClassArg = js.ClassOf(toTypeRef(sym.info)) - val ctorsInfosArg = js.JSArrayConstr(constructorsInfos) - - val stat = genApplyMethod( - genLoadModule(jsdefn.ReflectModule), - jsdefn.Reflect_registerInstantiatableClass, - List(fqcnArg, runtimeClassArg, ctorsInfosArg)) - - Some(stat) - } - } - - // Constructor of a non-native JS class ------------------------------------ - - def genJSClassCapturesAndConstructor(constructorTrees: List[DefDef])( - implicit pos: SourcePosition): (List[js.ParamDef], js.JSConstructorDef) = { - /* We need to merge all Scala constructors into a single one because the - * IR, like JavaScript, only allows a single one. - * - * We do this by applying: - * 1. Applying runtime type based dispatch, just like exports. - * 2. Splitting secondary ctors into parts before and after the `this` call. - * 3. Topo-sorting all constructor statements and including/excluding - * them based on the overload that was chosen. - */ - - val (primaryTree :: Nil, secondaryTrees) = - constructorTrees.partition(_.symbol.isPrimaryConstructor): @unchecked - - val primaryCtor = genPrimaryJSClassCtor(primaryTree) - val secondaryCtors = secondaryTrees.map(genSecondaryJSClassCtor(_)) - - // VarDefs for the parameters of all constructors. - val paramVarDefs = for { - vparam <- constructorTrees.flatMap(_.paramss.flatten) - } yield { - val sym = vparam.symbol - val tpe = toIRType(sym.info) - js.VarDef(encodeLocalSym(sym), originalNameOfLocal(sym), tpe, mutable = true, jstpe.zeroOf(tpe))(vparam.span) - } - - /* organize constructors in a called-by tree - * (the implicit root is the primary constructor) - */ - val ctorTree = { - val ctorToChildren = secondaryCtors - .groupBy(_.targetCtor) - .withDefaultValue(Nil) - - /* when constructing the call-by tree, we use pre-order traversal to - * assign overload numbers. - * this puts all descendants of a ctor in a range of overloads numbers. - * - * this property is useful, later, when we need to make statements - * conditional based on the chosen overload. - */ - var nextOverloadNum = 0 - def subTree[T <: JSCtor](ctor: T): ConstructorTree[T] = { - val overloadNum = nextOverloadNum - nextOverloadNum += 1 - val subtrees = ctorToChildren(ctor.sym).map(subTree(_)) - new ConstructorTree(overloadNum, ctor, subtrees) - } - - subTree(primaryCtor) - } - - /* prepare overload dispatch for all constructors. - * as a side-product, we retrieve the capture parameters. - */ - val (exports, jsClassCaptures) = { - val exports = List.newBuilder[jsExportsGen.Exported] - val jsClassCaptures = List.newBuilder[js.ParamDef] - - def add(tree: ConstructorTree[_ <: JSCtor]): Unit = { - val (e, c) = genJSClassCtorDispatch(tree.ctor.sym, - tree.ctor.paramsAndInfo, tree.overloadNum) - exports += e - jsClassCaptures ++= c - tree.subCtors.foreach(add(_)) - } - - add(ctorTree) - - (exports.result(), jsClassCaptures.result()) - } - - // The name 'constructor' is used for error reporting here - val (formalArgs, restParam, overloadDispatchBody) = - jsExportsGen.genOverloadDispatch(JSName.Literal("constructor"), exports, jstpe.IntType) - - val overloadVar = js.VarDef(freshLocalIdent("overload"), NoOriginalName, - jstpe.IntType, mutable = false, overloadDispatchBody) - - val constructorBody = wrapJSCtorBody( - paramVarDefs :+ overloadVar, - genJSClassCtorBody(overloadVar.ref, ctorTree), - js.Undefined() :: Nil - ) - - val constructorDef = js.JSConstructorDef( - js.MemberFlags.empty.withNamespace(js.MemberNamespace.Constructor), - formalArgs, restParam, constructorBody)(OptimizerHints.empty, None) - - (jsClassCaptures, constructorDef) - } - - private def genPrimaryJSClassCtor(dd: DefDef): PrimaryJSCtor = { - val sym = dd.symbol - val Block(stats, _) = dd.rhs: @unchecked - assert(sym.isPrimaryConstructor, s"called with non-primary ctor: $sym") - - var jsSuperCall: Option[js.JSSuperConstructorCall] = None - val jsStats = List.newBuilder[js.Tree] - - /* Move all statements after the super constructor call since JS - * cannot access `this` before the super constructor call. - * - * dotc inserts statements before the super constructor call for param - * accessor initializers (including val's and var's declared in the - * params). We move those after the super constructor call, and are - * therefore executed later than for a Scala class. - */ - withPerMethodBodyState(sym) { - stats.foreach { - case tree @ Apply(fun @ Select(Super(This(_), _), _), args) - if fun.symbol.isClassConstructor => - assert(jsSuperCall.isEmpty, s"Found 2 JS Super calls at ${dd.sourcePos}") - implicit val pos: Position = tree.span - jsSuperCall = Some(js.JSSuperConstructorCall(genActualJSArgs(fun.symbol, args))) - - case stat => - val jsStat = genStat(stat) - assert(jsSuperCall.isDefined || !jsStat.isInstanceOf[js.VarDef], - "Trying to move a local VarDef after the super constructor call of a non-native JS class at " + - dd.sourcePos) - jsStats += jsStat - } - } - - assert(jsSuperCall.isDefined, - s"Did not find Super call in primary JS construtor at ${dd.sourcePos}") - - new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), - js.JSConstructorBody(Nil, jsSuperCall.get, jsStats.result())(dd.span)) - } - - private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = { - val sym = dd.symbol - assert(!sym.isPrimaryConstructor, s"called with primary ctor $sym") - - def flattenBlocks(t: Tree): List[Tree] = t match { - case Block(stats, expr) => (stats :+ expr).flatMap(flattenBlocks) - case _ => t :: Nil - } - val stats = flattenBlocks(dd.rhs) - - val beforeThisCall = List.newBuilder[js.Tree] - var thisCall: Option[(Symbol, List[js.Tree])] = None - val afterThisCall = List.newBuilder[js.Tree] - - withPerMethodBodyState(sym) { - stats.foreach { - case tree @ Apply(fun @ Select(This(_), _), args) - if fun.symbol.isClassConstructor => - assert(thisCall.isEmpty, - s"duplicate this() call in secondary JS constructor at ${dd.sourcePos}") - - implicit val pos: Position = tree.span - val sym = fun.symbol - thisCall = Some((sym, genActualArgs(sym, args))) - - case stat => - val jsStat = genStat(stat) - if (thisCall.isEmpty) - beforeThisCall += jsStat - else - afterThisCall += jsStat - } - } - - assert(thisCall.isDefined, - i"could not find the this() call in secondary JS constructor at ${dd.sourcePos}:\n${stats.map(_.show).mkString("\n")}") - val Some((targetCtor, ctorArgs)) = thisCall: @unchecked - - new SplitSecondaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), - beforeThisCall.result(), targetCtor, ctorArgs, afterThisCall.result()) - } - - private def genParamsAndInfo(ctorSym: Symbol, - vparamss: List[ParamClause]): List[(Symbol, JSParamInfo)] = { - implicit val pos: SourcePosition = ctorSym.sourcePos - - val paramSyms = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) - paramSyms.zip(ctorSym.jsParamInfos) - } - - private def genJSClassCtorDispatch(ctorSym: Symbol, - allParamsAndInfos: List[(Symbol, JSParamInfo)], - overloadNum: Int): (jsExportsGen.Exported, List[js.ParamDef]) = { - - implicit val pos: SourcePosition = ctorSym.sourcePos - - /* `allParams` are the parameters as seen from inside the constructor body, - * i.e., the ones generated by the trees in the constructor body. - */ - val (captureParamsAndInfos, normalParamsAndInfos) = - allParamsAndInfos.partition(_._2.capture) - - /* For class captures, we need to generate different names than the ones - * used by the constructor body. This is necessary so that we can forward - * captures properly between constructor delegation calls. - */ - val (jsClassCaptures, captureAssigns) = (for { - (param, info) <- captureParamsAndInfos - } yield { - val ident = freshLocalIdent(param.name.toTermName) - val jsClassCapture = - js.ParamDef(ident, originalNameOfLocal(param), toIRType(info.info), mutable = false) - val captureAssign = - js.Assign(genVarRef(param), jsClassCapture.ref) - (jsClassCapture, captureAssign) - }).unzip - - val normalInfos = normalParamsAndInfos.map(_._2).toIndexedSeq - - val jsExport = new jsExportsGen.Exported(ctorSym, normalInfos) { - def genBody(formalArgsRegistry: jsExportsGen.FormalArgsRegistry): js.Tree = { - val paramAssigns = for { - ((param, info), i) <- normalParamsAndInfos.zipWithIndex - } yield { - val rhs = jsExportsGen.genScalaArg(this, i, formalArgsRegistry, info, static = true, - captures = captureParamsAndInfos.map(pi => genVarRef(pi._1)))( - prevArgsCount => normalParamsAndInfos.take(prevArgsCount).map(pi => genVarRef(pi._1))) - - js.Assign(genVarRef(param), rhs) - } - - js.Block(captureAssigns ::: paramAssigns, js.IntLiteral(overloadNum)) - } - } - - (jsExport, jsClassCaptures) - } - - /** Generates a JS constructor body based on a constructor tree. */ - private def genJSClassCtorBody(overloadVar: js.VarRef, - ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.JSConstructorBody = { - - /* generates a statement that conditionally executes body iff the chosen - * overload is any of the descendants of `tree` (including itself). - * - * here we use the property from building the trees, that a set of - * descendants always has a range of overload numbers. - */ - def ifOverload(tree: ConstructorTree[_], body: js.Tree): js.Tree = body match { - case js.Skip() => js.Skip() - - case body => - val x = overloadVar - val cond = { - import tree.{lo, hi} - - if (lo == hi) { - js.BinaryOp(js.BinaryOp.Int_==, js.IntLiteral(lo), x) - } else { - val lhs = js.BinaryOp(js.BinaryOp.Int_<=, js.IntLiteral(lo), x) - val rhs = js.BinaryOp(js.BinaryOp.Int_<=, x, js.IntLiteral(hi)) - js.If(lhs, rhs, js.BooleanLiteral(false))(jstpe.BooleanType) - } - } - - js.If(cond, body, js.Skip())(jstpe.NoType) - } - - /* preStats / postStats use pre/post order traversal respectively to - * generate a topo-sorted sequence of statements. - */ - - def preStats(tree: ConstructorTree[SplitSecondaryJSCtor], - nextParamsAndInfo: List[(Symbol, JSParamInfo)]): js.Tree = { - val inner = tree.subCtors.map(preStats(_, tree.ctor.paramsAndInfo)) - - assert(tree.ctor.ctorArgs.size == nextParamsAndInfo.size, "param count mismatch") - val paramsInfosAndArgs = nextParamsAndInfo.zip(tree.ctor.ctorArgs) - - val (captureParamsInfosAndArgs, normalParamsInfosAndArgs) = - paramsInfosAndArgs.partition(_._1._2.capture) - - val captureAssigns = for { - ((param, _), arg) <- captureParamsInfosAndArgs - } yield { - js.Assign(genVarRef(param), arg) - } - - val normalAssigns = for { - (((param, info), arg), i) <- normalParamsInfosAndArgs.zipWithIndex - } yield { - val newArg = arg match { - case js.Transient(UndefinedParam) => - /* Go full circle: We have ignored the default param getter for - * this, we'll create it again. - * - * This seems not optimal: We could simply not ignore the calls to - * default param getters in the first place. - * - * However, this proves to be difficult: Because of translations in - * earlier phases, calls to default param getters may be assigned - * to temporary variables first (see the undefinedDefaultParams - * ScopedVar). If this happens, it becomes increasingly difficult - * to distinguish a default param getter call for a constructor - * call of *this* instance (in which case we would want to keep - * the default param getter call) from one for a *different* - * instance (in which case we would want to discard the default - * param getter call) - * - * Because of this, it ends up being easier to just re-create the - * default param getter call if necessary. - */ - implicit val pos: SourcePosition = tree.ctor.sym.sourcePos - jsExportsGen.genCallDefaultGetter(tree.ctor.sym, i, static = false, - captures = captureParamsInfosAndArgs.map(p => genVarRef(p._1._1)))( - prevArgsCount => normalParamsInfosAndArgs.take(prevArgsCount).map(p => genVarRef(p._1._1))) - - case arg => arg - } - - js.Assign(genVarRef(param), newArg) - } - - ifOverload(tree, js.Block( - inner ++ tree.ctor.beforeCall ++ captureAssigns ++ normalAssigns)) - } - - def postStats(tree: ConstructorTree[SplitSecondaryJSCtor]): js.Tree = { - val inner = tree.subCtors.map(postStats(_)) - ifOverload(tree, js.Block(tree.ctor.afterCall ++ inner)) - } - - val primaryCtor = ctorTree.ctor - val secondaryCtorTrees = ctorTree.subCtors - - wrapJSCtorBody( - secondaryCtorTrees.map(preStats(_, primaryCtor.paramsAndInfo)), - primaryCtor.body, - secondaryCtorTrees.map(postStats(_)) - ) - } - - private def wrapJSCtorBody(before: List[js.Tree], body: js.JSConstructorBody, - after: List[js.Tree]): js.JSConstructorBody = { - js.JSConstructorBody(before ::: body.beforeSuper, body.superCall, - body.afterSuper ::: after)(body.pos) - } - - private sealed trait JSCtor { - val sym: Symbol - val paramsAndInfo: List[(Symbol, JSParamInfo)] - } - - private class PrimaryJSCtor(val sym: Symbol, - val paramsAndInfo: List[(Symbol, JSParamInfo)], - val body: js.JSConstructorBody) extends JSCtor - - private class SplitSecondaryJSCtor(val sym: Symbol, - val paramsAndInfo: List[(Symbol, JSParamInfo)], - val beforeCall: List[js.Tree], - val targetCtor: Symbol, val ctorArgs: List[js.Tree], - val afterCall: List[js.Tree]) extends JSCtor - - private class ConstructorTree[Ctor <: JSCtor]( - val overloadNum: Int, val ctor: Ctor, - val subCtors: List[ConstructorTree[SplitSecondaryJSCtor]]) { - val lo: Int = overloadNum - val hi: Int = subCtors.lastOption.fold(lo)(_.hi) - - assert(lo <= hi, "bad overload range") - } - - // Generate a method ------------------------------------------------------- - - /** Generates the JSNativeMemberDef. */ - def genJSNativeMemberDef(tree: ValOrDefDef): js.JSNativeMemberDef = { - implicit val pos = tree.span - - val sym = tree.symbol - val flags = js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic) - val methodName = encodeJSNativeMemberSym(sym) - val jsNativeLoadSpec = computeJSNativeLoadSpecOfValDef(sym) - js.JSNativeMemberDef(flags, methodName, jsNativeLoadSpec) - } - - private def genMethod(dd: DefDef): Option[js.MethodDef] = { - withScopedVars( - localNames := new LocalNameGenerator - ) { - genMethodWithCurrentLocalNameScope(dd) - } - } - - /** Gen JS code for a method definition in a class or in an impl class. - * On the JS side, method names are mangled to encode the full signature - * of the Scala method, as described in `JSEncoding`, to support - * overloading. - * - * Some methods are not emitted at all: - * - Primitives, since they are never actually called - * - Constructors of hijacked classes - * - * Constructors are emitted by generating their body as a statement. - * - * Other (normal) methods are emitted with `genMethodBody()`. - */ - private def genMethodWithCurrentLocalNameScope(dd: DefDef): Option[js.MethodDef] = { - implicit val pos = dd.span - val sym = dd.symbol - val vparamss = dd.termParamss - val rhs = dd.rhs - - /* Is this method a default accessor that should be ignored? - * - * This is the case iff one of the following applies: - * - It is a constructor default accessor and the linked class is a - * native JS class. - * - It is a default accessor for a native JS def, but with the caveat - * that its rhs must be `js.native` because of #4553. - * - * Both of those conditions can only happen if the default accessor is in - * a module class, so we use that as a fast way out. (But omitting that - * condition would not change the result.) - * - * This is different than `isJSDefaultParam` in `genApply`: we do not - * ignore default accessors of *non-native* JS types. Neither for - * constructor default accessor nor regular default accessors. We also - * do not need to worry about non-constructor members of native JS types, - * since for those, the entire member list is ignored in `genJSClassData`. - */ - def isIgnorableDefaultParam: Boolean = { - sym.name.is(DefaultGetterName) && sym.owner.is(ModuleClass) && { - val info = new DefaultParamInfo(sym) - if (info.isForConstructor) { - /* This is a default accessor for a constructor parameter. Check - * whether the attached constructor is a native JS constructor, - * which is the case iff the linked class is a native JS type. - */ - info.constructorOwner.hasAnnotation(jsdefn.JSNativeAnnot) - } else { - /* #4553 We need to ignore default accessors for JS native defs. - * However, because Scala.js <= 1.7.0 actually emitted code calling - * those accessors, we must keep default accessors that would - * compile. The only accessors we can actually get rid of are those - * that are `= js.native`. - */ - !sym.owner.isJSType && - info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) && { - dd.rhs match { - case MaybeAsInstanceOf(Apply(fun, _)) => - fun.symbol == jsdefn.JSPackage_native - case _ => - false - } - } - } - } - } - - withPerMethodBodyState(sym) { - assert(vparamss.isEmpty || vparamss.tail.isEmpty, - "Malformed parameter list: " + vparamss) - val params = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) - - val methodName = encodeMethodSym(sym) - val originalName = originalNameOfMethod(sym) - - def jsParams = params.map(genParamDef(_)) - - if (primitives.isPrimitive(sym)) { - None - } else if (sym.is(Deferred) && currentClassSym.isNonNativeJSClass) { - // scala-js/#4409: Do not emit abstract methods in non-native JS classes - None - } else if (sym.is(Deferred)) { - Some(js.MethodDef(js.MemberFlags.empty, methodName, originalName, - jsParams, toIRType(patchedResultType(sym)), None)( - OptimizerHints.empty, None)) - } else if (isIgnorableDefaultParam) { - // #11592 - None - } else if (sym.is(Bridge) && sym.name.is(DefaultGetterName) && currentClassSym.isNonNativeJSClass) { - /* #12572 Bridges for default accessors in non-native JS classes must not be emitted, - * because they call another default accessor, making their entire body an - * that cannot be eliminated. - * Such methods are never called anyway, because they are filtered out in - * JSExportsGen.defaultGetterDenot(). - */ - None - } else /*if (sym.isClassConstructor && isHijackedBoxedClass(sym.owner)) { - None - } else*/ { - /*def isTraitImplForwarder = dd.rhs match { - case app: Apply => foreignIsImplClass(app.symbol.owner) - case _ => false - }*/ - - val shouldMarkInline = { - sym.hasAnnotation(jsdefn.InlineAnnot) || - sym.isAnonymousFunction - } - - val shouldMarkNoinline = { - sym.hasAnnotation(jsdefn.NoinlineAnnot) /*&& - !isTraitImplForwarder*/ - } - - val optimizerHints = { - OptimizerHints.empty - .withInline(shouldMarkInline) - .withNoinline(shouldMarkNoinline) - } - - val methodDef = { - if (sym.isClassConstructor) { - val namespace = js.MemberNamespace.Constructor - js.MethodDef(js.MemberFlags.empty.withNamespace(namespace), - methodName, originalName, jsParams, jstpe.NoType, Some(genStat(rhs)))( - optimizerHints, None) - } else { - val namespace = if (isMethodStaticInIR(sym)) { - if (sym.isPrivate) js.MemberNamespace.PrivateStatic - else js.MemberNamespace.PublicStatic - } else { - if (sym.isPrivate) js.MemberNamespace.Private - else js.MemberNamespace.Public - } - val resultIRType = toIRType(patchedResultType(sym)) - genMethodDef(namespace, methodName, originalName, - params, resultIRType, rhs, optimizerHints) - } - } - - Some(methodDef) - } - } - } - - /** Generates the MethodDef of a (non-constructor) method - * - * Most normal methods are emitted straightforwardly. If the result - * type is Unit, then the body is emitted as a statement. Otherwise, it is - * emitted as an expression. - * - * Instance methods in non-native JS classes are compiled as static methods - * taking an explicit parameter for their `this` value. Static methods in - * non-native JS classes are compiled as is, like methods in Scala classes. - */ - private def genMethodDef(namespace: js.MemberNamespace, methodName: js.MethodIdent, - originalName: OriginalName, paramsSyms: List[Symbol], resultIRType: jstpe.Type, - tree: Tree, optimizerHints: OptimizerHints): js.MethodDef = { - implicit val pos = tree.span - - val jsParams = paramsSyms.map(genParamDef(_)) - - def genBody() = localNames.makeLabeledIfRequiresEnclosingReturn(resultIRType) { - if (resultIRType == jstpe.NoType) genStat(tree) - else genExpr(tree) - } - - if (namespace.isStatic || !currentClassSym.isNonNativeJSClass) { - val flags = js.MemberFlags.empty.withNamespace(namespace) - js.MethodDef(flags, methodName, originalName, jsParams, resultIRType, Some(genBody()))( - optimizerHints, None) - } else { - val thisLocalIdent = freshLocalIdent("this") - withScopedVars( - thisLocalVarIdent := Some(thisLocalIdent) - ) { - val staticNamespace = - if (namespace.isPrivate) js.MemberNamespace.PrivateStatic - else js.MemberNamespace.PublicStatic - val flags = - js.MemberFlags.empty.withNamespace(staticNamespace) - val thisParamDef = js.ParamDef(thisLocalIdent, thisOriginalName, - jstpe.AnyType, mutable = false) - - js.MethodDef(flags, methodName, originalName, - thisParamDef :: jsParams, resultIRType, Some(genBody()))( - optimizerHints, None) - } - } - } - - // ParamDefs --------------------------------------------------------------- - - def genParamDef(sym: Symbol): js.ParamDef = - genParamDef(sym, toIRType(sym.info)) - - private def genParamDef(sym: Symbol, ptpe: jstpe.Type): js.ParamDef = - genParamDef(sym, ptpe, sym.span) - - private def genParamDef(sym: Symbol, pos: Position): js.ParamDef = - genParamDef(sym, toIRType(sym.info), pos) - - private def genParamDef(sym: Symbol, ptpe: jstpe.Type, pos: Position): js.ParamDef = { - js.ParamDef(encodeLocalSym(sym)(implicitly, pos, implicitly), - originalNameOfLocal(sym), ptpe, mutable = false)(pos) - } - - // Generate statements and expressions ------------------------------------- - - /** Gen JS code for a tree in statement position (in the IR). - */ - private def genStat(tree: Tree): js.Tree = { - exprToStat(genStatOrExpr(tree, isStat = true)) - } - - /** Turn a JavaScript expression of type Unit into a statement */ - private def exprToStat(tree: js.Tree): js.Tree = { - /* Any JavaScript expression is also a statement, but at least we get rid - * of some pure expressions that come from our own codegen. - */ - implicit val pos = tree.pos - tree match { - case js.Block(stats :+ expr) => - js.Block(stats :+ exprToStat(expr)) - case _:js.Literal | _:js.This | _:js.VarRef => - js.Skip() - case _ => - tree - } - } - - /** Gen JS code for a tree in expression position (in the IR). - */ - private def genExpr(tree: Tree): js.Tree = { - val result = genStatOrExpr(tree, isStat = false) - assert(result.tpe != jstpe.NoType, - s"genExpr($tree) returned a tree with type NoType at pos ${tree.span}") - result - } - - def genExpr(name: JSName)(implicit pos: SourcePosition): js.Tree = name match { - case JSName.Literal(name) => js.StringLiteral(name) - case JSName.Computed(sym) => genComputedJSName(sym) - } - - private def genComputedJSName(sym: Symbol)(implicit pos: SourcePosition): js.Tree = { - /* By construction (i.e. restriction in PrepJSInterop), we know that sym - * must be a static method. - * Therefore, at this point, we can invoke it by loading its owner and - * calling it. - */ - def moduleOrGlobalScope = genLoadModuleOrGlobalScope(sym.owner) - def module = genLoadModule(sym.owner) - - if (sym.owner.isJSType) { - if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) - genApplyJSMethodGeneric(sym, moduleOrGlobalScope, args = Nil, isStat = false) - else - genApplyJSClassMethod(module, sym, arguments = Nil) - } else { - genApplyMethod(module, sym, arguments = Nil) - } - } - - /** Gen JS code for a tree in expression position (in the IR) or the - * global scope. - */ - def genExprOrGlobalScope(tree: Tree): MaybeGlobalScope = { - implicit def pos: SourcePosition = tree.sourcePos - - tree match { - case _: This => - val sym = tree.symbol - if (sym != currentClassSym.get && sym.is(Module)) - genLoadModuleOrGlobalScope(sym) - else - MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - - case _:Ident | _:Select => - val sym = tree.symbol - if (sym.is(Module)) { - assert(!sym.is(PackageClass), "Cannot use package as value: " + tree) - genLoadModuleOrGlobalScope(sym) - } else { - MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - } - - case Apply(fun, _) => - if (fun.symbol == jsdefn.JSDynamic_global) - MaybeGlobalScope.GlobalScope(pos) - else - MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - - case _ => - MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - } - } - - /** Gen JS code for a tree in statement or expression position (in the IR). - * - * This is the main transformation method. Each node of the Scala AST - * is transformed into an equivalent portion of the JS AST. - */ - private def genStatOrExpr(tree: Tree, isStat: Boolean): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - - report.debuglog(" " + tree) - report.debuglog("") - - tree match { - /** Local val or var declaration */ - case tree @ ValDef(name, _, _) => - val sym = tree.symbol - val rhs = tree.rhs - val rhsTree = genExpr(rhs) - - rhsTree match { - case js.Transient(UndefinedParam) => - /* This is an intermediate assignment for default params on a - * js.Any. Add the symbol to the corresponding set to inform - * the Ident resolver how to replace it and don't emit the symbol. - */ - undefinedDefaultParams += sym - js.Skip() - case _ => - js.VarDef(encodeLocalSym(sym), originalNameOfLocal(sym), - toIRType(sym.info), sym.is(Mutable), rhsTree) - } - - case If(cond, thenp, elsep) => - val tpe = - if (isStat) jstpe.NoType - else toIRType(tree.tpe) - - js.If(genExpr(cond), genStatOrExpr(thenp, isStat), - genStatOrExpr(elsep, isStat))(tpe) - - case Labeled(bind, expr) => - js.Labeled(encodeLabelSym(bind.symbol), toIRType(tree.tpe), genStatOrExpr(expr, isStat)) - - case Return(expr, from) => - val fromSym = from.symbol - val label = - if (fromSym.is(Label)) encodeLabelSym(fromSym) - else localNames.get.getEnclosingReturnLabel() - js.Return(toIRType(expr.tpe) match { - case jstpe.NoType => js.Block(genStat(expr), js.Undefined()) - case _ => genExpr(expr) - }, label) - - case WhileDo(cond, body) => - val genCond = - if (cond == EmptyTree) js.BooleanLiteral(true) - else genExpr(cond) - js.While(genCond, genStat(body)) - - case t: Try => - genTry(t, isStat) - - case app: Apply => - genApply(app, isStat) - - case app: TypeApply => - genTypeApply(app) - - /*case app: ApplyDynamic => - genApplyDynamic(app)*/ - - case tree: This => - val currentClass = currentClassSym.get - val symIsModuleClass = tree.symbol.is(ModuleClass) - assert(tree.symbol == currentClass || symIsModuleClass, - s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $currentClass") - if (symIsModuleClass && tree.symbol != currentClass) - genLoadModule(tree.symbol) - else - genThis() - - case Select(qualifier, _) => - val sym = tree.symbol - if (sym.is(Module)) { - assert(!sym.is(Package), "Cannot use package as value: " + tree) - genLoadModule(sym) - } else if (sym.is(JavaStatic)) { - genLoadStaticField(sym) - } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { - genJSNativeMemberSelect(tree) - } else { - val (field, boxed) = genAssignableField(sym, qualifier) - if (boxed) unbox(field, atPhase(elimErasedValueTypePhase)(sym.info)) - else field - } - - case tree: Ident => - desugarIdent(tree).fold[js.Tree] { - val sym = tree.symbol - assert(!sym.is(Package), "Cannot use package as value: " + tree) - if (sym.is(Module)) { - genLoadModule(sym) - } else if (undefinedDefaultParams.contains(sym)) { - /* This is a default parameter whose assignment was moved to - * a local variable. Put an undefined param instead. - */ - js.Transient(UndefinedParam) - } else { - genVarRef(sym) - } - } { select => - genStatOrExpr(select, isStat) - } - - case Literal(value) => - import Constants._ - value.tag match { - case UnitTag => - js.Skip() - case BooleanTag => - js.BooleanLiteral(value.booleanValue) - case ByteTag => - js.ByteLiteral(value.byteValue) - case ShortTag => - js.ShortLiteral(value.shortValue) - case CharTag => - js.CharLiteral(value.charValue) - case IntTag => - js.IntLiteral(value.intValue) - case LongTag => - js.LongLiteral(value.longValue) - case FloatTag => - js.FloatLiteral(value.floatValue) - case DoubleTag => - js.DoubleLiteral(value.doubleValue) - case StringTag => - js.StringLiteral(value.stringValue) - case NullTag => - js.Null() - case ClazzTag => - genClassConstant(value.typeValue) - } - - case Block(stats, expr) => - // #15419 Collapse { ; BoxedUnit } to - val genStatsAndExpr0 = stats.map(genStat(_)) :+ genStatOrExpr(expr, isStat) - val genStatsAndExpr = genStatsAndExpr0 match { - case (undefParam @ js.Transient(UndefinedParam)) :: js.Undefined() :: Nil => - undefParam :: Nil - case _ => - genStatsAndExpr0 - } - js.Block(genStatsAndExpr) - - case Typed(expr, _) => - expr match { - case _: Super => genThis() - case _ => genExpr(expr) - } - - case Assign(lhs0, rhs) => - val sym = lhs0.symbol - if (sym.is(JavaStaticTerm) && sym.source != ctx.compilationUnit.source) - throw new FatalError(s"Assignment to static member ${sym.fullName} not supported") - def genRhs = genExpr(rhs) - val lhs = lhs0 match { - case lhs: Ident => desugarIdent(lhs).getOrElse(lhs) - case lhs => lhs - } - lhs match { - case lhs: Select => - val qualifier = lhs.qualifier - - def ctorAssignment = ( - currentMethodSym.get.name == nme.CONSTRUCTOR && - currentMethodSym.get.owner == qualifier.symbol && - qualifier.isInstanceOf[This] - ) - // TODO This fails for OFFSET$x fields. Re-enable when we can. - /*if (!sym.is(Mutable) && !ctorAssignment) - throw new FatalError(s"Assigning to immutable field ${sym.fullName} at $pos")*/ - - if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { - /* This is an assignment to a @js.native field. Since we reject - * `@js.native var`s as compile errors, this can only happen in - * the constructor of the enclosing object. - * We simply ignore the assignment, since the field will not be - * emitted at all. - */ - js.Skip() - } else { - val (field, boxed) = genAssignableField(sym, qualifier) - if (boxed) { - val genBoxedRhs = box(genRhs, atPhase(elimErasedValueTypePhase)(sym.info)) - js.Assign(field, genBoxedRhs) - } else { - js.Assign(field, genRhs) - } - } - - case _ => - js.Assign(genVarRef(sym), genRhs) - } - - /** Array constructor */ - case javaSeqLiteral: JavaSeqLiteral => - genJavaSeqLiteral(javaSeqLiteral) - - /** A Match reaching the backend is supposed to be optimized as a switch */ - case mtch: Match => - genMatch(mtch, isStat) - - case tree: Closure => - genClosure(tree) - - case EmptyTree => - js.Skip() - - case _ => - throw new FatalError("Unexpected tree in genExpr: " + - tree + "/" + tree.getClass + " at: " + (tree.span: Position)) - } - } // end of genStatOrExpr() - - private def qualifierOf(fun: Tree): Tree = fun match { - case fun: Ident => - fun.tpe match { - case TermRef(prefix: TermRef, _) => tpd.ref(prefix) - case TermRef(prefix: ThisType, _) => tpd.This(prefix.cls) - } - case Select(qualifier, _) => - qualifier - case TypeApply(fun, _) => - qualifierOf(fun) - } - - /** Gen JS this of the current class. - * Normally encoded straightforwardly as a JS this. - * But must be replaced by the `thisLocalVarIdent` local variable if there - * is one. - */ - private def genThis()(implicit pos: Position): js.Tree = { - /*if (tryingToGenMethodAsJSFunction) { - throw new CancelGenMethodAsJSFunction( - "Trying to generate `this` inside the body") - }*/ - - thisLocalVarIdent.fold[js.Tree] { - js.This()(currentThisType) - } { thisLocalIdent => - js.VarRef(thisLocalIdent)(currentThisType) - } - } - - /** Gen IR code for a `try..catch` or `try..finally` block. - * - * `try..finally` blocks are compiled straightforwardly to `try..finally` - * blocks of the IR. - * - * `try..catch` blocks are a bit more subtle, as the IR does not have - * type-based selection of exceptions to catch. We thus encode explicitly - * the type tests, like in: - * - * ``` - * try { ... } - * catch (e) { - * if (e.isInstanceOf[IOException]) { ... } - * else if (e.isInstanceOf[Exception]) { ... } - * else { - * throw e; // default, re-throw - * } - * } - * ``` - * - * In addition, there are provisions to handle catching JavaScript - * exceptions (which do not extend `Throwable`) as wrapped in a - * `js.JavaScriptException`. - */ - private def genTry(tree: Try, isStat: Boolean): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - val Try(block, catches, finalizer) = tree - - val blockAST = genStatOrExpr(block, isStat) - - val resultType = - if (isStat) jstpe.NoType - else toIRType(tree.tpe) - - val handled = - if (catches.isEmpty) blockAST - else genTryCatch(blockAST, catches, resultType, isStat) - - genStat(finalizer) match { - case js.Skip() => handled - case ast => js.TryFinally(handled, ast) - } - } - - private def genTryCatch(body: js.Tree, catches: List[CaseDef], - resultType: jstpe.Type, - isStat: Boolean)(implicit pos: SourcePosition): js.Tree = { - val exceptIdent = freshLocalIdent("e") - val origExceptVar = js.VarRef(exceptIdent)(jstpe.AnyType) - - val mightCatchJavaScriptException = catches.exists { caseDef => - caseDef.pat match { - case Typed(Ident(nme.WILDCARD), tpt) => - isMaybeJavaScriptException(tpt.tpe) - case Ident(nme.WILDCARD) => - true - case pat @ Bind(_, _) => - isMaybeJavaScriptException(pat.symbol.info) - } - } - - val (exceptValDef, exceptVar) = if (mightCatchJavaScriptException) { - val valDef = js.VarDef(freshLocalIdent("e"), NoOriginalName, - encodeClassType(defn.ThrowableClass), mutable = false, js.WrapAsThrowable(origExceptVar)) - (valDef, valDef.ref) - } else { - (js.Skip(), origExceptVar) - } - - val elseHandler: js.Tree = js.Throw(origExceptVar) - - val handler = catches.foldRight(elseHandler) { (caseDef, elsep) => - implicit val pos: SourcePosition = caseDef.sourcePos - val CaseDef(pat, _, body) = caseDef - - // Extract exception type and variable - val (tpe, boundVar) = (pat match { - case Typed(Ident(nme.WILDCARD), tpt) => - (tpt.tpe, None) - case Ident(nme.WILDCARD) => - (defn.ThrowableType, None) - case Bind(_, _) => - val ident = encodeLocalSym(pat.symbol) - val origName = originalNameOfLocal(pat.symbol) - (pat.symbol.info, Some(ident, origName)) - }) - - // Generate the body that must be executed if the exception matches - val bodyWithBoundVar = (boundVar match { - case None => - genStatOrExpr(body, isStat) - case Some((boundVarIdent, boundVarOriginalName)) => - val castException = genAsInstanceOf(exceptVar, tpe) - js.Block( - js.VarDef(boundVarIdent, boundVarOriginalName, toIRType(tpe), - mutable = false, castException), - genStatOrExpr(body, isStat)) - }) - - // Generate the test - if (tpe =:= defn.ThrowableType) { - bodyWithBoundVar - } else { - val cond = genIsInstanceOf(exceptVar, tpe) - js.If(cond, bodyWithBoundVar, elsep)(resultType) - } - } - - js.TryCatch(body, exceptIdent, NoOriginalName, - js.Block(exceptValDef, handler))(resultType) - } - - /** Gen JS code for an Apply node (method call) - * - * There's a whole bunch of varieties of Apply nodes: regular method - * calls, super calls, constructor calls, isInstanceOf/asInstanceOf, - * primitives, JS calls, etc. They are further dispatched in here. - */ - private def genApply(tree: Apply, isStat: Boolean): js.Tree = { - implicit val pos = tree.span - val args = tree.args - val sym = tree.fun.symbol - - /* Is the method a JS default accessor, which should become an - * `UndefinedParam` rather than being compiled normally. - * - * This is true iff one of the following conditions apply: - * - It is a constructor default param for the constructor of a JS class. - * - It is a default param of an instance method of a native JS type. - * - It is a default param of an instance method of a non-native JS type - * and the attached method is exposed. - * - It is a default param for a native JS def. - * - * This is different than `isIgnorableDefaultParam` in - * `genMethodWithCurrentLocalNameScope`: we include here the default - * accessors of *non-native* JS types (unless the corresponding methods are - * not exposed). We also need to handle non-constructor members of native - * JS types. - */ - def isJSDefaultParam: Boolean = { - sym.name.is(DefaultGetterName) && { - val info = new DefaultParamInfo(sym) - if (info.isForConstructor) { - /* This is a default accessor for a constructor parameter. Check - * whether the attached constructor is a JS constructor, which is - * the case iff the linked class is a JS type. - */ - info.constructorOwner.isJSType - } else { - if (sym.owner.isJSType) { - /* The default accessor is in a JS type. It is a JS default - * param iff the enclosing class is native or the attached method - * is exposed. - */ - !sym.owner.isNonNativeJSClass || info.attachedMethod.isJSExposed - } else { - /* The default accessor is in a Scala type. It is a JS default - * param iff the attached method is a native JS def. This can - * only happen if the owner is a module class, which we test - * first as a fast way out. - */ - sym.owner.is(ModuleClass) && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) - } - } - } - } - - tree.fun match { - case _ if isJSDefaultParam => - js.Transient(UndefinedParam) - - case Select(Super(_, _), _) => - genSuperCall(tree, isStat) - - case Select(New(_), nme.CONSTRUCTOR) => - genApplyNew(tree) - - case _ => - if (primitives.isPrimitive(tree)) { - genPrimitiveOp(tree, isStat) - } else if (Erasure.Boxing.isBox(sym)) { - // Box a primitive value (cannot be Unit) - val arg = args.head - makePrimitiveBox(genExpr(arg), arg.tpe) - } else if (Erasure.Boxing.isUnbox(sym)) { - // Unbox a primitive value (cannot be Unit) - val arg = args.head - makePrimitiveUnbox(genExpr(arg), tree.tpe) - } else { - genNormalApply(tree, isStat) - } - } - } - - /** Gen JS code for a super call, of the form Class.super[mix].fun(args). - * - * This does not include calls defined in mixin traits, as these are - * already desugared by the 'mixin' phase. Only calls to super classes - * remain. - * - * Since a class has exactly one direct superclass, and calling a method - * two classes above the current one is invalid in Scala, the `mix` item is - * irrelevant. - */ - private def genSuperCall(tree: Apply, isStat: Boolean): js.Tree = { - implicit val pos = tree.span - val Apply(fun @ Select(sup @ Super(qual, _), _), args) = tree: @unchecked - val sym = fun.symbol - - if (sym == defn.Any_getClass) { - // The only primitive that is also callable as super call - js.GetClass(genThis()) - } else if (currentClassSym.isNonNativeJSClass) { - genJSSuperCall(tree, isStat) - } else { - /* #3013 `qual` can be `this.$outer()` in some cases since Scala 2.12, - * so we call `genExpr(qual)`, not just `genThis()`. - */ - val superCall = genApplyMethodStatically( - genExpr(qual), sym, genActualArgs(sym, args)) - - // Initialize the module instance just after the super constructor call. - if (isStaticModule(currentClassSym) && !isModuleInitialized.get.value && - currentMethodSym.get.isClassConstructor) { - isModuleInitialized.get.value = true - val className = encodeClassName(currentClassSym) - val thisType = jstpe.ClassType(className) - val initModule = js.StoreModule(className, js.This()(thisType)) - js.Block(superCall, initModule) - } else { - superCall - } - } - } - - /** Gen JS code for a constructor call (new). - * Further refined into: - * * new String(...) - * * new of a hijacked boxed class - * * new of an anonymous function class that was recorded as JS function - * * new of a raw JS class - * * new Array - * * regular new - */ - private def genApplyNew(tree: Apply): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - - val Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) = tree: @unchecked - val ctor = fun.symbol - val tpe = tpt.tpe - - assert(ctor.isClassConstructor, - "'new' call to non-constructor: " + ctor.name) - - val clsSym = tpe.typeSymbol - - if (isHijackedClass(clsSym)) { - genNewHijackedClass(clsSym, ctor, args.map(genExpr)) - } else /*if (translatedAnonFunctions contains tpe.typeSymbol) { - val functionMaker = translatedAnonFunctions(tpe.typeSymbol) - functionMaker(args map genExpr) - } else*/ if (clsSym.isJSType) { - genNewJSClass(tree) - } else { - toTypeRef(tpe) match { - case jstpe.ClassRef(className) => - js.New(className, encodeMethodSym(ctor), genActualArgs(ctor, args)) - - case other => - throw new FatalError(s"Non ClassRef cannot be instantiated: $other") - } - } - } - - /** Gen JS code for a call to a constructor of a hijacked class. - * Reroute them to the `new` method with the same signature in the - * companion object. - */ - private def genNewHijackedClass(clazz: Symbol, ctor: Symbol, - args: List[js.Tree])(implicit pos: SourcePosition): js.Tree = { - - val className = encodeClassName(clazz) - val initName = encodeMethodSym(ctor).name - val newName = MethodName(newSimpleMethodName, initName.paramTypeRefs, - jstpe.ClassRef(className)) - val newMethodIdent = js.MethodIdent(newName) - - js.ApplyStatic(js.ApplyFlags.empty, className, newMethodIdent, args)( - jstpe.ClassType(className)) - } - - /** Gen JS code for a new of a JS class (subclass of `js.Any`). */ - private def genNewJSClass(tree: Apply): js.Tree = { - acquireContextualJSClassValue { jsClassValue => - implicit val pos: Position = tree.span - - val Apply(fun @ Select(New(tpt), _), args) = tree: @unchecked - val cls = tpt.tpe.typeSymbol - val ctor = fun.symbol - - val nestedJSClass = cls.isNestedJSClass - assert(jsClassValue.isDefined == nestedJSClass, - s"$cls at $pos: jsClassValue.isDefined = ${jsClassValue.isDefined} " + - s"but isInnerNonNativeJSClass = $nestedJSClass") - - def genArgs: List[js.TreeOrJSSpread] = genActualJSArgs(ctor, args) - def genArgsAsClassCaptures: List[js.Tree] = args.map(genExpr) - - jsClassValue.fold { - // Static JS class (by construction, it cannot be a module class, as their News do not reach the back-end) - if (cls == jsdefn.JSObjectClass && args.isEmpty) - js.JSObjectConstr(Nil) - else if (cls == jsdefn.JSArrayClass && args.isEmpty) - js.JSArrayConstr(Nil) - else - js.JSNew(genLoadJSConstructor(cls), genArgs) - } { jsClassVal => - // Nested JS class - if (cls.isAnonymousClass) - genNewAnonJSClass(cls, jsClassVal, genArgsAsClassCaptures)(fun.span) - else if (atPhase(erasurePhase)(cls.is(ModuleClass))) // LambdaLift removes the ModuleClass flag of lifted classes - js.JSNew(js.CreateJSClass(encodeClassName(cls), jsClassVal :: genArgsAsClassCaptures), Nil) - else - js.JSNew(jsClassVal, genArgs) - } - } - } - - /** Generate an instance of an anonymous (non-lambda) JS class inline - * - * @param sym Class to generate the instance of - * @param jsSuperClassValue JS class value of the super class - * @param args Arguments to the Scala constructor, which map to JS class captures - * @param pos Position of the original New tree - */ - private def genNewAnonJSClass(sym: Symbol, jsSuperClassValue: js.Tree, args: List[js.Tree])( - implicit pos: Position): js.Tree = { - assert(sym.isAnonymousClass, - s"Generating AnonJSClassNew of non anonymous JS class ${sym.fullName}") - - // Find the TypeDef for this anonymous class and generate it - val typeDef = consumeLazilyGeneratedAnonClass(sym) - val originalClassDef = resetAllScopedVars { - withScopedVars( - currentClassSym := sym - ) { - genNonNativeJSClass(typeDef) - } - } - - // Partition class members. - val privateFieldDefs = mutable.ListBuffer.empty[js.FieldDef] - val classDefMembers = mutable.ListBuffer.empty[js.MemberDef] - val instanceMembers = mutable.ListBuffer.empty[js.MemberDef] - var constructor: Option[js.JSConstructorDef] = None - - originalClassDef.memberDefs.foreach { - case fdef: js.FieldDef => - privateFieldDefs += fdef - - case fdef: js.JSFieldDef => - instanceMembers += fdef - - case mdef: js.MethodDef => - assert(mdef.flags.namespace.isStatic, - "Non-static, unexported method in non-native JS class") - classDefMembers += mdef - - case cdef: js.JSConstructorDef => - assert(constructor.isEmpty, "two ctors in class") - constructor = Some(cdef) - - case mdef: js.JSMethodDef => - assert(!mdef.flags.namespace.isStatic, "Exported static method") - instanceMembers += mdef - - case property: js.JSPropertyDef => - instanceMembers += property - - case nativeMemberDef: js.JSNativeMemberDef => - throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) - } - - assert(originalClassDef.topLevelExportDefs.isEmpty, - "Found top-level exports in anonymous JS class at " + pos) - - // Make new class def with static members - val newClassDef = { - implicit val pos = originalClassDef.pos - val parent = js.ClassIdent(jsNames.ObjectClass) - js.ClassDef(originalClassDef.name, originalClassDef.originalName, - ClassKind.AbstractJSType, None, Some(parent), interfaces = Nil, - jsSuperClass = None, jsNativeLoadSpec = None, - classDefMembers.toList, Nil)( - originalClassDef.optimizerHints) - } - - generatedClasses += newClassDef - - // Construct inline class definition - - val jsClassCaptures = originalClassDef.jsClassCaptures.getOrElse { - throw new AssertionError(s"no class captures for anonymous JS class at $pos") - } - val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { - throw new AssertionError("No ctor found") - } - assert(ctorParams.isEmpty && ctorRestParam.isEmpty, - s"non-empty constructor params for anonymous JS class at $pos") - - /* The first class capture is always a reference to the super class. - * This is enforced by genJSClassCapturesAndConstructor. - */ - def jsSuperClassRef(implicit pos: ir.Position): js.VarRef = - jsClassCaptures.head.ref - - /* The `this` reference. - * FIXME This could clash with a local variable of the constructor or a JS - * class capture. It seems Scala 2 has the same vulnerability. How do we - * avoid this? - */ - val selfName = freshLocalIdent("this")(pos) - def selfRef(implicit pos: ir.Position) = - js.VarRef(selfName)(jstpe.AnyType) - - def memberLambda(params: List[js.ParamDef], restParam: Option[js.ParamDef], body: js.Tree)(implicit pos: ir.Position): js.Closure = - js.Closure(arrow = false, captureParams = Nil, params, restParam, body, captureValues = Nil) - - val memberDefinitions0 = instanceMembers.toList.map { - case fdef: js.FieldDef => - throw new AssertionError("unexpected FieldDef") - - case fdef: js.JSFieldDef => - implicit val pos = fdef.pos - js.Assign(js.JSSelect(selfRef, fdef.name), jstpe.zeroOf(fdef.ftpe)) - - case mdef: js.MethodDef => - throw new AssertionError("unexpected MethodDef") - - case cdef: js.JSConstructorDef => - throw new AssertionError("unexpected JSConstructorDef") - - case mdef: js.JSMethodDef => - implicit val pos = mdef.pos - val impl = memberLambda(mdef.args, mdef.restParam, mdef.body) - js.Assign(js.JSSelect(selfRef, mdef.name), impl) - - case pdef: js.JSPropertyDef => - implicit val pos = pdef.pos - val optGetter = pdef.getterBody.map { body => - js.StringLiteral("get") -> memberLambda(params = Nil, restParam = None, body) - } - val optSetter = pdef.setterArgAndBody.map { case (arg, body) => - js.StringLiteral("set") -> memberLambda(params = arg :: Nil, restParam = None, body) - } - val descriptor = js.JSObjectConstr( - optGetter.toList ::: - optSetter.toList ::: - List(js.StringLiteral("configurable") -> js.BooleanLiteral(true)) - ) - js.JSMethodApply(js.JSGlobalRef("Object"), - js.StringLiteral("defineProperty"), - List(selfRef, pdef.name, descriptor)) - - case nativeMemberDef: js.JSNativeMemberDef => - throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) - } - - val memberDefinitions = if (privateFieldDefs.isEmpty) { - memberDefinitions0 - } else { - /* Private fields, declared in FieldDefs, are stored in a separate - * object, itself stored as a non-enumerable field of the `selfRef`. - * The name of that field is retrieved at - * `scala.scalajs.runtime.privateFieldsSymbol()`, and is a Symbol if - * supported, or a randomly generated string that has the same enthropy - * as a UUID (i.e., 128 random bits). - * - * This encoding solves two issues: - * - * - Hide private fields in anonymous JS classes from `JSON.stringify` - * and other cursory inspections in JS (#2748). - * - Get around the fact that abstract JS types cannot declare - * FieldDefs (#3777). - */ - val fieldsObjValue = { - js.JSObjectConstr(privateFieldDefs.toList.map { fdef => - implicit val pos = fdef.pos - js.StringLiteral(fdef.name.name.nameString) -> jstpe.zeroOf(fdef.ftpe) - }) - } - val definePrivateFieldsObj = { - /* Object.defineProperty(selfRef, privateFieldsSymbol, { - * value: fieldsObjValue - * }); - * - * `writable`, `configurable` and `enumerable` are false by default. - */ - js.JSMethodApply( - js.JSGlobalRef("Object"), - js.StringLiteral("defineProperty"), - List( - selfRef, - genPrivateFieldsSymbol()(using sym.sourcePos), - js.JSObjectConstr(List( - js.StringLiteral("value") -> fieldsObjValue - )) - ) - ) - } - definePrivateFieldsObj :: memberDefinitions0 - } - - // Transform the constructor body. - val inlinedCtorStats: List[js.Tree] = { - val beforeSuper = ctorBody.beforeSuper - - val superCall = { - implicit val pos = ctorBody.superCall.pos - val js.JSSuperConstructorCall(args) = ctorBody.superCall - - val newTree = { - val ident = originalClassDef.superClass.getOrElse(throw new FatalError("No superclass")) - if (args.isEmpty && ident.name == JSObjectClassName) - js.JSObjectConstr(Nil) - else - js.JSNew(jsSuperClassRef, args) - } - - val selfVarDef = js.VarDef(selfName, thisOriginalName, jstpe.AnyType, mutable = false, newTree) - selfVarDef :: memberDefinitions - } - - // After the super call, substitute `selfRef` for `This()` - val afterSuper = new ir.Transformers.Transformer { - override def transform(tree: js.Tree, isStat: Boolean): js.Tree = tree match { - case js.This() => - selfRef(tree.pos) - - // Don't traverse closure boundaries - case closure: js.Closure => - val newCaptureValues = closure.captureValues.map(transformExpr) - closure.copy(captureValues = newCaptureValues)(closure.pos) - - case tree => - super.transform(tree, isStat) - } - }.transformStats(ctorBody.afterSuper) - - beforeSuper ::: superCall ::: afterSuper - } - - val closure = js.Closure(arrow = true, jsClassCaptures, Nil, None, - js.Block(inlinedCtorStats, selfRef), jsSuperClassValue :: args) - js.JSFunctionApply(closure, Nil) - } - - /** Gen JS code for a primitive method call. */ - private def genPrimitiveOp(tree: Apply, isStat: Boolean): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ - - implicit val pos = tree.span - - val Apply(fun, args) = tree - val receiver = qualifierOf(fun) - - val code = primitives.getPrimitive(tree, receiver.tpe) - - if (isArithmeticOp(code) || isLogicalOp(code) || isComparisonOp(code)) - genSimpleOp(tree, receiver :: args, code) - else if (code == CONCAT) - genStringConcat(tree, receiver, args) - else if (code == HASH) - genScalaHash(tree, receiver) - else if (isArrayOp(code)) - genArrayOp(tree, code) - else if (code == SYNCHRONIZED) - genSynchronized(tree, isStat) - else if (isCoercion(code)) - genCoercion(tree, receiver, code) - else if (code == JSPrimitives.THROW) - genThrow(tree, args) - else if (JSPrimitives.isJSPrimitive(code)) - genJSPrimitive(tree, args, code, isStat) - else - throw new FatalError(s"Unknown primitive: ${tree.symbol.fullName} at: $pos") - } - - /** Gen JS code for a simple operation (arithmetic, logical, or comparison) */ - private def genSimpleOp(tree: Apply, args: List[Tree], code: Int): js.Tree = { - args match { - case List(arg) => genSimpleUnaryOp(tree, arg, code) - case List(lhs, rhs) => genSimpleBinaryOp(tree, lhs, rhs, code) - case _ => throw new FatalError("Incorrect arity for primitive") - } - } - - /** Gen JS code for a simple unary operation. */ - private def genSimpleUnaryOp(tree: Apply, arg: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ - - implicit val pos = tree.span - - val resultIRType = toIRType(tree.tpe) - val genArg = adaptPrimitive(genExpr(arg), resultIRType) - - (code: @switch) match { - case POS => - genArg - - case NEG => - (resultIRType: @unchecked) match { - case jstpe.IntType => - js.BinaryOp(js.BinaryOp.Int_-, js.IntLiteral(0), genArg) - case jstpe.LongType => - js.BinaryOp(js.BinaryOp.Long_-, js.LongLiteral(0), genArg) - case jstpe.FloatType => - js.BinaryOp(js.BinaryOp.Float_*, js.FloatLiteral(-1.0f), genArg) - case jstpe.DoubleType => - js.BinaryOp(js.BinaryOp.Double_*, js.DoubleLiteral(-1.0), genArg) - } - - case NOT => - (resultIRType: @unchecked) match { - case jstpe.IntType => - js.BinaryOp(js.BinaryOp.Int_^, js.IntLiteral(-1), genArg) - case jstpe.LongType => - js.BinaryOp(js.BinaryOp.Long_^, js.LongLiteral(-1), genArg) - } - - case ZNOT => - js.UnaryOp(js.UnaryOp.Boolean_!, genArg) - - case _ => - throw new FatalError("Unknown unary operation code: " + code) - } - } - - /** Gen JS code for a simple binary operation. */ - private def genSimpleBinaryOp(tree: Apply, lhs: Tree, rhs: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ - - implicit val pos: SourcePosition = tree.sourcePos - - val lhsIRType = toIRType(lhs.tpe) - val rhsIRType = toIRType(rhs.tpe) - - val isShift = isShiftOp(code) - - val opType = { - if (isShift) { - if (lhsIRType == jstpe.LongType) jstpe.LongType - else jstpe.IntType - } else { - (lhsIRType, rhsIRType) match { - case (jstpe.DoubleType, _) | (_, jstpe.DoubleType) => jstpe.DoubleType - case (jstpe.FloatType, _) | (_, jstpe.FloatType) => jstpe.FloatType - case (jstpe.LongType, _) | (_, jstpe.LongType) => jstpe.LongType - case (jstpe.IntType | jstpe.ByteType | jstpe.ShortType | jstpe.CharType, _) => jstpe.IntType - case (_, jstpe.IntType | jstpe.ByteType | jstpe.ShortType | jstpe.CharType) => jstpe.IntType - case (jstpe.BooleanType, _) | (_, jstpe.BooleanType) => jstpe.BooleanType - case _ => jstpe.AnyType - } - } - } - - val lsrc = - if (opType == jstpe.AnyType) genExpr(lhs) - else adaptPrimitive(genExpr(lhs), opType) - val rsrc = - if (opType == jstpe.AnyType) genExpr(rhs) - else adaptPrimitive(genExpr(rhs), if (isShift) jstpe.IntType else opType) - - if (opType == jstpe.AnyType && isUniversalEqualityOp(code)) { - genUniversalEqualityOp(lhs.tpe, rhs.tpe, lsrc, rsrc, code) - } else if (code == ZOR) { - js.If(lsrc, js.BooleanLiteral(true), rsrc)(jstpe.BooleanType) - } else if (code == ZAND) { - js.If(lsrc, rsrc, js.BooleanLiteral(false))(jstpe.BooleanType) - } else { - import js.BinaryOp._ - - (opType: @unchecked) match { - case jstpe.IntType => - val op = (code: @switch) match { - case ADD => Int_+ - case SUB => Int_- - case MUL => Int_* - case DIV => Int_/ - case MOD => Int_% - case OR => Int_| - case AND => Int_& - case XOR => Int_^ - case LSL => Int_<< - case LSR => Int_>>> - case ASR => Int_>> - - case EQ => Int_== - case NE => Int_!= - case LT => Int_< - case LE => Int_<= - case GT => Int_> - case GE => Int_>= - } - js.BinaryOp(op, lsrc, rsrc) - - case jstpe.FloatType => - def withFloats(op: Int): js.Tree = - js.BinaryOp(op, lsrc, rsrc) - - def toDouble(value: js.Tree): js.Tree = - js.UnaryOp(js.UnaryOp.FloatToDouble, value) - - def withDoubles(op: Int): js.Tree = - js.BinaryOp(op, toDouble(lsrc), toDouble(rsrc)) - - (code: @switch) match { - case ADD => withFloats(Float_+) - case SUB => withFloats(Float_-) - case MUL => withFloats(Float_*) - case DIV => withFloats(Float_/) - case MOD => withFloats(Float_%) - - case EQ => withDoubles(Double_==) - case NE => withDoubles(Double_!=) - case LT => withDoubles(Double_<) - case LE => withDoubles(Double_<=) - case GT => withDoubles(Double_>) - case GE => withDoubles(Double_>=) - } - - case jstpe.DoubleType => - val op = (code: @switch) match { - case ADD => Double_+ - case SUB => Double_- - case MUL => Double_* - case DIV => Double_/ - case MOD => Double_% - - case EQ => Double_== - case NE => Double_!= - case LT => Double_< - case LE => Double_<= - case GT => Double_> - case GE => Double_>= - } - js.BinaryOp(op, lsrc, rsrc) - - case jstpe.LongType => - val op = (code: @switch) match { - case ADD => Long_+ - case SUB => Long_- - case MUL => Long_* - case DIV => Long_/ - case MOD => Long_% - case OR => Long_| - case XOR => Long_^ - case AND => Long_& - case LSL => Long_<< - case LSR => Long_>>> - case ASR => Long_>> - - case EQ => Long_== - case NE => Long_!= - case LT => Long_< - case LE => Long_<= - case GT => Long_> - case GE => Long_>= - } - js.BinaryOp(op, lsrc, rsrc) - - case jstpe.BooleanType => - val op = (code: @switch) match { - case EQ => Boolean_== - case NE => Boolean_!= - case OR => Boolean_| - case AND => Boolean_& - case XOR => Boolean_!= - } - js.BinaryOp(op, lsrc, rsrc) - - case jstpe.AnyType => - val op = code match { - case ID => === - case NI => !== - } - js.BinaryOp(op, lsrc, rsrc) - } - } - } - - private def adaptPrimitive(value: js.Tree, to: jstpe.Type)( - implicit pos: Position): js.Tree = { - genConversion(value.tpe, to, value) - } - - /* This method corresponds to the method of the same name in - * BCodeBodyBuilder of the JVM back-end. It ends up calling the method - * BCodeIdiomatic.emitT2T, whose logic we replicate here. - */ - private def genConversion(from: jstpe.Type, to: jstpe.Type, value: js.Tree)( - implicit pos: Position): js.Tree = { - import js.UnaryOp._ - - if (from == to || from == jstpe.NothingType) { - value - } else if (from == jstpe.BooleanType || to == jstpe.BooleanType) { - throw new AssertionError(s"Invalid genConversion from $from to $to") - } else { - def intValue = (from: @unchecked) match { - case jstpe.IntType => value - case jstpe.CharType => js.UnaryOp(CharToInt, value) - case jstpe.ByteType => js.UnaryOp(ByteToInt, value) - case jstpe.ShortType => js.UnaryOp(ShortToInt, value) - case jstpe.LongType => js.UnaryOp(LongToInt, value) - case jstpe.FloatType => js.UnaryOp(DoubleToInt, js.UnaryOp(FloatToDouble, value)) - case jstpe.DoubleType => js.UnaryOp(DoubleToInt, value) - } - - def doubleValue = from match { - case jstpe.DoubleType => value - case jstpe.FloatType => js.UnaryOp(FloatToDouble, value) - case jstpe.LongType => js.UnaryOp(LongToDouble, value) - case _ => js.UnaryOp(IntToDouble, intValue) - } - - (to: @unchecked) match { - case jstpe.CharType => - js.UnaryOp(IntToChar, intValue) - case jstpe.ByteType => - js.UnaryOp(IntToByte, intValue) - case jstpe.ShortType => - js.UnaryOp(IntToShort, intValue) - case jstpe.IntType => - intValue - case jstpe.LongType => - from match { - case jstpe.FloatType | jstpe.DoubleType => - js.UnaryOp(DoubleToLong, doubleValue) - case _ => - js.UnaryOp(IntToLong, intValue) - } - case jstpe.FloatType => - if (from == jstpe.LongType) - js.UnaryOp(js.UnaryOp.LongToFloat, value) - else - js.UnaryOp(js.UnaryOp.DoubleToFloat, doubleValue) - case jstpe.DoubleType => - doubleValue - } - } - } - - /** Gen JS code for a universal equality test. */ - private def genUniversalEqualityOp(ltpe: Type, rtpe: Type, lhs: js.Tree, rhs: js.Tree, code: Int)( - implicit pos: SourcePosition): js.Tree = { - - import dotty.tools.backend.ScalaPrimitivesOps._ - - val bypassEqEq = { - // Do not call equals if we have a literal null at either side. - lhs.isInstanceOf[js.Null] || - rhs.isInstanceOf[js.Null] - } - - if (bypassEqEq) { - js.BinaryOp( - if (code == EQ) js.BinaryOp.=== else js.BinaryOp.!==, - lhs, rhs) - } else { - val body = genEqEqPrimitive(ltpe, rtpe, lhs, rhs) - if (code == EQ) body - else js.UnaryOp(js.UnaryOp.Boolean_!, body) - } - } - - private lazy val externalEqualsNumNum: Symbol = - defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) - private lazy val externalEqualsNumChar: Symbol = - defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumChar) - private lazy val externalEqualsNumObject: Symbol = - defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) - private lazy val externalEquals: Symbol = - defn.BoxesRunTimeModule.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol - - /** Gen JS code for a call to Any.== */ - private def genEqEqPrimitive(ltpe: Type, rtpe: Type, lsrc: js.Tree, rsrc: js.Tree)( - implicit pos: SourcePosition): js.Tree = { - report.debuglog(s"$ltpe == $rtpe") - val lsym = ltpe.typeSymbol.asClass - val rsym = rtpe.typeSymbol.asClass - - /* True if the equality comparison is between values that require the - * use of the rich equality comparator - * (scala.runtime.BoxesRunTime.equals). - * This is the case when either side of the comparison might have a - * run-time type subtype of java.lang.Number or java.lang.Character, - * **which includes when either is a JS type**. - * When it is statically known that both sides are equal and subtypes of - * Number or Character, not using the rich equality is possible (their - * own equals method will do ok), except for java.lang.Float and - * java.lang.Double: their `equals` have different behavior around `NaN` - * and `-0.0`, see Javadoc (scala-dev#329, scala-js#2799). - */ - val mustUseAnyComparator: Boolean = { - lsym.isJSType || rsym.isJSType || { - val p = ctx.platform - p.isMaybeBoxed(lsym) && p.isMaybeBoxed(rsym) && { - val areSameFinals = lsym.is(Final) && rsym.is(Final) && (ltpe =:= rtpe) - !areSameFinals || lsym == defn.BoxedFloatClass || lsym == defn.BoxedDoubleClass - } - } - } - - if (mustUseAnyComparator) { - val equalsMethod: Symbol = { - val ptfm = ctx.platform - if (lsym.derivesFrom(defn.BoxedNumberClass)) { - if (rsym.derivesFrom(defn.BoxedNumberClass)) externalEqualsNumNum - else if (rsym.derivesFrom(defn.BoxedCharClass)) externalEqualsNumChar - else externalEqualsNumObject - } else externalEquals - } - genApplyStatic(equalsMethod, List(lsrc, rsrc)) - } else { - // if (lsrc eq null) rsrc eq null else lsrc.equals(rsrc) - if (lsym == defn.StringClass) { - // String.equals(that) === (this eq that) - js.BinaryOp(js.BinaryOp.===, lsrc, rsrc) - } else { - /* This requires to evaluate both operands in local values first. - * The optimizer will eliminate them if possible. - */ - val ltemp = js.VarDef(freshLocalIdent(), NoOriginalName, lsrc.tpe, mutable = false, lsrc) - val rtemp = js.VarDef(freshLocalIdent(), NoOriginalName, rsrc.tpe, mutable = false, rsrc) - js.Block( - ltemp, - rtemp, - js.If(js.BinaryOp(js.BinaryOp.===, ltemp.ref, js.Null()), - js.BinaryOp(js.BinaryOp.===, rtemp.ref, js.Null()), - genApplyMethod(ltemp.ref, defn.Any_equals, List(rtemp.ref)))( - jstpe.BooleanType)) - } - } - } - - /** Gen JS code for string concatenation. - */ - private def genStringConcat(tree: Apply, receiver: Tree, - args: List[Tree]): js.Tree = { - implicit val pos = tree.span - - js.BinaryOp(js.BinaryOp.String_+, genExpr(receiver), genExpr(args.head)) - } - - /** Gen JS code for a call to Any.## */ - private def genScalaHash(tree: Apply, receiver: Tree): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - - genModuleApplyMethod(defn.ScalaRuntimeModule.requiredMethod(nme.hash_), - List(genExpr(receiver))) - } - - /** Gen JS code for an array operation (get, set or length) */ - private def genArrayOp(tree: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ - - implicit val pos = tree.span - - val Apply(fun, args) = tree: @unchecked - val arrayObj = qualifierOf(fun) - - val genArray = genExpr(arrayObj) - val genArgs = args.map(genExpr) - - def elementType: Type = arrayObj.tpe.widenDealias match { - case defn.ArrayOf(el) => el - case JavaArrayType(el) => el - case tpe => - val msg = em"expected Array $tpe" - report.error(msg) - ErrorType(msg) - } - - def genSelect(): js.AssignLhs = - js.ArraySelect(genArray, genArgs(0))(toIRType(elementType)) - - if (isArrayGet(code)) { - // get an item of the array - assert(args.length == 1, - s"Array get requires 1 argument, found ${args.length} in $tree") - genSelect() - } else if (isArraySet(code)) { - // set an item of the array - assert(args.length == 2, - s"Array set requires 2 arguments, found ${args.length} in $tree") - js.Assign(genSelect(), genArgs(1)) - } else { - // length of the array - js.ArrayLength(genArray) - } - } - - /** Gen JS code for a call to AnyRef.synchronized */ - private def genSynchronized(tree: Apply, isStat: Boolean): js.Tree = { - /* JavaScript is single-threaded, so we can drop the - * synchronization altogether. - */ - val Apply(fun, List(arg)) = tree - val receiver = qualifierOf(fun) - - val genReceiver = genExpr(receiver) - val genArg = genStatOrExpr(arg, isStat) - - genReceiver match { - case js.This() => - // common case for which there is no side-effect nor NPE - genArg - case _ => - implicit val pos = tree.span - js.Block( - js.If(js.BinaryOp(js.BinaryOp.===, genReceiver, js.Null()), - js.Throw(js.New(NullPointerExceptionClass, js.MethodIdent(jsNames.NoArgConstructorName), Nil)), - js.Skip())(jstpe.NoType), - genArg) - } - } - - /** Gen JS code for a coercion */ - private def genCoercion(tree: Apply, receiver: Tree, code: Int): js.Tree = { - implicit val pos = tree.span - - val source = genExpr(receiver) - val resultType = toIRType(tree.tpe) - adaptPrimitive(source, resultType) - } - - /** Gen a call to the special `throw` method. */ - private def genThrow(tree: Apply, args: List[Tree]): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - val exception = args.head - val genException = genExpr(exception) - genException match { - case js.New(cls, _, _) if cls != JavaScriptExceptionClassName => - // Common case where ex is neither null nor a js.JavaScriptException - js.Throw(genException) - case _ => - js.Throw(js.UnwrapFromThrowable(genException)) - } - } - - /** Gen a "normal" apply (to a true method). - * - * But even these are further refined into: - * * Methods of java.lang.String, which are redirected to the - * RuntimeString trait implementation. - * * Calls to methods of raw JS types (Scala.js -> JS interop) - * * Calls to methods in impl classes of Scala2 traits. - * * Regular method call - */ - private def genNormalApply(tree: Apply, isStat: Boolean): js.Tree = { - implicit val pos = tree.span - - val fun = tree.fun match { - case fun: Ident => desugarIdent(fun).get - case fun: Select => fun - } - val receiver = fun.qualifier - val args = tree.args - val sym = fun.symbol - - def isStringMethodFromObject: Boolean = sym.name match { - case nme.toString_ | nme.equals_ | nme.hashCode_ => true - case _ => false - } - - if (isMethodStaticInIR(sym)) { - genApplyStatic(sym, genActualArgs(sym, args)) - } else if (sym.owner.isJSType) { - if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) - genApplyJSMethodGeneric(sym, genExprOrGlobalScope(receiver), genActualJSArgs(sym, args), isStat)(tree.sourcePos) - else - genApplyJSClassMethod(genExpr(receiver), sym, genActualArgs(sym, args)) - } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { - genJSNativeMemberCall(tree) - } else { - genApplyMethodMaybeStatically(genExpr(receiver), sym, genActualArgs(sym, args)) - } - } - - /** Gen JS code for a call to a JS method (of a subclass of `js.Any`). - * - * Basically it boils down to calling the method as a `JSBracketSelect`, - * without name mangling. But other aspects come into play: - * - * - Operator methods are translated to JS operators (not method calls) - * - `apply` is translated as a function call, i.e., `o()` instead of `o.apply()` - * - Scala varargs are turned into JS varargs (see `genPrimitiveJSArgs()`) - * - Getters and parameterless methods are translated as `JSBracketSelect` - * - Setters are translated to `Assign` to `JSBracketSelect` - */ - private def genApplyJSMethodGeneric(sym: Symbol, - receiver: MaybeGlobalScope, args: List[js.TreeOrJSSpread], isStat: Boolean, - jsSuperClassValue: Option[js.Tree] = None)( - implicit pos: SourcePosition): js.Tree = { - - def argsNoSpread: List[js.Tree] = { - assert(!args.exists(_.isInstanceOf[js.JSSpread]), s"Unexpected spread at $pos") - args.asInstanceOf[List[js.Tree]] - } - - val argc = args.size // meaningful only for methods that don't have varargs - - def requireNotSuper(): Unit = { - if (jsSuperClassValue.isDefined) - report.error("Illegal super call in Scala.js-defined JS class", pos) - } - - def requireNotSpread(arg: js.TreeOrJSSpread): js.Tree = - arg.asInstanceOf[js.Tree] - - def genSuperReference(propName: js.Tree): js.AssignLhs = { - jsSuperClassValue.fold[js.AssignLhs] { - genJSSelectOrGlobalRef(receiver, propName) - } { superClassValue => - js.JSSuperSelect(superClassValue, ruleOutGlobalScope(receiver), propName) - } - } - - def genSelectGet(propName: js.Tree): js.Tree = - genSuperReference(propName) - - def genSelectSet(propName: js.Tree, value: js.Tree): js.Tree = - js.Assign(genSuperReference(propName), value) - - def genCall(methodName: js.Tree, args: List[js.TreeOrJSSpread]): js.Tree = { - jsSuperClassValue.fold[js.Tree] { - genJSMethodApplyOrGlobalRefApply(receiver, methodName, args) - } { superClassValue => - js.JSSuperMethodCall(superClassValue, ruleOutGlobalScope(receiver), methodName, args) - } - } - - val boxedResult = sym.jsCallingConvention match { - case JSCallingConvention.UnaryOp(code) => - requireNotSuper() - assert(argc == 0, s"bad argument count ($argc) for unary op at $pos") - js.JSUnaryOp(code, ruleOutGlobalScope(receiver)) - - case JSCallingConvention.BinaryOp(code) => - requireNotSuper() - assert(argc == 1, s"bad argument count ($argc) for binary op at $pos") - js.JSBinaryOp(code, ruleOutGlobalScope(receiver), requireNotSpread(args.head)) - - case JSCallingConvention.Call => - requireNotSuper() - if (sym.owner.isSubClass(jsdefn.JSThisFunctionClass)) - js.JSMethodApply(ruleOutGlobalScope(receiver), js.StringLiteral("call"), args) - else - js.JSFunctionApply(ruleOutGlobalScope(receiver), args) - - case JSCallingConvention.Property(jsName) => - argsNoSpread match { - case Nil => - genSelectGet(genExpr(jsName)) - case value :: Nil => - genSelectSet(genExpr(jsName), value) - case _ => - throw new AssertionError(s"property methods should have 0 or 1 non-varargs arguments at $pos") - } - - case JSCallingConvention.BracketAccess => - argsNoSpread match { - case keyArg :: Nil => - genSelectGet(keyArg) - case keyArg :: valueArg :: Nil => - genSelectSet(keyArg, valueArg) - case _ => - throw new AssertionError(s"@JSBracketAccess methods should have 1 or 2 non-varargs arguments at $pos") - } - - case JSCallingConvention.BracketCall => - val (methodName, actualArgs) = extractFirstArg(args) - genCall(methodName, actualArgs) - - case JSCallingConvention.Method(jsName) => - genCall(genExpr(jsName), args) - } - - if (isStat) { - boxedResult - } else { - val tpe = atPhase(elimErasedValueTypePhase) { - sym.info.finalResultType - } - if (tpe.isRef(defn.BoxedUnitClass) && sym.isGetter) { - /* Work around to reclaim Scala 2 erasure behavior, assumed by the test - * NonNativeJSTypeTest.defaultValuesForFields. - * Scala 2 erases getters of `Unit`-typed fields as returning `Unit` - * (not `BoxedUnit`). Therefore, when called in expression position, - * the call site introduces an explicit `BoxedUnit.UNIT`. Even if the - * field has not been initialized at all (with `= _`), this results in - * an actual `()` value. - * In Scala 3, the same pattern returns `null`, as a `BoxedUnit`, so we - * introduce here an explicit `()` value. - * TODO We should remove this branch if the upstream test is updated - * not to assume such a strict interpretation of erasure. - */ - js.Block(boxedResult, js.Undefined()) - } else { - unbox(boxedResult, tpe) - } - } - } - - /** Extract the first argument in a list of actual arguments. - * - * This is nothing else than decomposing into head and tail, except that - * we assert that the first element is not a JSSpread. - */ - private def extractFirstArg(args: List[js.TreeOrJSSpread]): (js.Tree, List[js.TreeOrJSSpread]) = { - assert(args.nonEmpty, - "Trying to extract the first argument of an empty argument list") - val firstArg = args.head - assert(!firstArg.isInstanceOf[js.JSSpread], - "Trying to extract the first argument of an argument list starting " + - "with a Spread argument: " + firstArg) - (firstArg.asInstanceOf[js.Tree], args.tail) - } - - /** Gen JS code for a call to a native JS def or val. */ - private def genJSNativeMemberSelect(tree: Tree): js.Tree = - genJSNativeMemberSelectOrCall(tree, Nil) - - /** Gen JS code for a call to a native JS def or val. */ - private def genJSNativeMemberCall(tree: Apply): js.Tree = - genJSNativeMemberSelectOrCall(tree, tree.args) - - /** Gen JS code for a call to a native JS def or val. */ - private def genJSNativeMemberSelectOrCall(tree: Tree, args: List[Tree]): js.Tree = { - val sym = tree.symbol - - implicit val pos = tree.span - - val jsNativeMemberValue = - js.SelectJSNativeMember(encodeClassName(sym.owner), encodeJSNativeMemberSym(sym)) - - val boxedResult = - if (sym.isJSGetter) jsNativeMemberValue - else js.JSFunctionApply(jsNativeMemberValue, genActualJSArgs(sym, args)) - - unbox(boxedResult, atPhase(elimErasedValueTypePhase) { - sym.info.resultType - }) - } - - private def genJSSuperCall(tree: Apply, isStat: Boolean): js.Tree = { - acquireContextualJSClassValue { explicitJSSuperClassValue => - implicit val pos = tree.span - val Apply(fun @ Select(sup @ Super(qual, _), _), args) = tree: @unchecked - val sym = fun.symbol - - val genReceiver = genExpr(qual) - def genScalaArgs = genActualArgs(sym, args) - def genJSArgs = genActualJSArgs(sym, args) - - if (sym.owner == defn.ObjectClass) { - // Normal call anyway - assert(!sym.isClassConstructor, - s"Trying to call the super constructor of Object in a non-native JS class at $pos") - genApplyMethod(genReceiver, sym, genScalaArgs) - } else if (sym.isClassConstructor) { - throw new AssertionError( - s"calling a JS super constructor should have happened in genPrimaryJSClassCtor at $pos") - } else if (sym.owner.isNonNativeJSClass && !sym.isJSExposed) { - // Reroute to the static method - genApplyJSClassMethod(genReceiver, sym, genScalaArgs) - } else { - val jsSuperClassValue = explicitJSSuperClassValue.orElse { - Some(genLoadJSConstructor(currentClassSym.get.asClass.superClass)) - } - genApplyJSMethodGeneric(sym, MaybeGlobalScope.NotGlobalScope(genReceiver), - genJSArgs, isStat, jsSuperClassValue)(tree.sourcePos) - } - } - } - - /** Gen JS code for a call to a polymorphic method. - * - * The only methods that reach the back-end as polymorphic are - * `isInstanceOf` and `asInstanceOf`. - * - * (Well, in fact `DottyRunTime.newRefArray` too, but it is handled as a - * primitive instead.) - */ - private def genTypeApply(tree: TypeApply): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - - val TypeApply(fun, targs) = tree - - val sym = fun.symbol - val receiver = qualifierOf(fun) - - val to = targs.head.tpe - - assert(!isPrimitiveValueType(receiver.tpe), - s"Found receiver of type test with primitive type ${receiver.tpe} at $pos") - assert(!isPrimitiveValueType(to), - s"Found target type of type test with primitive type ${receiver.tpe} at $pos") - - val genReceiver = genExpr(receiver) - - if (sym == defn.Any_asInstanceOf) { - genAsInstanceOf(genReceiver, to) - } else if (sym == defn.Any_isInstanceOf) { - genIsInstanceOf(genReceiver, to) - } else { - throw new FatalError( - s"Unexpected type application $fun with symbol ${sym.fullName}") - } - } - - /** Gen JS code for a Java Seq literal. */ - private def genJavaSeqLiteral(tree: JavaSeqLiteral): js.Tree = { - implicit val pos = tree.span - - val genElems = tree.elems.map(genExpr) - val arrayTypeRef = toTypeRef(tree.tpe).asInstanceOf[jstpe.ArrayTypeRef] - js.ArrayValue(arrayTypeRef, genElems) - } - - /** Gen JS code for a switch-`Match`, which is translated into an IR `js.Match`. */ - def genMatch(tree: Tree, isStat: Boolean): js.Tree = { - implicit val pos = tree.span - val Match(selector, cases) = tree: @unchecked - - def abortMatch(msg: String): Nothing = - throw new FatalError(s"$msg in switch-like pattern match at ${tree.span}: $tree") - - val genSelector = genExpr(selector) - - // Sanity check: we can handle Ints and Strings (including `null`s), but nothing else - genSelector.tpe match { - case jstpe.IntType | jstpe.ClassType(jsNames.BoxedStringClass) | jstpe.NullType | jstpe.NothingType => - // ok - case _ => - abortMatch(s"Invalid selector type ${genSelector.tpe}") - } - - val resultType = toIRType(tree.tpe) match { - case jstpe.NothingType => jstpe.NothingType // must take priority over NoType below - case _ if isStat => jstpe.NoType - case resType => resType - } - - var clauses: List[(List[js.MatchableLiteral], js.Tree)] = Nil - var optDefaultClause: Option[js.Tree] = None - - for (caze @ CaseDef(pat, guard, body) <- cases) { - if (guard != EmptyTree) - abortMatch("Found a case guard") - - val genBody = genStatOrExpr(body, isStat) - - def invalidCase(): Nothing = - abortMatch("Invalid case") - - def genMatchableLiteral(tree: Literal): js.MatchableLiteral = { - genExpr(tree) match { - case matchableLiteral: js.MatchableLiteral => matchableLiteral - case otherExpr => invalidCase() - } - } - - pat match { - case lit: Literal => - clauses = (List(genMatchableLiteral(lit)), genBody) :: clauses - case Ident(nme.WILDCARD) => - optDefaultClause = Some(genBody) - case Alternative(alts) => - val genAlts = alts.map { - case lit: Literal => genMatchableLiteral(lit) - case _ => invalidCase() - } - clauses = (genAlts, genBody) :: clauses - case _ => - invalidCase() - } - } - - clauses = clauses.reverse - val defaultClause = optDefaultClause.getOrElse { - throw new AssertionError("No elseClause in pattern match") - } - - /* Builds a `js.Match`, but simplifies it to a `js.If` if there is only - * one case with one alternative, and to a `js.Block` if there is no case - * at all. This happens in practice in the standard library. Having no - * case is a typical product of `match`es that are full of - * `case n if ... =>`, which are used instead of `if` chains for - * convenience and/or readability. - */ - def isInt(tree: js.Tree): Boolean = tree.tpe == jstpe.IntType - - clauses match { - case Nil => - // Completely remove the Match. Preserve the side-effects of `genSelector`. - js.Block(exprToStat(genSelector), defaultClause) - - case (uniqueAlt :: Nil, caseRhs) :: Nil => - /* Simplify the `match` as an `if`, so that the optimizer has less - * work to do, and we emit less code at the end of the day. - * Use `Int_==` instead of `===` if possible, since it is a common case. - */ - val op = - if (isInt(genSelector) && isInt(uniqueAlt)) js.BinaryOp.Int_== - else js.BinaryOp.=== - js.If(js.BinaryOp(op, genSelector, uniqueAlt), caseRhs, defaultClause)(resultType) - - case _ => - // We have more than one case: use a js.Match - js.Match(genSelector, clauses, defaultClause)(resultType) - } - } - - /** Gen JS code for a closure. - * - * Input: a `Closure` tree of the form - * {{{ - * Closure(env, call, functionalInterface) - * }}} - * representing the pseudo-syntax - * {{{ - * { (p1, ..., pm) => call(env1, ..., envn, p1, ..., pm) }: functionInterface - * }}} - * where `envi` are identifiers in the local scope. The qualifier of `call` - * is also implicitly captured. - * - * Output: a `js.Closure` tree of the form - * {{{ - * js.Closure(formalCaptures, formalParams, body, actualCaptures) - * }}} - * representing the pseudo-syntax - * {{{ - * lambda( - * formalParam1, ..., formalParamM) = body - * }}} - * where the `actualCaptures` and `body` are, in general, arbitrary - * expressions. But in this case, `actualCaptures` will be identifiers from - * `env`, and the `body` will be of the form - * {{{ - * call(formalCapture1.ref, ..., formalCaptureN.ref, - * formalParam1.ref, ...formalParamM.ref) - * }}} - * - * When the `js.Closure` node is evaluated, i.e., when the closure value is - * created, the expressions of the `actualCaptures` are evaluated, and the - * results of those evaluations is "stored" in the environment of the - * closure as the corresponding `formalCapture`. - * - * When we later *call* the closure, the `formalCaptures` already have their - * values from the environment, and they are available in the `body`. The - * `formalParams` of the created closure receive their values from the - * actual arguments at the call-site of the closure, and they are also - * available in the `body`. - */ - private def genClosure(tree: Closure): js.Tree = { - implicit val pos = tree.span - val Closure(env, call, functionalInterface) = tree - - val envSize = env.size - - val (fun, args) = call match { - // case Apply(fun, args) => (fun, args) // Conjectured not to happen - case t @ Select(_, _) => (t, Nil) - case t @ Ident(_) => (t, Nil) - } - val sym = fun.symbol - val isStaticCall = isMethodStaticInIR(sym) - - val qualifier = qualifierOf(fun) - val allCaptureValues = - if (isStaticCall) env - else qualifier :: env - - val formalAndActualCaptures = allCaptureValues.map { value => - implicit val pos = value.span - val (formalIdent, originalName) = value match { - case Ident(name) => (freshLocalIdent(name.toTermName), OriginalName(name.toString)) - case This(_) => (freshLocalIdent("this"), thisOriginalName) - case _ => (freshLocalIdent(), NoOriginalName) - } - val formalCapture = js.ParamDef(formalIdent, originalName, - toIRType(value.tpe), mutable = false) - val actualCapture = genExpr(value) - (formalCapture, actualCapture) - } - val (formalCaptures, actualCaptures) = formalAndActualCaptures.unzip - - val funInterfaceSym = functionalInterface.tpe.typeSymbol - val hasRepeatedParam = { - funInterfaceSym.exists && { - val Seq(samMethodDenot) = funInterfaceSym.info.possibleSamMethods - val samMethod = samMethodDenot.symbol - atPhase(elimRepeatedPhase)(samMethod.info.paramInfoss.flatten.exists(_.isRepeatedParam)) - } - } - - val formalParamNames = sym.info.paramNamess.flatten.drop(envSize) - val formalParamTypes = sym.info.paramInfoss.flatten.drop(envSize) - val formalParamRepeateds = - if (hasRepeatedParam) (0 until (formalParamTypes.size - 1)).map(_ => false) :+ true - else (0 until formalParamTypes.size).map(_ => false) - - val formalAndActualParams = formalParamNames.lazyZip(formalParamTypes).lazyZip(formalParamRepeateds).map { - (name, tpe, repeated) => - val formalParam = js.ParamDef(freshLocalIdent(name), - OriginalName(name.toString), jstpe.AnyType, mutable = false) - val actualParam = - if (repeated) genJSArrayToVarArgs(formalParam.ref)(tree.sourcePos) - else unbox(formalParam.ref, tpe) - (formalParam, actualParam) - } - val (formalAndRestParams, actualParams) = formalAndActualParams.unzip - - val (formalParams, restParam) = - if (hasRepeatedParam) (formalAndRestParams.init, Some(formalAndRestParams.last)) - else (formalAndRestParams, None) - - val genBody = { - val call = if (isStaticCall) { - genApplyStatic(sym, formalCaptures.map(_.ref) ::: actualParams) - } else { - val thisCaptureRef :: argCaptureRefs = formalCaptures.map(_.ref): @unchecked - if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) - genApplyMethodMaybeStatically(thisCaptureRef, sym, argCaptureRefs ::: actualParams) - else - genApplyJSClassMethod(thisCaptureRef, sym, argCaptureRefs ::: actualParams) - } - box(call, sym.info.finalResultType) - } - - val isThisFunction = funInterfaceSym.isSubClass(jsdefn.JSThisFunctionClass) && { - val ok = formalParams.nonEmpty - if (!ok) - report.error("The SAM or apply method for a js.ThisFunction must have a leading non-varargs parameter", tree) - ok - } - - if (isThisFunction) { - val thisParam :: otherParams = formalParams: @unchecked - js.Closure( - arrow = false, - formalCaptures, - otherParams, - restParam, - js.Block( - js.VarDef(thisParam.name, thisParam.originalName, - thisParam.ptpe, mutable = false, - js.This()(thisParam.ptpe)(thisParam.pos))(thisParam.pos), - genBody), - actualCaptures) - } else { - val closure = js.Closure(arrow = true, formalCaptures, formalParams, restParam, genBody, actualCaptures) - - if (!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym)) { - assert(!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym), - s"Invalid functional interface $funInterfaceSym reached the back-end") - val formalCount = formalParams.size - val cls = ClassName("scala.scalajs.runtime.AnonFunction" + formalCount) - val ctorName = MethodName.constructor( - jstpe.ClassRef(ClassName("scala.scalajs.js.Function" + formalCount)) :: Nil) - js.New(cls, js.MethodIdent(ctorName), List(closure)) - } else { - assert(funInterfaceSym.isJSType, - s"Invalid functional interface $funInterfaceSym reached the back-end") - closure - } - } - } - - /** Generates a static method instantiating and calling this - * DynamicImportThunk's `apply`: - * - * {{{ - * static def dynamicImport$;;Ljava.lang.Object(): any = { - * new .;:V().apply;Ljava.lang.Object() - * } - * }}} - */ - private def genDynamicImportForwarder(clsSym: Symbol)(using Position): js.MethodDef = { - withNewLocalNameScope { - val ctor = clsSym.primaryConstructor - val paramSyms = ctor.paramSymss.flatten - val paramDefs = paramSyms.map(genParamDef(_)) - - val body = { - val inst = js.New(encodeClassName(clsSym), encodeMethodSym(ctor), paramDefs.map(_.ref)) - genApplyMethod(inst, jsdefn.DynamicImportThunkClass_apply, Nil) - } - - js.MethodDef( - js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), - encodeDynamicImportForwarderIdent(paramSyms), - NoOriginalName, - paramDefs, - jstpe.AnyType, - Some(body))(OptimizerHints.empty, None) - } - } - - /** Boxes a value of the given type before `elimErasedValueType`. - * - * This should be used when sending values to a JavaScript context, which - * is erased/boxed at the IR level, although it is not erased at the - * dotty/JVM level. - * - * @param expr Tree to be boxed if needed. - * @param tpeEnteringElimErasedValueType The type of `expr` as it was - * entering the `elimErasedValueType` phase. - */ - def box(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = { - tpeEnteringElimErasedValueType match { - case tpe if isPrimitiveValueType(tpe) => - makePrimitiveBox(expr, tpe) - - case tpe: ErasedValueType => - val boxedClass = tpe.tycon.typeSymbol - val ctor = boxedClass.primaryConstructor - js.New(encodeClassName(boxedClass), encodeMethodSym(ctor), List(expr)) - - case _ => - expr - } - } - - /** Unboxes a value typed as Any to the given type before `elimErasedValueType`. - * - * This should be used when receiving values from a JavaScript context, - * which is erased/boxed at the IR level, although it is not erased at the - * dotty/JVM level. - * - * @param expr Tree to be extracted. - * @param tpeEnteringElimErasedValueType The type of `expr` as it was - * entering the `elimErasedValueType` phase. - */ - def unbox(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = { - tpeEnteringElimErasedValueType match { - case tpe if isPrimitiveValueType(tpe) => - makePrimitiveUnbox(expr, tpe) - - case tpe: ErasedValueType => - val boxedClass = tpe.tycon.typeSymbol.asClass - val unboxMethod = ValueClasses.valueClassUnbox(boxedClass) - val content = genApplyMethod( - js.AsInstanceOf(expr, encodeClassType(boxedClass)), unboxMethod, Nil) - if (unboxMethod.info.resultType <:< tpe.erasedUnderlying) - content - else - unbox(content, tpe.erasedUnderlying) - - case tpe => - genAsInstanceOf(expr, tpe) - } - } - - /** Gen JS code for an asInstanceOf cast (for reference types only) */ - private def genAsInstanceOf(value: js.Tree, to: Type)(implicit pos: Position): js.Tree = - genAsInstanceOf(value, toIRType(to)) - - /** Gen JS code for an asInstanceOf cast (for reference types only) */ - private def genAsInstanceOf(value: js.Tree, to: jstpe.Type)(implicit pos: Position): js.Tree = { - to match { - case jstpe.AnyType => - value - case jstpe.NullType => - js.If( - js.BinaryOp(js.BinaryOp.===, value, js.Null()), - js.Null(), - genThrowClassCastException())( - jstpe.NullType) - case jstpe.NothingType => - js.Block(value, genThrowClassCastException()) - case _ => - js.AsInstanceOf(value, to) - } - } - - private def genThrowClassCastException()(implicit pos: Position): js.Tree = { - js.Throw(js.New(jsNames.ClassCastExceptionClass, - js.MethodIdent(jsNames.NoArgConstructorName), Nil)) - } - - /** Gen JS code for an isInstanceOf test (for reference types only) */ - def genIsInstanceOf(value: js.Tree, to: Type)( - implicit pos: SourcePosition): js.Tree = { - val sym = to.typeSymbol - - if (sym == defn.ObjectClass) { - js.BinaryOp(js.BinaryOp.!==, value, js.Null()) - } else if (sym.isJSType) { - if (sym.is(Trait)) { - report.error( - em"isInstanceOf[${sym.fullName}] not supported because it is a JS trait", - pos) - js.BooleanLiteral(true) - } else { - js.AsInstanceOf(js.JSBinaryOp( - js.JSBinaryOp.instanceof, value, genLoadJSConstructor(sym)), - jstpe.BooleanType) - } - } else { - // The Scala type system prevents x.isInstanceOf[Null] and ...[Nothing] - assert(sym != defn.NullClass && sym != defn.NothingClass, - s"Found a .isInstanceOf[$sym] at $pos") - js.IsInstanceOf(value, toIRType(to)) - } - } - - /** Gen a statically linked call to an instance method. */ - def genApplyMethodMaybeStatically(receiver: js.Tree, method: Symbol, - arguments: List[js.Tree])(implicit pos: Position): js.Tree = { - if (method.isPrivate || method.isClassConstructor) - genApplyMethodStatically(receiver, method, arguments) - else - genApplyMethod(receiver, method, arguments) - } - - /** Gen a dynamically linked call to a Scala method. */ - def genApplyMethod(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { - assert(!method.isPrivate, - s"Cannot generate a dynamic call to private method $method at $pos") - js.Apply(js.ApplyFlags.empty, receiver, encodeMethodSym(method), arguments)( - toIRType(patchedResultType(method))) - } - - /** Gen a statically linked call to an instance method. */ - def genApplyMethodStatically(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { - val flags = js.ApplyFlags.empty - .withPrivate(method.isPrivate && !method.isClassConstructor) - .withConstructor(method.isClassConstructor) - js.ApplyStatically(flags, receiver, encodeClassName(method.owner), - encodeMethodSym(method), arguments)( - toIRType(patchedResultType(method))) - } - - /** Gen a call to a static method. */ - private def genApplyStatic(method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { - js.ApplyStatic(js.ApplyFlags.empty.withPrivate(method.isPrivate), - encodeClassName(method.owner), encodeMethodSym(method), arguments)( - toIRType(patchedResultType(method))) - } - - /** Gen a call to a non-exposed method of a non-native JS class. */ - def genApplyJSClassMethod(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { - genApplyStatic(method, receiver :: arguments) - } - - /** Gen a call to a method of a Scala top-level module. */ - private def genModuleApplyMethod(methodSym: Symbol, arguments: List[js.Tree])( - implicit pos: SourcePosition): js.Tree = { - genApplyMethod(genLoadModule(methodSym.owner), methodSym, arguments) - } - - /** Gen a boxing operation (tpe is the primitive type) */ - private def makePrimitiveBox(expr: js.Tree, tpe: Type)( - implicit pos: Position): js.Tree = { - toIRType(tpe) match { - case jstpe.NoType => // for JS interop cases - js.Block(expr, js.Undefined()) - case jstpe.BooleanType | jstpe.CharType | jstpe.ByteType | - jstpe.ShortType | jstpe.IntType | jstpe.LongType | jstpe.FloatType | - jstpe.DoubleType => - expr // box is identity for all those primitive types - case typeRef => - throw new FatalError( - s"makePrimitiveBox requires a primitive type, found $typeRef for $tpe at $pos") - } - } - - /** Gen an unboxing operation (tpe is the primitive type) */ - private def makePrimitiveUnbox(expr: js.Tree, tpe: Type)( - implicit pos: Position): js.Tree = { - toIRType(tpe) match { - case jstpe.NoType => expr // for JS interop cases - case irTpe => js.AsInstanceOf(expr, irTpe) - } - } - - /** Gen JS code for a Scala.js-specific primitive method */ - private def genJSPrimitive(tree: Apply, args: List[Tree], code: Int, - isStat: Boolean): js.Tree = { - - import JSPrimitives._ - - implicit val pos = tree.span - - def genArgs1: js.Tree = { - assert(args.size == 1, - s"Expected exactly 1 argument for JS primitive $code but got " + - s"${args.size} at $pos") - genExpr(args.head) - } - - def genArgs2: (js.Tree, js.Tree) = { - assert(args.size == 2, - s"Expected exactly 2 arguments for JS primitive $code but got " + - s"${args.size} at $pos") - (genExpr(args.head), genExpr(args.tail.head)) - } - - def genArgsVarLength: List[js.TreeOrJSSpread] = - genActualJSArgs(tree.symbol, args) - - def resolveReifiedJSClassSym(arg: Tree): Symbol = { - def fail(): Symbol = { - report.error( - tree.symbol.name.toString + " must be called with a constant " + - "classOf[T] representing a class extending js.Any " + - "(not a trait nor an object)", - tree.sourcePos) - NoSymbol - } - arg match { - case Literal(value) if value.tag == Constants.ClazzTag => - val classSym = value.typeValue.typeSymbol - if (classSym.isJSType && !classSym.is(Trait) && !classSym.is(ModuleClass)) - classSym - else - fail() - case _ => - fail() - } - } - - (code: @switch) match { - case DYNNEW => - // js.Dynamic.newInstance(clazz)(actualArgs: _*) - val (jsClass, actualArgs) = extractFirstArg(genArgsVarLength) - js.JSNew(jsClass, actualArgs) - - case ARR_CREATE => - // js.Array(elements: _*) - js.JSArrayConstr(genArgsVarLength) - - case CONSTRUCTOROF => - // runtime.constructorOf(clazz) - val classSym = resolveReifiedJSClassSym(args.head) - if (classSym == NoSymbol) - js.Undefined() // compile error emitted by resolveReifiedJSClassSym - else - genLoadJSConstructor(classSym) - - case CREATE_INNER_JS_CLASS | CREATE_LOCAL_JS_CLASS => - // runtime.createInnerJSClass(clazz, superClass) - // runtime.createLocalJSClass(clazz, superClass, fakeNewInstances) - val classSym = resolveReifiedJSClassSym(args(0)) - val superClassValue = genExpr(args(1)) - if (classSym == NoSymbol) { - js.Undefined() // compile error emitted by resolveReifiedJSClassSym - } else { - val captureValues = { - if (code == CREATE_INNER_JS_CLASS) { - /* Private inner classes that do not actually access their outer - * pointer do not receive an outer argument. We therefore count - * the number of constructors that have non-empty param list to - * know how many times we need to pass `this`. - */ - val requiredThisParams = - classSym.info.decls.lookupAll(nme.CONSTRUCTOR).count(_.info.paramInfoss.head.nonEmpty) - val outer = genThis() - List.fill(requiredThisParams)(outer) - } else { - val fakeNewInstances = args(2).asInstanceOf[JavaSeqLiteral].elems - fakeNewInstances.flatMap(genCaptureValuesFromFakeNewInstance(_)) - } - } - js.CreateJSClass(encodeClassName(classSym), superClassValue :: captureValues) - } - - case WITH_CONTEXTUAL_JS_CLASS_VALUE => - // withContextualJSClassValue(jsclass, inner) - val jsClassValue = genExpr(args(0)) - withScopedVars( - contextualJSClassValue := Some(jsClassValue) - ) { - genStatOrExpr(args(1), isStat) - } - - case LINKING_INFO => - // runtime.linkingInfo - js.JSLinkingInfo() - - case DEBUGGER => - // js.special.debugger() - js.Debugger() - - case UNITVAL => - // BoxedUnit.UNIT, which is the boxed version of () - js.Undefined() - - case JS_NEW_TARGET => - // js.new.target - val valid = currentMethodSym.get.isClassConstructor && currentClassSym.isNonNativeJSClass - if (!valid) { - report.error( - "Illegal use of js.`new`.target.\n" + - "It can only be used in the constructor of a JS class, " + - "as a statement or in the rhs of a val or var.\n" + - "It cannot be used inside a lambda or by-name parameter, nor in any other location.", - tree.sourcePos) - } - js.JSNewTarget() - - case JS_IMPORT => - // js.import(arg) - val arg = genArgs1 - js.JSImportCall(arg) - - case JS_IMPORT_META => - // js.import.meta - js.JSImportMeta() - - case DYNAMIC_IMPORT => - // runtime.dynamicImport - assert(args.size == 1, - s"Expected exactly 1 argument for JS primitive $code but got " + - s"${args.size} at $pos") - - args.head match { - case Block(stats, expr @ Typed(Apply(fun @ Select(New(tpt), _), args), _)) => - /* stats is always empty if no other compiler plugin is present. - * However, code instrumentation (notably scoverage) might add - * statements here. If this is the case, the thunk anonymous class - * has already been created when the other plugin runs (i.e. the - * plugin ran after jsinterop). - * - * Therefore, it is OK to leave the statements on our side of the - * dynamic loading boundary. - */ - - val clsSym = tpt.symbol - val ctor = fun.symbol - - assert(clsSym.isSubClass(jsdefn.DynamicImportThunkClass), - s"expected subclass of DynamicImportThunk, got: $clsSym at: ${expr.sourcePos}") - assert(ctor.isPrimaryConstructor, - s"expected primary constructor, got: $ctor at: ${expr.sourcePos}") - - js.Block( - stats.map(genStat(_)), - js.ApplyDynamicImport( - js.ApplyFlags.empty, - encodeClassName(clsSym), - encodeDynamicImportForwarderIdent(ctor.paramSymss.flatten), - genActualArgs(ctor, args)) - ) - - case tree => - throw new FatalError( - s"Unexpected argument tree in dynamicImport: $tree/${tree.getClass} at: $pos") - } - - case JS_NATIVE => - // js.native - report.error( - "js.native may only be used as stub implementation in facade types", - tree.sourcePos) - js.Undefined() - - case TYPEOF => - // js.typeOf(arg) - val arg = genArgs1 - val typeofExpr = arg match { - case arg: js.JSGlobalRef => js.JSTypeOfGlobalRef(arg) - case _ => js.JSUnaryOp(js.JSUnaryOp.typeof, arg) - } - js.AsInstanceOf(typeofExpr, jstpe.ClassType(jsNames.BoxedStringClass)) - - case STRICT_EQ => - // js.special.strictEquals(arg1, arg2) - val (arg1, arg2) = genArgs2 - js.JSBinaryOp(js.JSBinaryOp.===, arg1, arg2) - - case IN => - // js.special.in(arg1, arg2) - val (arg1, arg2) = genArgs2 - js.AsInstanceOf(js.JSBinaryOp(js.JSBinaryOp.in, arg1, arg2), - jstpe.BooleanType) - - case INSTANCEOF => - // js.special.instanceof(arg1, arg2) - val (arg1, arg2) = genArgs2 - js.AsInstanceOf(js.JSBinaryOp(js.JSBinaryOp.instanceof, arg1, arg2), - jstpe.BooleanType) - - case DELETE => - // js.special.delete(arg1, arg2) - val (arg1, arg2) = genArgs2 - js.JSDelete(arg1, arg2) - - case FORIN => - /* js.special.forin(arg1, arg2) - * - * We must generate: - * - * val obj = arg1 - * val f = arg2 - * for (val key in obj) { - * f(key) - * } - * - * with temporary vals, because `arg2` must be evaluated only - * once, and after `arg1`. - */ - val (arg1, arg2) = genArgs2 - val objVarDef = js.VarDef(freshLocalIdent("obj"), NoOriginalName, - jstpe.AnyType, mutable = false, arg1) - val fVarDef = js.VarDef(freshLocalIdent("f"), NoOriginalName, - jstpe.AnyType, mutable = false, arg2) - val keyVarIdent = freshLocalIdent("key") - val keyVarRef = js.VarRef(keyVarIdent)(jstpe.AnyType) - js.Block( - objVarDef, - fVarDef, - js.ForIn(objVarDef.ref, keyVarIdent, NoOriginalName, { - js.JSFunctionApply(fVarDef.ref, List(keyVarRef)) - })) - - case JS_THROW => - // js.special.throw(arg) - js.Throw(genArgs1) - - case JS_TRY_CATCH => - /* js.special.tryCatch(arg1, arg2) - * - * We must generate: - * - * val body = arg1 - * val handler = arg2 - * try { - * body() - * } catch (e) { - * handler(e) - * } - * - * with temporary vals, because `arg2` must be evaluated before - * `body` executes. Moreover, exceptions thrown while evaluating - * the function values `arg1` and `arg2` must not be caught. - */ - val (arg1, arg2) = genArgs2 - val bodyVarDef = js.VarDef(freshLocalIdent("body"), NoOriginalName, - jstpe.AnyType, mutable = false, arg1) - val handlerVarDef = js.VarDef(freshLocalIdent("handler"), NoOriginalName, - jstpe.AnyType, mutable = false, arg2) - val exceptionVarIdent = freshLocalIdent("e") - val exceptionVarRef = js.VarRef(exceptionVarIdent)(jstpe.AnyType) - js.Block( - bodyVarDef, - handlerVarDef, - js.TryCatch( - js.JSFunctionApply(bodyVarDef.ref, Nil), - exceptionVarIdent, - NoOriginalName, - js.JSFunctionApply(handlerVarDef.ref, List(exceptionVarRef)) - )(jstpe.AnyType) - ) - - case WRAP_AS_THROWABLE => - // js.special.wrapAsThrowable(arg) - js.WrapAsThrowable(genArgs1) - - case UNWRAP_FROM_THROWABLE => - // js.special.unwrapFromThrowable(arg) - js.UnwrapFromThrowable(genArgs1) - - case UNION_FROM | UNION_FROM_TYPE_CONSTRUCTOR => - /* js.|.from and js.|.fromTypeConstructor - * We should not have to deal with those. They have a perfectly valid - * user-space implementation. However, the Dotty type checker inserts - * way too many of those, even when they are completely unnecessary. - * That still wouldn't be an issue ... if only it did not insert them - * around the default getters to their parameters! But even there it - * does it (although the types are, by construction, *equivalent*!), - * and that kills our `UndefinedParam` treatment. So we have to handle - * those two methods as primitives to completely eliminate them. - * - * Hopefully this will become unnecessary when/if we manage to - * reinterpret js.| as a true Dotty union type. - */ - genArgs2._1 - - case REFLECT_SELECTABLE_SELECTDYN => - // scala.reflect.Selectable.selectDynamic - genReflectiveCall(tree, isSelectDynamic = true) - case REFLECT_SELECTABLE_APPLYDYN => - // scala.reflect.Selectable.applyDynamic - genReflectiveCall(tree, isSelectDynamic = false) - } - } - - /** Gen the SJSIR for a reflective call. - * - * Reflective calls are calls to a structural type field or method that - * involve a reflective Selectable. They look like the following in source - * code: - * {{{ - * import scala.reflect.Selectable.reflectiveSelectable - * - * type Structural = { - * val foo: Int - * def bar(x: Int, y: String): String - * } - * - * val structural: Structural = new { - * val foo: Int = 5 - * def bar(x: Int, y: String): String = x.toString + y - * } - * - * structural.foo - * structural.bar(6, "hello") - * }}} - * - * After expansion by the Scala 3 rules for structural member selections and - * calls, they look like - * - * {{{ - * reflectiveSelectable(structural).selectDynamic("foo") - * reflectiveSelectable(structural).applyDynamic("bar", - * classOf[Int], classOf[String] - * )( - * 6, "hello" - * ) - * }}} - * - * When the original `structural` value is already of a subtype of - * `scala.reflect.Selectable`, there is no conversion involved. There could - * also be any other arbitrary conversion, such as the deprecated bridge for - * Scala 2's `import scala.language.reflectiveCalls`. In general, the shape - * is therefore the following, for some `selectable: reflect.Selectable`: - * - * {{{ - * selectable.selectDynamic("foo") - * selectable.applyDynamic("bar", - * classOf[Int], classOf[String] - * )( - * 6, "hello" - * ) - * }}} - * - * and eventually reaches the back-end as - * - * {{{ - * selectable.selectDynamic("foo") // same as above - * selectable.applyDynamic("bar", - * wrapRefArray([ classOf[Int], classOf[String] : jl.Class ] - * )( - * genericWrapArray([ Int.box(6), "hello" : Object ]) - * ) - * }}} - * - * In SJSIR, they must be encoded as follows: - * - * {{{ - * selectable.selectedValue;O().foo;R() - * selectable.selectedValue;O().bar;I;Ljava.lang.String;R( - * Int.box(6).asInstanceOf[int], - * "hello".asInstanceOf[java.lang.String] - * ) - * }}} - * - * where `selectedValue;O()` is declared in `scala.reflect.Selectable` and - * holds the actual instance on which to perform the reflective operations. - * For the typical use case from the first snippet, it returns `structural`. - * - * This means that we must deconstruct the elaborated calls to recover: - * - * - the method name as a compile-time string `foo` or `bar` - * - the `tp: Type`s that have been wrapped in `classOf[tp]`, as a - * compile-time List[Type], from which we'll derive `jstpe.Type`s for the - * `asInstanceOf`s and `jstpe.TypeRef`s for the `MethodName.reflectiveProxy` - * - the actual arguments as a compile-time `List[Tree]` - * - * Virtually all of the code in `genReflectiveCall` deals with recovering - * those elements. Constructing the IR Tree is the easy part after that. - */ - private def genReflectiveCall(tree: Apply, isSelectDynamic: Boolean): js.Tree = { - implicit val pos = tree.span - val Apply(fun @ Select(receiver, _), args) = tree: @unchecked - - val selectedValueTree = js.Apply(js.ApplyFlags.empty, genExpr(receiver), - js.MethodIdent(selectedValueMethodName), Nil)(jstpe.AnyType) - - // Extract the method name as a String - val methodNameStr = args.head match { - case Literal(Constants.Constant(name: String)) => - name - case _ => - report.error( - "The method name given to Selectable.selectDynamic or Selectable.applyDynamic " + - "must be a literal string. " + - "Other uses are not supported in Scala.js.", - args.head.sourcePos) - "erroneous" - } - - val (formalParamTypeRefs, actualArgs) = if (isSelectDynamic) { - (Nil, Nil) - } else { - // Extract the param type refs and actual args from the 2nd and 3rd argument to applyDynamic - args.tail match { - case WrapArray(classOfsArray: JavaSeqLiteral) :: WrapArray(actualArgsAnyArray: JavaSeqLiteral) :: Nil => - // Extract jstpe.Type's and jstpe.TypeRef's from the classOf[_] trees - val formalParamTypesAndTypeRefs = classOfsArray.elems.map { - // classOf[tp] -> tp - case Literal(const) if const.tag == Constants.ClazzTag => - toIRTypeAndTypeRef(const.typeValue) - // Anything else is invalid - case otherTree => - report.error( - "The java.lang.Class[_] arguments passed to Selectable.applyDynamic must be " + - "literal classOf[T] expressions (typically compiler-generated). " + - "Other uses are not supported in Scala.js.", - otherTree.sourcePos) - (jstpe.AnyType, jstpe.ClassRef(jsNames.ObjectClass)) - } - - // Gen the actual args, downcasting them to the formal param types - val actualArgs = actualArgsAnyArray.elems.zip(formalParamTypesAndTypeRefs).map { - (actualArgAny, formalParamTypeAndTypeRef) => - val genActualArgAny = genExpr(actualArgAny) - genAsInstanceOf(genActualArgAny, formalParamTypeAndTypeRef._1)(genActualArgAny.pos) - } - - (formalParamTypesAndTypeRefs.map(pair => toParamOrResultTypeRef(pair._2)), actualArgs) - - case _ => - report.error( - "Passing the varargs of Selectable.applyDynamic with `: _*` " + - "is not supported in Scala.js.", - tree.sourcePos) - (Nil, Nil) - } - } - - val methodName = MethodName.reflectiveProxy(methodNameStr, formalParamTypeRefs) - - js.Apply(js.ApplyFlags.empty, selectedValueTree, js.MethodIdent(methodName), actualArgs)(jstpe.AnyType) - } - - /** Gen actual actual arguments to Scala method call. - * Returns a list of the transformed arguments. - * - * This tries to optimize repeated arguments (varargs) by turning them - * into js.WrappedArray instead of Scala wrapped arrays. - */ - private def genActualArgs(sym: Symbol, args: List[Tree])( - implicit pos: Position): List[js.Tree] = { - args.map(genExpr) - /*val wereRepeated = exitingPhase(currentRun.typerPhase) { - sym.tpe.params.map(p => isScalaRepeatedParamType(p.tpe)) - } - - if (wereRepeated.size > args.size) { - // Should not happen, but let's not crash - args.map(genExpr) - } else { - /* Arguments that are in excess compared to the type signature after - * erasure are lambda-lifted arguments. They cannot be repeated, hence - * the extension to `false`. - */ - for ((arg, wasRepeated) <- args.zipAll(wereRepeated, EmptyTree, false)) yield { - if (wasRepeated) { - tryGenRepeatedParamAsJSArray(arg, handleNil = false).fold { - genExpr(arg) - } { genArgs => - genNew(WrappedArrayClass, WrappedArray_ctor, - List(js.JSArrayConstr(genArgs))) - } - } else { - genExpr(arg) - } - } - }*/ - } - - /** Gen actual actual arguments to a JS method call. - * Returns a list of the transformed arguments. - * - * - TODO Repeated arguments (varargs) are expanded - * - Default arguments are omitted or replaced by undefined - * - All arguments are boxed - * - * Repeated arguments that cannot be expanded at compile time (i.e., if a - * Seq is passed to a varargs parameter with the syntax `seq: _*`) will be - * wrapped in a [[js.JSSpread]] node to be expanded at runtime. - */ - private def genActualJSArgs(sym: Symbol, args: List[Tree])( - implicit pos: Position): List[js.TreeOrJSSpread] = { - - var reversedArgs: List[js.TreeOrJSSpread] = Nil - - for ((arg, info) <- args.zip(sym.jsParamInfos)) { - if (info.repeated) { - reversedArgs = genJSRepeatedParam(arg) reverse_::: reversedArgs - } else if (info.capture) { - // Ignore captures - assert(sym.isClassConstructor, - i"Found a capture param in method ${sym.fullName}, which is not a class constructor, at $pos") - } else { - val unboxedArg = genExpr(arg) - val boxedArg = unboxedArg match { - case js.Transient(UndefinedParam) => - unboxedArg - case _ => - box(unboxedArg, info.info) - } - reversedArgs ::= boxedArg - } - } - - /* Remove all consecutive UndefinedParam's at the end of the argument - * list. No check is performed whether they may be there, since they will - * only be placed where default arguments can be anyway. - */ - reversedArgs = reversedArgs.dropWhile(_.isInstanceOf[js.Transient]) - - /* Find remaining UndefinedParam and replace by js.Undefined. This can - * happen with named arguments or with multiple argument lists. - */ - reversedArgs = reversedArgs map { - case js.Transient(UndefinedParam) => js.Undefined() - case arg => arg - } - - reversedArgs.reverse - } - - /** Gen JS code for a repeated param of a JS method. - * - * In this case `arg` has type `Seq[T]` for some `T`, but the result should - * be an expanded list of the elements in the sequence. So this method - * takes care of the conversion. - * - * It is specialized for the shapes of tree generated by the desugaring - * of repeated params in Scala, so that these are actually expanded at - * compile-time. - * - * Otherwise, it returns a `JSSpread` with the `Seq` converted to a - * `js.Array`. - */ - private def genJSRepeatedParam(arg: Tree): List[js.TreeOrJSSpread] = { - tryGenRepeatedParamAsJSArray(arg, handleNil = true).getOrElse { - /* Fall back to calling runtime.genTraversableOnce2jsArray - * to perform the conversion to js.Array, then wrap in a Spread - * operator. - */ - implicit val pos: SourcePosition = arg.sourcePos - val jsArrayArg = genModuleApplyMethod( - jsdefn.Runtime_toJSVarArgs, - List(genExpr(arg))) - List(js.JSSpread(jsArrayArg)) - } - } - - /** Try and expand an actual argument to a repeated param `(xs: T*)`. - * - * This method recognizes the shapes of tree generated by the desugaring - * of repeated params in Scala, and expands them. - * If `arg` does not have the shape of a generated repeated param, this - * method returns `None`. - */ - private def tryGenRepeatedParamAsJSArray(arg: Tree, - handleNil: Boolean): Option[List[js.Tree]] = { - implicit val pos = arg.span - - // Given a method `def foo(args: T*)` - arg match { - // foo(arg1, arg2, ..., argN) where N > 0 - case MaybeAsInstanceOf(WrapArray(MaybeAsInstanceOf(array: JavaSeqLiteral))) => - /* Value classes in arrays are already boxed, so no need to use - * the type before erasure. - * TODO Is this true in dotty? - */ - Some(array.elems.map(e => box(genExpr(e), e.tpe))) - - // foo() - case Ident(_) if handleNil && arg.symbol == defn.NilModule => - Some(Nil) - - // foo(argSeq: _*) - cannot be optimized - case _ => - None - } - } - - private object MaybeAsInstanceOf { - def unapply(tree: Tree): Some[Tree] = tree match { - case TypeApply(asInstanceOf_? @ Select(base, _), _) - if asInstanceOf_?.symbol == defn.Any_asInstanceOf => - Some(base) - case _ => - Some(tree) - } - } - - private object WrapArray { - lazy val isWrapArray: Set[Symbol] = { - val names0 = defn.ScalaValueClasses().map(sym => nme.wrapXArray(sym.name)) - val names1 = names0 ++ Set(nme.wrapRefArray, nme.genericWrapArray) - val symsInPredef = names1.map(defn.ScalaPredefModule.requiredMethod(_)) - val symsInScalaRunTime = names1.map(defn.ScalaRuntimeModule.requiredMethod(_)) - (symsInPredef ++ symsInScalaRunTime).toSet - } - - def unapply(tree: Apply): Option[Tree] = tree match { - case Apply(wrapArray_?, List(wrapped)) if isWrapArray(wrapArray_?.symbol) => - Some(wrapped) - case _ => - None - } - } - - /** Wraps a `js.Array` to use as varargs. */ - def genJSArrayToVarArgs(arrayRef: js.Tree)(implicit pos: SourcePosition): js.Tree = - genModuleApplyMethod(jsdefn.Runtime_toScalaVarArgs, List(arrayRef)) - - /** Gen the actual capture values for a JS constructor based on its fake `new` invocation. */ - private def genCaptureValuesFromFakeNewInstance(tree: Tree): List[js.Tree] = { - implicit val pos: Position = tree.span - - val Apply(fun @ Select(New(_), _), args) = tree: @unchecked - val sym = fun.symbol - - /* We use the same strategy as genActualJSArgs to detect which parameters were - * introduced by explicitouter or lambdalift (but reversed, of course). - */ - - val existedBeforeUncurry = atPhase(elimRepeatedPhase) { - sym.info.paramNamess.flatten.toSet - } - - for { - (arg, paramName) <- args.zip(sym.info.paramNamess.flatten) - if !existedBeforeUncurry(paramName) - } yield { - genExpr(arg) - } - } - - private def genVarRef(sym: Symbol)(implicit pos: Position): js.VarRef = - js.VarRef(encodeLocalSym(sym))(toIRType(sym.info)) - - private def genAssignableField(sym: Symbol, qualifier: Tree)(implicit pos: SourcePosition): (js.AssignLhs, Boolean) = { - def qual = genExpr(qualifier) - - if (sym.owner.isNonNativeJSClass) { - val f = if (sym.isJSExposed) { - js.JSSelect(qual, genExpr(sym.jsName)) - } else if (sym.owner.isAnonymousClass) { - js.JSSelect( - js.JSSelect(qual, genPrivateFieldsSymbol()), - encodeFieldSymAsStringLiteral(sym)) - } else { - js.JSPrivateSelect(qual, encodeClassName(sym.owner), - encodeFieldSym(sym)) - } - - (f, true) - } else if (sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot)) { - val f = js.SelectStatic(encodeClassName(sym.owner), encodeFieldSym(sym))(jstpe.AnyType) - (f, true) - } else if (sym.hasAnnotation(jsdefn.JSExportStaticAnnot)) { - val jsName = sym.getAnnotation(jsdefn.JSExportStaticAnnot).get.argumentConstantString(0).getOrElse { - sym.defaultJSName - } - val companionClass = sym.owner.linkedClass - val f = js.JSSelect(genLoadJSConstructor(companionClass), js.StringLiteral(jsName)) - (f, true) - } else { - val className = encodeClassName(sym.owner) - val fieldIdent = encodeFieldSym(sym) - - /* #4370 Fields cannot have type NothingType, so we box them as - * scala.runtime.Nothing$ instead. They will be initialized with - * `null`, and any attempt to access them will throw a - * `ClassCastException` (generated in the unboxing code). - */ - val (irType, boxed) = toIRType(sym.info) match - case jstpe.NothingType => - (encodeClassType(defn.NothingClass), true) - case ftpe => - (ftpe, false) - - val f = - if sym.is(JavaStatic) then - js.SelectStatic(className, fieldIdent)(irType) - else - js.Select(qual, className, fieldIdent)(irType) - - (f, boxed) - } - } - - /** Gen JS code for loading a Java static field. - */ - private def genLoadStaticField(sym: Symbol)(implicit pos: SourcePosition): js.Tree = { - /* Actually, there is no static member in Scala.js. If we come here, that - * is because we found the symbol in a Java-emitted .class in the - * classpath. But the corresponding implementation in Scala.js will - * actually be a val in the companion module. - */ - - if (sym == defn.BoxedUnit_UNIT) { - js.Undefined() - } else if (sym == defn.BoxedUnit_TYPE) { - js.ClassOf(jstpe.VoidRef) - } else { - val className = encodeClassName(sym.owner) - val method = encodeStaticMemberSym(sym) - js.ApplyStatic(js.ApplyFlags.empty, className, method, Nil)(toIRType(sym.info)) - } - } - - /** Generates a call to `runtime.privateFieldsSymbol()` */ - private def genPrivateFieldsSymbol()(implicit pos: SourcePosition): js.Tree = - genModuleApplyMethod(jsdefn.Runtime_privateFieldsSymbol, Nil) - - /** Generate loading of a module value. - * - * Can be given either the module symbol or its module class symbol. - * - * If the module we load refers to the global scope (i.e., it is - * annotated with `@JSGlobalScope`), report a compile error specifying - * that a global scope object should only be used as the qualifier of a - * `.`-selection. - */ - def genLoadModule(sym: Symbol)(implicit pos: SourcePosition): js.Tree = - ruleOutGlobalScope(genLoadModuleOrGlobalScope(sym)) - - /** Generate loading of a module value or the global scope. - * - * Can be given either the module symbol of its module class symbol. - * - * Unlike `genLoadModule`, this method does not fail if the module we load - * refers to the global scope. - */ - def genLoadModuleOrGlobalScope(sym0: Symbol)( - implicit pos: SourcePosition): MaybeGlobalScope = { - - require(sym0.is(Module), - "genLoadModule called with non-module symbol: " + sym0) - val sym = if (sym0.isTerm) sym0.moduleClass else sym0 - - // Does that module refer to the global scope? - if (sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { - MaybeGlobalScope.GlobalScope(pos) - } else { - val cls = encodeClassName(sym) - val tree = - if (sym.isJSType) js.LoadJSModule(cls) - else js.LoadModule(cls) - MaybeGlobalScope.NotGlobalScope(tree) - } - } - - /** Gen JS code representing the constructor of a JS class. */ - private def genLoadJSConstructor(sym: Symbol)( - implicit pos: Position): js.Tree = { - assert(!isStaticModule(sym) && !sym.is(Trait), - s"genLoadJSConstructor called with non-class $sym") - js.LoadJSConstructor(encodeClassName(sym)) - } - - private inline val GenericGlobalObjectInformationMsg = { - "\n " + - "See https://www.scala-js.org/doc/interoperability/global-scope.html " + - "for further information." - } - - /** Rule out the `GlobalScope` case of a `MaybeGlobalScope` and extract the - * value tree. - * - * If `tree` represents the global scope, report a compile error. - */ - private def ruleOutGlobalScope(tree: MaybeGlobalScope): js.Tree = { - tree match { - case MaybeGlobalScope.NotGlobalScope(t) => - t - case MaybeGlobalScope.GlobalScope(pos) => - reportErrorLoadGlobalScope()(pos) - } - } - - /** Report a compile error specifying that the global scope cannot be - * loaded as a value. - */ - private def reportErrorLoadGlobalScope()(implicit pos: SourcePosition): js.Tree = { - report.error( - "Loading the global scope as a value (anywhere but as the " + - "left-hand-side of a `.`-selection) is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.Undefined() - } - - /** Gen a JS bracket select or a `JSGlobalRef`. - * - * If the receiver is a normal value, i.e., not the global scope, then - * emit a `JSSelect`. - * - * Otherwise, if the `item` is a constant string that is a valid - * JavaScript identifier, emit a `JSGlobalRef`. - * - * Otherwise, report a compile error. - */ - private def genJSSelectOrGlobalRef(qual: MaybeGlobalScope, item: js.Tree)( - implicit pos: SourcePosition): js.AssignLhs = { - qual match { - case MaybeGlobalScope.NotGlobalScope(qualTree) => - js.JSSelect(qualTree, item) - - case MaybeGlobalScope.GlobalScope(_) => - item match { - case js.StringLiteral(value) => - if (js.JSGlobalRef.isValidJSGlobalRefName(value)) { - js.JSGlobalRef(value) - } else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) { - report.error( - "Invalid selection in the global scope of the reserved " + - s"identifier name `$value`." + - GenericGlobalObjectInformationMsg, - pos) - js.JSGlobalRef("erroneous") - } else { - report.error( - "Selecting a field of the global scope whose name is " + - "not a valid JavaScript identifier is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.JSGlobalRef("erroneous") - } - - case _ => - report.error( - "Selecting a field of the global scope with a dynamic " + - "name is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.JSGlobalRef("erroneous") - } - } - } - - /** Gen a JS bracket method apply or an apply of a `GlobalRef`. - * - * If the receiver is a normal value, i.e., not the global scope, then - * emit a `JSMethodApply`. - * - * Otherwise, if the `method` is a constant string that is a valid - * JavaScript identifier, emit a `JSFunctionApply(JSGlobalRef(...), ...)`. - * - * Otherwise, report a compile error. - */ - private def genJSMethodApplyOrGlobalRefApply( - receiver: MaybeGlobalScope, method: js.Tree, args: List[js.TreeOrJSSpread])( - implicit pos: SourcePosition): js.Tree = { - receiver match { - case MaybeGlobalScope.NotGlobalScope(receiverTree) => - js.JSMethodApply(receiverTree, method, args) - - case MaybeGlobalScope.GlobalScope(_) => - method match { - case js.StringLiteral(value) => - if (js.JSGlobalRef.isValidJSGlobalRefName(value)) { - js.JSFunctionApply(js.JSGlobalRef(value), args) - } else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) { - report.error( - "Invalid call in the global scope of the reserved " + - s"identifier name `$value`." + - GenericGlobalObjectInformationMsg, - pos) - js.Undefined() - } else { - report.error( - "Calling a method of the global scope whose name is not " + - "a valid JavaScript identifier is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.Undefined() - } - - case _ => - report.error( - "Calling a method of the global scope with a dynamic " + - "name is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.Undefined() - } - } - } - - private def computeJSNativeLoadSpecOfValDef(sym: Symbol): js.JSNativeLoadSpec = { - atPhaseBeforeTransforms { - computeJSNativeLoadSpecOfInPhase(sym) - } - } - - private def computeJSNativeLoadSpecOfClass(sym: Symbol): Option[js.JSNativeLoadSpec] = { - if (sym.is(Trait) || sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { - None - } else { - atPhaseBeforeTransforms { - if (sym.owner.isStaticOwner) - Some(computeJSNativeLoadSpecOfInPhase(sym)) - else - None - } - } - } - - private def computeJSNativeLoadSpecOfInPhase(sym: Symbol)(using Context): js.JSNativeLoadSpec = { - import js.JSNativeLoadSpec._ - - val symOwner = sym.owner - - // Marks a code path as unexpected because it should have been reported as an error in `PrepJSInterop`. - def unexpected(msg: String): Nothing = - throw new FatalError(i"$msg for ${sym.fullName} at ${sym.srcPos}") - - if (symOwner.hasAnnotation(jsdefn.JSNativeAnnot)) { - val jsName = sym.jsName match { - case JSName.Literal(jsName) => jsName - case JSName.Computed(_) => unexpected("could not read the simple JS name as a string literal") - } - - if (symOwner.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { - Global(jsName, Nil) - } else { - val ownerLoadSpec = computeJSNativeLoadSpecOfInPhase(symOwner) - ownerLoadSpec match { - case Global(globalRef, path) => - Global(globalRef, path :+ jsName) - case Import(module, path) => - Import(module, path :+ jsName) - case ImportWithGlobalFallback(Import(module, modulePath), Global(globalRef, globalPath)) => - ImportWithGlobalFallback( - Import(module, modulePath :+ jsName), - Global(globalRef, globalPath :+ jsName)) - } - } - } else { - def parsePath(pathName: String): List[String] = - pathName.split('.').toList - - def parseGlobalPath(pathName: String): Global = { - val globalRef :: path = parsePath(pathName): @unchecked - Global(globalRef, path) - } - - val annot = sym.annotations.find { annot => - annot.symbol == jsdefn.JSGlobalAnnot || annot.symbol == jsdefn.JSImportAnnot - }.getOrElse { - unexpected("could not find the JS native load spec annotation") - } - - if (annot.symbol == jsdefn.JSGlobalAnnot) { - val pathName = annot.argumentConstantString(0).getOrElse { - sym.defaultJSName - } - parseGlobalPath(pathName) - } else { // annot.symbol == jsdefn.JSImportAnnot - val module = annot.argumentConstantString(0).getOrElse { - unexpected("could not read the module argument as a string literal") - } - val path = annot.argumentConstantString(1).fold { - if (annot.arguments.sizeIs < 2) - parsePath(sym.defaultJSName) - else - Nil - } { pathName => - parsePath(pathName) - } - val importSpec = Import(module, path) - annot.argumentConstantString(2).fold[js.JSNativeLoadSpec] { - importSpec - } { globalPathName => - ImportWithGlobalFallback(importSpec, parseGlobalPath(globalPathName)) - } - } - } - } - - private def isMethodStaticInIR(sym: Symbol): Boolean = - sym.is(JavaStatic) - - /** Generate a Class[_] value (e.g. coming from classOf[T]) */ - private def genClassConstant(tpe: Type)(implicit pos: Position): js.Tree = - js.ClassOf(toTypeRef(tpe)) - - private def isStaticModule(sym: Symbol): Boolean = - sym.is(Module) && sym.isStatic - - private def isPrimitiveValueType(tpe: Type): Boolean = { - tpe.widenDealias match { - case JavaArrayType(_) => false - case _: ErasedValueType => false - case t => t.typeSymbol.asClass.isPrimitiveValueClass - } - } - - protected lazy val isHijackedClass: Set[Symbol] = { - /* This list is a duplicate of ir.Definitions.HijackedClasses, but - * with global.Symbol's instead of IR encoded names as Strings. - * We also add java.lang.Void, which BoxedUnit "erases" to. - */ - Set[Symbol]( - defn.BoxedUnitClass, defn.BoxedBooleanClass, defn.BoxedCharClass, defn.BoxedByteClass, - defn.BoxedShortClass, defn.BoxedIntClass, defn.BoxedLongClass, defn.BoxedFloatClass, - defn.BoxedDoubleClass, defn.StringClass, jsdefn.JavaLangVoidClass - ) - } - - private def isMaybeJavaScriptException(tpe: Type): Boolean = - jsdefn.JavaScriptExceptionClass.isSubClass(tpe.typeSymbol) - - private def hasDefaultCtorArgsAndJSModule(classSym: Symbol): Boolean = { - def hasNativeCompanion = - classSym.companionModule.moduleClass.hasAnnotation(jsdefn.JSNativeAnnot) - def hasDefaultParameters = - classSym.info.decls.exists(sym => sym.isClassConstructor && sym.hasDefaultParams) - - hasNativeCompanion && hasDefaultParameters - } - - // Copied from DottyBackendInterface - - private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] - - def desugarIdent(i: Ident): Option[tpd.Select] = { - var found = desugared.get(i.tpe) - if (found == null) { - tpd.desugarIdent(i) match { - case sel: tpd.Select => - desugared.put(i.tpe, sel) - found = sel - case _ => - } - } - if (found == null) None else Some(found) - } -} - -object JSCodeGen { - - private val NullPointerExceptionClass = ClassName("java.lang.NullPointerException") - private val JSObjectClassName = ClassName("scala.scalajs.js.Object") - private val JavaScriptExceptionClassName = ClassName("scala.scalajs.js.JavaScriptException") - - private val ObjectClassRef = jstpe.ClassRef(ir.Names.ObjectClass) - - private val newSimpleMethodName = SimpleMethodName("new") - - private val selectedValueMethodName = MethodName("selectedValue", Nil, ObjectClassRef) - - private val ObjectArgConstructorName = MethodName.constructor(List(ObjectClassRef)) - - private val thisOriginalName = OriginalName("this") - - sealed abstract class MaybeGlobalScope - - object MaybeGlobalScope { - final case class NotGlobalScope(tree: js.Tree) extends MaybeGlobalScope - - final case class GlobalScope(pos: SourcePosition) extends MaybeGlobalScope - } - - /** Marker object for undefined parameters in JavaScript semantic calls. - * - * To be used inside a `js.Transient` node. - */ - case object UndefinedParam extends js.Transient.Value { - val tpe: jstpe.Type = jstpe.UndefType - - def traverse(traverser: ir.Traversers.Traverser): Unit = () - - def transform(transformer: ir.Transformers.Transformer, isStat: Boolean)( - implicit pos: ir.Position): js.Tree = { - js.Transient(this) - } - - def printIR(out: ir.Printers.IRTreePrinter): Unit = - out.print("") - } - - /** Info about a default param accessor. - * - * The method must have a default getter name for this class to make sense. - */ - private class DefaultParamInfo(sym: Symbol)(using Context) { - private val methodName = sym.name.exclude(DefaultGetterName) - - def isForConstructor: Boolean = methodName == nme.CONSTRUCTOR - - /** When `isForConstructor` is true, returns the owner of the attached - * constructor. - */ - def constructorOwner: Symbol = sym.owner.linkedClass - - /** When `isForConstructor` is false, returns the method attached to the - * specified default accessor. - */ - def attachedMethod: Symbol = { - // If there are overloads, we need to find the one that has default params. - val overloads = sym.owner.info.decl(methodName) - if (!overloads.isOverloaded) - overloads.symbol - else - overloads.suchThat(_.is(HasDefaultParams, butNot = Bridge)).symbol - } - } - -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala b/tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala deleted file mode 100644 index 964811c69e19..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala +++ /dev/null @@ -1,340 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import scala.annotation.threadUnsafe - -import dotty.tools.dotc.core._ -import Names._ -import Types._ -import Contexts._ -import Symbols._ -import StdNames._ - -import dotty.tools.dotc.config.SJSPlatform - -object JSDefinitions { - /** The Scala.js-specific definitions for the current context. */ - def jsdefn(using Context): JSDefinitions = - ctx.platform.asInstanceOf[SJSPlatform].jsDefinitions -} - -final class JSDefinitions()(using DetachedContext) { - - @threadUnsafe lazy val InlineAnnotType: TypeRef = requiredClassRef("scala.inline") - def InlineAnnot(using Context) = InlineAnnotType.symbol.asClass - @threadUnsafe lazy val NoinlineAnnotType: TypeRef = requiredClassRef("scala.noinline") - def NoinlineAnnot(using Context) = NoinlineAnnotType.symbol.asClass - - @threadUnsafe lazy val JavaLangVoidType: TypeRef = requiredClassRef("java.lang.Void") - def JavaLangVoidClass(using Context) = JavaLangVoidType.symbol.asClass - - @threadUnsafe lazy val ScalaJSJSPackageVal = requiredPackage("scala.scalajs.js") - @threadUnsafe lazy val ScalaJSJSPackageClass = ScalaJSJSPackageVal.moduleClass.asClass - @threadUnsafe lazy val JSPackage_typeOfR = ScalaJSJSPackageClass.requiredMethodRef("typeOf") - def JSPackage_typeOf(using Context) = JSPackage_typeOfR.symbol - @threadUnsafe lazy val JSPackage_constructorOfR = ScalaJSJSPackageClass.requiredMethodRef("constructorOf") - def JSPackage_constructorOf(using Context) = JSPackage_constructorOfR.symbol - @threadUnsafe lazy val JSPackage_nativeR = ScalaJSJSPackageClass.requiredMethodRef("native") - def JSPackage_native(using Context) = JSPackage_nativeR.symbol - @threadUnsafe lazy val JSPackage_undefinedR = ScalaJSJSPackageClass.requiredMethodRef("undefined") - def JSPackage_undefined(using Context) = JSPackage_undefinedR.symbol - @threadUnsafe lazy val JSPackage_dynamicImportR = ScalaJSJSPackageClass.requiredMethodRef("dynamicImport") - def JSPackage_dynamicImport(using Context) = JSPackage_dynamicImportR.symbol - - @threadUnsafe lazy val JSNativeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.native") - def JSNativeAnnot(using Context) = JSNativeAnnotType.symbol.asClass - - @threadUnsafe lazy val JSAnyType: TypeRef = requiredClassRef("scala.scalajs.js.Any") - def JSAnyClass(using Context) = JSAnyType.symbol.asClass - @threadUnsafe lazy val JSObjectType: TypeRef = requiredClassRef("scala.scalajs.js.Object") - def JSObjectClass(using Context) = JSObjectType.symbol.asClass - @threadUnsafe lazy val JSFunctionType: TypeRef = requiredClassRef("scala.scalajs.js.Function") - def JSFunctionClass(using Context) = JSFunctionType.symbol.asClass - @threadUnsafe lazy val JSThisFunctionType: TypeRef = requiredClassRef("scala.scalajs.js.ThisFunction") - def JSThisFunctionClass(using Context) = JSThisFunctionType.symbol.asClass - - @threadUnsafe lazy val PseudoUnionType: TypeRef = requiredClassRef("scala.scalajs.js.|") - def PseudoUnionClass(using Context) = PseudoUnionType.symbol.asClass - - @threadUnsafe lazy val PseudoUnionModuleRef = requiredModuleRef("scala.scalajs.js.|") - def PseudoUnionModule(using Context) = PseudoUnionModuleRef.symbol - @threadUnsafe lazy val PseudoUnion_fromR = PseudoUnionModule.requiredMethodRef("from") - def PseudoUnion_from(using Context) = PseudoUnion_fromR.symbol - @threadUnsafe lazy val PseudoUnion_fromTypeConstructorR = PseudoUnionModule.requiredMethodRef("fromTypeConstructor") - def PseudoUnion_fromTypeConstructor(using Context) = PseudoUnion_fromTypeConstructorR.symbol - - @threadUnsafe lazy val UnionOpsModuleRef = requiredModuleRef("scala.scalajs.js.internal.UnitOps") - - @threadUnsafe lazy val JSArrayType: TypeRef = requiredClassRef("scala.scalajs.js.Array") - def JSArrayClass(using Context) = JSArrayType.symbol.asClass - @threadUnsafe lazy val JSDynamicType: TypeRef = requiredClassRef("scala.scalajs.js.Dynamic") - def JSDynamicClass(using Context) = JSDynamicType.symbol.asClass - - @threadUnsafe lazy val RuntimeExceptionType: TypeRef = requiredClassRef("java.lang.RuntimeException") - def RuntimeExceptionClass(using Context) = RuntimeExceptionType.symbol.asClass - @threadUnsafe lazy val JavaScriptExceptionType: TypeRef = requiredClassRef("scala.scalajs.js.JavaScriptException") - def JavaScriptExceptionClass(using Context) = JavaScriptExceptionType.symbol.asClass - - @threadUnsafe lazy val JSGlobalAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSGlobal") - def JSGlobalAnnot(using Context) = JSGlobalAnnotType.symbol.asClass - @threadUnsafe lazy val JSImportAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSImport") - def JSImportAnnot(using Context) = JSImportAnnotType.symbol.asClass - @threadUnsafe lazy val JSGlobalScopeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSGlobalScope") - def JSGlobalScopeAnnot(using Context) = JSGlobalScopeAnnotType.symbol.asClass - @threadUnsafe lazy val JSNameAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSName") - def JSNameAnnot(using Context) = JSNameAnnotType.symbol.asClass - @threadUnsafe lazy val JSFullNameAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSFullName") - def JSFullNameAnnot(using Context) = JSFullNameAnnotType.symbol.asClass - @threadUnsafe lazy val JSBracketAccessAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSBracketAccess") - def JSBracketAccessAnnot(using Context) = JSBracketAccessAnnotType.symbol.asClass - @threadUnsafe lazy val JSBracketCallAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSBracketCall") - def JSBracketCallAnnot(using Context) = JSBracketCallAnnotType.symbol.asClass - @threadUnsafe lazy val JSExportTopLevelAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportTopLevel") - def JSExportTopLevelAnnot(using Context) = JSExportTopLevelAnnotType.symbol.asClass - @threadUnsafe lazy val JSExportAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExport") - def JSExportAnnot(using Context) = JSExportAnnotType.symbol.asClass - @threadUnsafe lazy val JSExportStaticAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportStatic") - def JSExportStaticAnnot(using Context) = JSExportStaticAnnotType.symbol.asClass - @threadUnsafe lazy val JSExportAllAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportAll") - def JSExportAllAnnot(using Context) = JSExportAllAnnotType.symbol.asClass - - def JSAnnotPackage(using Context) = JSGlobalAnnot.owner.asClass - - @threadUnsafe lazy val JSTypeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.JSType") - def JSTypeAnnot(using Context) = JSTypeAnnotType.symbol.asClass - @threadUnsafe lazy val JSOptionalAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.JSOptional") - def JSOptionalAnnot(using Context) = JSOptionalAnnotType.symbol.asClass - @threadUnsafe lazy val ExposedJSMemberAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.ExposedJSMember") - def ExposedJSMemberAnnot(using Context) = ExposedJSMemberAnnotType.symbol.asClass - - @threadUnsafe lazy val JSImportNamespaceModuleRef = requiredModuleRef("scala.scalajs.js.annotation.JSImport.Namespace") - def JSImportNamespaceModule(using Context) = JSImportNamespaceModuleRef.symbol - - @threadUnsafe lazy val JSAnyModuleRef = requiredModuleRef("scala.scalajs.js.Any") - def JSAnyModule(using Context) = JSAnyModuleRef.symbol - @threadUnsafe lazy val JSAny_fromFunctionR = (0 to 22).map(n => JSAnyModule.requiredMethodRef("fromFunction" + n)).toArray - def JSAny_fromFunction(n: Int)(using Context) = JSAny_fromFunctionR(n).symbol - - @threadUnsafe lazy val JSDynamicModuleRef = requiredModuleRef("scala.scalajs.js.Dynamic") - def JSDynamicModule(using Context) = JSDynamicModuleRef.symbol - @threadUnsafe lazy val JSDynamic_globalR = JSDynamicModule.requiredMethodRef("global") - def JSDynamic_global(using Context) = JSDynamic_globalR.symbol - @threadUnsafe lazy val JSDynamic_newInstanceR = JSDynamicModule.requiredMethodRef("newInstance") - def JSDynamic_newInstance(using Context) = JSDynamic_newInstanceR.symbol - - @threadUnsafe lazy val JSDynamicLiteralModuleRef = JSDynamicModule.moduleClass.requiredValueRef("literal") - def JSDynamicLiteralModule(using Context) = JSDynamicLiteralModuleRef.symbol - @threadUnsafe lazy val JSDynamicLiteral_applyDynamicNamedR = JSDynamicLiteralModule.requiredMethodRef("applyDynamicNamed") - def JSDynamicLiteral_applyDynamicNamed(using Context) = JSDynamicLiteral_applyDynamicNamedR.symbol - @threadUnsafe lazy val JSDynamicLiteral_applyDynamicR = JSDynamicLiteralModule.requiredMethodRef("applyDynamic") - def JSDynamicLiteral_applyDynamic(using Context) = JSDynamicLiteral_applyDynamicR.symbol - - @threadUnsafe lazy val JSObjectModuleRef = requiredModuleRef("scala.scalajs.js.Object") - def JSObjectModule(using Context) = JSObjectModuleRef.symbol - - @threadUnsafe lazy val JSArrayModuleRef = requiredModuleRef("scala.scalajs.js.Array") - def JSArrayModule(using Context) = JSArrayModuleRef.symbol - @threadUnsafe lazy val JSArray_applyR = JSArrayModule.requiredMethodRef(nme.apply) - def JSArray_apply(using Context) = JSArray_applyR.symbol - - @threadUnsafe lazy val JSThisFunctionModuleRef = requiredModuleRef("scala.scalajs.js.ThisFunction") - def JSThisFunctionModule(using Context) = JSThisFunctionModuleRef.symbol - @threadUnsafe lazy val JSThisFunction_fromFunctionR = (1 to 22).map(n => JSThisFunctionModule.requiredMethodRef("fromFunction" + n)).toArray - def JSThisFunction_fromFunction(n: Int)(using Context) = JSThisFunction_fromFunctionR(n - 1).symbol - - @threadUnsafe lazy val JSConstructorTagModuleRef = requiredModuleRef("scala.scalajs.js.ConstructorTag") - def JSConstructorTagModule(using Context) = JSConstructorTagModuleRef.symbol - @threadUnsafe lazy val JSConstructorTag_materializeR = JSConstructorTagModule.requiredMethodRef("materialize") - def JSConstructorTag_materialize(using Context) = JSConstructorTag_materializeR.symbol - - @threadUnsafe lazy val JSNewModuleRef = requiredModuleRef("scala.scalajs.js.new") - def JSNewModule(using Context) = JSNewModuleRef.symbol - @threadUnsafe lazy val JSNew_targetR = JSNewModule.requiredMethodRef("target") - def JSNew_target(using Context) = JSNew_targetR.symbol - - @threadUnsafe lazy val JSImportModuleRef = requiredModuleRef("scala.scalajs.js.import") - def JSImportModule(using Context) = JSImportModuleRef.symbol - @threadUnsafe lazy val JSImport_applyR = JSImportModule.requiredMethodRef(nme.apply) - def JSImport_apply(using Context) = JSImport_applyR.symbol - @threadUnsafe lazy val JSImport_metaR = JSImportModule.requiredMethodRef("meta") - def JSImport_meta(using Context) = JSImport_metaR.symbol - - @threadUnsafe lazy val RuntimePackageVal = requiredPackage("scala.scalajs.runtime") - @threadUnsafe lazy val RuntimePackageClass = RuntimePackageVal.moduleClass.asClass - @threadUnsafe lazy val Runtime_toScalaVarArgsR = RuntimePackageClass.requiredMethodRef("toScalaVarArgs") - def Runtime_toScalaVarArgs(using Context) = Runtime_toScalaVarArgsR.symbol - @threadUnsafe lazy val Runtime_toJSVarArgsR = RuntimePackageClass.requiredMethodRef("toJSVarArgs") - def Runtime_toJSVarArgs(using Context) = Runtime_toJSVarArgsR.symbol - @threadUnsafe lazy val Runtime_privateFieldsSymbolR = RuntimePackageClass.requiredMethodRef("privateFieldsSymbol") - def Runtime_privateFieldsSymbol(using Context) = Runtime_privateFieldsSymbolR.symbol - @threadUnsafe lazy val Runtime_constructorOfR = RuntimePackageClass.requiredMethodRef("constructorOf") - def Runtime_constructorOf(using Context) = Runtime_constructorOfR.symbol - @threadUnsafe lazy val Runtime_newConstructorTagR = RuntimePackageClass.requiredMethodRef("newConstructorTag") - def Runtime_newConstructorTag(using Context) = Runtime_newConstructorTagR.symbol - @threadUnsafe lazy val Runtime_createInnerJSClassR = RuntimePackageClass.requiredMethodRef("createInnerJSClass") - def Runtime_createInnerJSClass(using Context) = Runtime_createInnerJSClassR.symbol - @threadUnsafe lazy val Runtime_createLocalJSClassR = RuntimePackageClass.requiredMethodRef("createLocalJSClass") - def Runtime_createLocalJSClass(using Context) = Runtime_createLocalJSClassR.symbol - @threadUnsafe lazy val Runtime_withContextualJSClassValueR = RuntimePackageClass.requiredMethodRef("withContextualJSClassValue") - def Runtime_withContextualJSClassValue(using Context) = Runtime_withContextualJSClassValueR.symbol - @threadUnsafe lazy val Runtime_linkingInfoR = RuntimePackageClass.requiredMethodRef("linkingInfo") - def Runtime_linkingInfo(using Context) = Runtime_linkingInfoR.symbol - @threadUnsafe lazy val Runtime_dynamicImportR = RuntimePackageClass.requiredMethodRef("dynamicImport") - def Runtime_dynamicImport(using Context) = Runtime_dynamicImportR.symbol - - @threadUnsafe lazy val DynamicImportThunkType: TypeRef = requiredClassRef("scala.scalajs.runtime.DynamicImportThunk") - def DynamicImportThunkClass(using Context) = DynamicImportThunkType.symbol.asClass - @threadUnsafe lazy val DynamicImportThunkClass_applyR = DynamicImportThunkClass.requiredMethodRef(nme.apply) - def DynamicImportThunkClass_apply(using Context) = DynamicImportThunkClass_applyR.symbol - - @threadUnsafe lazy val SpecialPackageVal = requiredPackage("scala.scalajs.js.special") - @threadUnsafe lazy val SpecialPackageClass = SpecialPackageVal.moduleClass.asClass - @threadUnsafe lazy val Special_debuggerR = SpecialPackageClass.requiredMethodRef("debugger") - def Special_debugger(using Context) = Special_debuggerR.symbol - @threadUnsafe lazy val Special_deleteR = SpecialPackageClass.requiredMethodRef("delete") - def Special_delete(using Context) = Special_deleteR.symbol - @threadUnsafe lazy val Special_forinR = SpecialPackageClass.requiredMethodRef("forin") - def Special_forin(using Context) = Special_forinR.symbol - @threadUnsafe lazy val Special_inR = SpecialPackageClass.requiredMethodRef("in") - def Special_in(using Context) = Special_inR.symbol - @threadUnsafe lazy val Special_instanceofR = SpecialPackageClass.requiredMethodRef("instanceof") - def Special_instanceof(using Context) = Special_instanceofR.symbol - @threadUnsafe lazy val Special_strictEqualsR = SpecialPackageClass.requiredMethodRef("strictEquals") - def Special_strictEquals(using Context) = Special_strictEqualsR.symbol - @threadUnsafe lazy val Special_throwR = SpecialPackageClass.requiredMethodRef("throw") - def Special_throw(using Context) = Special_throwR.symbol - @threadUnsafe lazy val Special_tryCatchR = SpecialPackageClass.requiredMethodRef("tryCatch") - def Special_tryCatch(using Context) = Special_tryCatchR.symbol - @threadUnsafe lazy val Special_wrapAsThrowableR = SpecialPackageClass.requiredMethodRef("wrapAsThrowable") - def Special_wrapAsThrowable(using Context) = Special_wrapAsThrowableR.symbol - @threadUnsafe lazy val Special_unwrapFromThrowableR = SpecialPackageClass.requiredMethodRef("unwrapFromThrowable") - def Special_unwrapFromThrowable(using Context) = Special_unwrapFromThrowableR.symbol - - @threadUnsafe lazy val WrappedArrayType: TypeRef = requiredClassRef("scala.scalajs.js.WrappedArray") - def WrappedArrayClass(using Context) = WrappedArrayType.symbol.asClass - - @threadUnsafe lazy val ScalaRunTime_isArrayR = defn.ScalaRuntimeModule.requiredMethodRef("isArray", List(???, ???)) - def ScalaRunTime_isArray(using Context): Symbol = ScalaRunTime_isArrayR.symbol - - @threadUnsafe lazy val BoxesRunTime_boxToCharacterR = defn.BoxesRunTimeModule.requiredMethodRef("boxToCharacter") - def BoxesRunTime_boxToCharacter(using Context): Symbol = BoxesRunTime_boxToCharacterR.symbol - @threadUnsafe lazy val BoxesRunTime_unboxToCharR = defn.BoxesRunTimeModule.requiredMethodRef("unboxToChar") - def BoxesRunTime_unboxToChar(using Context): Symbol = BoxesRunTime_unboxToCharR.symbol - - @threadUnsafe lazy val EnableReflectiveInstantiationAnnotType: TypeRef = requiredClassRef("scala.scalajs.reflect.annotation.EnableReflectiveInstantiation") - def EnableReflectiveInstantiationAnnot(using Context) = EnableReflectiveInstantiationAnnotType.symbol.asClass - - @threadUnsafe lazy val ReflectModuleRef = requiredModuleRef("scala.scalajs.reflect.Reflect") - def ReflectModule(using Context) = ReflectModuleRef.symbol - @threadUnsafe lazy val Reflect_registerLoadableModuleClassR = ReflectModule.requiredMethodRef("registerLoadableModuleClass") - def Reflect_registerLoadableModuleClass(using Context) = Reflect_registerLoadableModuleClassR.symbol - @threadUnsafe lazy val Reflect_registerInstantiatableClassR = ReflectModule.requiredMethodRef("registerInstantiatableClass") - def Reflect_registerInstantiatableClass(using Context) = Reflect_registerInstantiatableClassR.symbol - - @threadUnsafe lazy val ReflectSelectableType: TypeRef = requiredClassRef("scala.reflect.Selectable") - def ReflectSelectableClass(using Context) = ReflectSelectableType.symbol.asClass - @threadUnsafe lazy val ReflectSelectable_selectDynamicR = ReflectSelectableClass.requiredMethodRef("selectDynamic") - def ReflectSelectable_selectDynamic(using Context) = ReflectSelectable_selectDynamicR.symbol - @threadUnsafe lazy val ReflectSelectable_applyDynamicR = ReflectSelectableClass.requiredMethodRef("applyDynamic") - def ReflectSelectable_applyDynamic(using Context) = ReflectSelectable_applyDynamicR.symbol - - @threadUnsafe lazy val ReflectSelectableModuleRef = requiredModuleRef("scala.reflect.Selectable") - def ReflectSelectableModule(using Context) = ReflectSelectableModuleRef.symbol - @threadUnsafe lazy val ReflectSelectable_reflectiveSelectableR = ReflectSelectableModule.requiredMethodRef("reflectiveSelectable") - def ReflectSelectable_reflectiveSelectable(using Context) = ReflectSelectable_reflectiveSelectableR.symbol - - @threadUnsafe lazy val SelectableModuleRef = requiredModuleRef("scala.Selectable") - def SelectableModule(using Context) = SelectableModuleRef.symbol - @threadUnsafe lazy val Selectable_reflectiveSelectableFromLangReflectiveCallsR = SelectableModule.requiredMethodRef("reflectiveSelectableFromLangReflectiveCalls") - def Selectable_reflectiveSelectableFromLangReflectiveCalls(using Context) = Selectable_reflectiveSelectableFromLangReflectiveCallsR.symbol - - private var allRefClassesCache: Set[Symbol] = _ - def allRefClasses(using Context): Set[Symbol] = { - if (allRefClassesCache == null) { - val baseNames = List("Object", "Boolean", "Character", "Byte", "Short", - "Int", "Long", "Float", "Double") - val fullNames = baseNames.flatMap { base => - List(s"scala.runtime.${base}Ref", s"scala.runtime.Volatile${base}Ref") - } - allRefClassesCache = fullNames.map(name => requiredClass(name)).toSet - } - allRefClassesCache - } - - /** Definitions related to scala.Enumeration. */ - object scalaEnumeration { - val nmeValue = termName("Value") - val nmeVal = termName("Val") - val hasNext = termName("hasNext") - val next = termName("next") - - @threadUnsafe lazy val EnumerationClass = requiredClass("scala.Enumeration") - @threadUnsafe lazy val Enumeration_Value_NoArg = EnumerationClass.requiredValue(nmeValue) - @threadUnsafe lazy val Enumeration_Value_IntArg = EnumerationClass.requiredMethod(nmeValue, List(defn.IntType)) - @threadUnsafe lazy val Enumeration_Value_StringArg = EnumerationClass.requiredMethod(nmeValue, List(defn.StringType)) - @threadUnsafe lazy val Enumeration_Value_IntStringArg = EnumerationClass.requiredMethod(nmeValue, List(defn.IntType, defn.StringType)) - @threadUnsafe lazy val Enumeration_nextName = EnumerationClass.requiredMethod(termName("nextName")) - - @threadUnsafe lazy val EnumerationValClass = EnumerationClass.requiredClass("Val") - @threadUnsafe lazy val Enumeration_Val_NoArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, Nil) - @threadUnsafe lazy val Enumeration_Val_IntArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, List(defn.IntType)) - @threadUnsafe lazy val Enumeration_Val_StringArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, List(defn.StringType)) - @threadUnsafe lazy val Enumeration_Val_IntStringArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, List(defn.IntType, defn.StringType)) - - def isValueMethod(sym: Symbol)(using Context): Boolean = - sym.name == nmeValue && sym.owner == EnumerationClass - - def isValueMethodNoName(sym: Symbol)(using Context): Boolean = - isValueMethod(sym) && (sym == Enumeration_Value_NoArg || sym == Enumeration_Value_IntArg) - - def isValueMethodName(sym: Symbol)(using Context): Boolean = - isValueMethod(sym) && (sym == Enumeration_Value_StringArg || sym == Enumeration_Value_IntStringArg) - - def isValCtor(sym: Symbol)(using Context): Boolean = - sym.isClassConstructor && sym.owner == EnumerationValClass - - def isValCtorNoName(sym: Symbol)(using Context): Boolean = - isValCtor(sym) && (sym == Enumeration_Val_NoArg || sym == Enumeration_Val_IntArg) - - def isValCtorName(sym: Symbol)(using Context): Boolean = - isValCtor(sym) && (sym == Enumeration_Val_StringArg || sym == Enumeration_Val_IntStringArg) - } - - /** Definitions related to the treatment of JUnit bootstrappers. */ - object junit { - @threadUnsafe lazy val TestAnnotType: TypeRef = requiredClassRef("org.junit.Test") - def TestAnnotClass(using Context): ClassSymbol = TestAnnotType.symbol.asClass - - @threadUnsafe lazy val BeforeAnnotType: TypeRef = requiredClassRef("org.junit.Before") - def BeforeAnnotClass(using Context): ClassSymbol = BeforeAnnotType.symbol.asClass - - @threadUnsafe lazy val AfterAnnotType: TypeRef = requiredClassRef("org.junit.After") - def AfterAnnotClass(using Context): ClassSymbol = AfterAnnotType.symbol.asClass - - @threadUnsafe lazy val BeforeClassAnnotType: TypeRef = requiredClassRef("org.junit.BeforeClass") - def BeforeClassAnnotClass(using Context): ClassSymbol = BeforeClassAnnotType.symbol.asClass - - @threadUnsafe lazy val AfterClassAnnotType: TypeRef = requiredClassRef("org.junit.AfterClass") - def AfterClassAnnotClass(using Context): ClassSymbol = AfterClassAnnotType.symbol.asClass - - @threadUnsafe lazy val IgnoreAnnotType: TypeRef = requiredClassRef("org.junit.Ignore") - def IgnoreAnnotClass(using Context): ClassSymbol = IgnoreAnnotType.symbol.asClass - - @threadUnsafe lazy val BootstrapperType: TypeRef = requiredClassRef("org.scalajs.junit.Bootstrapper") - - @threadUnsafe lazy val TestMetadataType: TypeRef = requiredClassRef("org.scalajs.junit.TestMetadata") - - @threadUnsafe lazy val NoSuchMethodExceptionType: TypeRef = requiredClassRef("java.lang.NoSuchMethodException") - - @threadUnsafe lazy val FutureType: TypeRef = requiredClassRef("scala.concurrent.Future") - def FutureClass(using Context): ClassSymbol = FutureType.symbol.asClass - - @threadUnsafe private lazy val FutureModule_successfulR = requiredModule("scala.concurrent.Future").requiredMethodRef("successful") - def FutureModule_successful(using Context): Symbol = FutureModule_successfulR.symbol - - @threadUnsafe private lazy val SuccessModule_applyR = requiredModule("scala.util.Success").requiredMethodRef(nme.apply) - def SuccessModule_apply(using Context): Symbol = SuccessModule_applyR.symbol - } - -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala b/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala deleted file mode 100644 index 518295543610..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala +++ /dev/null @@ -1,428 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import scala.collection.mutable - -import dotty.tools.dotc.core._ -import Contexts._ -import Flags._ -import Types._ -import Symbols._ -import NameOps._ -import Names._ -import StdNames._ - -import dotty.tools.dotc.transform.sjs.JSSymUtils._ - -import dotty.tools.sjs.ir -import dotty.tools.sjs.ir.{Trees => js, Types => jstpe} -import dotty.tools.sjs.ir.Names.{LocalName, LabelName, FieldName, SimpleMethodName, MethodName, ClassName} -import dotty.tools.sjs.ir.OriginalName -import dotty.tools.sjs.ir.OriginalName.NoOriginalName -import dotty.tools.sjs.ir.UTF8String - -import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions - -import JSDefinitions.jsdefn - -/** Encoding of symbol names for JavaScript - * - * Some issues that this encoding solves: - * * Overloading: encode the full signature in the JS name - * * Same scope for fields and methods of a class - * * Global access to classes and modules (by their full name) - * - * @author Sébastien Doeraene - */ -object JSEncoding { - - /** Name of the capture param storing the JS super class. - * - * This is used by the dispatchers of exposed JS methods and properties of - * nested JS classes when they need to perform a super call. Other super - * calls (in the actual bodies of the methods, not in the dispatchers) do - * not use this value, since they are implemented as static methods that do - * not have access to it. Instead, they get the JS super class value through - * the magic method inserted by `ExplicitLocalJS`, leveraging `lambdalift` - * to ensure that it is properly captured. - * - * Using this identifier is only allowed if it was reserved in the current - * local name scope using [[reserveLocalName]]. Otherwise, this name can - * clash with another local identifier. - */ - final val JSSuperClassParamName = LocalName("superClass$") - - private val ScalaRuntimeNothingClassName = ClassName("scala.runtime.Nothing$") - private val ScalaRuntimeNullClassName = ClassName("scala.runtime.Null$") - - private val dynamicImportForwarderSimpleName = SimpleMethodName("dynamicImport$") - - // Fresh local name generator ---------------------------------------------- - - class LocalNameGenerator { - import LocalNameGenerator._ - - private val usedLocalNames = mutable.Set.empty[LocalName] - private val localSymbolNames = mutable.Map.empty[Symbol, LocalName] - private val usedLabelNames = mutable.Set.empty[LabelName] - private val labelSymbolNames = mutable.Map.empty[Symbol, LabelName] - private var returnLabelName: Option[LabelName] = None - - def reserveLocalName(name: LocalName): Unit = { - require(usedLocalNames.isEmpty, - s"Trying to reserve the name '$name' but names have already been allocated") - usedLocalNames += name - } - - private def freshNameGeneric[N <: ir.Names.Name](base: N, usedNamesSet: mutable.Set[N])( - withSuffix: (N, String) => N): N = { - - var suffix = 1 - var result = base - while (usedNamesSet(result)) { - suffix += 1 - result = withSuffix(base, "$" + suffix) - } - usedNamesSet += result - result - } - - def freshName(base: LocalName): LocalName = - freshNameGeneric(base, usedLocalNames)(_.withSuffix(_)) - - def freshName(base: String): LocalName = - freshName(LocalName(base)) - - def freshLocalIdent()(implicit pos: ir.Position): js.LocalIdent = - js.LocalIdent(freshName(xLocalName)) - - def freshLocalIdent(base: LocalName)(implicit pos: ir.Position): js.LocalIdent = - js.LocalIdent(freshName(base)) - - def freshLocalIdent(base: String)(implicit pos: ir.Position): js.LocalIdent = - freshLocalIdent(LocalName(base)) - - def freshLocalIdent(base: TermName)(implicit pos: ir.Position): js.LocalIdent = - freshLocalIdent(base.mangledString) - - def localSymbolName(sym: Symbol)(using Context): LocalName = { - localSymbolNames.getOrElseUpdate(sym, { - /* The emitter does not like local variables that start with a '$', - * because it needs to encode them not to clash with emitter-generated - * names. There are two common cases, caused by scalac-generated names: - * - the `$this` parameter of tailrec methods and "extension" methods of - * AnyVals, which scalac knows as `nme.SELF`, and - * - the `$outer` parameter of inner class constructors, which scalac - * knows as `nme.OUTER`. - * We choose different base names for those two cases instead, so that - * the avoidance mechanism of the emitter doesn't happen as a common - * case. It can still happen for user-defined variables, but in that case - * the emitter will deal with it. - */ - val base = sym.name match { - case nme.SELF => "this$" // instead of $this - case nme.OUTER => "outer" // instead of $outer - case name => name.mangledString - } - freshName(base) - }) - } - - def freshLabelName(base: LabelName): LabelName = - freshNameGeneric(base, usedLabelNames)(_.withSuffix(_)) - - def freshLabelName(base: String): LabelName = - freshLabelName(LabelName(base)) - - def freshLabelIdent(base: String)(implicit pos: ir.Position): js.LabelIdent = - js.LabelIdent(freshLabelName(base)) - - def labelSymbolName(sym: Symbol)(using Context): LabelName = - labelSymbolNames.getOrElseUpdate(sym, freshLabelName(sym.javaSimpleName)) - - def getEnclosingReturnLabel()(implicit pos: ir.Position): js.LabelIdent = { - if (returnLabelName.isEmpty) - returnLabelName = Some(freshLabelName("_return")) - js.LabelIdent(returnLabelName.get) - } - - /* If this `LocalNameGenerator` has a `returnLabelName` (often added in the - * construction of the `body` argument), wrap the resulting js.Tree to use that label. - */ - def makeLabeledIfRequiresEnclosingReturn(tpe: jstpe.Type)(body: js.Tree)(implicit pos: ir.Position): js.Tree = { - returnLabelName match { - case None => - body - case Some(labelName) => - js.Labeled(js.LabelIdent(labelName), tpe, body) - } - } - } - - private object LocalNameGenerator { - private val xLocalName = LocalName("x") - } - - // Encoding methods ---------------------------------------------------------- - - def encodeLabelSym(sym: Symbol)( - implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LabelIdent = { - require(sym.is(Flags.Label), "encodeLabelSym called with non-label symbol: " + sym) - js.LabelIdent(localNames.labelSymbolName(sym)) - } - - def encodeFieldSym(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.FieldIdent = - js.FieldIdent(FieldName(encodeFieldSymAsString(sym))) - - def encodeFieldSymAsStringLiteral(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.StringLiteral = - js.StringLiteral(encodeFieldSymAsString(sym)) - - private def encodeFieldSymAsString(sym: Symbol)(using Context): String = { - require(sym.owner.isClass && sym.isTerm && !sym.isOneOf(MethodOrModule), - "encodeFieldSym called with non-field symbol: " + sym) - - val name0 = sym.javaSimpleName - if (name0.charAt(name0.length() - 1) != ' ') name0 - else name0.substring(0, name0.length() - 1) - } - - def encodeMethodSym(sym: Symbol, reflProxy: Boolean = false)( - implicit ctx: Context, pos: ir.Position): js.MethodIdent = { - require(sym.is(Flags.Method), "encodeMethodSym called with non-method symbol: " + sym) - - val tpe = sym.info - - val paramTypeRefs0 = tpe.firstParamTypes.map(paramOrResultTypeRef(_)) - - val hasExplicitThisParameter = !sym.is(JavaStatic) && sym.owner.isNonNativeJSClass - val paramTypeRefs = - if (!hasExplicitThisParameter) paramTypeRefs0 - else encodeClassRef(sym.owner) :: paramTypeRefs0 - - val name = sym.name - val simpleName = SimpleMethodName(name.mangledString) - - val methodName = { - if (sym.isClassConstructor) - MethodName.constructor(paramTypeRefs) - else if (reflProxy) - MethodName.reflectiveProxy(simpleName, paramTypeRefs) - else - MethodName(simpleName, paramTypeRefs, paramOrResultTypeRef(patchedResultType(sym))) - } - - js.MethodIdent(methodName) - } - - def encodeJSNativeMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { - require(sym.hasAnnotation(jsdefn.JSNativeAnnot), - "encodeJSNativeMemberSym called with non-native symbol: " + sym) - if (sym.is(Method)) - encodeMethodSym(sym) - else - encodeFieldSymAsMethod(sym) - } - - def encodeStaticMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { - require(sym.is(Flags.JavaStaticTerm), - "encodeStaticMemberSym called with non-static symbol: " + sym) - encodeFieldSymAsMethod(sym) - } - - private def encodeFieldSymAsMethod(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { - val name = sym.name - val resultTypeRef = paramOrResultTypeRef(sym.info) - val methodName = MethodName(name.mangledString, Nil, resultTypeRef) - js.MethodIdent(methodName) - } - - def encodeDynamicImportForwarderIdent(params: List[Symbol])(using Context, ir.Position): js.MethodIdent = { - val paramTypeRefs = params.map(sym => paramOrResultTypeRef(sym.info)) - val resultTypeRef = jstpe.ClassRef(ir.Names.ObjectClass) - val methodName = MethodName(dynamicImportForwarderSimpleName, paramTypeRefs, resultTypeRef) - js.MethodIdent(methodName) - } - - /** Computes the type ref for a type, to be used in a method signature. */ - private def paramOrResultTypeRef(tpe: Type)(using Context): jstpe.TypeRef = - toParamOrResultTypeRef(toTypeRef(tpe)) - - def encodeLocalSym(sym: Symbol)( - implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LocalIdent = { - require(!sym.owner.isClass && sym.isTerm && !sym.is(Flags.Method) && !sym.is(Flags.Module), - "encodeLocalSym called with non-local symbol: " + sym) - js.LocalIdent(localNames.localSymbolName(sym)) - } - - def encodeClassType(sym: Symbol)(using Context): jstpe.Type = { - if (sym == defn.ObjectClass) jstpe.AnyType - else if (sym.isJSType) jstpe.AnyType - else { - assert(sym != defn.ArrayClass, - "encodeClassType() cannot be called with ArrayClass") - jstpe.ClassType(encodeClassName(sym)) - } - } - - def encodeClassRef(sym: Symbol)(using Context): jstpe.ClassRef = - jstpe.ClassRef(encodeClassName(sym)) - - def encodeClassNameIdent(sym: Symbol)( - implicit ctx: Context, pos: ir.Position): js.ClassIdent = - js.ClassIdent(encodeClassName(sym)) - - def encodeClassName(sym: Symbol)(using Context): ClassName = { - val sym1 = - if (sym.isAllOf(ModuleClass | JavaDefined)) sym.linkedClass - else sym - - /* Some rewirings: - * - scala.runtime.BoxedUnit to java.lang.Void, as the IR expects. - * BoxedUnit$ is a JVM artifact. - * - scala.Nothing to scala.runtime.Nothing$. - * - scala.Null to scala.runtime.Null$. - */ - if (sym1 == defn.BoxedUnitClass) - ir.Names.BoxedUnitClass - else if (sym1 == defn.NothingClass) - ScalaRuntimeNothingClassName - else if (sym1 == defn.NullClass) - ScalaRuntimeNullClassName - else - ClassName(sym1.javaClassName) - } - - /** Converts a general TypeRef to a TypeRef to be used in a method signature. */ - def toParamOrResultTypeRef(typeRef: jstpe.TypeRef): jstpe.TypeRef = { - typeRef match { - case jstpe.ClassRef(ScalaRuntimeNullClassName) => jstpe.NullRef - case jstpe.ClassRef(ScalaRuntimeNothingClassName) => jstpe.NothingRef - case _ => typeRef - } - } - - def toIRTypeAndTypeRef(tp: Type)(using Context): (jstpe.Type, jstpe.TypeRef) = { - val typeRefInternal = toTypeRefInternal(tp) - (toIRTypeInternal(typeRefInternal), typeRefInternal._1) - } - - def toIRType(tp: Type)(using Context): jstpe.Type = - toIRTypeInternal(toTypeRefInternal(tp)) - - private def toIRTypeInternal(typeRefInternal: (jstpe.TypeRef, Symbol))(using Context): jstpe.Type = { - typeRefInternal._1 match { - case jstpe.PrimRef(irTpe) => - irTpe - - case typeRef: jstpe.ClassRef => - val sym = typeRefInternal._2 - if (sym == defn.ObjectClass || sym.isJSType) - jstpe.AnyType - else if (sym == defn.NothingClass) - jstpe.NothingType - else if (sym == defn.NullClass) - jstpe.NullType - else - jstpe.ClassType(typeRef.className) - - case typeRef: jstpe.ArrayTypeRef => - jstpe.ArrayType(typeRef) - } - } - - def toTypeRef(tp: Type)(using Context): jstpe.TypeRef = - toTypeRefInternal(tp)._1 - - private def toTypeRefInternal(tp: Type)(using Context): (jstpe.TypeRef, Symbol) = { - def primitiveOrClassToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = { - assert(sym.isClass, sym) - //assert(sym != defn.ArrayClass || isCompilingArray, sym) - val typeRef = if (sym.isPrimitiveValueClass) { - if (sym == defn.UnitClass) jstpe.VoidRef - else if (sym == defn.BooleanClass) jstpe.BooleanRef - else if (sym == defn.CharClass) jstpe.CharRef - else if (sym == defn.ByteClass) jstpe.ByteRef - else if (sym == defn.ShortClass) jstpe.ShortRef - else if (sym == defn.IntClass) jstpe.IntRef - else if (sym == defn.LongClass) jstpe.LongRef - else if (sym == defn.FloatClass) jstpe.FloatRef - else if (sym == defn.DoubleClass) jstpe.DoubleRef - else throw new Exception(s"unknown primitive value class $sym") - } else { - encodeClassRef(sym) - } - (typeRef, sym) - } - - /** - * When compiling Array.scala, the type parameter T is not erased and shows up in method - * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference. - */ - def nonClassTypeRefToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = { - //assert(sym.isType && isCompilingArray, sym) - (jstpe.ClassRef(ir.Names.ObjectClass), defn.ObjectClass) - } - - tp.widenDealias match { - // Array type such as Array[Int] (kept by erasure) - case JavaArrayType(el) => - val elTypeRef = toTypeRefInternal(el) - (jstpe.ArrayTypeRef.of(elTypeRef._1), elTypeRef._2) - - case t: TypeRef => - if (!t.symbol.isClass) nonClassTypeRefToTypeRef(t.symbol) // See comment on nonClassTypeRefToBType - else primitiveOrClassToTypeRef(t.symbol) // Common reference to a type such as scala.Int or java.lang.String - - case Types.ClassInfo(_, sym, _, _, _) => - /* We get here, for example, for genLoadModule, which invokes - * toTypeKind(moduleClassSymbol.info) - */ - primitiveOrClassToTypeRef(sym) - - /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for - * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning. - * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala. - */ - case a @ AnnotatedType(t, _) => - //debuglog(s"typeKind of annotated type $a") - toTypeRefInternal(t) - } - } - - /** Patches the result type of a method symbol to sanitize it. - * - * For some reason, dotc thinks that the `info.resultType`of an - * `isConstructor` method (for classes or traits) is the enclosing class - * or trait, but the bodies and usages act as if the result type was `Unit`. - * - * This method returns `UnitType` for constructor methods, and otherwise - * `sym.info.resultType`. - */ - def patchedResultType(sym: Symbol)(using Context): Type = - if (sym.isConstructor) defn.UnitType - else sym.info.resultType - - def originalNameOfLocal(sym: Symbol)( - implicit ctx: Context, localNames: LocalNameGenerator): OriginalName = { - val irName = localNames.localSymbolName(sym) - val originalName = UTF8String(sym.name.unexpandedName.toString) - if (UTF8String.equals(originalName, irName.encoded)) NoOriginalName - else OriginalName(originalName) - } - - def originalNameOfField(sym: Symbol)(using Context): OriginalName = - originalNameOf(sym.name) - - def originalNameOfMethod(sym: Symbol)(using Context): OriginalName = - originalNameOf(sym.name) - - def originalNameOfClass(sym: Symbol)(using Context): OriginalName = - originalNameOf(sym.fullName) - - private def originalNameOf(name: Name): OriginalName = { - val originalName = name.unexpandedName.toString - if (originalName == name.mangledString) NoOriginalName - else OriginalName(originalName) - } -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala deleted file mode 100644 index 82b69e6a16a7..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala +++ /dev/null @@ -1,1025 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import scala.annotation.tailrec -import scala.collection.mutable - -import dotty.tools.dotc.core._ - -import Contexts._ -import Decorators._ -import Denotations._ -import Flags._ -import Names._ -import NameKinds.DefaultGetterName -import NameOps._ -import Phases._ -import Symbols._ -import Types._ -import TypeErasure.ErasedValueType - -import dotty.tools.dotc.util.{SourcePosition, SrcPos} -import dotty.tools.dotc.report - -import dotty.tools.sjs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} -import dotty.tools.sjs.ir.Names.DefaultModuleID -import dotty.tools.sjs.ir.OriginalName.NoOriginalName -import dotty.tools.sjs.ir.Position.NoPosition -import dotty.tools.sjs.ir.Trees.OptimizerHints - -import dotty.tools.dotc.transform.sjs.JSExportUtils._ -import dotty.tools.dotc.transform.sjs.JSSymUtils._ - -import JSEncoding._ - -final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { - import jsCodeGen._ - import positionConversions._ - - /** Info for a non-member export. */ - sealed trait ExportInfo { - val pos: SourcePosition - } - - final case class TopLevelExportInfo(moduleID: String, jsName: String)(val pos: SourcePosition) extends ExportInfo - final case class StaticExportInfo(jsName: String)(val pos: SourcePosition) extends ExportInfo - - private sealed trait ExportKind - - private object ExportKind { - case object Module extends ExportKind - case object JSClass extends ExportKind - case object Constructor extends ExportKind - case object Method extends ExportKind - case object Property extends ExportKind - case object Field extends ExportKind - - def apply(sym: Symbol): ExportKind = { - if (sym.is(Flags.Module) && sym.isStatic) Module - else if (sym.isClass) JSClass - else if (sym.isConstructor) Constructor - else if (!sym.is(Flags.Method)) Field - else if (sym.isJSProperty) Property - else Method - } - } - - private def topLevelExportsOf(sym: Symbol): List[TopLevelExportInfo] = { - def isScalaClass(sym: Symbol): Boolean = - sym.isClass && !sym.isOneOf(Module | Trait) && !sym.isJSType - - if (isScalaClass(sym)) { - // Scala classes are never exported; their constructors are - Nil - } else if (sym.is(Accessor) || sym.is(Module, butNot = ModuleClass)) { - /* - Accessors receive the `@JSExportTopLevel` annotation of their associated field, - * but only the field is really exported. - * - Module values are not exported; their module class takes care of the export. - */ - Nil - } else { - val symForAnnot = - if (sym.isConstructor && isScalaClass(sym.owner)) sym.owner - else sym - - symForAnnot.annotations.collect { - case annot if annot.symbol == jsdefn.JSExportTopLevelAnnot => - val jsName = annot.argumentConstantString(0).get - val moduleID = annot.argumentConstantString(1).getOrElse(DefaultModuleID) - TopLevelExportInfo(moduleID, jsName)(annot.tree.sourcePos) - } - } - } - - private def staticExportsOf(sym: Symbol): List[StaticExportInfo] = { - if (sym.is(Accessor)) { - Nil - } else { - sym.annotations.collect { - case annot if annot.symbol == jsdefn.JSExportStaticAnnot => - val jsName = annot.argumentConstantString(0).getOrElse { - sym.defaultJSName - } - StaticExportInfo(jsName)(annot.tree.sourcePos) - } - } - } - - private def checkSameKind(tups: List[(ExportInfo, Symbol)]): Option[ExportKind] = { - assert(tups.nonEmpty, "must have at least one export") - - val firstSym = tups.head._2 - val overallKind = ExportKind(firstSym) - var bad = false - - for ((info, sym) <- tups.tail) { - val kind = ExportKind(sym) - - if (kind != overallKind) { - bad = true - report.error( - em"export overload conflicts with export of $firstSym: they are of different types (${kind.tryToShow} / ${overallKind.tryToShow})", - info.pos) - } - } - - if (bad) None - else Some(overallKind) - } - - private def checkSingleField(tups: List[(ExportInfo, Symbol)]): Symbol = { - assert(tups.nonEmpty, "must have at least one export") - - val firstSym = tups.head._2 - - for ((info, _) <- tups.tail) { - report.error( - em"export overload conflicts with export of $firstSym: a field may not share its exported name with another export", - info.pos) - } - - firstSym - } - - def genTopLevelExports(classSym: ClassSymbol): List[js.TopLevelExportDef] = { - val exports = for { - sym <- classSym :: classSym.info.decls.toList - info <- topLevelExportsOf(sym) - } yield { - (info, sym) - } - - (for { - (info, tups) <- exports.groupBy(_._1) - kind <- checkSameKind(tups) - } yield { - import ExportKind._ - - implicit val pos = info.pos - - kind match { - case Module => - js.TopLevelModuleExportDef(info.moduleID, info.jsName) - - case JSClass => - assert(classSym.isNonNativeJSClass, "found export on non-JS class") - js.TopLevelJSClassExportDef(info.moduleID, info.jsName) - - case Constructor | Method => - val exported = tups.map(_._2) - - val methodDef = withNewLocalNameScope { - genExportMethod(exported, JSName.Literal(info.jsName), static = true) - } - - js.TopLevelMethodExportDef(info.moduleID, methodDef) - - case Property => - throw new AssertionError("found top-level exported property") - - case Field => - val sym = checkSingleField(tups) - js.TopLevelFieldExportDef(info.moduleID, info.jsName, encodeFieldSym(sym)) - } - }).toList - } - - def genStaticExports(classSym: Symbol): List[js.MemberDef] = { - val exports = for { - sym <- classSym.info.decls.toList - info <- staticExportsOf(sym) - } yield { - (info, sym) - } - - (for { - (info, tups) <- exports.groupBy(_._1) - kind <- checkSameKind(tups) - } yield { - def alts = tups.map(_._2) - - implicit val pos = info.pos - - import ExportKind._ - - kind match { - case Method => - genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = false, alts, static = true) - - case Property => - genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = true, alts, static = true) - - case Field => - val sym = checkSingleField(tups) - - // static fields must always be mutable - val flags = js.MemberFlags.empty - .withNamespace(js.MemberNamespace.PublicStatic) - .withMutable(true) - val name = js.StringLiteral(info.jsName) - val irTpe = genExposedFieldIRType(sym) - js.JSFieldDef(flags, name, irTpe) - - case kind => - throw new AssertionError(s"unexpected static export kind: $kind") - } - }).toList - } - - /** Generates exported methods and properties for a class. - * - * @param classSym symbol of the class we export for - */ - def genMemberExports(classSym: ClassSymbol): List[js.MemberDef] = { - val classInfo = classSym.info - val allExports = classInfo.memberDenots(takeAllFilter, { (name, buf) => - if (isExportName(name)) - buf ++= classInfo.member(name).alternatives - }) - - val newlyDeclaredExports = if (classSym.superClass == NoSymbol) { - allExports - } else { - allExports.filterNot { denot => - classSym.superClass.info.member(denot.name).hasAltWith(_.info =:= denot.info) - } - } - - val newlyDeclaredExportNames = newlyDeclaredExports.map(_.name.toTermName).toList.distinct - - newlyDeclaredExportNames.map(genMemberExport(classSym, _)) - } - - private def genMemberExport(classSym: ClassSymbol, name: TermName): js.MemberDef = { - /* This used to be `.member(name)`, but it caused #3538, since we were - * sometimes selecting mixin forwarders, whose type history does not go - * far enough back in time to see varargs. We now explicitly exclude - * mixed-in members in addition to bridge methods (the latter are always - * excluded by `.member(name)`). - */ - val alts = classSym - .findMemberNoShadowingBasedOnFlags(name, classSym.appliedRef, required = Method, excluded = Bridge | MixedIn) - .alternatives - - assert(!alts.isEmpty, - em"""Ended up with no alternatives for ${classSym.fullName}::$name. - |Original set was ${alts} with types ${alts.map(_.info)}""") - - val (jsName, isProp) = exportNameInfo(name) - - // Check if we have a conflicting export of the other kind - val conflicting = classSym.info.member(makeExportName(jsName, !isProp)) - - if (conflicting.exists) { - val kind = if (isProp) "property" else "method" - val conflictingMember = conflicting.alternatives.head.symbol.fullName - val errorPos: SrcPos = alts.map(_.symbol).filter(_.owner == classSym) match { - case Nil => classSym - case altsInClass => altsInClass.minBy(_.span.point) - } - report.error(em"Exported $kind $jsName conflicts with $conflictingMember", errorPos) - } - - genMemberExportOrDispatcher(JSName.Literal(jsName), isProp, alts.map(_.symbol), static = false) - } - - def genJSClassDispatchers(classSym: Symbol, dispatchMethodsNames: List[JSName]): List[js.MemberDef] = { - dispatchMethodsNames.map(genJSClassDispatcher(classSym, _)) - } - - private def genJSClassDispatcher(classSym: Symbol, name: JSName): js.MemberDef = { - val alts = classSym.info.membersBasedOnFlags(required = Method, excluded = Bridge) - .map(_.symbol) - .filter { sym => - /* scala-js#3939: Object is not a "real" superclass of JS types. - * as such, its methods do not participate in overload resolution. - * An exception is toString, which is handled specially in genExportMethod. - */ - sym.owner != defn.ObjectClass && sym.jsName == name - } - .toList - - assert(!alts.isEmpty, s"Ended up with no alternatives for ${classSym.fullName}::$name.") - - val (propSyms, methodSyms) = alts.partition(_.isJSProperty) - val isProp = propSyms.nonEmpty - - if (isProp && methodSyms.nonEmpty) { - val firstAlt = alts.head - report.error( - em"Conflicting properties and methods for ${classSym.fullName}::$name.", - firstAlt.srcPos) - implicit val pos = firstAlt.span - js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None) - } else { - genMemberExportOrDispatcher(name, isProp, alts, static = false) - } - } - - private def genMemberExportOrDispatcher(jsName: JSName, isProp: Boolean, - alts: List[Symbol], static: Boolean): js.MemberDef = { - withNewLocalNameScope { - if (isProp) - genExportProperty(alts, jsName, static) - else - genExportMethod(alts, jsName, static) - } - } - - private def genExportProperty(alts: List[Symbol], jsName: JSName, static: Boolean): js.JSPropertyDef = { - assert(!alts.isEmpty, s"genExportProperty with empty alternatives for $jsName") - - implicit val pos: Position = alts.head.span - - val namespace = - if (static) js.MemberNamespace.PublicStatic - else js.MemberNamespace.Public - val flags = js.MemberFlags.empty.withNamespace(namespace) - - /* Separate getters and setters. Since we only have getters and setters, we - * simply test the param list size, which is faster than using the full isJSGetter. - */ - val (getter, setters) = alts.partition(_.info.paramInfoss.head.isEmpty) - - // We can have at most one getter - if (getter.sizeIs > 1) - reportCannotDisambiguateError(jsName, alts) - - val getterBody = getter.headOption.map { getterSym => - genApplyForSingleExported(new FormalArgsRegistry(0, false), new ExportedSymbol(getterSym, static), static) - } - - val setterArgAndBody = { - if (setters.isEmpty) { - None - } else { - val formalArgsRegistry = new FormalArgsRegistry(1, false) - val (List(arg), None) = formalArgsRegistry.genFormalArgs(): @unchecked - val body = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, - setters.map(new ExportedSymbol(_, static)), jstpe.AnyType, None) - Some((arg, body)) - } - } - - js.JSPropertyDef(flags, genExpr(jsName)(alts.head.sourcePos), getterBody, setterArgAndBody) - } - - private def genExportMethod(alts0: List[Symbol], jsName: JSName, static: Boolean)(using Context): js.JSMethodDef = { - assert(alts0.nonEmpty, "need at least one alternative to generate exporter method") - - implicit val pos: SourcePosition = alts0.head.sourcePos - - val namespace = - if (static) js.MemberNamespace.PublicStatic - else js.MemberNamespace.Public - val flags = js.MemberFlags.empty.withNamespace(namespace) - - // toString() is always exported. We might need to add it here to get correct overloading. - val alts = jsName match { - case JSName.Literal("toString") if alts0.forall(_.info.paramInfoss.exists(_.nonEmpty)) => - defn.Any_toString :: alts0 - case _ => - alts0 - } - - val overloads = alts.map(new ExportedSymbol(_, static)) - - val (formalArgs, restParam, body) = - genOverloadDispatch(jsName, overloads, jstpe.AnyType) - - js.JSMethodDef(flags, genExpr(jsName), formalArgs, restParam, body)( - OptimizerHints.empty, None) - } - - def genOverloadDispatch(jsName: JSName, alts: List[Exported], tpe: jstpe.Type)( - using pos: SourcePosition): (List[js.ParamDef], Option[js.ParamDef], js.Tree) = { - - // Create the formal args registry - val hasVarArg = alts.exists(_.hasRepeatedParam) - val minArgc = alts.map(_.minArgc).min - val maxNonRepeatedArgc = alts.map(_.maxNonRepeatedArgc).max - val needsRestParam = maxNonRepeatedArgc != minArgc || hasVarArg - val formalArgsRegistry = new FormalArgsRegistry(minArgc, needsRestParam) - - // Generate the list of formal parameters - val (formalArgs, restParam) = formalArgsRegistry.genFormalArgs() - - /* Generate the body - * We have a fast-path for methods that are not overloaded. In addition to - * being a fast path, it does a better job than `genExportMethodMultiAlts` - * when the only alternative has default parameters, because it avoids a - * spurious dispatch. - * In scalac, the spurious dispatch was avoided by a more elaborate case - * generation in `genExportMethod`, which was very convoluted and was not - * ported to dotc. - */ - val body = - if (alts.tail.isEmpty) alts.head.genBody(formalArgsRegistry) - else genExportMethodMultiAlts(formalArgsRegistry, maxNonRepeatedArgc, alts, tpe, jsName) - - (formalArgs, restParam, body) - } - - private def genExportMethodMultiAlts(formalArgsRegistry: FormalArgsRegistry, - maxNonRepeatedArgc: Int, alts: List[Exported], tpe: jstpe.Type, jsName: JSName)( - implicit pos: SourcePosition): js.Tree = { - - // Generate tuples (argc, method) - val methodArgCounts = for { - alt <- alts - argc <- alt.minArgc to (if (alt.hasRepeatedParam) maxNonRepeatedArgc else alt.maxNonRepeatedArgc) - } yield { - (argc, alt) - } - - // Create a list of (argCount -> methods), sorted by argCount (methods may appear multiple times) - val methodsByArgCount: List[(Int, List[Exported])] = - methodArgCounts.groupMap(_._1)(_._2).toList.sortBy(_._1) // sort for determinism - - val altsWithVarArgs = alts.filter(_.hasRepeatedParam) - - // Generate a case block for each (argCount, methods) tuple - // TODO? We could optimize this a bit by putting together all the `argCount`s that have the same methods - // (Scala.js for scalac does that, but the code is very convoluted and it's not clear that it is worth it). - val cases = for { - (argc, methods) <- methodsByArgCount - if methods != altsWithVarArgs // exclude default case we're generating anyways for varargs - } yield { - // body of case to disambiguates methods with current count - val caseBody = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, methods, tpe, Some(argc)) - List(js.IntLiteral(argc - formalArgsRegistry.minArgc)) -> caseBody - } - - def defaultCase = { - if (altsWithVarArgs.isEmpty) - genThrowTypeError() - else - genOverloadDispatchSameArgc(jsName, formalArgsRegistry, altsWithVarArgs, tpe, None) - } - - val body = { - if (cases.isEmpty) { - defaultCase - } else if (cases.tail.isEmpty && altsWithVarArgs.isEmpty) { - cases.head._2 - } else { - val restArgRef = formalArgsRegistry.genRestArgRef() - js.Match( - js.AsInstanceOf(js.JSSelect(restArgRef, js.StringLiteral("length")), jstpe.IntType), - cases, - defaultCase)( - tpe) - } - } - - body - } - - /** Resolves method calls to [[alts]] while assuming they have the same parameter count. - * - * @param jsName - * The JS name of the method, for error reporting - * @param formalArgsRegistry - * The registry of all the formal arguments - * @param alts - * Alternative methods - * @param tpe - * Result type - * @param maxArgc - * Maximum number of arguments to use for disambiguation - */ - private def genOverloadDispatchSameArgc(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, - alts: List[Exported], tpe: jstpe.Type, maxArgc: Option[Int]): js.Tree = { - genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex = 0, maxArgc) - } - - /** Resolves method calls to [[alts]] while assuming they have the same parameter count. - * - * @param jsName - * The JS name of the method, for error reporting - * @param formalArgsRegistry - * The registry of all the formal arguments - * @param alts - * Alternative methods - * @param tpe - * Result type - * @param paramIndex - * Index where to start disambiguation (starts at 0, increases through recursion) - * @param maxArgc - * Maximum number of arguments to use for disambiguation - */ - private def genOverloadDispatchSameArgcRec(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, - alts: List[Exported], tpe: jstpe.Type, paramIndex: Int, maxArgc: Option[Int]): js.Tree = { - - implicit val pos = alts.head.pos - - if (alts.sizeIs == 1) { - alts.head.genBody(formalArgsRegistry) - } else if (maxArgc.exists(_ <= paramIndex) || !alts.exists(_.params.size > paramIndex)) { - // We reach here in three cases: - // 1. The parameter list has been exhausted - // 2. The optional argument count restriction has triggered - // 3. We only have (more than once) repeated parameters left - // Therefore, we should fail - reportCannotDisambiguateError(jsName, alts.map(_.sym)) - js.Undefined() - } else { - val altsByTypeTest = groupByWithoutHashCode(alts) { exported => - typeTestForTpe(exported.exportArgTypeAt(paramIndex)) - } - - if (altsByTypeTest.size == 1) { - // Testing this parameter is not doing any us good - genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex + 1, maxArgc) - } else { - // Sort them so that, e.g., isInstanceOf[String] comes before isInstanceOf[Object] - val sortedAltsByTypeTest = topoSortDistinctsWith(altsByTypeTest) { (lhs, rhs) => - (lhs._1, rhs._1) match { - // NoTypeTest is always last - case (_, NoTypeTest) => true - case (NoTypeTest, _) => false - - case (PrimitiveTypeTest(_, rank1), PrimitiveTypeTest(_, rank2)) => - rank1 <= rank2 - - case (InstanceOfTypeTest(t1), InstanceOfTypeTest(t2)) => - t1 <:< t2 - - case (_: PrimitiveTypeTest, _: InstanceOfTypeTest) => true - case (_: InstanceOfTypeTest, _: PrimitiveTypeTest) => false - } - } - - val defaultCase = genThrowTypeError() - - sortedAltsByTypeTest.foldRight[js.Tree](defaultCase) { (elem, elsep) => - val (typeTest, subAlts) = elem - implicit val pos = subAlts.head.pos - - val paramRef = formalArgsRegistry.genArgRef(paramIndex) - val genSubAlts = genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, - subAlts, tpe, paramIndex + 1, maxArgc) - - def hasDefaultParam = subAlts.exists(_.hasDefaultAt(paramIndex)) - - val optCond = typeTest match { - case PrimitiveTypeTest(tpe, _) => Some(js.IsInstanceOf(paramRef, tpe)) - case InstanceOfTypeTest(tpe) => Some(genIsInstanceOf(paramRef, tpe)) - case NoTypeTest => None - } - - optCond.fold[js.Tree] { - genSubAlts // note: elsep is discarded, obviously - } { cond => - val condOrUndef = if (!hasDefaultParam) cond else { - js.If(cond, js.BooleanLiteral(true), - js.BinaryOp(js.BinaryOp.===, paramRef, js.Undefined()))( - jstpe.BooleanType) - } - js.If(condOrUndef, genSubAlts, elsep)(tpe) - } - } - } - } - } - - private def reportCannotDisambiguateError(jsName: JSName, alts: List[Symbol]): Unit = { - val currentClass = currentClassSym.get - - /* Find a position that is in the current class for decent error reporting. - * If there are more than one, always use the "highest" one (i.e., the - * one coming last in the source text) so that we reliably display the - * same error in all compilers. - */ - val validPositions = alts.collect { - case alt if alt.owner == currentClass => alt.sourcePos - } - val pos: SourcePosition = - if (validPositions.isEmpty) currentClass.sourcePos - else validPositions.maxBy(_.point) - - val kind = - if (alts.head.isJSGetter) "getter" - else if (alts.head.isJSSetter) "setter" - else "method" - - val fullKind = - if (currentClass.isJSType) kind - else "exported " + kind - - val displayName = jsName.displayName - val altsTypesInfo = alts.map(_.info.show).sorted.mkString("\n ") - - report.error( - em"Cannot disambiguate overloads for $fullKind $displayName with types\n $altsTypesInfo", - pos) - } - - /** Generates a call to the method represented by the given `exported` while using the formalArguments - * and potentially the argument array. - * - * Also inserts default parameters if required. - */ - private def genApplyForSingleExported(formalArgsRegistry: FormalArgsRegistry, - exported: Exported, static: Boolean): js.Tree = { - if (currentClassSym.isJSType && exported.sym.owner != currentClassSym.get) { - assert(!static, s"nonsensical JS super call in static export of ${exported.sym}") - genApplyForSingleExportedJSSuperCall(formalArgsRegistry, exported) - } else { - genApplyForSingleExportedNonJSSuperCall(formalArgsRegistry, exported, static) - } - } - - private def genApplyForSingleExportedJSSuperCall( - formalArgsRegistry: FormalArgsRegistry, exported: Exported): js.Tree = { - implicit val pos = exported.pos - - val sym = exported.sym - assert(!sym.isClassConstructor, - s"Trying to genApplyForSingleExportedJSSuperCall for the constructor ${sym.fullName}") - - val allArgs = formalArgsRegistry.genAllArgsRefsForForwarder() - - val superClass = { - val superClassSym = currentClassSym.asClass.superClass - if (superClassSym.isNestedJSClass) - js.VarRef(js.LocalIdent(JSSuperClassParamName))(jstpe.AnyType) - else - js.LoadJSConstructor(encodeClassName(superClassSym)) - } - - val receiver = js.This()(currentThisType) - val nameTree = genExpr(sym.jsName) - - if (sym.isJSGetter) { - assert(allArgs.isEmpty, - s"getter symbol $sym does not have a getter signature") - js.JSSuperSelect(superClass, receiver, nameTree) - } else if (sym.isJSSetter) { - assert(allArgs.size == 1 && allArgs.head.isInstanceOf[js.Tree], - s"setter symbol $sym does not have a setter signature") - js.Assign(js.JSSuperSelect(superClass, receiver, nameTree), - allArgs.head.asInstanceOf[js.Tree]) - } else { - js.JSSuperMethodCall(superClass, receiver, nameTree, allArgs) - } - } - - private def genApplyForSingleExportedNonJSSuperCall( - formalArgsRegistry: FormalArgsRegistry, exported: Exported, static: Boolean): js.Tree = { - - implicit val pos = exported.pos - - val varDefs = new mutable.ListBuffer[js.VarDef] - - for ((param, i) <- exported.params.zipWithIndex) { - val rhs = genScalaArg(exported, i, formalArgsRegistry, param, static, captures = Nil)( - prevArgsCount => varDefs.take(prevArgsCount).toList.map(_.ref)) - - varDefs += js.VarDef(freshLocalIdent("prep" + i), NoOriginalName, rhs.tpe, mutable = false, rhs) - } - - val builtVarDefs = varDefs.result() - - val jsResult = genResult(exported, builtVarDefs.map(_.ref), static) - - js.Block(builtVarDefs :+ jsResult) - } - - /** Generates a Scala argument from dispatched JavaScript arguments - * (unboxing and default parameter handling). - */ - def genScalaArg(exported: Exported, paramIndex: Int, formalArgsRegistry: FormalArgsRegistry, - param: JSParamInfo, static: Boolean, captures: List[js.Tree])( - previousArgsValues: Int => List[js.Tree])( - implicit pos: SourcePosition): js.Tree = { - - if (param.repeated) { - genJSArrayToVarArgs(formalArgsRegistry.genVarargRef(paramIndex)) - } else { - val jsArg = formalArgsRegistry.genArgRef(paramIndex) - - // Unboxed argument (if it is defined) - val unboxedArg = unbox(jsArg, param.info) - - if (exported.hasDefaultAt(paramIndex)) { - // If argument is undefined and there is a default getter, call it - js.If(js.BinaryOp(js.BinaryOp.===, jsArg, js.Undefined()), { - genCallDefaultGetter(exported.sym, paramIndex, static, captures)(previousArgsValues) - }, { - unboxedArg - })(unboxedArg.tpe) - } else { - // Otherwise, it is always the unboxed argument - unboxedArg - } - } - } - - def genCallDefaultGetter(sym: Symbol, paramIndex: Int, - static: Boolean, captures: List[js.Tree])( - previousArgsValues: Int => List[js.Tree])( - implicit pos: SourcePosition): js.Tree = { - - val targetSym = targetSymForDefaultGetter(sym) - val defaultGetterDenot = this.defaultGetterDenot(targetSym, sym, paramIndex) - - assert(defaultGetterDenot.exists, s"need default getter for method ${sym.fullName}") - assert(!defaultGetterDenot.isOverloaded, i"found overloaded default getter $defaultGetterDenot") - val defaultGetter = defaultGetterDenot.symbol - - val targetTree = { - if (sym.isClassConstructor || static) { - if (targetSym.isStatic) { - assert(captures.isEmpty, i"expected empty captures for ${targetSym.fullName} at $pos") - genLoadModule(targetSym) - } else { - assert(captures.sizeIs == 1, "expected exactly one capture") - - // Find the module accessor. We cannot use memberBasedOnFlags because of scala-js/scala-js#4526. - val outer = targetSym.originalOwner - val name = atPhase(typerPhase)(targetSym.name.unexpandedName).sourceModuleName - val modAccessor = outer.info.allMembers.find { denot => - denot.symbol.is(Module) && denot.name.unexpandedName == name - }.getOrElse { - throw new AssertionError(i"could not find module accessor for ${targetSym.fullName} at $pos") - }.symbol - - val receiver = captures.head - if (outer.isJSType) - genApplyJSClassMethod(receiver, modAccessor, Nil) - else - genApplyMethodMaybeStatically(receiver, modAccessor, Nil) - } - } else { - js.This()(currentThisType) - } - } - - // Pass previous arguments to defaultGetter - val defaultGetterArgs = previousArgsValues(defaultGetter.info.paramInfoss.head.size) - - val callGetter = if (targetSym.isJSType) { - if (defaultGetter.owner.isNonNativeJSClass) { - if (defaultGetter.hasAnnotation(jsdefn.JSOptionalAnnot)) - js.Undefined() - else - genApplyJSClassMethod(targetTree, defaultGetter, defaultGetterArgs) - } else if (defaultGetter.owner == targetSym) { - /* We get here if a non-native constructor has a native companion. - * This is reported on a per-class level. - */ - assert(sym.isClassConstructor, - s"got non-constructor method $sym with default method in JS native companion") - js.Undefined() - } else { - report.error( - "When overriding a native method with default arguments, " + - "the overriding method must explicitly repeat the default arguments.", - sym.srcPos) - js.Undefined() - } - } else { - genApplyMethod(targetTree, defaultGetter, defaultGetterArgs) - } - - // #15419 If the getter returns void, we must "box" it by returning undefined - if (callGetter.tpe == jstpe.NoType) - js.Block(callGetter, js.Undefined()) - else - callGetter - } - - private def targetSymForDefaultGetter(sym: Symbol): Symbol = - if (sym.isClassConstructor) sym.owner.companionModule.moduleClass - else sym.owner - - private def defaultGetterDenot(targetSym: Symbol, sym: Symbol, paramIndex: Int): Denotation = - targetSym.info.memberBasedOnFlags(DefaultGetterName(sym.name.asTermName, paramIndex), excluded = Bridge) - - private def defaultGetterDenot(sym: Symbol, paramIndex: Int): Denotation = - defaultGetterDenot(targetSymForDefaultGetter(sym), sym, paramIndex) - - /** Generate the final forwarding call to the exported method. */ - private def genResult(exported: Exported, args: List[js.Tree], static: Boolean)( - implicit pos: SourcePosition): js.Tree = { - - val sym = exported.sym - val currentClass = currentClassSym.get - - def receiver = - if (static) genLoadModule(sym.owner) - else js.This()(currentThisType) - - def boxIfNeeded(call: js.Tree): js.Tree = - box(call, atPhase(elimErasedValueTypePhase)(sym.info.resultType)) - - if (currentClass.isNonNativeJSClass) { - assert(sym.owner == currentClass, sym.fullName) - boxIfNeeded(genApplyJSClassMethod(receiver, sym, args)) - } else { - if (sym.isClassConstructor) - js.New(encodeClassName(currentClass), encodeMethodSym(sym), args) - else if (sym.isPrivate) - boxIfNeeded(genApplyMethodStatically(receiver, sym, args)) - else - boxIfNeeded(genApplyMethod(receiver, sym, args)) - } - } - - private def genThrowTypeError(msg: String = "No matching overload")(implicit pos: Position): js.Tree = - js.Throw(js.JSNew(js.JSGlobalRef("TypeError"), js.StringLiteral(msg) :: Nil)) - - abstract class Exported( - val sym: Symbol, - // Parameters participating in overload resolution. - val params: scala.collection.immutable.IndexedSeq[JSParamInfo] - ) { - assert(!params.exists(_.capture), "illegal capture params in Exported") - - private val paramsHasDefault = { - if (!atPhase(elimRepeatedPhase)(sym.hasDefaultParams)) { - Vector.empty - } else { - val targetSym = targetSymForDefaultGetter(sym) - params.indices.map(i => defaultGetterDenot(targetSym, sym, i).exists) - } - } - - def hasDefaultAt(paramIndex: Int): Boolean = - paramIndex < paramsHasDefault.size && paramsHasDefault(paramIndex) - - val hasRepeatedParam = params.nonEmpty && params.last.repeated - - val minArgc = { - // Find the first default param or repeated param - params - .indices - .find(i => hasDefaultAt(i) || params(i).repeated) - .getOrElse(params.size) - } - - val maxNonRepeatedArgc = if (hasRepeatedParam) params.size - 1 else params.size - - def pos: SourcePosition = sym.sourcePos - - def exportArgTypeAt(paramIndex: Int): Type = { - if (paramIndex < params.length) { - params(paramIndex).info - } else { - assert(hasRepeatedParam, i"$sym does not have varargs nor enough params for $paramIndex") - params.last.info - } - } - - def typeInfo: String = sym.info.toString - - def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree - } - - private class ExportedSymbol(sym: Symbol, static: Boolean) - extends Exported(sym, sym.jsParamInfos.toIndexedSeq) { - - def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree = - genApplyForSingleExported(formalArgsRegistry, this, static) - } - - // !!! Hash codes of RTTypeTest are meaningless because of InstanceOfTypeTest - private sealed abstract class RTTypeTest - - private case class PrimitiveTypeTest(tpe: jstpe.Type, rank: Int) extends RTTypeTest - - // !!! This class does not have a meaningful hash code - private case class InstanceOfTypeTest(tpe: Type) extends RTTypeTest { - override def equals(that: Any): Boolean = { - that match { - case InstanceOfTypeTest(thatTpe) => tpe =:= thatTpe - case _ => false - } - } - } - - private case object NoTypeTest extends RTTypeTest - - /** Very simple O(n²) topological sort for elements assumed to be distinct. */ - private def topoSortDistinctsWith[A <: AnyRef](coll: List[A])(lteq: (A, A) => Boolean): List[A] = { - @tailrec - def loop(coll: List[A], acc: List[A]): List[A] = { - if (coll.isEmpty) acc - else if (coll.tail.isEmpty) coll.head :: acc - else { - val (lhs, rhs) = coll.span(x => !coll.forall(y => (x eq y) || !lteq(x, y))) - assert(!rhs.isEmpty, s"cycle while ordering $coll") - loop(lhs ::: rhs.tail, rhs.head :: acc) - } - } - - loop(coll, Nil) - } - - private def typeTestForTpe(tpe: Type): RTTypeTest = { - tpe match { - case tpe: ErasedValueType => - InstanceOfTypeTest(tpe.tycon.typeSymbol.typeRef) - - case _ => - import dotty.tools.sjs.ir.Names - - (toIRType(tpe): @unchecked) match { - case jstpe.AnyType => NoTypeTest - - case jstpe.NoType => PrimitiveTypeTest(jstpe.UndefType, 0) - case jstpe.BooleanType => PrimitiveTypeTest(jstpe.BooleanType, 1) - case jstpe.CharType => PrimitiveTypeTest(jstpe.CharType, 2) - case jstpe.ByteType => PrimitiveTypeTest(jstpe.ByteType, 3) - case jstpe.ShortType => PrimitiveTypeTest(jstpe.ShortType, 4) - case jstpe.IntType => PrimitiveTypeTest(jstpe.IntType, 5) - case jstpe.LongType => PrimitiveTypeTest(jstpe.LongType, 6) - case jstpe.FloatType => PrimitiveTypeTest(jstpe.FloatType, 7) - case jstpe.DoubleType => PrimitiveTypeTest(jstpe.DoubleType, 8) - - case jstpe.ClassType(Names.BoxedUnitClass) => PrimitiveTypeTest(jstpe.UndefType, 0) - case jstpe.ClassType(Names.BoxedStringClass) => PrimitiveTypeTest(jstpe.StringType, 9) - case jstpe.ClassType(_) => InstanceOfTypeTest(tpe) - - case jstpe.ArrayType(_) => InstanceOfTypeTest(tpe) - } - } - } - - // Group-by that does not rely on hashCode(), only equals() - O(n²) - private def groupByWithoutHashCode[A, B](coll: List[A])(f: A => B): List[(B, List[A])] = { - val m = new mutable.ArrayBuffer[(B, List[A])] - m.sizeHint(coll.length) - - for (elem <- coll) { - val key = f(elem) - val index = m.indexWhere(_._1 == key) - if (index < 0) - m += ((key, List(elem))) - else - m(index) = (key, elem :: m(index)._2) - } - - m.toList - } - - class FormalArgsRegistry(val minArgc: Int, needsRestParam: Boolean) { - private val fixedParamNames: scala.collection.immutable.IndexedSeq[jsNames.LocalName] = - (0 until minArgc).toIndexedSeq.map(_ => freshLocalIdent("arg")(NoPosition).name) - - private val restParamName: jsNames.LocalName = - if (needsRestParam) freshLocalIdent("rest")(NoPosition).name - else null - - def genFormalArgs()(implicit pos: Position): (List[js.ParamDef], Option[js.ParamDef]) = { - val fixedParamDefs = fixedParamNames.toList.map { paramName => - js.ParamDef(js.LocalIdent(paramName), NoOriginalName, jstpe.AnyType, mutable = false) - } - - val restParam = { - if (needsRestParam) - Some(js.ParamDef(js.LocalIdent(restParamName), NoOriginalName, jstpe.AnyType, mutable = false)) - else - None - } - - (fixedParamDefs, restParam) - } - - def genArgRef(index: Int)(implicit pos: Position): js.Tree = { - if (index < minArgc) - js.VarRef(js.LocalIdent(fixedParamNames(index)))(jstpe.AnyType) - else - js.JSSelect(genRestArgRef(), js.IntLiteral(index - minArgc)) - } - - def genVarargRef(fixedParamCount: Int)(implicit pos: Position): js.Tree = { - assert(fixedParamCount >= minArgc, s"genVarargRef($fixedParamCount) with minArgc = $minArgc at $pos") - val restParam = genRestArgRef() - if (fixedParamCount == minArgc) - restParam - else - js.JSMethodApply(restParam, js.StringLiteral("slice"), List(js.IntLiteral(fixedParamCount - minArgc))) - } - - def genRestArgRef()(implicit pos: Position): js.Tree = { - assert(needsRestParam, s"trying to generate a reference to non-existent rest param at $pos") - js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) - } - - def genAllArgsRefsForForwarder()(implicit pos: Position): List[js.TreeOrJSSpread] = { - val fixedArgRefs = fixedParamNames.toList.map { paramName => - js.VarRef(js.LocalIdent(paramName))(jstpe.AnyType) - } - - if (needsRestParam) { - val restArgRef = js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) - fixedArgRefs :+ js.JSSpread(restArgRef) - } else { - fixedArgRefs - } - } - } -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala b/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala deleted file mode 100644 index 620e76ab4bab..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala +++ /dev/null @@ -1,102 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import java.net.{URI, URISyntaxException} - -import dotty.tools.dotc.core._ -import Contexts._ -import Decorators.em - -import dotty.tools.dotc.report - -import dotty.tools.dotc.util.{SourceFile, SourcePosition} -import dotty.tools.dotc.util.Spans.Span - -import dotty.tools.sjs.ir - -/** Conversion utilities from dotty Positions to IR Positions. */ -class JSPositions()(using Context) { - import JSPositions._ - - private val sourceURIMaps: List[URIMap] = { - ctx.settings.scalajsMapSourceURI.value.flatMap { option => - val uris = option.split("->") - if (uris.length != 1 && uris.length != 2) { - report.error("-scalajs-mapSourceURI needs one or two URIs as argument (separated by '->').") - Nil - } else { - try { - val from = new URI(uris.head) - val to = uris.lift(1).map(str => new URI(str)) - URIMap(from, to) :: Nil - } catch { - case e: URISyntaxException => - report.error(em"${e.getInput} is not a valid URI") - Nil - } - } - } - } - - private def sourceAndSpan2irPos(source: SourceFile, span: Span): ir.Position = { - if (!span.exists) ir.Position.NoPosition - else { - // dotty positions and IR positions are both 0-based - val irSource = span2irPosCache.toIRSource(source) - val point = span.point - val line = source.offsetToLine(point) - val column = source.column(point) - ir.Position(irSource, line, column) - } - } - - /** Implicit conversion from dotty Span to ir.Position. */ - implicit def span2irPos(span: Span): ir.Position = - sourceAndSpan2irPos(ctx.compilationUnit.source, span) - - /** Implicitly materializes an ir.Position from an implicit dotty Span. */ - implicit def implicitSpan2irPos(implicit span: Span): ir.Position = - span2irPos(span) - - /** Implicitly materializes an ir.Position from an implicit dotty SourcePosition. */ - implicit def implicitSourcePos2irPos(implicit sourcePos: SourcePosition): ir.Position = - sourceAndSpan2irPos(sourcePos.source, sourcePos.span) - - private object span2irPosCache { - import dotty.tools.dotc.util._ - - private var lastDotcSource: SourceFile = null - private var lastIRSource: ir.Position.SourceFile = null - - def toIRSource(dotcSource: SourceFile): ir.Position.SourceFile = { - if (dotcSource != lastDotcSource) { - lastIRSource = convert(dotcSource) - lastDotcSource = dotcSource - } - lastIRSource - } - - private def convert(dotcSource: SourceFile): ir.Position.SourceFile = { - dotcSource.file.file match { - case null => - new java.net.URI( - "virtualfile", // Pseudo-Scheme - dotcSource.file.path, // Scheme specific part - null // Fragment - ) - case file => - val srcURI = file.toURI - sourceURIMaps.collectFirst { - case URIMap(from, to) if from.relativize(srcURI) != srcURI => - val relURI = from.relativize(srcURI) - to.fold(relURI)(_.resolve(relURI)) - }.getOrElse(srcURI) - } - } - } -} - -object JSPositions { - final case class URIMap(from: URI, to: Option[URI]) -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala b/tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala deleted file mode 100644 index ce83f5e9e83b..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala +++ /dev/null @@ -1,150 +0,0 @@ -package dotty.tools.backend.sjs - -import dotty.tools.dotc.core._ -import Names.TermName -import Types._ -import Contexts._ -import Symbols._ -import Decorators.em - -import dotty.tools.dotc.ast.tpd._ -import dotty.tools.backend.jvm.DottyPrimitives -import dotty.tools.dotc.report -import dotty.tools.dotc.util.ReadOnlyMap - -object JSPrimitives { - - inline val FirstJSPrimitiveCode = 300 - - inline val DYNNEW = FirstJSPrimitiveCode + 1 // Instantiate a new JavaScript object - - inline val ARR_CREATE = DYNNEW + 1 // js.Array.apply (array literal syntax) - - inline val TYPEOF = ARR_CREATE + 1 // typeof x - inline val JS_NATIVE = TYPEOF + 1 // js.native. Marker method. Fails if tried to be emitted. - - inline val UNITVAL = JS_NATIVE + 1 // () value, which is undefined - - inline val JS_NEW_TARGET = UNITVAL + 1 // js.new.target - - inline val JS_IMPORT = JS_NEW_TARGET + 1 // js.import.apply(specifier) - inline val JS_IMPORT_META = JS_IMPORT + 1 // js.import.meta - - inline val CONSTRUCTOROF = JS_IMPORT_META + 1 // runtime.constructorOf(clazz) - inline val CREATE_INNER_JS_CLASS = CONSTRUCTOROF + 1 // runtime.createInnerJSClass - inline val CREATE_LOCAL_JS_CLASS = CREATE_INNER_JS_CLASS + 1 // runtime.createLocalJSClass - inline val WITH_CONTEXTUAL_JS_CLASS_VALUE = CREATE_LOCAL_JS_CLASS + 1 // runtime.withContextualJSClassValue - inline val LINKING_INFO = WITH_CONTEXTUAL_JS_CLASS_VALUE + 1 // runtime.linkingInfo - inline val DYNAMIC_IMPORT = LINKING_INFO + 1 // runtime.dynamicImport - - inline val STRICT_EQ = DYNAMIC_IMPORT + 1 // js.special.strictEquals - inline val IN = STRICT_EQ + 1 // js.special.in - inline val INSTANCEOF = IN + 1 // js.special.instanceof - inline val DELETE = INSTANCEOF + 1 // js.special.delete - inline val FORIN = DELETE + 1 // js.special.forin - inline val JS_THROW = FORIN + 1 // js.special.throw - inline val JS_TRY_CATCH = JS_THROW + 1 // js.special.tryCatch - inline val WRAP_AS_THROWABLE = JS_TRY_CATCH + 1 // js.special.wrapAsThrowable - inline val UNWRAP_FROM_THROWABLE = WRAP_AS_THROWABLE + 1 // js.special.unwrapFromThrowable - inline val DEBUGGER = UNWRAP_FROM_THROWABLE + 1 // js.special.debugger - - inline val THROW = DEBUGGER + 1 - - inline val UNION_FROM = THROW + 1 // js.|.from - inline val UNION_FROM_TYPE_CONSTRUCTOR = UNION_FROM + 1 // js.|.fromTypeConstructor - - inline val REFLECT_SELECTABLE_SELECTDYN = UNION_FROM_TYPE_CONSTRUCTOR + 1 // scala.reflect.Selectable.selectDynamic - inline val REFLECT_SELECTABLE_APPLYDYN = REFLECT_SELECTABLE_SELECTDYN + 1 // scala.reflect.Selectable.applyDynamic - - inline val LastJSPrimitiveCode = REFLECT_SELECTABLE_APPLYDYN - - def isJSPrimitive(code: Int): Boolean = - code >= FirstJSPrimitiveCode && code <= LastJSPrimitiveCode - -} - -class JSPrimitives(ictx: DetachedContext) extends DottyPrimitives(ictx) { - import JSPrimitives._ - - private lazy val jsPrimitives: ReadOnlyMap[Symbol, Int] = initJSPrimitives(using ictx) - - override def getPrimitive(sym: Symbol): Int = - jsPrimitives.getOrElse(sym, super.getPrimitive(sym)) - - override def getPrimitive(app: Apply, tpe: Type)(using Context): Int = - jsPrimitives.getOrElse(app.fun.symbol, super.getPrimitive(app, tpe)) - - override def isPrimitive(sym: Symbol): Boolean = - jsPrimitives.contains(sym) || super.isPrimitive(sym) - - override def isPrimitive(fun: Tree): Boolean = - jsPrimitives.contains(fun.symbol(using ictx)) || super.isPrimitive(fun) - - /** Initialize the primitive map */ - private def initJSPrimitives(using Context): ReadOnlyMap[Symbol, Int] = { - - val primitives = MutableSymbolMap[Int]() - - // !!! Code duplicate with DottyPrimitives - /** Add a primitive operation to the map */ - def addPrimitive(s: Symbol, code: Int): Unit = { - assert(!(primitives contains s), "Duplicate primitive " + s) - primitives(s) = code - } - - def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { - val alts = cls.info.member(method).alternatives.map(_.symbol) - if (alts.isEmpty) { - report.error(em"Unknown primitive method $cls.$method") - } else { - for (s <- alts) - addPrimitive(s, code) - } - } - - val jsdefn = JSDefinitions.jsdefn - - addPrimitive(jsdefn.JSDynamic_newInstance, DYNNEW) - - addPrimitive(jsdefn.JSArray_apply, ARR_CREATE) - - addPrimitive(jsdefn.JSPackage_typeOf, TYPEOF) - addPrimitive(jsdefn.JSPackage_native, JS_NATIVE) - - addPrimitive(defn.BoxedUnit_UNIT, UNITVAL) - - addPrimitive(jsdefn.JSNew_target, JS_NEW_TARGET) - - addPrimitive(jsdefn.JSImport_apply, JS_IMPORT) - addPrimitive(jsdefn.JSImport_meta, JS_IMPORT_META) - - addPrimitive(jsdefn.Runtime_constructorOf, CONSTRUCTOROF) - addPrimitive(jsdefn.Runtime_createInnerJSClass, CREATE_INNER_JS_CLASS) - addPrimitive(jsdefn.Runtime_createLocalJSClass, CREATE_LOCAL_JS_CLASS) - addPrimitive(jsdefn.Runtime_withContextualJSClassValue, WITH_CONTEXTUAL_JS_CLASS_VALUE) - addPrimitive(jsdefn.Runtime_linkingInfo, LINKING_INFO) - addPrimitive(jsdefn.Runtime_dynamicImport, DYNAMIC_IMPORT) - - addPrimitive(jsdefn.Special_strictEquals, STRICT_EQ) - addPrimitive(jsdefn.Special_in, IN) - addPrimitive(jsdefn.Special_instanceof, INSTANCEOF) - addPrimitive(jsdefn.Special_delete, DELETE) - addPrimitive(jsdefn.Special_forin, FORIN) - addPrimitive(jsdefn.Special_throw, JS_THROW) - addPrimitive(jsdefn.Special_tryCatch, JS_TRY_CATCH) - addPrimitive(jsdefn.Special_wrapAsThrowable, WRAP_AS_THROWABLE) - addPrimitive(jsdefn.Special_unwrapFromThrowable, UNWRAP_FROM_THROWABLE) - addPrimitive(jsdefn.Special_debugger, DEBUGGER) - - addPrimitive(defn.throwMethod, THROW) - - addPrimitive(jsdefn.PseudoUnion_from, UNION_FROM) - addPrimitive(jsdefn.PseudoUnion_fromTypeConstructor, UNION_FROM_TYPE_CONSTRUCTOR) - - addPrimitive(jsdefn.ReflectSelectable_selectDynamic, REFLECT_SELECTABLE_SELECTDYN) - addPrimitive(jsdefn.ReflectSelectable_applyDynamic, REFLECT_SELECTABLE_APPLYDYN) - - primitives - } - -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala b/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala deleted file mode 100644 index af7570a6edca..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala +++ /dev/null @@ -1,38 +0,0 @@ -package dotty.tools.backend.sjs - -class ScopedVar[A](init: A) extends Pure { - import ScopedVar.Assignment - - private[ScopedVar] var value = init - - def this()(implicit ev: Null <:< A) = this(ev(null)) - - def get: A = value - def :=(newValue: A): Assignment[A] = new Assignment(this, newValue) -} - -object ScopedVar { - class Assignment[T](scVar: ScopedVar[T], value: T) { - private[ScopedVar] def push(): AssignmentStackElement[T] = { - val stack = new AssignmentStackElement(scVar, scVar.value) - scVar.value = value - stack - } - } - - private class AssignmentStackElement[T](scVar: ScopedVar[T], oldValue: T) { - private[ScopedVar] def pop(): Unit = { - scVar.value = oldValue - } - } - - implicit def toValue[T](scVar: ScopedVar[T]): T = scVar.get - - def withScopedVars[T](ass: Assignment[_]*)(body: => T): T = { - val stack = ass.map(_.push()) - try body - finally stack.reverse.foreach(_.pop()) - } - - final class VarBox[A](var value: A) -} diff --git a/tests/pos-with-compiler-cc/dotc/Bench.scala b/tests/pos-with-compiler-cc/dotc/Bench.scala deleted file mode 100644 index c9c032b0ae7d..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Bench.scala +++ /dev/null @@ -1,64 +0,0 @@ -package dotty.tools -package dotc - -import core.Contexts._ -import reporting.Reporter -import io.AbstractFile - -import scala.annotation.internal.sharable - -/** A main class for running compiler benchmarks. Can instantiate a given - * number of compilers and run each (sequentially) a given number of times - * on the same sources. - */ -object Bench extends Driver: - - @sharable private var numRuns = 1 - - private def ntimes(n: Int)(op: => Reporter): Reporter = - (0 until n).foldLeft(emptyReporter)((_, _) => op) - - @sharable private var times: Array[Int] = _ - - override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = - times = new Array[Int](numRuns) - var reporter: Reporter = emptyReporter - for i <- 0 until numRuns do - val start = System.nanoTime() - reporter = super.doCompile(compiler, files) - times(i) = ((System.nanoTime - start) / 1000000).toInt - println(s"time elapsed: ${times(i)}ms") - if ctx.settings.Xprompt.value then - print("hit to continue >") - System.in.nn.read() - println() - reporter - - def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = { - val pos = args indexOf name - if (pos < 0) (default, args) - else (args(pos + 1).toInt, (args take pos) ++ (args drop (pos + 2))) - } - - def reportTimes() = - val best = times.sorted - val measured = numRuns / 3 - val avgBest = best.take(measured).sum / measured - val avgLast = times.reverse.take(measured).sum / measured - println(s"best out of $numRuns runs: ${best(0)}") - println(s"average out of best $measured: $avgBest") - println(s"average out of last $measured: $avgLast") - - override def process(args: Array[String], rootCtx: Context): Reporter = - val (numCompilers, args1) = extractNumArg(args, "#compilers") - val (numRuns, args2) = extractNumArg(args1, "#runs") - this.numRuns = numRuns - var reporter: Reporter = emptyReporter - for i <- 0 until numCompilers do - reporter = super.process(args2, rootCtx) - reportTimes() - reporter - -end Bench - - diff --git a/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala deleted file mode 100644 index ad51305d5858..000000000000 --- a/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala +++ /dev/null @@ -1,167 +0,0 @@ -package dotty.tools -package dotc - -import core._ -import Contexts._ -import SymDenotations.ClassDenotation -import Symbols._ -import util.{FreshNameCreator, SourceFile, NoSource} -import util.Spans.Span -import ast.{tpd, untpd} -import tpd.{Tree, TreeTraverser} -import ast.Trees.{Import, Ident} -import typer.Nullables -import transform.SymUtils._ -import core.Decorators._ -import config.{SourceVersion, Feature} -import StdNames.nme -import scala.annotation.internal.sharable -import language.experimental.pureFunctions - -class CompilationUnit protected (val source: SourceFile) { - - override def toString: String = source.toString - - var untpdTree: untpd.Tree = untpd.EmptyTree - - var tpdTree: tpd.Tree = tpd.EmptyTree - - /** Is this the compilation unit of a Java file */ - def isJava: Boolean = source.file.name.endsWith(".java") - - /** The source version for this unit, as determined by a language import */ - var sourceVersion: Option[SourceVersion] = None - - /** Pickled TASTY binaries, indexed by class. */ - var pickled: Map[ClassSymbol, () -> Array[Byte]] = Map() - - /** The fresh name creator for the current unit. - * FIXME(#7661): This is not fine-grained enough to enable reproducible builds, - * see https://github.com/scala/scala/commit/f50ec3c866263448d803139e119b33afb04ec2bc - */ - val freshNames: FreshNameCreator = new FreshNameCreator.Default - - /** Will be set to `true` if there are inline call that must be inlined after typer. - * The information is used in phase `Inlining` in order to avoid traversing trees that need no transformations. - */ - var needsInlining: Boolean = false - - /** Set to `true` if inliner added anonymous mirrors that need to be completed */ - var needsMirrorSupport: Boolean = false - - /** Will be set to `true` if contains `Quote`. - * The information is used in phase `Staging`/`Splicing`/`PickleQuotes` in order to avoid traversing trees that need no transformations. - */ - var needsStaging: Boolean = false - - /** Will be set to true if the unit contains a captureChecking language import */ - var needsCaptureChecking: Boolean = false - - /** Will be set to true if the unit contains a pureFunctions language import */ - var knowsPureFuns: Boolean = false - - var suspended: Boolean = false - var suspendedAtInliningPhase: Boolean = false - - /** Can this compilation unit be suspended */ - def isSuspendable: Boolean = true - - /** Suspends the compilation unit by throwing a SuspendException - * and recording the suspended compilation unit - */ - def suspend()(using Context): Nothing = - assert(isSuspendable) - if !suspended then - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspended: $this") - suspended = true - ctx.run.nn.suspendedUnits += this - if ctx.phase == Phases.inliningPhase then - suspendedAtInliningPhase = true - throw CompilationUnit.SuspendException() - - private var myAssignmentSpans: Map[Int, List[Span]] | Null = null - - /** A map from (name-) offsets of all local variables in this compilation unit - * that can be tracked for being not null to the list of spans of assignments - * to these variables. - */ - def assignmentSpans(using Context): Map[Int, List[Span]] = - if myAssignmentSpans == null then myAssignmentSpans = Nullables.assignmentSpans - myAssignmentSpans.nn -} - -@sharable object NoCompilationUnit extends CompilationUnit(NoSource) { - - override def isJava: Boolean = false - - override def suspend()(using Context): Nothing = - throw CompilationUnit.SuspendException() - - override def assignmentSpans(using Context): Map[Int, List[Span]] = Map.empty -} - -object CompilationUnit { - - class SuspendException extends Exception - - /** Make a compilation unit for top class `clsd` with the contents of the `unpickled` tree */ - def apply(clsd: ClassDenotation, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = - val file = clsd.symbol.associatedFile.nn - apply(SourceFile(file, Array.empty[Char]), unpickled, forceTrees) - - /** Make a compilation unit, given picked bytes and unpickled tree */ - def apply(source: SourceFile, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = { - assert(!unpickled.isEmpty, unpickled) - val unit1 = new CompilationUnit(source) - unit1.tpdTree = unpickled - if (forceTrees) { - val force = new Force - force.traverse(unit1.tpdTree) - unit1.needsStaging = force.containsQuote - unit1.needsInlining = force.containsInline - } - unit1 - } - - /** Create a compilation unit corresponding to `source`. - * If `mustExist` is true, this will fail if `source` does not exist. - */ - def apply(source: SourceFile, mustExist: Boolean = true)(using Context): CompilationUnit = { - val src = - if (!mustExist) - source - else if (source.file.isDirectory) { - report.error(em"expected file, received directory '${source.file.path}'") - NoSource - } - else if (!source.file.exists) { - report.error(em"source file not found: ${source.file.path}") - NoSource - } - else source - new CompilationUnit(src) - } - - /** Force the tree to be loaded */ - private class Force extends TreeTraverser { - var containsQuote = false - var containsInline = false - var containsCaptureChecking = false - def traverse(tree: Tree)(using Context): Unit = { - if (tree.symbol.isQuote) - containsQuote = true - if tree.symbol.is(Flags.Inline) then - containsInline = true - tree match - case Import(qual, selectors) => - tpd.languageImport(qual) match - case Some(prefix) => - for case untpd.ImportSelector(untpd.Ident(imported), untpd.EmptyTree, _) <- selectors do - Feature.handleGlobalLanguageImport(prefix, imported) - case _ => - case _ => - traverseChildren(tree) - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/Compiler.scala b/tests/pos-with-compiler-cc/dotc/Compiler.scala deleted file mode 100644 index c8c95647b5e4..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Compiler.scala +++ /dev/null @@ -1,171 +0,0 @@ -package dotty.tools -package dotc - -import core._ -import Contexts._ -import typer.{TyperPhase, RefChecks} -import cc.CheckCaptures -import parsing.Parser -import Phases.Phase -import transform._ -import dotty.tools.backend -import backend.jvm.{CollectSuperCalls, GenBCode} -import localopt.StringInterpolatorOpt - -/** The central class of the dotc compiler. The job of a compiler is to create - * runs, which process given `phases` in a given `rootContext`. - */ -class Compiler { - - /** Meta-ordering constraint: - * - * DenotTransformers that change the signature of their denotation's info must go - * after erasure. The reason is that denotations are permanently referred to by - * TermRefs which contain a signature. If the signature of a symbol would change, - * all refs to it would become outdated - they could not be dereferenced in the - * new phase. - * - * After erasure, signature changing denot-transformers are OK because signatures - * are never recomputed later than erasure. - */ - def phases: List[List[Phase]] = - frontendPhases ::: picklerPhases ::: transformPhases ::: backendPhases - - /** Phases dealing with the frontend up to trees ready for TASTY pickling */ - protected def frontendPhases: List[List[Phase]] = - List(new Parser) :: // Compiler frontend: scanner, parser - List(new TyperPhase) :: // Compiler frontend: namer, typer - List(new YCheckPositions) :: // YCheck positions - List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks - List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files - List(new PostTyper) :: // Additional checks and cleanups after type checking - List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) - List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks - List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols - Nil - - /** Phases dealing with TASTY tree pickling and unpickling */ - protected def picklerPhases: List[List[Phase]] = - List(new Pickler) :: // Generate TASTY info - List(new Inlining) :: // Inline and execute macros - List(new PostInlining) :: // Add mirror support for inlined code - List(new Staging) :: // Check staging levels and heal staged types - List(new Splicing) :: // Replace level 1 splices with holes - List(new PickleQuotes) :: // Turn quoted trees into explicit run-time data structures - Nil - - /** Phases dealing with the transformation from pickled trees to backend trees */ - protected def transformPhases: List[List[Phase]] = - List(new InstrumentCoverage) :: // Perform instrumentation for code coverage (if -coverage-out is set) - List(new FirstTransform, // Some transformations to put trees into a canonical form - new CheckReentrant, // Internal use only: Check that compiled program has no data races involving global vars - new ElimPackagePrefixes, // Eliminate references to package prefixes in Select nodes - new CookComments, // Cook the comments: expand variables, doc, etc. - new CheckStatic, // Check restrictions that apply to @static members - new CheckLoopingImplicits, // Check that implicit defs do not call themselves in an infinite loop - new BetaReduce, // Reduce closure applications - new InlineVals, // Check right hand-sides of an `inline val`s - new ExpandSAMs, // Expand single abstract method closures to anonymous classes - new ElimRepeated, // Rewrite vararg parameters and arguments - new RefChecks) :: // Various checks mostly related to abstract members and overriding - List(new init.Checker) :: // Check initialization of objects - List(new CrossVersionChecks, // Check issues related to deprecated and experimental - new ProtectedAccessors, // Add accessors for protected members - new ExtensionMethods, // Expand methods of value classes with extension methods - new UncacheGivenAliases, // Avoid caching RHS of simple parameterless given aliases - new ElimByName, // Map by-name parameters to functions - new HoistSuperArgs, // Hoist complex arguments of supercalls to enclosing scope - new ForwardDepChecks, // Check that there are no forward references to local vals - new SpecializeApplyMethods, // Adds specialized methods to FunctionN - new TryCatchPatterns, // Compile cases in try/catch - new PatternMatcher) :: // Compile pattern matches - List(new TestRecheck.Pre) :: // Test only: run rechecker, enabled under -Yrecheck-test - List(new TestRecheck) :: // Test only: run rechecker, enabled under -Yrecheck-test - List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under captureChecking - List(new CheckCaptures) :: // Check captures, enabled under captureChecking - List(new ElimOpaque, // Turn opaque into normal aliases - new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) - new ExplicitOuter, // Add accessors to outer classes from nested ones. - new ExplicitSelf, // Make references to non-trivial self types explicit as casts - new StringInterpolatorOpt) :: // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats - List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions - new UninitializedDefs, // Replaces `compiletime.uninitialized` by `_` - new InlinePatterns, // Remove placeholders of inlined patterns - new VCInlineMethods, // Inlines calls to value class methods - new SeqLiterals, // Express vararg arguments as arrays - new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods - new Getters, // Replace non-private vals and vars with getter defs (fields are added later) - new SpecializeFunctions, // Specialized Function{0,1,2} by replacing super with specialized super - new SpecializeTuples, // Specializes Tuples by replacing tuple construction and selection trees - new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods - new CollectNullableFields, // Collect fields that can be nulled out after use in lazy initialization - new ElimOuterSelect, // Expand outer selections - new ResolveSuper, // Implement super accessors - new FunctionXXLForwarders, // Add forwarders for FunctionXXL apply method - new ParamForwarding, // Add forwarders for aliases of superclass parameters - new TupleOptimizations, // Optimize generic operations on tuples - new LetOverApply, // Lift blocks from receivers of applications - new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. - List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. - List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types - new PureStats, // Remove pure stats from blocks - new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations - new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference - new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` - new sjs.AddLocalJSFakeNews, // Adds fake new invocations to local JS classes in calls to `createLocalJSClass` - new ElimPolyFunction, // Rewrite PolyFunction subclasses to FunctionN subclasses - new TailRec, // Rewrite tail recursion to loops - new CompleteJavaEnums, // Fill in constructors for Java enums - new Mixin, // Expand trait fields and trait initializers - new LazyVals, // Expand lazy vals - new Memoize, // Add private fields to getters and setters - new NonLocalReturns, // Expand non-local returns - new CapturedVars) :: // Represent vars captured by closures as heap objects - List(new Constructors, // Collect initialization code in primary constructors - // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it - new Instrumentation) :: // Count calls and allocations under -Yinstrument - List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments - // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here - new ElimStaticThis, // Replace `this` references to static objects by global identifiers - new CountOuterAccesses) :: // Identify outer accessors that can be dropped - List(new DropOuterAccessors, // Drop unused outer accessors - new CheckNoSuperThis, // Check that supercalls don't contain references to `this` - new Flatten, // Lift all inner classes to package scope - new TransformWildcards, // Replace wildcards with default values - new MoveStatics, // Move static methods from companion to the class itself - new ExpandPrivate, // Widen private definitions accessed from nested classes - new RestoreScopes, // Repair scopes rendered invalid by moving definitions in prior phases of the group - new SelectStatic, // get rid of selects that would be compiled into GetStatic - new sjs.JUnitBootstrappers, // Generate JUnit-specific bootstrapper classes for Scala.js (not enabled by default) - new CollectEntryPoints, // Collect all entry points and save them in the context - new CollectSuperCalls, // Find classes that are called with super - new RepeatableAnnotations) :: // Aggregate repeatable annotations - Nil - - /** Generate the output of the compilation */ - protected def backendPhases: List[List[Phase]] = - List(new backend.sjs.GenSJSIR) :: // Generate .sjsir files for Scala.js (not enabled by default) - List(new GenBCode) :: // Generate JVM bytecode - Nil - - var runId: Int = 1 - def nextRunId: Int = { - runId += 1; runId - } - - def reset()(using Context): Unit = { - ctx.base.reset() - val run = ctx.run - if (run != null) run.reset() - } - - def newRun(using Context): Run = { - reset() - val rctx = - if ctx.settings.Xsemanticdb.value then - ctx.addMode(Mode.ReadPositions) - else - ctx - new Run(this, rctx) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/Driver.scala b/tests/pos-with-compiler-cc/dotc/Driver.scala deleted file mode 100644 index b85f1365243b..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Driver.scala +++ /dev/null @@ -1,207 +0,0 @@ -package dotty.tools.dotc - -import dotty.tools.FatalError -import config.CompilerCommand -import core.Comments.{ContextDoc, ContextDocstrings} -import core.Contexts._ -import core.{MacroClassLoader, TypeError} -import dotty.tools.dotc.ast.Positioned -import dotty.tools.io.AbstractFile -import reporting._ -import core.Decorators._ -import config.Feature - -import scala.util.control.NonFatal -import fromtasty.{TASTYCompiler, TastyFileUtil} - -/** Run the Dotty compiler. - * - * Extending this class lets you customize many aspect of the compilation - * process, but in most cases you only need to call [[process]] on the - * existing object [[Main]]. - */ -class Driver { - - protected def newCompiler(using Context): Compiler = - if (ctx.settings.fromTasty.value) new TASTYCompiler - else new Compiler - - protected def emptyReporter: Reporter = new StoreReporter(null) - - protected def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = - if files.nonEmpty then - try - val run = compiler.newRun - run.compile(files) - finish(compiler, run) - catch - case ex: FatalError => - report.error(ex.getMessage.nn) // signals that we should fail compilation. - case ex: TypeError => - println(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}") - throw ex - case ex: Throwable => - println(s"$ex while compiling ${files.map(_.path).mkString(", ")}") - throw ex - ctx.reporter - - protected def finish(compiler: Compiler, run: Run)(using Context): Unit = - run.printSummary() - if !ctx.reporter.errorsReported && run.suspendedUnits.nonEmpty then - val suspendedUnits = run.suspendedUnits.toList - if (ctx.settings.XprintSuspension.value) - report.echo(i"compiling suspended $suspendedUnits%, %") - val run1 = compiler.newRun - for unit <- suspendedUnits do unit.suspended = false - run1.compileUnits(suspendedUnits) - finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) - - protected def initCtx: Context = (new ContextBase).initialCtx - - protected def sourcesRequired: Boolean = true - - protected def command: CompilerCommand = ScalacCommand - - /** Setup context with initialized settings from CLI arguments, then check if there are any settings that - * would change the default behaviour of the compiler. - * - * @return If there is no setting like `-help` preventing us from continuing compilation, - * this method returns a list of files to compile and an updated Context. - * If compilation should be interrupted, this method returns None. - */ - def setup(args: Array[String], rootCtx: Context): Option[(List[AbstractFile], DetachedContext)] = { - val ictx = rootCtx.fresh - val summary = command.distill(args, ictx.settings)(ictx.settingsState)(using ictx) - ictx.setSettings(summary.sstate) - Feature.checkExperimentalSettings(using ictx) - MacroClassLoader.init(ictx) - Positioned.init(using ictx) - - inContext(ictx) { - if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then - ictx.setProperty(ContextDoc, new ContextDocstrings) - val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) - fileNamesOrNone.map { fileNames => - val files = fileNames.map(ctx.getFile) - (files, fromTastySetup(files).detach) - } - } - } - - /** Setup extra classpath of tasty and jar files */ - protected def fromTastySetup(files: List[AbstractFile])(using Context): Context = - if ctx.settings.fromTasty.value then - val newEntries: List[String] = files - .flatMap { file => - if !file.exists then - report.error(em"File does not exist: ${file.path}") - None - else file.extension match - case "jar" => Some(file.path) - case "tasty" => - TastyFileUtil.getClassPath(file) match - case Some(classpath) => Some(classpath) - case _ => - report.error(em"Could not load classname from: ${file.path}") - None - case _ => - report.error(em"File extension is not `tasty` or `jar`: ${file.path}") - None - } - .distinct - val ctx1 = ctx.fresh - val fullClassPath = - (newEntries :+ ctx.settings.classpath.value).mkString(java.io.File.pathSeparator.nn) - ctx1.setSetting(ctx1.settings.classpath, fullClassPath) - else ctx - - /** Entry point to the compiler that can be conveniently used with Java reflection. - * - * This entry point can easily be used without depending on the `dotty` package, - * you only need to depend on `dotty-interfaces` and call this method using - * reflection. This allows you to write code that will work against multiple - * versions of dotty without recompilation. - * - * The trade-off is that you can only pass a SimpleReporter to this method - * and not a normal Reporter which is more powerful. - * - * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala]] - * - * @param args Arguments to pass to the compiler. - * @param simple Used to log errors, warnings, and info messages. - * The default reporter is used if this is `null`. - * @param callback Used to execute custom code during the compilation - * process. No callbacks will be executed if this is `null`. - * @return - */ - final def process(args: Array[String], simple: interfaces.SimpleReporter | Null, - callback: interfaces.CompilerCallback | Null): interfaces.ReporterResult = { - val reporter = if (simple == null) null else Reporter.fromSimpleReporter(simple) - process(args, reporter, callback) - } - - /** Principal entry point to the compiler. - * - * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] - * in method `runCompiler` - * - * @param args Arguments to pass to the compiler. - * @param reporter Used to log errors, warnings, and info messages. - * The default reporter is used if this is `null`. - * @param callback Used to execute custom code during the compilation - * process. No callbacks will be executed if this is `null`. - * @return The `Reporter` used. Use `Reporter#hasErrors` to check - * if compilation succeeded. - */ - final def process(args: Array[String], reporter: Reporter | Null = null, - callback: interfaces.CompilerCallback | Null = null): Reporter = { - val compileCtx = initCtx.fresh - if (reporter != null) - compileCtx.setReporter(reporter) - if (callback != null) - compileCtx.setCompilerCallback(callback) - process(args, compileCtx) - } - - /** Entry point to the compiler with no optional arguments. - * - * This overload is provided for compatibility reasons: the - * `RawCompiler` of sbt expects this method to exist and calls - * it using reflection. Keeping it means that we can change - * the other overloads without worrying about breaking compatibility - * with sbt. - */ - final def process(args: Array[String]): Reporter = - process(args, null: Reporter | Null, null: interfaces.CompilerCallback | Null) - - /** Entry point to the compiler using a custom `Context`. - * - * In most cases, you do not need a custom `Context` and should - * instead use one of the other overloads of `process`. However, - * the other overloads cannot be overridden, instead you - * should override this one which they call internally. - * - * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] - * in method `runCompilerWithContext` - * - * @param args Arguments to pass to the compiler. - * @param rootCtx The root Context to use. - * @return The `Reporter` used. Use `Reporter#hasErrors` to check - * if compilation succeeded. - */ - def process(args: Array[String], rootCtx: Context): Reporter = { - setup(args, rootCtx) match - case Some((files, compileCtx)) => - doCompile(newCompiler(using compileCtx), files)(using compileCtx) - case None => - rootCtx.reporter - } - - def main(args: Array[String]): Unit = { - // Preload scala.util.control.NonFatal. Otherwise, when trying to catch a StackOverflowError, - // we may try to load it but fail with another StackOverflowError and lose the original exception, - // see . - val _ = NonFatal - sys.exit(if (process(args).hasErrors) 1 else 0) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/Main.scala b/tests/pos-with-compiler-cc/dotc/Main.scala deleted file mode 100644 index 3288fded52a2..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Main.scala +++ /dev/null @@ -1,5 +0,0 @@ -package dotty.tools -package dotc - -/** Main class of the `dotc` batch compiler. */ -object Main extends Driver diff --git a/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala b/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala deleted file mode 100644 index ae20d81226c9..000000000000 --- a/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala +++ /dev/null @@ -1,9 +0,0 @@ -package dotty.tools.dotc - -import dotty.tools.FatalError - -class MissingCoreLibraryException(rootPackage: String) extends FatalError( - s"""Could not find package $rootPackage from compiler core libraries. - |Make sure the compiler core libraries are on the classpath. - """.stripMargin -) diff --git a/tests/pos-with-compiler-cc/dotc/Resident.scala b/tests/pos-with-compiler-cc/dotc/Resident.scala deleted file mode 100644 index 9ebeaaaeb1c2..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Resident.scala +++ /dev/null @@ -1,61 +0,0 @@ -package dotty.tools -package dotc - -import core.Contexts._ -import reporting.Reporter -import java.io.EOFException -import scala.annotation.tailrec - -/** A compiler which stays resident between runs. This is more of a PoC than - * something that's expected to be used often - * - * Usage: - * - * > scala dotty.tools.dotc.Resident - * - * dotc> "more options and files to compile" - * - * ... - * - * dotc> :reset // reset all options to the ones passed on the command line - * - * ... - * - * dotc> :q // quit - */ -class Resident extends Driver { - - object residentCompiler extends Compiler - - override def sourcesRequired: Boolean = false - - private val quit = ":q" - private val reset = ":reset" - private val prompt = "dotc> " - - private def getLine() = { - Console.print(prompt) - try scala.io.StdIn.readLine() catch { case _: EOFException => quit } - } - - final override def process(args: Array[String], rootCtx: Context): Reporter = { - @tailrec def loop(args: Array[String], prevCtx: Context): Reporter = { - setup(args, prevCtx) match - case Some((files, ctx)) => - inContext(ctx) { - doCompile(residentCompiler, files) - } - var nextCtx: DetachedContext = ctx - var line = getLine() - while (line == reset) { - nextCtx = rootCtx.detach - line = getLine() - } - if line.startsWith(quit) then ctx.reporter - else loop((line split "\\s+").asInstanceOf[Array[String]], nextCtx) - case None => - prevCtx.reporter - } - loop(args, rootCtx) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/Run.scala b/tests/pos-with-compiler-cc/dotc/Run.scala deleted file mode 100644 index 96f8c6a7b06f..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Run.scala +++ /dev/null @@ -1,404 +0,0 @@ -package dotty.tools -package dotc - -import core._ -import Contexts._ -import Periods._ -import Symbols._ -import Scopes._ -import Names.Name -import Denotations.Denotation -import typer.Typer -import typer.ImportInfo.withRootImports -import Decorators._ -import io.AbstractFile -import Phases.unfusedPhases - -import util._ -import reporting.{Suppression, Action, Profile, ActiveProfile, NoProfile} -import reporting.Diagnostic -import reporting.Diagnostic.Warning -import rewrites.Rewrites -import profile.Profiler -import printing.XprintMode -import typer.ImplicitRunInfo -import config.Feature -import StdNames.nme - -import java.io.{BufferedWriter, OutputStreamWriter} -import java.nio.charset.StandardCharsets - -import scala.collection.mutable -import scala.util.control.NonFatal -import scala.io.Codec -import annotation.constructorOnly -import annotation.unchecked.uncheckedCaptures - -/** A compiler run. Exports various methods to compile source files */ -class Run(comp: Compiler, @constructorOnly ictx0: Context) extends ImplicitRunInfo with ConstraintRunInfo { - - val ictx = ictx0.detach - - /** Default timeout to stop looking for further implicit suggestions, in ms. - * This is usually for the first import suggestion; subsequent suggestions - * may get smaller timeouts. @see ImportSuggestions.reduceTimeBudget - */ - private var myImportSuggestionBudget: Int = - Int.MinValue // sentinel value; means whatever is set in command line option - - def importSuggestionBudget = - if myImportSuggestionBudget == Int.MinValue then ictx.settings.XimportSuggestionTimeout.value - else myImportSuggestionBudget - - def importSuggestionBudget_=(x: Int) = - myImportSuggestionBudget = x - - /** If this variable is set to `true`, some core typer operations will - * return immediately. Currently these early abort operations are - * `Typer.typed` and `Implicits.typedImplicit`. - */ - @volatile var isCancelled = false - - private var compiling = false - - private var myUnits: List[CompilationUnit] = Nil - private var myUnitsCached: List[CompilationUnit] = Nil - private var myFiles: Set[AbstractFile] = _ - - // `@nowarn` annotations by source file, populated during typer - private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty - // source files whose `@nowarn` annotations are processed - private val mySuppressionsComplete: mutable.Set[SourceFile] = mutable.Set.empty - // warnings issued before a source file's `@nowarn` annotations are processed, suspended so that `@nowarn` can filter them - private val mySuspendedMessages: mutable.LinkedHashMap[SourceFile, mutable.LinkedHashSet[Warning]] = mutable.LinkedHashMap.empty - - object suppressions: - // When the REPL creates a new run (ReplDriver.compile), parsing is already done in the old context, with the - // previous Run. Parser warnings were suspended in the old run and need to be copied over so they are not lost. - // Same as scala/scala/commit/79ca1408c7. - def initSuspendedMessages(oldRun: Run | Null) = if oldRun != null then - mySuspendedMessages.clear() - mySuspendedMessages ++= oldRun.mySuspendedMessages - - def suppressionsComplete(source: SourceFile) = source == NoSource || mySuppressionsComplete(source) - - def addSuspendedMessage(warning: Warning) = - mySuspendedMessages.getOrElseUpdate(warning.pos.source, mutable.LinkedHashSet.empty) += warning - - def nowarnAction(dia: Diagnostic): Action.Warning.type | Action.Verbose.type | Action.Silent.type = - mySuppressions.getOrElse(dia.pos.source, Nil).find(_.matches(dia)) match { - case Some(s) => - s.markUsed() - if (s.verbose) Action.Verbose - else Action.Silent - case _ => - Action.Warning - } - - def addSuppression(sup: Suppression): Unit = - val source = sup.annotPos.source - mySuppressions.getOrElseUpdate(source, mutable.ListBuffer.empty) += sup - - def reportSuspendedMessages(source: SourceFile)(using Context): Unit = { - // sort suppressions. they are not added in any particular order because of lazy type completion - for (sups <- mySuppressions.get(source)) - mySuppressions(source) = sups.sortBy(sup => 0 - sup.start) - mySuppressionsComplete += source - mySuspendedMessages.remove(source).foreach(_.foreach(ctx.reporter.issueIfNotSuppressed)) - } - - def runFinished(hasErrors: Boolean): Unit = - // report suspended messages (in case the run finished before typer) - mySuspendedMessages.keysIterator.toList.foreach(reportSuspendedMessages) - // report unused nowarns only if all all phases are done - if !hasErrors && ctx.settings.WunusedHas.nowarn then - for { - source <- mySuppressions.keysIterator.toList - sups <- mySuppressions.remove(source) - sup <- sups.reverse - } if (!sup.used) - report.warning("@nowarn annotation does not suppress any warnings", sup.annotPos) - - /** The compilation units currently being compiled, this may return different - * results over time. - */ - def units: List[CompilationUnit] = myUnits - - private def units_=(us: List[CompilationUnit]): Unit = - myUnits = us - - var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() - - def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = - if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then - val where = - if suspendedUnits.size == 1 then i"in ${suspendedUnits.head}." - else i"""among - | - | ${suspendedUnits.toList}%, % - |""" - val enableXprintSuspensionHint = - if ctx.settings.XprintSuspension.value then "" - else "\n\nCompiling with -Xprint-suspension gives more information." - report.error(em"""Cyclic macro dependencies $where - |Compilation stopped since no further progress can be made. - | - |To fix this, place macros in one set of files and their callers in another.$enableXprintSuspensionHint""") - - /** The files currently being compiled (active or suspended). - * This may return different results over time. - * These files do not have to be source files since it's possible to compile - * from TASTY. - */ - def files: Set[AbstractFile] = { - if (myUnits ne myUnitsCached) { - myUnitsCached = myUnits - myFiles = (myUnits ++ suspendedUnits).map(_.source.file).toSet - } - myFiles - } - - /** The source files of all late entered symbols, as a set */ - private var lateFiles = mutable.Set[AbstractFile]() - - /** A cache for static references to packages and classes */ - val staticRefs = util.EqHashMap[Name, Denotation](initialCapacity = 1024) - - /** Actions that need to be performed at the end of the current compilation run */ - @uncheckedCaptures - private var finalizeActions = mutable.ListBuffer[() => Unit]() - - /** Will be set to true if any of the compiled compilation units contains - * a pureFunctions language import. - */ - var pureFunsImportEncountered = false - - /** Will be set to true if any of the compiled compilation units contains - * a captureChecking language import. - */ - var ccImportEncountered = false - - def compile(files: List[AbstractFile]): Unit = - try - val codec = Codec(runContext.settings.encoding.value) - val sources = files.map(runContext.getSource(_, codec)) - compileSources(sources) - catch - case NonFatal(ex) => - if units.nonEmpty then report.echo(i"exception occurred while compiling $units%, %") - else report.echo(s"exception occurred while compiling ${files.map(_.name).mkString(", ")}") - throw ex - - /** TODO: There's a fundamental design problem here: We assemble phases using `fusePhases` - * when we first build the compiler. But we modify them with -Yskip, -Ystop - * on each run. That modification needs to either transform the tree structure, - * or we need to assemble phases on each run, and take -Yskip, -Ystop into - * account. I think the latter would be preferable. - */ - def compileSources(sources: List[SourceFile]): Unit = - if (sources forall (_.exists)) { - units = sources.map(CompilationUnit(_)) - compileUnits() - } - - - def compileUnits(us: List[CompilationUnit]): Unit = { - units = us - compileUnits() - } - - def compileUnits(us: List[CompilationUnit], ctx: Context): Unit = { - units = us - compileUnits()(using ctx) - } - - var profile: Profile = NoProfile - - private def compileUnits()(using Context) = Stats.maybeMonitored { - if (!ctx.mode.is(Mode.Interactive)) // IDEs might have multi-threaded access, accesses are synchronized - ctx.base.checkSingleThreaded() - - compiling = true - - profile = - if ctx.settings.Vprofile.value - || !ctx.settings.VprofileSortedBy.value.isEmpty - || ctx.settings.VprofileDetails.value != 0 - then ActiveProfile(ctx.settings.VprofileDetails.value.max(0).min(1000)) - else NoProfile - - // If testing pickler, make sure to stop after pickling phase: - val stopAfter = - if (ctx.settings.YtestPickler.value) List("pickler") - else ctx.settings.YstopAfter.value - - val pluginPlan = ctx.base.addPluginPhases(ctx.base.phasePlan) - val phases = ctx.base.fusePhases(pluginPlan, - ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value) - ctx.base.usePhases(phases) - - def runPhases(using Context) = { - var lastPrintedTree: PrintedTree = NoPrintedTree - val profiler = ctx.profiler - var phasesWereAdjusted = false - - for (phase <- ctx.base.allPhases) - if (phase.isRunnable) - Stats.trackTime(s"$phase ms ") { - val start = System.currentTimeMillis - val profileBefore = profiler.beforePhase(phase) - units = phase.runOn(units) - profiler.afterPhase(phase, profileBefore) - if (ctx.settings.Xprint.value.containsPhase(phase)) - for (unit <- units) - lastPrintedTree = - printTree(lastPrintedTree)(using ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) - report.informTime(s"$phase ", start) - Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) - for (unit <- units) - Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) - ctx.typerState.gc() - } - if !phasesWereAdjusted then - phasesWereAdjusted = true - if !Feature.ccEnabledSomewhere then - ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) - ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) - - profiler.finished() - } - - val runCtx = ctx.fresh - runCtx.setProfiler(Profiler()) - unfusedPhases.foreach(_.initContext(runCtx)) - runPhases(using runCtx) - if (!ctx.reporter.hasErrors) - Rewrites.writeBack() - suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) - while (finalizeActions.nonEmpty) { - val action = finalizeActions.remove(0) - action() - } - compiling = false - } - - /** Enter top-level definitions of classes and objects contained in source file `file`. - * The newly added symbols replace any previously entered symbols. - * If `typeCheck = true`, also run typer on the compilation unit, and set - * `rootTreeOrProvider`. - */ - def lateCompile(file: AbstractFile, typeCheck: Boolean)(using Context): Unit = - if (!files.contains(file) && !lateFiles.contains(file)) { - lateFiles += file - - val codec = Codec(ctx.settings.encoding.value) - val unit = CompilationUnit(ctx.getSource(file, codec)) - val unitCtx = runContext.fresh - .setCompilationUnit(unit) - .withRootImports - - def process()(using Context) = - ctx.typer.lateEnterUnit(doTypeCheck => - if typeCheck then - if compiling then finalizeActions += doTypeCheck - else doTypeCheck() - ) - - process()(using unitCtx) - } - - private sealed trait PrintedTree - private /*final*/ case class SomePrintedTree(phase: String, tree: String) extends PrintedTree - private object NoPrintedTree extends PrintedTree - - private def printTree(last: PrintedTree)(using Context): PrintedTree = { - val unit = ctx.compilationUnit - val fusedPhase = ctx.phase.prevMega - val echoHeader = f"[[syntax trees at end of $fusedPhase%25s]] // ${unit.source}" - val tree = if ctx.isAfterTyper then unit.tpdTree else unit.untpdTree - val treeString = fusedPhase.show(tree) - - last match { - case SomePrintedTree(phase, lastTreeString) if lastTreeString == treeString => - report.echo(s"$echoHeader: unchanged since $phase") - last - - case SomePrintedTree(phase, lastTreeString) if ctx.settings.XprintDiff.value || ctx.settings.XprintDiffDel.value => - val diff = DiffUtil.mkColoredCodeDiff(treeString, lastTreeString, ctx.settings.XprintDiffDel.value) - report.echo(s"$echoHeader\n$diff\n") - SomePrintedTree(fusedPhase.phaseName, treeString) - - case _ => - report.echo(s"$echoHeader\n$treeString\n") - SomePrintedTree(fusedPhase.phaseName, treeString) - } - } - - def compileFromStrings(scalaSources: List[String], javaSources: List[String] = Nil): Unit = { - def sourceFile(source: String, isJava: Boolean): SourceFile = { - val uuid = java.util.UUID.randomUUID().toString - val ext = if (isJava) "java" else "scala" - val name = s"compileFromString-$uuid.$ext" - SourceFile.virtual(name, source) - } - val sources = - scalaSources.map(sourceFile(_, isJava = false)) ++ - javaSources.map(sourceFile(_, isJava = true)) - - compileSources(sources) - } - - /** Print summary of warnings and errors encountered */ - def printSummary(): Unit = { - printMaxConstraint() - val r = runContext.reporter - if !r.errorsReported then - profile.printSummary() - r.summarizeUnreportedWarnings() - r.printSummary() - } - - override def reset(): Unit = { - super[ImplicitRunInfo].reset() - super[ConstraintRunInfo].reset() - myCtx = null - myUnits = Nil - myUnitsCached = Nil - } - - /** Produces the following contexts, from outermost to innermost - * - * bootStrap: A context with next available runId and a scope consisting of - * the RootPackage _root_ - * start A context with RootClass as owner and the necessary initializations - * for type checking. - * imports For each element of RootImports, an import context - */ - protected def rootContext(using Context): DetachedContext = { - ctx.initialize() - ctx.base.setPhasePlan(comp.phases) - val rootScope = new MutableScope(0) - val bootstrap = ctx.fresh - .setPeriod(Period(comp.nextRunId, FirstPhaseId)) - .setScope(rootScope) - rootScope.enter(ctx.definitions.RootPackage)(using bootstrap) - var start = bootstrap.fresh - .setOwner(defn.RootClass) - .setTyper(new Typer) - .addMode(Mode.ImplicitsEnabled) - .setTyperState(ctx.typerState.fresh(ctx.reporter)) - if ctx.settings.YexplicitNulls.value && !Feature.enabledBySetting(nme.unsafeNulls) then - start = start.addMode(Mode.SafeNulls) - ctx.initialize()(using start) // re-initialize the base context with start - - // `this` must be unchecked for safe initialization because by being passed to setRun during - // initialization, it is not yet considered fully initialized by the initialization checker - start.setRun(this: @unchecked).detach - } - - private var myCtx: DetachedContext | Null = rootContext(using ictx) - - /** The context created for this run */ - given runContext[Dummy_so_its_a_def]: DetachedContext = myCtx.nn - assert(runContext.runId <= Periods.MaxPossibleRunId) -} diff --git a/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala b/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala deleted file mode 100644 index 2e0d9a08f25d..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala +++ /dev/null @@ -1,9 +0,0 @@ -package dotty.tools.dotc - -import config.Properties._ -import config.CompilerCommand - -object ScalacCommand extends CompilerCommand: - override def cmdName: String = "scalac" - override def versionMsg: String = s"Scala compiler $versionString -- $copyrightString" - override def ifErrorsMsg: String = " scalac -help gives more information" diff --git a/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled b/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled deleted file mode 100644 index 6bf7530faf24..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled +++ /dev/null @@ -1,258 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ -import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._ - -// TODO: revise, integrate in a checking phase. -object CheckTrees { - - import tpd._ - - def check(p: Boolean, msg: => String = "")(using Context): Unit = assert(p, msg) - - def checkTypeArg(arg: Tree, bounds: TypeBounds)(using Context): Unit = { - check(arg.isValueType) - check(bounds contains arg.tpe) - } - - def escapingRefs(block: Block)(using Context): collection.Set[NamedType] = { - var hoisted: Set[Symbol] = Set() - lazy val locals = ctx.typeAssigner.localSyms(block.stats).toSet - def isLocal(sym: Symbol): Boolean = - (locals contains sym) && !isHoistableClass(sym) - def isHoistableClass(sym: Symbol) = - sym.isClass && { - (hoisted contains sym) || { - hoisted += sym - !classLeaks(sym.asClass) - } - } - def leakingTypes(tp: Type): collection.Set[NamedType] = - tp namedPartsWith (tp => isLocal(tp.symbol)) - def typeLeaks(tp: Type): Boolean = leakingTypes(tp).nonEmpty - def classLeaks(sym: ClassSymbol): Boolean = - (ctx.owner is Method) || // can't hoist classes out of method bodies - (sym.info.parents exists typeLeaks) || - (sym.decls.toList exists (t => typeLeaks(t.info))) - leakingTypes(block.tpe) - } - - def checkType(tree: Tree)(using Context): Unit = tree match { - case Ident(name) => - case Select(qualifier, name) => - check(qualifier.isValue) - check(qualifier.tpe =:= tree.tpe.normalizedPrefix) - val denot = qualifier.tpe.member(name) - check(denot.exists) - check(denot.hasAltWith(_.symbol == tree.symbol)) - case This(cls) => - case Super(qual, mixin) => - check(qual.isValue) - val cls = qual.tpe.typeSymbol - check(cls.isClass) - case Apply(fn, args) => - def checkArg(arg: Tree, name: Name, formal: Type): Unit = { - arg match { - case NamedArg(argName, _) => - check(argName == name) - case _ => - check(arg.isValue) - } - check(arg.tpe <:< formal) - } - val MethodType(paramNames, paramTypes) = fn.tpe.widen // checked already at construction - args.lazyZip(paramNames).lazyZip(paramTypes) foreach checkArg - case TypeApply(fn, args) => - val pt @ PolyType(_) = fn.tpe.widen // checked already at construction - args.lazyZip(pt.instantiateBounds(args map (_.tpe))) foreach checkTypeArg - case Literal(const: Constant) => - case New(tpt) => - check(tpt.isValueType) - val cls = tpt.tpe.typeSymbol - check(cls.isClass) - check(!(cls is AbstractOrTrait)) - case Pair(left, right) => - check(left.isValue) - check(right.isValue) - case Typed(expr, tpt) => - check(tpt.isValueType) - expr.tpe.widen match { - case tp: MethodType => - val cls = tpt.tpe.typeSymbol - check(cls.isClass) - check((cls is Trait) || - cls.primaryConstructor.info.paramTypess.flatten.isEmpty) - val absMembers = tpt.tpe.abstractTermMembers - check(absMembers.size == 1) - check(tp <:< absMembers.head.info) - case _ => - check(expr.isValueOrPattern) - check(expr.tpe <:< tpt.tpe.translateParameterized(defn.RepeatedParamClass, defn.SeqClass)) - } - case NamedArg(name, arg) => - case Assign(lhs, rhs) => - check(lhs.isValue); check(rhs.isValue) - lhs.tpe match { - case ltpe: TermRef => - check(ltpe.symbol is Mutable) - case _ => - check(false) - } - check(rhs.tpe <:< lhs.tpe.widen) - case tree @ Block(stats, expr) => - check(expr.isValue) - check(escapingRefs(tree).isEmpty) - case If(cond, thenp, elsep) => - check(cond.isValue); check(thenp.isValue); check(elsep.isValue) - check(cond.tpe isRef defn.BooleanClass) - case Closure(env, meth, target) => - meth.tpe.widen match { - case mt @ MethodType(_, paramTypes) => - if (target.isEmpty) { - check(env.length < paramTypes.length) - for ((arg, formal) <- env zip paramTypes) - check(arg.tpe <:< formal) - } - else - // env is stored in class, not method - target.tpe match { - case SAMType(targetMeth) => - check(mt <:< targetMeth.info) - } - } - case Match(selector, cases) => - check(selector.isValue) - // are any checks that relate selector and patterns desirable? - case CaseDef(pat, guard, body) => - check(pat.isValueOrPattern); check(guard.isValue); check(body.isValue) - check(guard.tpe.derivesFrom(defn.BooleanClass)) - case Return(expr, from) => - check(expr.isValue); check(from.isTerm) - check(from.tpe.termSymbol.isRealMethod) - case Try(block, handler, finalizer) => - check(block.isTerm) - check(finalizer.isTerm) - check(handler.isTerm) - check(handler.tpe derivesFrom defn.FunctionClass(1)) - check(handler.tpe.baseArgInfos(defn.FunctionClass(1)).head <:< defn.ThrowableType) - case Throw(expr) => - check(expr.isValue) - check(expr.tpe.derivesFrom(defn.ThrowableClass)) - case SeqLiteral(elems) => - val elemtp = tree.tpe.elemType - for (elem <- elems) { - check(elem.isValue) - check(elem.tpe <:< elemtp) - } - case TypeTree(original) => - if (!original.isEmpty) { - check(original.isValueType) - check(original.tpe == tree.tpe) - } - case SingletonTypeTree(ref) => - check(ref.isValue) - check(ref.symbol.isStable) - case SelectFromTypeTree(qualifier, name) => - check(qualifier.isValueType) - check(qualifier.tpe =:= tree.tpe.normalizedPrefix) - val denot = qualifier.tpe.member(name) - check(denot.exists) - check(denot.symbol == tree.symbol) - case AndTypeTree(left, right) => - check(left.isValueType); check(right.isValueType) - case OrTypeTree(left, right) => - check(left.isValueType); check(right.isValueType) - case RefinedTypeTree(tpt, refinements) => - check(tpt.isValueType) - def checkRefinements(forbidden: Set[Symbol], rs: List[Tree]): Unit = rs match { - case r :: rs1 => - val rsym = r.symbol - check(rsym.isTerm || rsym.isAbstractOrAliasType) - if (rsym.isAbstractType) check(tpt.tpe.member(rsym.name).exists) - check(rsym.info forallParts { - case nt: NamedType => !(forbidden contains nt.symbol) - case _ => true - }) - checkRefinements(forbidden - rsym, rs1) - case nil => - } - checkRefinements(ctx.typeAssigner.localSyms(refinements).toSet, refinements) - case AppliedTypeTree(tpt, args) => - check(tpt.isValueType) - val tparams = tpt.tpe.typeParams - check(sameLength(tparams, args)) - args.lazyZip(tparams map (_.info.bounds)) foreach checkTypeArg - case TypeBoundsTree(lo, hi) => - check(lo.isValueType); check(hi.isValueType) - check(lo.tpe <:< hi.tpe) - case Bind(sym, body) => - check(body.isValueOrPattern) - check(!(tree.symbol is Method)) - body match { - case Ident(nme.WILDCARD) => - case _ => check(body.tpe.widen =:= tree.symbol.info) - } - case Alternative(alts) => - for (alt <- alts) check(alt.isValueOrPattern) - case UnApply(fun, implicits, args) => // todo: review - check(fun.isTerm) - for (arg <- args) check(arg.isValueOrPattern) - val funtpe @ MethodType(_, _) = fun.tpe.widen - fun.symbol.name match { // check arg arity - case nme.unapplySeq => - // args need to be wrapped in (...: _*) - check(args.length == 1) - check(args.head.isInstanceOf[SeqLiteral]) - case nme.unapply => - val rtp = funtpe.resultType - if (rtp isRef defn.BooleanClass) - check(args.isEmpty) - else { - check(rtp isRef defn.OptionClass) - val normArgs = rtp.argTypesHi match { - case optionArg :: Nil => - optionArg.argTypesHi match { - case Nil => - optionArg :: Nil - case tupleArgs if defn.isTupleNType(optionArg) => - tupleArgs - } - case _ => - check(false) - Nil - } - check(sameLength(normArgs, args)) - } - } - case ValDef(mods, name, tpt, rhs) => - check(!(tree.symbol is Method)) - if (!rhs.isEmpty) { - check(rhs.isValue) - check(rhs.tpe <:< tpt.tpe) - } - case DefDef(mods, name, tparams, vparamss, tpt, rhs) => - check(tree.symbol is Method) - if (!rhs.isEmpty) { - check(rhs.isValue) - check(rhs.tpe <:< tpt.tpe) - } - case TypeDef(mods, name, tpt) => - check(tpt.isInstanceOf[Template] || tpt.tpe.isInstanceOf[TypeBounds]) - case Template(constr, parents, selfType, body) => - case Import(expr, selectors) => - check(expr.isValue) - check(expr.tpe.termSymbol.isStable) - case PackageDef(pid, stats) => - check(pid.isTerm) - check(pid.symbol is Package) - case Annotated(annot, arg) => - check(annot.isInstantiation) - check(annot.symbol.owner.isSubClass(defn.AnnotationClass)) - check(arg.isValueType || arg.isValue) - case EmptyTree => - } -} - diff --git a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala deleted file mode 100644 index 390e58d89245..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala +++ /dev/null @@ -1,1979 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._ -import Symbols._, StdNames._, Trees._, ContextOps._ -import Decorators._, transform.SymUtils._ -import Annotations.Annotation -import NameKinds.{UniqueName, EvidenceParamName, DefaultGetterName, WildcardParamName} -import typer.{Namer, Checking} -import util.{Property, SourceFile, SourcePosition, Chars} -import config.Feature.{sourceVersion, migrateTo3, enabled} -import config.SourceVersion._ -import collection.mutable.ListBuffer -import reporting._ -import annotation.constructorOnly -import printing.Formatting.hl -import config.Printers - -import scala.annotation.internal.sharable - -object desugar { - import untpd._ - import DesugarEnums._ - - /** An attachment for companion modules of classes that have a `derives` clause. - * The position value indicates the start position of the template of the - * deriving class. - */ - val DerivingCompanion: Property.Key[SourcePosition] = Property.Key() - - /** An attachment for match expressions generated from a PatDef or GenFrom. - * Value of key == one of IrrefutablePatDef, IrrefutableGenFrom - */ - val CheckIrrefutable: Property.Key[MatchCheck] = Property.StickyKey() - - /** A multi-line infix operation with the infix operator starting a new line. - * Used for explaining potential errors. - */ - val MultiLineInfix: Property.Key[Unit] = Property.StickyKey() - - /** An attachment key to indicate that a ValDef originated from parameter untupling. - */ - val UntupledParam: Property.Key[Unit] = Property.StickyKey() - - /** What static check should be applied to a Match? */ - enum MatchCheck { - case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom - } - - /** Is `name` the name of a method that can be invalidated as a compiler-generated - * case class method if it clashes with a user-defined method? - */ - def isRetractableCaseClassMethodName(name: Name)(using Context): Boolean = name match { - case nme.apply | nme.unapply | nme.unapplySeq | nme.copy => true - case DefaultGetterName(nme.copy, _) => true - case _ => false - } - - /** Is `name` the name of a method that is added unconditionally to case classes? */ - def isDesugaredCaseClassMethodName(name: Name)(using Context): Boolean = - isRetractableCaseClassMethodName(name) || name.isSelectorName - -// ----- DerivedTypeTrees ----------------------------------- - - class SetterParamTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { - def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.info.resultType) - } - - class TypeRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { - def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.typeRef) - } - - class TermRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { - def derivedTree(sym: Symbol)(using Context): tpd.Tree = tpd.ref(sym) - } - - /** A type tree that computes its type from an existing parameter. */ - class DerivedFromParamTree()(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { - - /** Complete the appropriate constructors so that OriginalSymbol attachments are - * pushed to DerivedTypeTrees. - */ - override def ensureCompletions(using Context): Unit = { - def completeConstructor(sym: Symbol) = - sym.infoOrCompleter match { - case completer: Namer#ClassCompleter => - completer.completeConstructor(sym) - case _ => - } - - if (!ctx.owner.is(Package)) - if (ctx.owner.isClass) { - completeConstructor(ctx.owner) - if (ctx.owner.is(ModuleClass)) - completeConstructor(ctx.owner.linkedClass) - } - else ensureCompletions(using ctx.outer) - } - - /** Return info of original symbol, where all references to siblings of the - * original symbol (i.e. sibling and original symbol have the same owner) - * are rewired to same-named parameters or accessors in the scope enclosing - * the current scope. The current scope is the scope owned by the defined symbol - * itself, that's why we have to look one scope further out. If the resulting - * type is an alias type, dealias it. This is necessary because the - * accessor of a type parameter is a private type alias that cannot be accessed - * from subclasses. - */ - def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = { - val dctx = ctx.detach - val relocate = new TypeMap(using dctx) { - val originalOwner = sym.owner - def apply(tp: Type) = tp match { - case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) => - val defctx = mapCtx.detach.outersIterator.dropWhile(_.scope eq mapCtx.scope).next() - var local = defctx.denotNamed(tp.name).suchThat(_.isParamOrAccessor).symbol - if (local.exists) (defctx.owner.thisType select local).dealiasKeepAnnots - else { - def msg = - em"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" - ErrorType(msg).assertingErrorsReported(msg) - } - case _ => - mapOver(tp) - } - } - tpd.TypeTree(relocate(sym.info)) - } - } - - /** A type definition copied from `tdef` with a rhs typetree derived from it */ - def derivedTypeParam(tdef: TypeDef)(using Context): TypeDef = - cpy.TypeDef(tdef)( - rhs = DerivedFromParamTree().withSpan(tdef.rhs.span).watching(tdef) - ) - - /** A derived type definition watching `sym` */ - def derivedTypeParamWithVariance(sym: TypeSymbol)(using Context): TypeDef = - val variance = VarianceFlags & sym.flags - TypeDef(sym.name, DerivedFromParamTree().watching(sym)).withFlags(TypeParam | Synthetic | variance) - - /** A value definition copied from `vdef` with a tpt typetree derived from it */ - def derivedTermParam(vdef: ValDef)(using Context): ValDef = - cpy.ValDef(vdef)( - tpt = DerivedFromParamTree().withSpan(vdef.tpt.span).watching(vdef)) - -// ----- Desugar methods ------------------------------------------------- - - /** Setter generation is needed for: - * - non-private class members - * - all trait members - * - all package object members - */ - def isSetterNeeded(valDef: ValDef)(using Context): Boolean = { - val mods = valDef.mods - mods.is(Mutable) - && ctx.owner.isClass - && (!mods.is(Private) || ctx.owner.is(Trait) || ctx.owner.isPackageObject) - } - - /** var x: Int = expr - * ==> - * def x: Int = expr - * def x_=($1: ): Unit = () - * - * Generate setter where needed - */ - def valDef(vdef0: ValDef)(using Context): Tree = - val vdef @ ValDef(_, tpt, rhs) = vdef0 - val valName = normalizeName(vdef, tpt).asTermName - var mods1 = vdef.mods - - def dropInto(tpt: Tree): Tree = tpt match - case Into(tpt1) => - mods1 = vdef.mods.withAddedAnnotation( - TypedSplice( - Annotation(defn.AllowConversionsAnnot).tree.withSpan(tpt.span.startPos))) - tpt1 - case ByNameTypeTree(tpt1) => - cpy.ByNameTypeTree(tpt)(dropInto(tpt1)) - case PostfixOp(tpt1, op) if op.name == tpnme.raw.STAR => - cpy.PostfixOp(tpt)(dropInto(tpt1), op) - case _ => - tpt - - val vdef1 = cpy.ValDef(vdef)(name = valName, tpt = dropInto(tpt)) - .withMods(mods1) - - if isSetterNeeded(vdef) then - val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) - // The rhs gets filled in later, when field is generated and getter has parameters (see Memoize miniphase) - val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral - val setter = cpy.DefDef(vdef)( - name = valName.setterName, - paramss = (setterParam :: Nil) :: Nil, - tpt = TypeTree(defn.UnitType), - rhs = setterRhs - ).withMods((vdef.mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) - .dropEndMarker() // the end marker should only appear on the getter definition - Thicket(vdef1, setter) - else vdef1 - end valDef - - def makeImplicitParameters(tpts: List[Tree], implicitFlag: FlagSet, forPrimaryConstructor: Boolean = false)(using Context): List[ValDef] = - for (tpt <- tpts) yield { - val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param - val epname = EvidenceParamName.fresh() - ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) - } - - def mapParamss(paramss: List[ParamClause]) - (mapTypeParam: TypeDef => TypeDef) - (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = - paramss.mapConserve { - case TypeDefs(tparams) => tparams.mapConserve(mapTypeParam) - case ValDefs(vparams) => vparams.mapConserve(mapTermParam) - case _ => unreachable() - } - - /** 1. Expand context bounds to evidence params. E.g., - * - * def f[T >: L <: H : B](params) - * ==> - * def f[T >: L <: H](params)(implicit evidence$0: B[T]) - * - * 2. Expand default arguments to default getters. E.g, - * - * def f[T: B](x: Int = 1)(y: String = x + "m") = ... - * ==> - * def f[T](x: Int)(y: String)(implicit evidence$0: B[T]) = ... - * def f$default$1[T] = 1 - * def f$default$2[T](x: Int) = x + "m" - */ - private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = - addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) - - private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = - val DefDef(_, paramss, tpt, rhs) = meth - val evidenceParamBuf = ListBuffer[ValDef]() - - def desugarContextBounds(rhs: Tree): Tree = rhs match - case ContextBounds(tbounds, cxbounds) => - val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit - evidenceParamBuf ++= makeImplicitParameters( - cxbounds, iflag, forPrimaryConstructor = isPrimaryConstructor) - tbounds - case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) - case _ => - rhs - - val paramssNoContextBounds = - mapParamss(paramss) { - tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) - }(identity) - - rhs match - case MacroTree(call) => - cpy.DefDef(meth)(rhs = call).withMods(meth.mods | Macro | Erased) - case _ => - addEvidenceParams( - cpy.DefDef(meth)( - name = normalizeName(meth, tpt).asTermName, - paramss = paramssNoContextBounds), - evidenceParamBuf.toList) - end elimContextBounds - - def addDefaultGetters(meth: DefDef)(using Context): Tree = - - /** The longest prefix of parameter lists in paramss whose total number of - * ValDefs does not exceed `n` - */ - def takeUpTo(paramss: List[ParamClause], n: Int): List[ParamClause] = paramss match - case ValDefs(vparams) :: paramss1 => - val len = vparams.length - if len <= n then vparams :: takeUpTo(paramss1, n - len) else Nil - case TypeDefs(tparams) :: paramss1 => - tparams :: takeUpTo(paramss1, n) - case _ => - Nil - - def dropContextBounds(tparam: TypeDef): TypeDef = - def dropInRhs(rhs: Tree): Tree = rhs match - case ContextBounds(tbounds, _) => - tbounds - case rhs @ LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, dropInRhs(body)) - case _ => - rhs - cpy.TypeDef(tparam)(rhs = dropInRhs(tparam.rhs)) - - def paramssNoRHS = mapParamss(meth.paramss)(identity) { - vparam => - if vparam.rhs.isEmpty then vparam - else cpy.ValDef(vparam)(rhs = EmptyTree).withMods(vparam.mods | HasDefault) - } - - def getterParamss(n: Int): List[ParamClause] = - mapParamss(takeUpTo(paramssNoRHS, n)) { - tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) - } { - vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) - } - - def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match - case ValDefs(vparam :: vparams) :: paramss1 => - def defaultGetter: DefDef = - DefDef( - name = DefaultGetterName(meth.name, n), - paramss = getterParamss(n), - tpt = TypeTree(), - rhs = vparam.rhs - ) - .withMods(Modifiers( - meth.mods.flags & (AccessFlags | Synthetic) | (vparam.mods.flags & Inline), - meth.mods.privateWithin)) - val rest = defaultGetters(vparams :: paramss1, n + 1) - if vparam.rhs.isEmpty then rest else defaultGetter :: rest - case _ :: paramss1 => // skip empty parameter lists and type parameters - defaultGetters(paramss1, n) - case Nil => - Nil - - val defGetters = defaultGetters(meth.paramss, 0) - if defGetters.isEmpty then meth - else Thicket(cpy.DefDef(meth)(paramss = paramssNoRHS) :: defGetters) - end addDefaultGetters - - /** Add an explicit ascription to the `expectedTpt` to every tail splice. - * - * - `'{ x }` -> `'{ x }` - * - `'{ $x }` -> `'{ $x: T }` - * - `'{ if (...) $x else $y }` -> `'{ if (...) ($x: T) else ($y: T) }` - * - * Note that the splice `$t: T` will be typed as `${t: Expr[T]}` - */ - def quotedPattern(tree: untpd.Tree, expectedTpt: untpd.Tree)(using Context): untpd.Tree = { - def adaptToExpectedTpt(tree: untpd.Tree): untpd.Tree = tree match { - // Add the expected type as an ascription - case _: untpd.Splice => - untpd.Typed(tree, expectedTpt).withSpan(tree.span) - case Typed(expr: untpd.Splice, tpt) => - cpy.Typed(tree)(expr, untpd.makeAndType(tpt, expectedTpt).withSpan(tpt.span)) - - // Propagate down the expected type to the leafs of the expression - case Block(stats, expr) => - cpy.Block(tree)(stats, adaptToExpectedTpt(expr)) - case If(cond, thenp, elsep) => - cpy.If(tree)(cond, adaptToExpectedTpt(thenp), adaptToExpectedTpt(elsep)) - case untpd.Parens(expr) => - cpy.Parens(tree)(adaptToExpectedTpt(expr)) - case Match(selector, cases) => - val newCases = cases.map(cdef => cpy.CaseDef(cdef)(body = adaptToExpectedTpt(cdef.body))) - cpy.Match(tree)(selector, newCases) - case untpd.ParsedTry(expr, handler, finalizer) => - cpy.ParsedTry(tree)(adaptToExpectedTpt(expr), adaptToExpectedTpt(handler), finalizer) - - // Tree does not need to be ascribed - case _ => - tree - } - adaptToExpectedTpt(tree) - } - - /** Add all evidence parameters in `params` as implicit parameters to `meth`. - * If the parameters of `meth` end in an implicit parameter list or using clause, - * evidence parameters are added in front of that list. Otherwise they are added - * as a separate parameter clause. - */ - private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = - params match - case Nil => - meth - case evidenceParams => - val paramss1 = meth.paramss.reverse match - case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => - ((evidenceParams ++ vparams) :: rparamss).reverse - case _ => - meth.paramss :+ evidenceParams - cpy.DefDef(meth)(paramss = paramss1) - - /** The implicit evidence parameters of `meth`, as generated by `desugar.defDef` */ - private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = - meth.paramss.reverse match { - case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.name.is(EvidenceParamName)) - case _ => - Nil - } - - @sharable private val synthetic = Modifiers(Synthetic) - - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { - var mods = tparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) - tparam.withMods(mods & EmptyFlags | Param) - } - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { - var mods = vparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) - val hasDefault = if keepDefault then HasDefault else EmptyFlags - vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) - } - - def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = - paramss.foldLeft(fn) { (fn, params) => params match - case TypeDefs(params) => - TypeApply(fn, params.map(refOfDef)) - case (vparam: ValDef) :: _ if vparam.mods.is(Given) => - Apply(fn, params.map(refOfDef)).setApplyKind(ApplyKind.Using) - case _ => - Apply(fn, params.map(refOfDef)) - } - - /** The expansion of a class definition. See inline comments for what is involved */ - def classDef(cdef: TypeDef)(using Context): Tree = { - val impl @ Template(constr0, _, self, _) = cdef.rhs: @unchecked - val className = normalizeName(cdef, impl).asTypeName - val parents = impl.parents - val mods = cdef.mods - val companionMods = mods - .withFlags((mods.flags & (AccessFlags | Final)).toCommonFlags) - .withMods(Nil) - .withAnnotations(Nil) - - var defaultGetters: List[Tree] = Nil - - def decompose(ddef: Tree): DefDef = ddef match { - case meth: DefDef => meth - case Thicket((meth: DefDef) :: defaults) => - defaultGetters = defaults - meth - } - - val constr1 = decompose(defDef(impl.constr, isPrimaryConstructor = true)) - - // The original type and value parameters in the constructor already have the flags - // needed to be type members (i.e. param, and possibly also private and local unless - // prefixed by type or val). `tparams` and `vparamss` are the type parameters that - // go in `constr`, the constructor after desugaring. - - /** Does `tree' look like a reference to AnyVal? Temporary test before we have inline classes */ - def isAnyVal(tree: Tree): Boolean = tree match { - case Ident(tpnme.AnyVal) => true - case Select(qual, tpnme.AnyVal) => isScala(qual) - case _ => false - } - def isScala(tree: Tree): Boolean = tree match { - case Ident(nme.scala) => true - case Select(Ident(nme.ROOTPKG), nme.scala) => true - case _ => false - } - - def namePos = cdef.sourcePos.withSpan(cdef.nameSpan) - - val isObject = mods.is(Module) - val isCaseClass = mods.is(Case) && !isObject - val isCaseObject = mods.is(Case) && isObject - val isEnum = mods.isEnumClass && !mods.is(Module) - def isEnumCase = mods.isEnumCase - def isNonEnumCase = !isEnumCase && (isCaseClass || isCaseObject) - val isValueClass = parents.nonEmpty && isAnyVal(parents.head) - // This is not watertight, but `extends AnyVal` will be replaced by `inline` later. - - val originalTparams = constr1.leadingTypeParams - val originalVparamss = asTermOnly(constr1.trailingParamss) - lazy val derivedEnumParams = enumClass.typeParams.map(derivedTypeParamWithVariance) - val impliedTparams = - if (isEnumCase) { - val tparamReferenced = typeParamIsReferenced( - enumClass.typeParams, originalTparams, originalVparamss, parents) - if (originalTparams.isEmpty && (parents.isEmpty || tparamReferenced)) - derivedEnumParams.map(tdef => tdef.withFlags(tdef.mods.flags | PrivateLocal)) - else originalTparams - } - else originalTparams - - if mods.is(Trait) then - for vparams <- originalVparamss; vparam <- vparams do - if isByNameType(vparam.tpt) then - report.error(em"implementation restriction: traits cannot have by name parameters", vparam.srcPos) - - // Annotations on class _type_ parameters are set on the derived parameters - // but not on the constructor parameters. The reverse is true for - // annotations on class _value_ parameters. - val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) - val constrVparamss = - if (originalVparamss.isEmpty) { // ensure parameter list is non-empty - if (isCaseClass) - report.error(CaseClassMissingParamList(cdef), namePos) - ListOfNil - } - else if (isCaseClass && originalVparamss.head.exists(_.mods.isOneOf(GivenOrImplicit))) { - report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) - ListOfNil - } - else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) - val derivedTparams = - constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => - derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) - val derivedVparamss = - constrVparamss.nestedMap(vparam => - derivedTermParam(vparam).withAnnotations(Nil)) - - val constr = cpy.DefDef(constr1)(paramss = joinParams(constrTparams, constrVparamss)) - - val (normalizedBody, enumCases, enumCompanionRef) = { - // Add constructor type parameters and evidence implicit parameters - // to auxiliary constructors; set defaultGetters as a side effect. - def expandConstructor(tree: Tree) = tree match { - case ddef: DefDef if ddef.name.isConstructorName => - decompose( - defDef( - addEvidenceParams( - cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), - evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) - case stat => - stat - } - // The Identifiers defined by a case - def caseIds(tree: Tree): List[Ident] = tree match { - case tree: MemberDef => Ident(tree.name.toTermName) :: Nil - case PatDef(_, ids: List[Ident] @ unchecked, _, _) => ids - } - - val stats0 = impl.body.map(expandConstructor) - val stats = - if (ctx.owner eq defn.ScalaPackageClass) && defn.hasProblematicGetClass(className) then - stats0.filterConserve { - case ddef: DefDef => - ddef.name ne nme.getClass_ - case _ => - true - } - else - stats0 - - if (isEnum) { - val (enumCases, enumStats) = stats.partition(DesugarEnums.isEnumCase) - if (enumCases.isEmpty) - report.error(EnumerationsShouldNotBeEmpty(cdef), namePos) - else - enumCases.last.pushAttachment(DesugarEnums.DefinesEnumLookupMethods, ()) - val enumCompanionRef = TermRefTree() - val enumImport = - Import(enumCompanionRef, enumCases.flatMap(caseIds).map( - enumCase => - ImportSelector(enumCase.withSpan(enumCase.span.startPos)) - ) - ) - (enumImport :: enumStats, enumCases, enumCompanionRef) - } - else (stats, Nil, EmptyTree) - } - - def anyRef = ref(defn.AnyRefAlias.typeRef) - - val arity = constrVparamss.head.length - - val classTycon: Tree = TypeRefTree() // watching is set at end of method - - def appliedTypeTree(tycon: Tree, args: List[Tree]) = - (if (args.isEmpty) tycon else AppliedTypeTree(tycon, args)) - .withSpan(cdef.span.startPos) - - def isHK(tparam: Tree): Boolean = tparam match { - case TypeDef(_, LambdaTypeTree(tparams, body)) => true - case TypeDef(_, rhs: DerivedTypeTree) => isHK(rhs.watched) - case _ => false - } - - def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { - val targs = for (tparam <- tparams) yield { - val targ = refOfDef(tparam) - def fullyApplied(tparam: Tree): Tree = tparam match { - case TypeDef(_, LambdaTypeTree(tparams, body)) => - AppliedTypeTree(targ, tparams.map(_ => WildcardTypeBoundsTree())) - case TypeDef(_, rhs: DerivedTypeTree) => - fullyApplied(rhs.watched) - case _ => - targ - } - if (widenHK) fullyApplied(tparam) else targ - } - appliedTypeTree(tycon, targs) - } - - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { - case PostfixOp(_, Ident(tpnme.raw.STAR)) => true - case _ => false - } - - // a reference to the class type bound by `cdef`, with type parameters coming from the constructor - val classTypeRef = appliedRef(classTycon) - - // a reference to `enumClass`, with type parameters coming from the case constructor - lazy val enumClassTypeRef = - if (enumClass.typeParams.isEmpty) - enumClassRef - else if (originalTparams.isEmpty) - appliedRef(enumClassRef) - else { - report.error(TypedCaseDoesNotExplicitlyExtendTypedEnum(enumClass, cdef) - , cdef.srcPos.startPos) - appliedTypeTree(enumClassRef, constrTparams map (_ => anyRef)) - } - - // new C[Ts](paramss) - lazy val creatorExpr = - val vparamss = constrVparamss match - case (vparam :: _) :: _ if vparam.mods.is(Implicit) => // add a leading () to match class parameters - Nil :: constrVparamss - case _ => - if constrVparamss.nonEmpty && constrVparamss.forall { - case vparam :: _ => vparam.mods.is(Given) - case _ => false - } - then constrVparamss :+ Nil // add a trailing () to match class parameters - else constrVparamss - val nu = vparamss.foldLeft(makeNew(classTypeRef)) { (nu, vparams) => - val app = Apply(nu, vparams.map(refOfDef)) - vparams match { - case vparam :: _ if vparam.mods.is(Given) => app.setApplyKind(ApplyKind.Using) - case _ => app - } - } - ensureApplied(nu) - - val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags - - // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) - // def _1: T1 = this.p1 - // ... - // def _N: TN = this.pN (unless already given as valdef or parameterless defdef) - // def copy(p1: T1 = p1..., pN: TN = pN)(moreParams) = - // new C[...](p1, ..., pN)(moreParams) - val (caseClassMeths, enumScaffolding) = { - def syntheticProperty(name: TermName, tpt: Tree, rhs: Tree) = - DefDef(name, Nil, tpt, rhs).withMods(synthetic) - - def productElemMeths = - val caseParams = derivedVparamss.head.toArray - val selectorNamesInBody = normalizedBody.collect { - case vdef: ValDef if vdef.name.isSelectorName => - vdef.name - case ddef: DefDef if ddef.name.isSelectorName && ddef.paramss.isEmpty => - ddef.name - } - for i <- List.range(0, arity) - selName = nme.selectorName(i) - if (selName ne caseParams(i).name) && !selectorNamesInBody.contains(selName) - yield syntheticProperty(selName, caseParams(i).tpt, - Select(This(EmptyTypeIdent), caseParams(i).name)) - - def enumCaseMeths = - if isEnumCase then - val (ordinal, scaffolding) = nextOrdinal(className, CaseKind.Class, definesEnumLookupMethods(cdef)) - (ordinalMethLit(ordinal) :: Nil, scaffolding) - else (Nil, Nil) - def copyMeths = { - val hasRepeatedParam = constrVparamss.nestedExists { - case ValDef(_, tpt, _) => isRepeated(tpt) - } - if (mods.is(Abstract) || hasRepeatedParam) Nil // cannot have default arguments for repeated parameters, hence copy method is not issued - else { - val copyFirstParams = derivedVparamss.head.map(vparam => - cpy.ValDef(vparam)(rhs = refOfDef(vparam))) - val copyRestParamss = derivedVparamss.tail.nestedMap(vparam => - cpy.ValDef(vparam)(rhs = EmptyTree)) - DefDef( - nme.copy, - joinParams(derivedTparams, copyFirstParams :: copyRestParamss), - TypeTree(), - creatorExpr - ).withMods(Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags, constr1.mods.privateWithin)) :: Nil - } - } - - if isCaseClass then - val (enumMeths, enumScaffolding) = enumCaseMeths - (copyMeths ::: enumMeths ::: productElemMeths, enumScaffolding) - else (Nil, Nil) - } - - var parents1: List[untpd.Tree] = parents // !cc! need explicit type to make capture checking pass - if (isEnumCase && parents.isEmpty) - parents1 = enumClassTypeRef :: Nil - if (isNonEnumCase) - parents1 = parents1 :+ scalaDot(str.Product.toTypeName) :+ scalaDot(nme.Serializable.toTypeName) - if (isEnum) - parents1 = parents1 :+ ref(defn.EnumClass) - - // derived type classes of non-module classes go to their companions - val (clsDerived, companionDerived) = - if (mods.is(Module)) (impl.derived, Nil) else (Nil, impl.derived) - - // The thicket which is the desugared version of the companion object - // synthetic object C extends parentTpt derives class-derived { defs } - def companionDefs(parentTpt: Tree, defs: List[Tree]) = { - val mdefs = moduleDef( - ModuleDef( - className.toTermName, Template(emptyConstructor, parentTpt :: Nil, companionDerived, EmptyValDef, defs)) - .withMods(companionMods | Synthetic)) - .withSpan(cdef.span).toList - if (companionDerived.nonEmpty) - for (case modClsDef @ TypeDef(_, _) <- mdefs) - modClsDef.putAttachment(DerivingCompanion, impl.srcPos.startPos) - mdefs - } - - val companionMembers = defaultGetters ::: enumCases - - // The companion object definitions, if a companion is needed, Nil otherwise. - // companion definitions include: - // 1. If class is a case class case class C[Ts](p1: T1, ..., pN: TN)(moreParams): - // def apply[Ts](p1: T1, ..., pN: TN)(moreParams) = new C[Ts](p1, ..., pN)(moreParams) (unless C is abstract) - // def unapply[Ts]($1: C[Ts]) = $1 // if not repeated - // def unapplySeq[Ts]($1: C[Ts]) = $1 // if repeated - // 2. The default getters of the constructor - // The parent of the companion object of a non-parameterized case class - // (T11, ..., T1N) => ... => (TM1, ..., TMN) => C - // For all other classes, the parent is AnyRef. - val companions = - if (isCaseClass) { - val applyMeths = - if (mods.is(Abstract)) Nil - else { - val appMods = - Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags).withPrivateWithin(constr1.mods.privateWithin) - val appParamss = - derivedVparamss.nestedZipWithConserve(constrVparamss)((ap, cp) => - ap.withMods(ap.mods | (cp.mods.flags & HasDefault))) - DefDef(nme.apply, joinParams(derivedTparams, appParamss), TypeTree(), creatorExpr) - .withMods(appMods) :: Nil - } - val unapplyMeth = { - val hasRepeatedParam = constrVparamss.head.exists { - case ValDef(_, tpt, _) => isRepeated(tpt) - } - val methName = if (hasRepeatedParam) nme.unapplySeq else nme.unapply - val unapplyParam = makeSyntheticParameter(tpt = classTypeRef) - val unapplyRHS = if (arity == 0) Literal(Constant(true)) else Ident(unapplyParam.name) - val unapplyResTp = if (arity == 0) Literal(Constant(true)) else TypeTree() - DefDef( - methName, - joinParams(derivedTparams, (unapplyParam :: Nil) :: Nil), - unapplyResTp, - unapplyRHS - ).withMods(synthetic) - } - val toStringMeth = - DefDef(nme.toString_, Nil, TypeTree(), Literal(Constant(className.toString))).withMods(Modifiers(Override | Synthetic)) - - companionDefs(anyRef, applyMeths ::: unapplyMeth :: toStringMeth :: companionMembers) - } - else if (companionMembers.nonEmpty || companionDerived.nonEmpty || isEnum) - companionDefs(anyRef, companionMembers) - else if (isValueClass) - companionDefs(anyRef, Nil) - else Nil - - enumCompanionRef match { - case ref: TermRefTree => // have the enum import watch the companion object - val (modVal: ValDef) :: _ = companions: @unchecked - ref.watching(modVal) - case _ => - } - - // For an implicit class C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, .., pMN: TMN), the method - // synthetic implicit C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, ..., pMN: TMN): C[Ts] = - // new C[Ts](p11, ..., p1N) ... (pM1, ..., pMN) = - val implicitWrappers = - if (!mods.isOneOf(GivenOrImplicit)) - Nil - else if (ctx.owner.is(Package)) { - report.error(TopLevelImplicitClass(cdef), cdef.srcPos) - Nil - } - else if (mods.is(Trait)) { - report.error(TypesAndTraitsCantBeImplicit(), cdef.srcPos) - Nil - } - else if (isCaseClass) { - report.error(ImplicitCaseClass(cdef), cdef.srcPos) - Nil - } - else if (arity != 1 && !mods.is(Given)) { - report.error(ImplicitClassPrimaryConstructorArity(), cdef.srcPos) - Nil - } - else { - val defParamss = constrVparamss match { - case Nil :: paramss => - paramss // drop leading () that got inserted by class - // TODO: drop this once we do not silently insert empty class parameters anymore - case paramss => paramss - } - // implicit wrapper is typechecked in same scope as constructor, so - // we can reuse the constructor parameters; no derived params are needed. - DefDef( - className.toTermName, joinParams(constrTparams, defParamss), - classTypeRef, creatorExpr) - .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | Final) - .withSpan(cdef.span) :: Nil - } - - val self1 = { - val selfType = if (self.tpt.isEmpty) classTypeRef else self.tpt - if (self.isEmpty) self - else cpy.ValDef(self)(tpt = selfType).withMods(self.mods | SelfName) - } - - val cdef1 = addEnumFlags { - val tparamAccessors = { - val impliedTparamsIt = impliedTparams.iterator - derivedTparams.map(_.withMods(impliedTparamsIt.next().mods)) - } - val caseAccessor = if (isCaseClass) CaseAccessor else EmptyFlags - val vparamAccessors = { - val originalVparamsIt = originalVparamss.iterator.flatten - derivedVparamss match { - case first :: rest => - first.map(_.withMods(originalVparamsIt.next().mods | caseAccessor)) ++ - rest.flatten.map(_.withMods(originalVparamsIt.next().mods)) - case _ => - Nil - } - } - if mods.isAllOf(Given | Inline | Transparent) then - report.error("inline given instances cannot be trasparent", cdef) - val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods - cpy.TypeDef(cdef: TypeDef)( - name = className, - rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, - tparamAccessors ::: vparamAccessors ::: normalizedBody ::: caseClassMeths) - ).withMods(classMods) - } - - // install the watch on classTycon - classTycon match { - case tycon: DerivedTypeTree => tycon.watching(cdef1) - case _ => - } - - flatTree(cdef1 :: companions ::: implicitWrappers ::: enumScaffolding) - }.showing(i"desugared: $cdef --> $result", Printers.desugar) - - /** Expand - * - * package object name { body } - * - * to: - * - * package name { - * object `package` { body } - * } - */ - def packageModuleDef(mdef: ModuleDef)(using Context): Tree = - val impl = mdef.impl - val mods = mdef.mods - val moduleName = normalizeName(mdef, impl).asTermName - if mods.is(Package) then - checkPackageName(mdef) - PackageDef(Ident(moduleName), - cpy.ModuleDef(mdef)(nme.PACKAGE, impl).withMods(mods &~ Package) :: Nil) - else - mdef - - /** Expand - * - * object name extends parents { self => body } - * - * to: - * - * val name: name$ = New(name$) - * final class name$ extends parents { self: name.type => body } - */ - def moduleDef(mdef: ModuleDef)(using Context): Tree = { - val impl = mdef.impl - val mods = mdef.mods - val moduleName = normalizeName(mdef, impl).asTermName - def isEnumCase = mods.isEnumCase - Checking.checkWellFormedModule(mdef) - - if (mods.is(Package)) - packageModuleDef(mdef) - else if (isEnumCase) { - typeParamIsReferenced(enumClass.typeParams, Nil, Nil, impl.parents) - // used to check there are no illegal references to enum's type parameters in parents - expandEnumModule(moduleName, impl, mods, definesEnumLookupMethods(mdef), mdef.span) - } - else { - val clsName = moduleName.moduleClassName - val clsRef = Ident(clsName) - val modul = ValDef(moduleName, clsRef, New(clsRef, Nil)) - .withMods(mods.toTermFlags & RetainedModuleValFlags | ModuleValCreationFlags) - .withSpan(mdef.span.startPos) - val ValDef(selfName, selfTpt, _) = impl.self - val selfMods = impl.self.mods - if (!selfTpt.isEmpty) report.error(ObjectMayNotHaveSelfType(mdef), impl.self.srcPos) - val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(moduleName)), impl.self.rhs) - .withMods(selfMods) - .withSpan(impl.self.span.orElse(impl.span.startPos)) - val clsTmpl = cpy.Template(impl)(self = clsSelf, body = impl.body) - val cls = TypeDef(clsName, clsTmpl) - .withMods(mods.toTypeFlags & RetainedModuleClassFlags | ModuleClassCreationFlags) - .withEndMarker(copyFrom = mdef) // copy over the end marker position to the module class def - Thicket(modul, classDef(cls).withSpan(mdef.span)) - } - } - - def extMethod(mdef: DefDef, extParamss: List[ParamClause])(using Context): DefDef = - cpy.DefDef(mdef)( - name = normalizeName(mdef, mdef.tpt).asTermName, - paramss = - if mdef.name.isRightAssocOperatorName then - val (typaramss, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters - - paramss match - case params :: paramss1 => // `params` must have a single parameter and without `given` flag - - def badRightAssoc(problem: String) = - report.error(em"right-associative extension method $problem", mdef.srcPos) - extParamss ++ mdef.paramss - - params match - case ValDefs(vparam :: Nil) => - if !vparam.mods.is(Given) then - // we merge the extension parameters with the method parameters, - // swapping the operator arguments: - // e.g. - // extension [A](using B)(c: C)(using D) - // def %:[E](f: F)(g: G)(using H): Res = ??? - // will be encoded as - // def %:[A](using B)[E](f: F)(c: C)(using D)(g: G)(using H): Res = ??? - val (leadingUsing, otherExtParamss) = extParamss.span(isUsingOrTypeParamClause) - leadingUsing ::: typaramss ::: params :: otherExtParamss ::: paramss1 - else - badRightAssoc("cannot start with using clause") - case _ => - badRightAssoc("must start with a single parameter") - case _ => - // no value parameters, so not an infix operator. - extParamss ++ mdef.paramss - else - extParamss ++ mdef.paramss - ).withMods(mdef.mods | ExtensionMethod) - - /** Transform extension construct to list of extension methods */ - def extMethods(ext: ExtMethods)(using Context): Tree = flatTree { - ext.methods map { - case exp: Export => exp - case mdef: DefDef => defDef(extMethod(mdef, ext.paramss)) - } - } - /** Transforms - * - * type t >: Low <: Hi - * to - * - * @patternType type $T >: Low <: Hi - * - * if the type has a pattern variable name - */ - def quotedPatternTypeDef(tree: TypeDef)(using Context): TypeDef = { - assert(ctx.mode.is(Mode.QuotedPattern)) - if tree.name.isVarPattern && !tree.isBackquoted then - val patternTypeAnnot = New(ref(defn.QuotedRuntimePatterns_patternTypeAnnot.typeRef)).withSpan(tree.span) - val mods = tree.mods.withAddedAnnotation(patternTypeAnnot) - tree.withMods(mods) - else if tree.name.startsWith("$") && !tree.isBackquoted then - report.error( - """Quoted pattern variable names starting with $ are not supported anymore. - |Use lower cases type pattern name instead. - |""".stripMargin, - tree.srcPos) - tree - else tree - } - - def checkPackageName(mdef: ModuleDef | PackageDef)(using Context): Unit = - - def check(name: Name, errSpan: Span): Unit = name match - case name: SimpleName if !errSpan.isSynthetic && name.exists(Chars.willBeEncoded) => - report.warning(em"The package name `$name` will be encoded on the classpath, and can lead to undefined behaviour.", mdef.source.atSpan(errSpan)) - case _ => - - def loop(part: RefTree): Unit = part match - case part @ Ident(name) => check(name, part.span) - case part @ Select(qual: RefTree, name) => - check(name, part.nameSpan) - loop(qual) - case _ => - - mdef match - case pdef: PackageDef => loop(pdef.pid) - case mdef: ModuleDef if mdef.mods.is(Package) => check(mdef.name, mdef.nameSpan) - case _ => - end checkPackageName - - /** The normalized name of `mdef`. This means - * 1. Check that the name does not redefine a Scala core class. - * If it does redefine, issue an error and return a mangled name instead - * of the original one. - * 2. If the name is missing (this can be the case for instance definitions), - * invent one instead. - */ - def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { - var name = mdef.name - if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) - def errPos = mdef.source.atSpan(mdef.nameSpan) - if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { - val kind = if (name.isTypeName) "class" else "object" - report.error(IllegalRedefinitionOfStandardKind(kind, name), errPos) - name = name.errorName - } - name - } - - /** Invent a name for an anonympus given of type or template `impl`. */ - def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = - val str = impl match - case impl: Template => - if impl.parents.isEmpty then - report.error(AnonymousInstanceCannotBeEmpty(impl), impl.srcPos) - nme.ERROR.toString - else - impl.parents.map(inventTypeName(_)).mkString("given_", "_", "") - case impl: Tree => - "given_" ++ inventTypeName(impl) - str.toTermName.asSimpleName - - private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { - private def extractArgs(args: List[Tree])(using Context): String = - args.map(argNameExtractor.apply("", _)).mkString("_") - override def apply(x: String, tree: Tree)(using Context): String = - if (x.isEmpty) - tree match { - case Select(pre, nme.CONSTRUCTOR) => foldOver(x, pre) - case tree: RefTree => - if tree.name.isTypeName then tree.name.toString - else s"${tree.name}_type" - case tree: TypeDef => tree.name.toString - case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => - s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" - case InfixOp(left, op, right) => - if followArgs then s"${op.name}_${extractArgs(List(left, right))}" - else op.name.toString - case tree: LambdaTypeTree => - apply(x, tree.body) - case tree: Tuple => - extractArgs(tree.trees) - case tree: Function if tree.args.nonEmpty => - if followArgs then s"${extractArgs(tree.args)}_to_${apply("", tree.body)}" - else "Function" - case _ => foldOver(x, tree) - } - else x - } - private val typeNameExtractor = NameExtractor(followArgs = true) - private val argNameExtractor = NameExtractor(followArgs = false) - - private def inventTypeName(tree: Tree)(using Context): String = typeNameExtractor("", tree) - - /**This will check if this def tree is marked to define enum lookup methods, - * this is not recommended to call more than once per tree - */ - private def definesEnumLookupMethods(ddef: DefTree): Boolean = - ddef.removeAttachment(DefinesEnumLookupMethods).isDefined - - /** val p1, ..., pN: T = E - * ==> - * makePatDef[[val p1: T1 = E]]; ...; makePatDef[[val pN: TN = E]] - * - * case e1, ..., eN - * ==> - * expandSimpleEnumCase([case e1]); ...; expandSimpleEnumCase([case eN]) - */ - def patDef(pdef: PatDef)(using Context): Tree = flatTree { - val PatDef(mods, pats, tpt, rhs) = pdef - if mods.isEnumCase then - def expand(id: Ident, definesLookups: Boolean) = - expandSimpleEnumCase(id.name.asTermName, mods, definesLookups, - Span(id.span.start, id.span.end, id.span.start)) - - val ids = pats.asInstanceOf[List[Ident]] - if definesEnumLookupMethods(pdef) then - ids.init.map(expand(_, false)) ::: expand(ids.last, true) :: Nil - else - ids.map(expand(_, false)) - else { - val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) - pats1 map (makePatDef(pdef, mods, _, rhs)) - } - } - - /** The selector of a match, which depends of the given `checkMode`. - * @param sel the original selector - * @return if `checkMode` is - * - None : sel @unchecked - * - Exhaustive : sel - * - IrrefutablePatDef, - * IrrefutableGenFrom: sel with attachment `CheckIrrefutable -> checkMode` - */ - def makeSelector(sel: Tree, checkMode: MatchCheck)(using Context): Tree = - checkMode match - case MatchCheck.None => - Annotated(sel, New(ref(defn.UncheckedAnnot.typeRef))) - - case MatchCheck.Exhaustive => - sel - - case MatchCheck.IrrefutablePatDef | MatchCheck.IrrefutableGenFrom => - // TODO: use `pushAttachment` and investigate duplicate attachment - sel.withAttachment(CheckIrrefutable, checkMode) - sel - end match - - /** If `pat` is a variable pattern, - * - * val/var/lazy val p = e - * - * Otherwise, in case there is exactly one variable x_1 in pattern - * val/var/lazy val p = e ==> val/var/lazy val x_1 = (e: @unchecked) match (case p => (x_1)) - * - * in case there are zero or more than one variables in pattern - * val/var/lazy p = e ==> private[this] synthetic [lazy] val t$ = (e: @unchecked) match (case p => (x_1, ..., x_N)) - * val/var/def x_1 = t$._1 - * ... - * val/var/def x_N = t$._N - * If the original pattern variable carries a type annotation, so does the corresponding - * ValDef or DefDef. - */ - def makePatDef(original: Tree, mods: Modifiers, pat: Tree, rhs: Tree)(using Context): Tree = pat match { - case IdPattern(id, tpt) => - val id1 = - if id.name == nme.WILDCARD - then cpy.Ident(id)(WildcardParamName.fresh()) - else id - derivedValDef(original, id1, tpt, rhs, mods) - case _ => - - def filterWildcardGivenBinding(givenPat: Bind): Boolean = - givenPat.name != nme.WILDCARD - - def errorOnGivenBinding(bind: Bind)(using Context): Boolean = - report.error( - em"""${hl("given")} patterns are not allowed in a ${hl("val")} definition, - |please bind to an identifier and use an alias given.""", bind) - false - - def isTuplePattern(arity: Int): Boolean = pat match { - case Tuple(pats) if pats.size == arity => - pats.forall(isVarPattern) - case _ => false - } - val isMatchingTuple: Tree => Boolean = { - case Tuple(es) => isTuplePattern(es.length) - case _ => false - } - - // We can only optimize `val pat = if (...) e1 else e2` if: - // - `e1` and `e2` are both tuples of arity N - // - `pat` is a tuple of N variables or wildcard patterns like `(x1, x2, ..., xN)` - val tupleOptimizable = forallResults(rhs, isMatchingTuple) - - val inAliasGenerator = original match - case _: GenAlias => true - case _ => false - - val vars = - if (tupleOptimizable) // include `_` - pat match - case Tuple(pats) => pats.map { case id: Ident => id -> TypeTree() } - else - getVariables( - tree = pat, - shouldAddGiven = - if inAliasGenerator then - filterWildcardGivenBinding - else - errorOnGivenBinding - ) // no `_` - - val ids = for ((named, _) <- vars) yield Ident(named.name) - val matchExpr = - if (tupleOptimizable) rhs - else - val caseDef = CaseDef(pat, EmptyTree, makeTuple(ids)) - Match(makeSelector(rhs, MatchCheck.IrrefutablePatDef), caseDef :: Nil) - vars match { - case Nil if !mods.is(Lazy) => - matchExpr - case (named, tpt) :: Nil => - derivedValDef(original, named, tpt, matchExpr, mods) - case _ => - val tmpName = UniqueName.fresh() - val patMods = - mods & Lazy | Synthetic | (if (ctx.owner.isClass) PrivateLocal else EmptyFlags) - val firstDef = - ValDef(tmpName, TypeTree(), matchExpr) - .withSpan(pat.span.union(rhs.span)).withMods(patMods) - val useSelectors = vars.length <= 22 - def selector(n: Int) = - if useSelectors then Select(Ident(tmpName), nme.selectorName(n)) - else Apply(Select(Ident(tmpName), nme.apply), Literal(Constant(n)) :: Nil) - val restDefs = - for (((named, tpt), n) <- vars.zipWithIndex if named.name != nme.WILDCARD) - yield - if mods.is(Lazy) then - DefDef(named.name.asTermName, Nil, tpt, selector(n)) - .withMods(mods &~ Lazy) - .withSpan(named.span) - else - valDef( - ValDef(named.name.asTermName, tpt, selector(n)) - .withMods(mods) - .withSpan(named.span) - ) - flatTree(firstDef :: restDefs) - } - } - - /** Expand variable identifier x to x @ _ */ - def patternVar(tree: Tree)(using Context): Bind = { - val Ident(name) = unsplice(tree): @unchecked - Bind(name, Ident(nme.WILDCARD)).withSpan(tree.span) - } - - /** The type of tests that check whether a MemberDef is OK for some flag. - * The test succeeds if the partial function is defined and returns true. - */ - type MemberDefTest = PartialFunction[MemberDef, Boolean] - - val legalOpaque: MemberDefTest = { - case TypeDef(_, rhs) => - def rhsOK(tree: Tree): Boolean = tree match { - case bounds: TypeBoundsTree => !bounds.alias.isEmpty - case _: Template | _: MatchTypeTree => false - case LambdaTypeTree(_, body) => rhsOK(body) - case _ => true - } - rhsOK(rhs) - } - - def checkOpaqueAlias(tree: MemberDef)(using Context): MemberDef = - def check(rhs: Tree): MemberDef = rhs match - case bounds: TypeBoundsTree if bounds.alias.isEmpty => - report.error(em"opaque type must have a right-hand side", tree.srcPos) - tree.withMods(tree.mods.withoutFlags(Opaque)) - case LambdaTypeTree(_, body) => check(body) - case _ => tree - if !tree.mods.is(Opaque) then tree - else tree match - case TypeDef(_, rhs) => check(rhs) - case _ => tree - - /** Check that modifiers are legal for the definition `tree`. - * Right now, we only check for `opaque`. TODO: Move other modifier checks here. - */ - def checkModifiers(tree: Tree)(using Context): Tree = tree match { - case tree: MemberDef => - var tested: MemberDef = tree - def checkApplicable(flag: Flag, test: MemberDefTest): MemberDef = - if (tested.mods.is(flag) && !test.applyOrElse(tree, (md: MemberDef) => false)) { - report.error(ModifierNotAllowedForDefinition(flag), tree.srcPos) - tested.withMods(tested.mods.withoutFlags(flag)) - } else tested - tested = checkOpaqueAlias(tested) - tested = checkApplicable(Opaque, legalOpaque) - tested - case _ => - tree - } - - def defTree(tree: Tree)(using Context): Tree = - checkModifiers(tree) match { - case tree: ValDef => valDef(tree) - case tree: TypeDef => - if (tree.isClassDef) classDef(tree) - else if (ctx.mode.is(Mode.QuotedPattern)) quotedPatternTypeDef(tree) - else tree - case tree: DefDef => - if (tree.name.isConstructorName) tree // was already handled by enclosing classDef - else defDef(tree) - case tree: ModuleDef => moduleDef(tree) - case tree: PatDef => patDef(tree) - } - - /** { stats; } - * ==> - * { stats; () } - */ - def block(tree: Block)(using Context): Block = tree.expr match { - case EmptyTree => - cpy.Block(tree)(tree.stats, - unitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) - case _ => - tree - } - - /** Translate infix operation expression - * - * l op r ==> l.op(r) if op is left-associative - * ==> r.op(l) if op is right-associative - */ - def binop(left: Tree, op: Ident, right: Tree)(using Context): Apply = { - def assignToNamedArg(arg: Tree) = arg match { - case Assign(Ident(name), rhs) => cpy.NamedArg(arg)(name, rhs) - case _ => arg - } - def makeOp(fn: Tree, arg: Tree, selectPos: Span) = - val sel = Select(fn, op.name).withSpan(selectPos) - if (left.sourcePos.endLine < op.sourcePos.startLine) - sel.pushAttachment(MultiLineInfix, ()) - arg match - case Parens(arg) => - Apply(sel, assignToNamedArg(arg) :: Nil) - case Tuple(args) if args.exists(_.isInstanceOf[Assign]) => - Apply(sel, args.mapConserve(assignToNamedArg)) - case Tuple(args) => - Apply(sel, arg :: Nil).setApplyKind(ApplyKind.InfixTuple) - case _ => - Apply(sel, arg :: Nil) - - if op.name.isRightAssocOperatorName then - makeOp(right, left, Span(op.span.start, right.span.end)) - else - makeOp(left, right, Span(left.span.start, op.span.end, op.span.start)) - } - - /** Translate throws type `A throws E1 | ... | En` to - * $throws[... $throws[A, E1] ... , En]. - */ - def throws(tpt: Tree, op: Ident, excepts: Tree)(using Context): AppliedTypeTree = excepts match - case Parens(excepts1) => - throws(tpt, op, excepts1) - case InfixOp(l, bar @ Ident(tpnme.raw.BAR), r) => - throws(throws(tpt, op, l), bar, r) - case e => - AppliedTypeTree( - TypeTree(defn.throwsAlias.typeRef).withSpan(op.span), tpt :: excepts :: Nil) - - /** Translate tuple expressions of arity <= 22 - * - * () ==> () - * (t) ==> t - * (t1, ..., tN) ==> TupleN(t1, ..., tN) - */ - def smallTuple(tree: Tuple)(using Context): Tree = { - val ts = tree.trees - val arity = ts.length - assert(arity <= Definitions.MaxTupleArity) - def tupleTypeRef = defn.TupleType(arity).nn - if (arity == 0) - if (ctx.mode is Mode.Type) TypeTree(defn.UnitType) else unitLiteral - else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts) - else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts) - } - - private def isTopLevelDef(stat: Tree)(using Context): Boolean = stat match - case _: ValDef | _: PatDef | _: DefDef | _: Export | _: ExtMethods => true - case stat: ModuleDef => - stat.mods.isOneOf(GivenOrImplicit) - case stat: TypeDef => - !stat.isClassDef || stat.mods.isOneOf(GivenOrImplicit) - case _ => - false - - /** Assuming `src` contains top-level definition, returns the name that should - * be using for the package object that will wrap them. - */ - def packageObjectName(src: SourceFile): TermName = - val fileName = src.file.name - val sourceName = fileName.take(fileName.lastIndexOf('.')) - (sourceName ++ str.TOPLEVEL_SUFFIX).toTermName - - /** Group all definitions that can't be at the toplevel in - * an object named `$package` where `` is the name of the source file. - * Definitions that can't be at the toplevel are: - * - * - all pattern, value and method definitions - * - non-class type definitions - * - implicit classes and objects - * - "companion objects" of wrapped type definitions - * (i.e. objects having the same name as a wrapped type) - */ - def packageDef(pdef: PackageDef)(using Context): PackageDef = { - checkPackageName(pdef) - val wrappedTypeNames = pdef.stats.collectCC { - case stat: TypeDef if isTopLevelDef(stat) => stat.name - } - def inPackageObject(stat: Tree) = - isTopLevelDef(stat) || { - stat match - case stat: ModuleDef => - wrappedTypeNames.contains(stat.name.stripModuleClassSuffix.toTypeName) - case _ => - false - } - val (nestedStats, topStats) = pdef.stats.partition(inPackageObject) - if (nestedStats.isEmpty) pdef - else { - val name = packageObjectName(ctx.source) - val grouped = - ModuleDef(name, Template(emptyConstructor, Nil, Nil, EmptyValDef, nestedStats)) - .withMods(Modifiers(Synthetic)) - cpy.PackageDef(pdef)(pdef.pid, topStats :+ grouped) - } - } - - /** Make closure corresponding to function. - * params => body - * ==> - * def $anonfun(params) = body - * Closure($anonfun) - */ - def makeClosure(params: List[ValDef], body: Tree, tpt: Tree | Null = null, isContextual: Boolean, span: Span)(using Context): Block = - Block( - DefDef(nme.ANON_FUN, params :: Nil, if (tpt == null) TypeTree() else tpt, body) - .withSpan(span) - .withMods(synthetic | Artifact), - Closure(Nil, Ident(nme.ANON_FUN), if (isContextual) ContextualEmptyTree else EmptyTree)) - - /** If `nparams` == 1, expand partial function - * - * { cases } - * ==> - * x$1 => (x$1 @unchecked?) match { cases } - * - * If `nparams` != 1, expand instead to - * - * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked?) match { cases } - */ - def makeCaseLambda(cases: List[CaseDef], checkMode: MatchCheck, nparams: Int = 1)(using Context): Function = { - val params = (1 to nparams).toList.map(makeSyntheticParameter(_)) - val selector = makeTuple(params.map(p => Ident(p.name))) - Function(params, Match(makeSelector(selector, checkMode), cases)) - } - - /** Map n-ary function `(x1: T1, ..., xn: Tn) => body` where n != 1 to unary function as follows: - * - * (x$1: (T1, ..., Tn)) => { - * def x1: T1 = x$1._1 - * ... - * def xn: Tn = x$1._n - * body - * } - * - * or if `isGenericTuple` - * - * (x$1: (T1, ... Tn) => { - * def x1: T1 = x$1.apply(0) - * ... - * def xn: Tn = x$1.apply(n-1) - * body - * } - * - * If some of the Ti's are absent, omit the : (T1, ..., Tn) type ascription - * in the selector. - */ - def makeTupledFunction(params: List[ValDef], body: Tree, isGenericTuple: Boolean)(using Context): Tree = { - val param = makeSyntheticParameter( - tpt = - if params.exists(_.tpt.isEmpty) then TypeTree() - else Tuple(params.map(_.tpt))) - def selector(n: Int) = - if (isGenericTuple) Apply(Select(refOfDef(param), nme.apply), Literal(Constant(n))) - else Select(refOfDef(param), nme.selectorName(n)) - val vdefs = - params.zipWithIndex.map { - case (param, idx) => - ValDef(param.name, param.tpt, selector(idx)) - .withSpan(param.span) - .withAttachment(UntupledParam, ()) - .withFlags(Synthetic) - } - Function(param :: Nil, Block(vdefs, body)) - } - - /** Convert a tuple pattern with given `elems` to a sequence of `ValDefs`, - * skipping elements that are not convertible. - */ - def patternsToParams(elems: List[Tree])(using Context): List[ValDef] = - def toParam(elem: Tree, tpt: Tree): Tree = - elem match - case Annotated(elem1, _) => toParam(elem1, tpt) - case Typed(elem1, tpt1) => toParam(elem1, tpt1) - case Ident(id: TermName) => ValDef(id, tpt, EmptyTree).withFlags(Param) - case _ => EmptyTree - elems.map(param => toParam(param, TypeTree()).withSpan(param.span)).collect { - case vd: ValDef => vd - } - - def makeContextualFunction(formals: List[Tree], body: Tree, isErased: Boolean)(using Context): Function = { - val mods = if (isErased) Given | Erased else Given - val params = makeImplicitParameters(formals, mods) - FunctionWithMods(params, body, Modifiers(mods)) - } - - private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { - val vdef = ValDef(named.name.asTermName, tpt, rhs) - .withMods(mods) - .withSpan(original.span.withPoint(named.span.start)) - val mayNeedSetter = valDef(vdef) - mayNeedSetter - } - - private def derivedDefDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit src: SourceFile) = - DefDef(named.name.asTermName, Nil, tpt, rhs) - .withMods(mods) - .withSpan(original.span.withPoint(named.span.start)) - - /** Main desugaring method */ - def apply(tree: Tree, pt: Type = NoType)(using Context): Tree = { - - /** Create tree for for-comprehension `` or - * `` where mapName and flatMapName are chosen - * corresponding to whether this is a for-do or a for-yield. - * The creation performs the following rewrite rules: - * - * 1. - * - * for (P <- G) E ==> G.foreach (P => E) - * - * Here and in the following (P => E) is interpreted as the function (P => E) - * if P is a variable pattern and as the partial function { case P => E } otherwise. - * - * 2. - * - * for (P <- G) yield E ==> G.map (P => E) - * - * 3. - * - * for (P_1 <- G_1; P_2 <- G_2; ...) ... - * ==> - * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) - * - * 4. - * - * for (P <- G; E; ...) ... - * => - * for (P <- G.filter (P => E); ...) ... - * - * 5. For any N: - * - * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) - * ==> - * for (TupleN(P_1, P_2, ... P_N) <- - * for (x_1 @ P_1 <- G) yield { - * val x_2 @ P_2 = E_2 - * ... - * val x_N & P_N = E_N - * TupleN(x_1, ..., x_N) - * } ...) - * - * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated - * and the variable constituting P_i is used instead of x_i - * - * @param mapName The name to be used for maps (either map or foreach) - * @param flatMapName The name to be used for flatMaps (either flatMap or foreach) - * @param enums The enumerators in the for expression - * @param body The body of the for expression - */ - def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Tree], body: Tree): Tree = trace(i"make for ${ForYield(enums, body)}", show = true) { - - /** Let `pat` be `gen`'s pattern. Make a function value `pat => body`. - * If `pat` is a var pattern `id: T` then this gives `(id: T) => body`. - * Otherwise this gives `{ case pat => body }`, where `pat` is checked to be - * irrefutable if `gen`'s checkMode is GenCheckMode.Check. - */ - def makeLambda(gen: GenFrom, body: Tree): Tree = gen.pat match { - case IdPattern(named, tpt) if gen.checkMode != GenCheckMode.FilterAlways => - Function(derivedValDef(gen.pat, named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body) - case _ => - val matchCheckMode = - if (gen.checkMode == GenCheckMode.Check || gen.checkMode == GenCheckMode.CheckAndFilter) MatchCheck.IrrefutableGenFrom - else MatchCheck.None - makeCaseLambda(CaseDef(gen.pat, EmptyTree, body) :: Nil, matchCheckMode) - } - - /** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap - * it in a Bind with a fresh name. Return the transformed pattern, and the identifier - * that refers to the bound variable for the pattern. Wildcard Binds are - * also replaced by Binds with fresh names. - */ - def makeIdPat(pat: Tree): (Tree, Ident) = pat match { - case bind @ Bind(name, pat1) => - if name == nme.WILDCARD then - val name = UniqueName.fresh() - (cpy.Bind(pat)(name, pat1).withMods(bind.mods), Ident(name)) - else (pat, Ident(name)) - case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => (id, id) - case Typed(id: Ident, _) if isVarPattern(id) && id.name != nme.WILDCARD => (pat, id) - case _ => - val name = UniqueName.fresh() - (Bind(name, pat), Ident(name)) - } - - /** Make a pattern filter: - * rhs.withFilter { case pat => true case _ => false } - * - * On handling irrefutable patterns: - * The idea is to wait until the pattern matcher sees a call - * - * xs withFilter { cases } - * - * where cases can be proven to be refutable i.e. cases would be - * equivalent to { case _ => true } - * - * In that case, compile to - * - * xs withFilter alwaysTrue - * - * where `alwaysTrue` is a predefined function value: - * - * val alwaysTrue: Any => Boolean = true - * - * In the libraries operations can take advantage of alwaysTrue to shortcircuit the - * withFilter call. - * - * def withFilter(f: Elem => Boolean) = - * if (f eq alwaysTrue) this // or rather identity filter monadic applied to this - * else real withFilter - */ - def makePatFilter(rhs: Tree, pat: Tree): Tree = { - val cases = List( - CaseDef(pat, EmptyTree, Literal(Constant(true))), - CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))) - Apply(Select(rhs, nme.withFilter), makeCaseLambda(cases, MatchCheck.None)) - } - - /** Is pattern `pat` irrefutable when matched against `rhs`? - * We only can do a simple syntactic check here; a more refined check - * is done later in the pattern matcher (see discussion in @makePatFilter). - */ - def isIrrefutable(pat: Tree, rhs: Tree): Boolean = { - def matchesTuple(pats: List[Tree], rhs: Tree): Boolean = rhs match { - case Tuple(trees) => (pats corresponds trees)(isIrrefutable) - case Parens(rhs1) => matchesTuple(pats, rhs1) - case Block(_, rhs1) => matchesTuple(pats, rhs1) - case If(_, thenp, elsep) => matchesTuple(pats, thenp) && matchesTuple(pats, elsep) - case Match(_, cases) => cases forall (matchesTuple(pats, _)) - case CaseDef(_, _, rhs1) => matchesTuple(pats, rhs1) - case Throw(_) => true - case _ => false - } - pat match { - case Bind(_, pat1) => isIrrefutable(pat1, rhs) - case Parens(pat1) => isIrrefutable(pat1, rhs) - case Tuple(pats) => matchesTuple(pats, rhs) - case _ => isVarPattern(pat) - } - } - - /** Is `pat` of the form `x`, `x T`, or `given T`? when used as the lhs of a generator, - * these are all considered irrefutable. - */ - def isVarBinding(pat: Tree): Boolean = pat match - case pat @ Bind(_, pat1) if pat.mods.is(Given) => isVarBinding(pat1) - case IdPattern(_) => true - case _ => false - - def needsNoFilter(gen: GenFrom): Boolean = gen.checkMode match - case GenCheckMode.FilterAlways => false // pattern was prefixed by `case` - case GenCheckMode.FilterNow | GenCheckMode.CheckAndFilter => isVarBinding(gen.pat) || isIrrefutable(gen.pat, gen.expr) - case GenCheckMode.Check => true - case GenCheckMode.Ignore => true - - /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when - * matched against `rhs`. - */ - def rhsSelect(gen: GenFrom, name: TermName) = { - val rhs = if (needsNoFilter(gen)) gen.expr else makePatFilter(gen.expr, gen.pat) - Select(rhs, name) - } - - enums match { - case (gen: GenFrom) :: Nil => - Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) - case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => - val cont = makeFor(mapName, flatMapName, rest, body) - Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) - case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => - val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) - val pats = valeqs map { case GenAlias(pat, _) => pat } - val rhss = valeqs map { case GenAlias(_, rhs) => rhs } - val (defpat0, id0) = makeIdPat(gen.pat) - val (defpats, ids) = (pats map makeIdPat).unzip - val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => - val mods = defpat match - case defTree: DefTree => defTree.mods - case _ => Modifiers() - makePatDef(valeq, mods, defpat, rhs) - } - val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) - val allpats = gen.pat :: pats - val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) - makeFor(mapName, flatMapName, vfrom1 :: rest1, body) - case (gen: GenFrom) :: test :: rest => - val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) - val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Ignore) - makeFor(mapName, flatMapName, genFrom :: rest, body) - case _ => - EmptyTree //may happen for erroneous input - } - } - - def makePolyFunction(targs: List[Tree], body: Tree, pt: Type): Tree = body match { - case Parens(body1) => - makePolyFunction(targs, body1, pt) - case Block(Nil, body1) => - makePolyFunction(targs, body1, pt) - case Function(vargs, res) => - assert(targs.nonEmpty) - // TODO: Figure out if we need a `PolyFunctionWithMods` instead. - val mods = body match { - case body: FunctionWithMods => body.mods - case _ => untpd.EmptyModifiers - } - val polyFunctionTpt = ref(defn.PolyFunctionType) - val applyTParams = targs.asInstanceOf[List[TypeDef]] - if (ctx.mode.is(Mode.Type)) { - // Desugar [T_1, ..., T_M] -> (P_1, ..., P_N) => R - // Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } - - val applyVParams = vargs.zipWithIndex.map { - case (p: ValDef, _) => p.withAddedFlags(mods.flags) - case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags) - } - RefinedTypeTree(polyFunctionTpt, List( - DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) - )) - } - else { - // Desugar [T_1, ..., T_M] -> (x_1: P_1, ..., x_N: P_N) => body - // with pt [S_1, ..., S_M] -> (O_1, ..., O_N) => R - // Into new scala.PolyFunction { def apply[T_1, ..., T_M](x_1: P_1, ..., x_N: P_N): R2 = body } - // where R2 is R, with all references to S_1..S_M replaced with T1..T_M. - - def typeTree(tp: Type) = tp match - case RefinedType(parent, nme.apply, PolyType(_, mt)) if parent.typeSymbol eq defn.PolyFunctionClass => - var bail = false - def mapper(tp: Type, topLevel: Boolean = false): Tree = tp match - case tp: TypeRef => ref(tp) - case tp: TypeParamRef => Ident(applyTParams(tp.paramNum).name) - case AppliedType(tycon, args) => AppliedTypeTree(mapper(tycon), args.map(mapper(_))) - case _ => if topLevel then TypeTree() else { bail = true; genericEmptyTree } - val mapped = mapper(mt.resultType, topLevel = true) - if bail then TypeTree() else mapped - case _ => TypeTree() - - val applyVParams = vargs.asInstanceOf[List[ValDef]] - .map(varg => varg.withAddedFlags(mods.flags | Param)) - New(Template(emptyConstructor, List(polyFunctionTpt), Nil, EmptyValDef, - List(DefDef(nme.apply, applyTParams :: applyVParams :: Nil, typeTree(pt), res)) - )) - } - case _ => - // may happen for erroneous input. An error will already have been reported. - assert(ctx.reporter.errorsReported) - EmptyTree - } - - // begin desugar - - // Special case for `Parens` desugaring: unlike all the desugarings below, - // its output is not a new tree but an existing one whose position should - // be preserved, so we shouldn't call `withPos` on it. - tree match { - case Parens(t) => - return t - case _ => - } - - val desugared = tree match { - case PolyFunction(targs, body) => - makePolyFunction(targs, body, pt) orElse tree - case SymbolLit(str) => - Apply( - ref(defn.ScalaSymbolClass.companionModule.termRef), - Literal(Constant(str)) :: Nil) - case InterpolatedString(id, segments) => - val strs = segments map { - case ts: Thicket => ts.trees.head - case t => t - } - val elems = segments flatMap { - case ts: Thicket => ts.trees.tail - case t => Nil - } map { (t: Tree) => t match - // !cc! explicitly typed parameter (t: Tree) is needed since otherwise - // we get an error similar to #16268. (The explicit type constrains the type of `segments` - // which is otherwise List[{*} tree]) - case Block(Nil, EmptyTree) => Literal(Constant(())) // for s"... ${} ..." - case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala - case t => t - } - // This is a deliberate departure from scalac, where StringContext is not rooted (See #4732) - Apply(Select(Apply(scalaDot(nme.StringContext), strs), id).withSpan(tree.span), elems) - case PostfixOp(t, op) => - if (ctx.mode is Mode.Type) && !isBackquoted(op) && op.name == tpnme.raw.STAR then - if ctx.isJava then - AppliedTypeTree(ref(defn.RepeatedParamType), t) - else - Annotated( - AppliedTypeTree(ref(defn.SeqType), t), - New(ref(defn.RepeatedAnnot.typeRef), Nil :: Nil)) - else - assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) - Select(t, op.name) - case PrefixOp(op, t) => - val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme - Select(t, nspace.UNARY_PREFIX ++ op.name) - case ForDo(enums, body) => - makeFor(nme.foreach, nme.foreach, enums, body) orElse tree - case ForYield(enums, body) => - makeFor(nme.map, nme.flatMap, enums, body) orElse tree - case PatDef(mods, pats, tpt, rhs) => - val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) - flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) - case ext: ExtMethods => - Block(List(ext), Literal(Constant(())).withSpan(ext.span)) - case CapturingTypeTree(refs, parent) => - // convert `{refs} T` to `T @retains refs` - // `{refs}-> T` to `-> (T @retainsByName refs)` - def annotate(annotName: TypeName, tp: Tree) = - Annotated(tp, New(scalaAnnotationDot(annotName), List(refs))) - parent match - case ByNameTypeTree(restpt) => - cpy.ByNameTypeTree(parent)(annotate(tpnme.retainsByName, restpt)) - case _ => - annotate(tpnme.retains, parent) - } - desugared.withSpan(tree.span) - } - - /** Turn a fucntion value `handlerFun` into a catch case for a try. - * If `handlerFun` is a partial function, translate to - * - * case ex => - * val ev$1 = handlerFun - * if ev$1.isDefinedAt(ex) then ev$1.apply(ex) else throw ex - * - * Otherwise translate to - * - * case ex => handlerFun.apply(ex) - */ - def makeTryCase(handlerFun: tpd.Tree)(using Context): CaseDef = - val handler = TypedSplice(handlerFun) - val excId = Ident(nme.DEFAULT_EXCEPTION_NAME) - val rhs = - if handlerFun.tpe.widen.isRef(defn.PartialFunctionClass) then - val tmpName = UniqueName.fresh() - val tmpId = Ident(tmpName) - val init = ValDef(tmpName, TypeTree(), handler) - val test = If( - Apply(Select(tmpId, nme.isDefinedAt), excId), - Apply(Select(tmpId, nme.apply), excId), - Throw(excId)) - Block(init :: Nil, test) - else - Apply(Select(handler, nme.apply), excId) - CaseDef(excId, EmptyTree, rhs) - - /** Create a class definition with the same info as the refined type given by `parent` - * and `refinements`. - * - * parent { refinements } - * ==> - * trait extends core { this: self => refinements } - * - * Here, `core` is the (possibly parameterized) class part of `parent`. - * If `parent` is the same as `core`, self is empty. Otherwise `self` is `parent`. - * - * Example: Given - * - * class C - * type T1 = C { type T <: A } - * - * the refined type - * - * T1 { type T <: B } - * - * is expanded to - * - * trait extends C { this: T1 => type T <: A } - * - * The result of this method is used for validity checking, is thrown away afterwards. - * @param parent The type of `parent` - */ - def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(using Context): TypeDef = { - def stripToCore(tp: Type): List[Type] = tp match { - case tp: AppliedType => tp :: Nil - case tp: TypeRef if tp.symbol.isClass => tp :: Nil // monomorphic class type - case tp: TypeProxy => stripToCore(tp.underlying) - case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) - case _ => defn.AnyType :: Nil - } - val parentCores = stripToCore(parent.tpe) - val untpdParent = TypedSplice(parent) - val (classParents, self) = - if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef) - else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) - val impl = Template(emptyConstructor, classParents, Nil, self, refinements) - TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) - } - - /** Returns list of all pattern variables, possibly with their types, - * without duplicates - */ - private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = { - val buf = ListBuffer[VarInfo]() - def seenName(name: Name) = buf exists (_._1.name == name) - def add(named: NameTree, t: Tree): Unit = - if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) - def collect(tree: Tree): Unit = tree match { - case tree @ Bind(nme.WILDCARD, tree1) => - if tree.mods.is(Given) then - val Typed(_, tpt) = tree1: @unchecked - if shouldAddGiven(tree) then - add(tree, tpt) - collect(tree1) - case tree @ Bind(_, Typed(tree1, tpt)) => - if !(tree.mods.is(Given) && !shouldAddGiven(tree)) then - add(tree, tpt) - collect(tree1) - case tree @ Bind(_, tree1) => - add(tree, TypeTree()) - collect(tree1) - case Typed(id: Ident, t) if isVarPattern(id) && id.name != nme.WILDCARD && !isWildcardStarArg(tree) => - add(id, t) - case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => - add(id, TypeTree()) - case Apply(_, args) => - args foreach collect - case Typed(expr, _) => - collect(expr) - case NamedArg(_, arg) => - collect(arg) - case SeqLiteral(elems, _) => - elems foreach collect - case Alternative(trees) => - for (tree <- trees; (vble, _) <- getVariables(tree, shouldAddGiven)) - report.error(IllegalVariableInPatternAlternative(vble.symbol.name), vble.srcPos) - case Annotated(arg, _) => - collect(arg) - case InterpolatedString(_, segments) => - segments foreach collect - case InfixOp(left, _, right) => - collect(left) - collect(right) - case PrefixOp(_, od) => - collect(od) - case Parens(tree) => - collect(tree) - case Tuple(trees) => - trees foreach collect - case Thicket(trees) => - trees foreach collect - case Block(Nil, expr) => - collect(expr) - case Quote(expr) => - new UntypedTreeTraverser { - def traverse(tree: untpd.Tree)(using Context): Unit = tree match { - case Splice(expr) => collect(expr) - case _ => traverseChildren(tree) - } - }.traverse(expr) - case CapturingTypeTree(refs, parent) => - collect(parent) - case _ => - } - collect(tree) - buf.toList - } -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala b/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala deleted file mode 100644 index a1c3c0ed0775..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala +++ /dev/null @@ -1,310 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ -import Symbols._, StdNames._, Trees._ -import Decorators._ -import util.{Property, SourceFile} -import typer.ErrorReporting._ -import transform.SyntheticMembers.ExtendsSingletonMirror - -import scala.annotation.internal.sharable - -/** Helper methods to desugar enums */ -object DesugarEnums { - import untpd._ - - enum CaseKind: - case Simple, Object, Class - - final case class EnumConstraints(minKind: CaseKind, maxKind: CaseKind, enumCases: List[(Int, RefTree)]): - require(minKind.ordinal <= maxKind.ordinal && !(cached && enumCases.isEmpty)) - def requiresCreator = minKind == CaseKind.Simple - def isEnumeration = maxKind.ordinal < CaseKind.Class.ordinal - def cached = minKind.ordinal < CaseKind.Class.ordinal - end EnumConstraints - - /** Attachment containing the number of enum cases, the smallest kind that was seen so far, - * and a list of all the value cases with their ordinals. - */ - val EnumCaseCount: Property.Key[(Int, CaseKind, CaseKind, List[(Int, TermName)])] = Property.Key() - - /** Attachment signalling that when this definition is desugared, it should add any additional - * lookup methods for enums. - */ - val DefinesEnumLookupMethods: Property.Key[Unit] = Property.Key() - - /** The enumeration class that belongs to an enum case. This works no matter - * whether the case is still in the enum class or it has been transferred to the - * companion object. - */ - def enumClass(using Context): Symbol = { - val cls = ctx.owner - if (cls.is(Module)) cls.linkedClass else cls - } - - def enumCompanion(using Context): Symbol = { - val cls = ctx.owner - if (cls.is(Module)) cls.sourceModule else cls.linkedClass.sourceModule - } - - /** Is `tree` an (untyped) enum case? */ - def isEnumCase(tree: Tree)(using Context): Boolean = tree match { - case tree: MemberDef => tree.mods.isEnumCase - case PatDef(mods, _, _, _) => mods.isEnumCase - case _ => false - } - - /** A reference to the enum class `E`, possibly followed by type arguments. - * Each covariant type parameter is approximated by its lower bound. - * Each contravariant type parameter is approximated by its upper bound. - * It is an error if a type parameter is non-variant, or if its approximation - * refers to pther type parameters. - */ - def interpolatedEnumParent(span: Span)(using Context): Tree = { - val tparams = enumClass.typeParams - def isGround(tp: Type) = tp.subst(tparams, tparams.map(_ => NoType)) eq tp - val targs = tparams map { tparam => - if (tparam.is(Covariant) && isGround(tparam.info.bounds.lo)) - tparam.info.bounds.lo - else if (tparam.is(Contravariant) && isGround(tparam.info.bounds.hi)) - tparam.info.bounds.hi - else { - def problem = - if (!tparam.isOneOf(VarianceFlags)) "is invariant" - else "has bounds that depend on a type parameter in the same parameter list" - errorType(em"""cannot determine type argument for enum parent $enumClass, - |type parameter $tparam $problem""", ctx.source.atSpan(span)) - } - } - TypeTree(enumClass.typeRef.appliedTo(targs)).withSpan(span) - } - - /** A type tree referring to `enumClass` */ - def enumClassRef(using Context): Tree = - if (enumClass.exists) TypeTree(enumClass.typeRef) else TypeTree() - - /** Add implied flags to an enum class or an enum case */ - def addEnumFlags(cdef: TypeDef)(using Context): TypeDef = - if (cdef.mods.isEnumClass) cdef.withMods(cdef.mods.withAddedFlags(Abstract | Sealed, cdef.span)) - else if (isEnumCase(cdef)) cdef.withMods(cdef.mods.withAddedFlags(Final, cdef.span)) - else cdef - - private def valuesDot(name: PreName)(implicit src: SourceFile) = - Select(Ident(nme.DOLLAR_VALUES), name.toTermName) - - private def ArrayLiteral(values: List[Tree], tpt: Tree)(using Context): Tree = - val clazzOf = TypeApply(ref(defn.Predef_classOf.termRef), tpt :: Nil) - val ctag = Apply(TypeApply(ref(defn.ClassTagModule_apply.termRef), tpt :: Nil), clazzOf :: Nil) - val apply = Select(ref(defn.ArrayModule.termRef), nme.apply) - Apply(Apply(TypeApply(apply, tpt :: Nil), values), ctag :: Nil) - - /** The following lists of definitions for an enum type E and known value cases e_0, ..., e_n: - * - * private val $values = Array[E](this.e_0,...,this.e_n)(ClassTag[E](classOf[E])) - * def values = $values.clone - * def valueOf($name: String) = $name match { - * case "e_0" => this.e_0 - * ... - * case "e_n" => this.e_n - * case _ => throw new IllegalArgumentException("case not found: " + $name) - * } - */ - private def enumScaffolding(enumValues: List[RefTree])(using Context): List[Tree] = { - val rawEnumClassRef = rawRef(enumClass.typeRef) - extension (tpe: NamedType) def ofRawEnum = AppliedTypeTree(ref(tpe), rawEnumClassRef) - - val privateValuesDef = - ValDef(nme.DOLLAR_VALUES, TypeTree(), ArrayLiteral(enumValues, rawEnumClassRef)) - .withFlags(Private | Synthetic) - - val valuesDef = - DefDef(nme.values, Nil, defn.ArrayType.ofRawEnum, valuesDot(nme.clone_)) - .withFlags(Synthetic) - - val valuesOfBody: Tree = - val defaultCase = - val msg = Apply(Select(Literal(Constant("enum case not found: ")), nme.PLUS), Ident(nme.nameDollar)) - CaseDef(Ident(nme.WILDCARD), EmptyTree, - Throw(New(TypeTree(defn.IllegalArgumentExceptionType), List(msg :: Nil)))) - val stringCases = enumValues.map(enumValue => - CaseDef(Literal(Constant(enumValue.name.toString)), EmptyTree, enumValue) - ) ::: defaultCase :: Nil - Match(Ident(nme.nameDollar), stringCases) - val valueOfDef = DefDef(nme.valueOf, List(param(nme.nameDollar, defn.StringType) :: Nil), - TypeTree(), valuesOfBody) - .withFlags(Synthetic) - - privateValuesDef :: - valuesDef :: - valueOfDef :: Nil - } - - private def enumLookupMethods(constraints: EnumConstraints)(using Context): List[Tree] = - def scaffolding: List[Tree] = - if constraints.isEnumeration then enumScaffolding(constraints.enumCases.map(_._2)) else Nil - def valueCtor: List[Tree] = if constraints.requiresCreator then enumValueCreator :: Nil else Nil - def fromOrdinal: Tree = - def throwArg(ordinal: Tree) = - Throw(New(TypeTree(defn.NoSuchElementExceptionType), List(Select(ordinal, nme.toString_) :: Nil))) - if !constraints.cached then - fromOrdinalMeth(throwArg) - else - def default(ordinal: Tree) = - CaseDef(Ident(nme.WILDCARD), EmptyTree, throwArg(ordinal)) - if constraints.isEnumeration then - fromOrdinalMeth(ordinal => - Try(Apply(valuesDot(nme.apply), ordinal), default(ordinal) :: Nil, EmptyTree)) - else - fromOrdinalMeth(ordinal => - Match(ordinal, - constraints.enumCases.map((i, enumValue) => CaseDef(Literal(Constant(i)), EmptyTree, enumValue)) - :+ default(ordinal))) - - if !enumClass.exists then - // in the case of a double definition of an enum that only defines class cases (see tests/neg/i4470c.scala) - // it seems `enumClass` might be `NoSymbol`; in this case we provide no scaffolding. - Nil - else - scaffolding ::: valueCtor ::: fromOrdinal :: Nil - end enumLookupMethods - - /** A creation method for a value of enum type `E`, which is defined as follows: - * - * private def $new(_$ordinal: Int, $name: String) = new E with scala.runtime.EnumValue { - * def ordinal = _$ordinal // if `E` does not derive from `java.lang.Enum` - * } - */ - private def enumValueCreator(using Context) = { - val creator = New(Template( - constr = emptyConstructor, - parents = enumClassRef :: scalaRuntimeDot(tpnme.EnumValue) :: Nil, - derived = Nil, - self = EmptyValDef, - body = Nil - ).withAttachment(ExtendsSingletonMirror, ())) - DefDef(nme.DOLLAR_NEW, - List(List(param(nme.ordinalDollar_, defn.IntType), param(nme.nameDollar, defn.StringType))), - TypeTree(), creator).withFlags(Private | Synthetic) - } - - /** Is a type parameter in `enumTypeParams` referenced from an enum class case that has - * given type parameters `caseTypeParams`, value parameters `vparamss` and parents `parents`? - * Issues an error if that is the case but the reference is illegal. - * The reference could be illegal for two reasons: - * - explicit type parameters are given - * - it's a value case, i.e. no value parameters are given - */ - def typeParamIsReferenced( - enumTypeParams: List[TypeSymbol], - caseTypeParams: List[TypeDef], - vparamss: List[List[ValDef]], - parents: List[Tree])(using Context): Boolean = { - - object searchRef extends UntypedTreeAccumulator[Boolean] { - var tparamNames = enumTypeParams.map(_.name).toSet[Name] - def underBinders(binders: List[MemberDef], op: => Boolean): Boolean = { - val saved = tparamNames - tparamNames = tparamNames -- binders.map(_.name) - try op - finally tparamNames = saved - } - def apply(x: Boolean, tree: Tree)(using Context): Boolean = x || { - tree match { - case Ident(name) => - val matches = tparamNames.contains(name) - if (matches && (caseTypeParams.nonEmpty || vparamss.isEmpty)) - report.error(em"illegal reference to type parameter $name from enum case", tree.srcPos) - matches - case LambdaTypeTree(lambdaParams, body) => - underBinders(lambdaParams, foldOver(x, tree)) - case RefinedTypeTree(parent, refinements) => - val refinementDefs = refinements collect { case r: MemberDef => r } - underBinders(refinementDefs, foldOver(x, tree)) - case _ => foldOver(x, tree) - } - } - def apply(tree: Tree)(using Context): Boolean = - underBinders(caseTypeParams, apply(false, tree)) - } - - def typeHasRef(tpt: Tree) = searchRef(tpt) - def valDefHasRef(vd: ValDef) = typeHasRef(vd.tpt) - def parentHasRef(parent: Tree): Boolean = parent match { - case Apply(fn, _) => parentHasRef(fn) - case TypeApply(_, targs) => targs.exists(typeHasRef) - case Select(nu, nme.CONSTRUCTOR) => parentHasRef(nu) - case New(tpt) => typeHasRef(tpt) - case parent => parent.isType && typeHasRef(parent) - } - - vparamss.nestedExists(valDefHasRef) || parents.exists(parentHasRef) - } - - /** A pair consisting of - * - the next enum tag - * - scaffolding containing the necessary definitions for singleton enum cases - * unless that scaffolding was already generated by a previous call to `nextEnumKind`. - */ - def nextOrdinal(name: Name, kind: CaseKind, definesLookups: Boolean)(using Context): (Int, List[Tree]) = { - val (ordinal, seenMinKind, seenMaxKind, seenCases) = - ctx.tree.removeAttachment(EnumCaseCount).getOrElse((0, CaseKind.Class, CaseKind.Simple, Nil)) - val minKind = if kind.ordinal < seenMinKind.ordinal then kind else seenMinKind - val maxKind = if kind.ordinal > seenMaxKind.ordinal then kind else seenMaxKind - val cases = name match - case name: TermName => (ordinal, name) :: seenCases - case _ => seenCases - if definesLookups then - val thisRef = This(EmptyTypeIdent) - val cachedValues = cases.reverse.map((i, name) => (i, Select(thisRef, name))) - (ordinal, enumLookupMethods(EnumConstraints(minKind, maxKind, cachedValues))) - else - ctx.tree.pushAttachment(EnumCaseCount, (ordinal + 1, minKind, maxKind, cases)) - (ordinal, Nil) - } - - def param(name: TermName, typ: Type)(using Context): ValDef = param(name, TypeTree(typ)) - def param(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(Param) - - def ordinalMeth(body: Tree)(using Context): DefDef = - DefDef(nme.ordinal, Nil, TypeTree(defn.IntType), body).withAddedFlags(Synthetic) - - def ordinalMethLit(ord: Int)(using Context): DefDef = - ordinalMeth(Literal(Constant(ord))) - - def fromOrdinalMeth(body: Tree => Tree)(using Context): DefDef = - DefDef(nme.fromOrdinal, (param(nme.ordinal, defn.IntType) :: Nil) :: Nil, - rawRef(enumClass.typeRef), body(Ident(nme.ordinal))).withFlags(Synthetic) - - /** Expand a module definition representing a parameterless enum case */ - def expandEnumModule(name: TermName, impl: Template, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = { - assert(impl.body.isEmpty) - if (!enumClass.exists) EmptyTree - else if (impl.parents.isEmpty) - expandSimpleEnumCase(name, mods, definesLookups, span) - else { - val (tag, scaffolding) = nextOrdinal(name, CaseKind.Object, definesLookups) - val impl1 = cpy.Template(impl)(parents = impl.parents :+ scalaRuntimeDot(tpnme.EnumValue), body = Nil) - .withAttachment(ExtendsSingletonMirror, ()) - val vdef = ValDef(name, TypeTree(), New(impl1)).withMods(mods.withAddedFlags(EnumValue, span)) - flatTree(vdef :: scaffolding).withSpan(span) - } - } - - /** Expand a simple enum case */ - def expandSimpleEnumCase(name: TermName, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = - if (!enumClass.exists) EmptyTree - else if (enumClass.typeParams.nonEmpty) { - val parent = interpolatedEnumParent(span) - val impl = Template(emptyConstructor, parent :: Nil, Nil, EmptyValDef, Nil) - expandEnumModule(name, impl, mods, definesLookups, span) - } - else { - val (tag, scaffolding) = nextOrdinal(name, CaseKind.Simple, definesLookups) - val creator = Apply(Ident(nme.DOLLAR_NEW), List(Literal(Constant(tag)), Literal(Constant(name.toString)))) - val vdef = ValDef(name, enumClassRef, creator).withMods(mods.withAddedFlags(EnumValue, span)) - flatTree(vdef :: scaffolding).withSpan(span) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala b/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala deleted file mode 100644 index c0cf2c0d1b81..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala +++ /dev/null @@ -1,449 +0,0 @@ -package dotty.tools.dotc -package ast - -import core._ -import Symbols._, Types._, Contexts._, Decorators._, util.Spans._, Flags._, Constants._ -import StdNames.{nme, tpnme} -import ast.Trees._ -import Names.Name -import Comments.Comment -import NameKinds.DefaultGetterName -import Annotations.Annotation - -object MainProxies { - - /** Generate proxy classes for @main functions and @myMain functions where myMain <:< MainAnnotation */ - def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - mainAnnotationProxies(stats) ++ mainProxies(stats) - } - - /** Generate proxy classes for @main functions. - * A function like - * - * @main def f(x: S, ys: T*) = ... - * - * would be translated to something like - * - * import CommandLineParser._ - * class f { - * @static def main(args: Array[String]): Unit = - * try - * f( - * parseArgument[S](args, 0), - * parseRemainingArguments[T](args, 1): _* - * ) - * catch case err: ParseError => showError(err) - * } - */ - private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd._ - def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { - case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => - stat.symbol :: Nil - case stat @ TypeDef(name, impl: Template) if stat.symbol.is(Module) => - mainMethods(impl.body) - case _ => - Nil - } - mainMethods(stats).flatMap(mainProxy) - } - - import untpd._ - private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = { - val mainAnnotSpan = mainFun.getAnnotation(defn.MainAnnot).get.tree.span - def pos = mainFun.sourcePos - val argsRef = Ident(nme.args) - - def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = - if (mt.isImplicitMethod) { - report.error(em"@main method cannot have implicit parameters", pos) - call - } - else { - val args = mt.paramInfos.zipWithIndex map { - (formal, n) => - val (parserSym, formalElem) = - if (formal.isRepeatedParam) (defn.CLP_parseRemainingArguments, formal.argTypes.head) - else (defn.CLP_parseArgument, formal) - val arg = Apply( - TypeApply(ref(parserSym.termRef), TypeTree(formalElem) :: Nil), - argsRef :: Literal(Constant(idx + n)) :: Nil) - if (formal.isRepeatedParam) repeated(arg) else arg - } - val call1 = Apply(call, args) - mt.resType match { - case restpe: MethodType => - if (mt.paramInfos.lastOption.getOrElse(NoType).isRepeatedParam) - report.error(em"varargs parameter of @main method must come last", pos) - addArgs(call1, restpe, idx + args.length) - case _ => - call1 - } - } - - var result: List[TypeDef] = Nil - if (!mainFun.owner.isStaticOwner) - report.error(em"@main method is not statically accessible", pos) - else { - var call = ref(mainFun.termRef) - mainFun.info match { - case _: ExprType => - case mt: MethodType => - call = addArgs(call, mt, 0) - case _: PolyType => - report.error(em"@main method cannot have type parameters", pos) - case _ => - report.error(em"@main can only annotate a method", pos) - } - val errVar = Ident(nme.error) - val handler = CaseDef( - Typed(errVar, TypeTree(defn.CLP_ParseError.typeRef)), - EmptyTree, - Apply(ref(defn.CLP_showError.termRef), errVar :: Nil)) - val body = Try(call, handler :: Nil, EmptyTree) - val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) - .withFlags(Param) - /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. - * The annotations will be retype-checked in another scope that may not have the same imports. - */ - def insertTypeSplices = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match - case tree: tpd.Ident @unchecked => TypedSplice(tree) - case tree => super.transform(tree) - } - val annots = mainFun.annotations - .filterNot(_.matches(defn.MainAnnot)) - .map(annot => insertTypeSplices.transform(annot.tree)) - val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) - .withFlags(JavaStatic | Synthetic) - .withAnnotations(annots) - val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) - val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) - .withFlags(Final | Invisible) - - if (!ctx.reporter.hasErrors) - result = mainCls.withSpan(mainAnnotSpan.toSynthetic) :: Nil - } - result - } - - private type DefaultValueSymbols = Map[Int, Symbol] - private type ParameterAnnotationss = Seq[Seq[Annotation]] - - /** - * Generate proxy classes for main functions. - * A function like - * - * /** - * * Lorem ipsum dolor sit amet - * * consectetur adipiscing elit. - * * - * * @param x my param x - * * @param ys all my params y - * */ - * @myMain(80) def f( - * @myMain.Alias("myX") x: S, - * y: S, - * ys: T* - * ) = ... - * - * would be translated to something like - * - * final class f { - * static def main(args: Array[String]): Unit = { - * val annotation = new myMain(80) - * val info = new Info( - * name = "f", - * documentation = "Lorem ipsum dolor sit amet consectetur adipiscing elit.", - * parameters = Seq( - * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))), - * new scala.annotation.MainAnnotation.Parameter("y", "S", true, false, "", Seq()), - * new scala.annotation.MainAnnotation.Parameter("ys", "T", false, true, "all my params y", Seq()) - * ) - * ), - * val command = annotation.command(info, args) - * if command.isDefined then - * val cmd = command.get - * val args0: () => S = annotation.argGetter[S](info.parameters(0), cmd(0), None) - * val args1: () => S = annotation.argGetter[S](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) - * val args2: () => Seq[T] = annotation.varargGetter[T](info.parameters(2), cmd.drop(2)) - * annotation.run(() => f(args0(), args1(), args2()*)) - * } - * } - */ - private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd._ - - /** - * Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this - * point of the compilation, they must be explicitly passed by [[mainProxy]]. - */ - def defaultValueSymbols(scope: Tree, funSymbol: Symbol): DefaultValueSymbols = - scope match { - case TypeDef(_, template: Template) => - template.body.flatMap((_: Tree) match { - case dd: DefDef if dd.name.is(DefaultGetterName) && dd.name.firstPart == funSymbol.name => - val DefaultGetterName.NumberedInfo(index) = dd.name.info: @unchecked - List(index -> dd.symbol) - case _ => Nil - }).toMap - case _ => Map.empty - } - - /** Computes the list of main methods present in the code. */ - def mainMethods(scope: Tree, stats: List[Tree]): List[(Symbol, ParameterAnnotationss, DefaultValueSymbols, Option[Comment])] = stats.flatMap { - case stat: DefDef => - val sym = stat.symbol - sym.annotations.filter(_.matches(defn.MainAnnotationClass)) match { - case Nil => - Nil - case _ :: Nil => - val paramAnnotations = stat.paramss.flatMap(_.map( - valdef => valdef.symbol.annotations.filter(_.matches(defn.MainAnnotationParameterAnnotation)) - )) - (sym, paramAnnotations.toVector, defaultValueSymbols(scope, sym), stat.rawComment) :: Nil - case mainAnnot :: others => - report.error(em"method cannot have multiple main annotations", mainAnnot.tree) - Nil - } - case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => - mainMethods(stat, impl.body) - case _ => - Nil - } - - // Assuming that the top-level object was already generated, all main methods will have a scope - mainMethods(EmptyTree, stats).flatMap(mainAnnotationProxy) - } - - private def mainAnnotationProxy(mainFun: Symbol, paramAnnotations: ParameterAnnotationss, defaultValueSymbols: DefaultValueSymbols, docComment: Option[Comment])(using Context): Option[TypeDef] = { - val mainAnnot = mainFun.getAnnotation(defn.MainAnnotationClass).get - def pos = mainFun.sourcePos - - val documentation = new Documentation(docComment) - - /** () => value */ - def unitToValue(value: Tree): Tree = - val defDef = DefDef(nme.ANON_FUN, List(Nil), TypeTree(), value) - Block(defDef, Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) - - /** Generate a list of trees containing the ParamInfo instantiations. - * - * A ParamInfo has the following shape - * ``` - * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))) - * ``` - */ - def parameterInfos(mt: MethodType): List[Tree] = - extension (tree: Tree) def withProperty(sym: Symbol, args: List[Tree]) = - Apply(Select(tree, sym.name), args) - - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val param = paramName.toString - val paramType0 = if formal.isRepeatedParam then formal.argTypes.head.dealias else formal.dealias - val paramType = paramType0.dealias - val paramTypeOwner = paramType.typeSymbol.owner - val paramTypeStr = - if paramTypeOwner == defn.EmptyPackageClass then paramType.show - else paramTypeOwner.showFullName + "." + paramType.show - val hasDefault = defaultValueSymbols.contains(idx) - val isRepeated = formal.isRepeatedParam - val paramDoc = documentation.argDocs.getOrElse(param, "") - val paramAnnots = - val annotationTrees = paramAnnotations(idx).map(instantiateAnnotation).toList - Apply(ref(defn.SeqModule.termRef), annotationTrees) - - val constructorArgs = List(param, paramTypeStr, hasDefault, isRepeated, paramDoc) - .map(value => Literal(Constant(value))) - - New(TypeTree(defn.MainAnnotationParameter.typeRef), List(constructorArgs :+ paramAnnots)) - - end parameterInfos - - /** - * Creates a list of references and definitions of arguments. - * The goal is to create the - * `val args0: () => S = annotation.argGetter[S](0, cmd(0), None)` - * part of the code. - */ - def argValDefs(mt: MethodType): List[ValDef] = - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val argName = nme.args ++ idx.toString - val isRepeated = formal.isRepeatedParam - val formalType = if isRepeated then formal.argTypes.head else formal - val getterName = if isRepeated then nme.varargGetter else nme.argGetter - val defaultValueGetterOpt = defaultValueSymbols.get(idx) match - case None => ref(defn.NoneModule.termRef) - case Some(dvSym) => - val value = unitToValue(ref(dvSym.termRef)) - Apply(ref(defn.SomeClass.companionModule.termRef), value) - val argGetter0 = TypeApply(Select(Ident(nme.annotation), getterName), TypeTree(formalType) :: Nil) - val index = Literal(Constant(idx)) - val paramInfo = Apply(Select(Ident(nme.info), nme.parameters), index) - val argGetter = - if isRepeated then Apply(argGetter0, List(paramInfo, Apply(Select(Ident(nme.cmd), nme.drop), List(index)))) - else Apply(argGetter0, List(paramInfo, Apply(Ident(nme.cmd), List(index)), defaultValueGetterOpt)) - ValDef(argName, TypeTree(), argGetter) - end argValDefs - - - /** Create a list of argument references that will be passed as argument to the main method. - * `args0`, ...`argn*` - */ - def argRefs(mt: MethodType): List[Tree] = - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val argRef = Apply(Ident(nme.args ++ idx.toString), Nil) - if formal.isRepeatedParam then repeated(argRef) else argRef - end argRefs - - - /** Turns an annotation (e.g. `@main(40)`) into an instance of the class (e.g. `new scala.main(40)`). */ - def instantiateAnnotation(annot: Annotation): Tree = - val argss = { - def recurse(t: tpd.Tree, acc: List[List[Tree]]): List[List[Tree]] = t match { - case Apply(t, args: List[tpd.Tree]) => recurse(t, extractArgs(args) :: acc) - case _ => acc - } - - def extractArgs(args: List[tpd.Tree]): List[Tree] = - args.flatMap { - case Typed(SeqLiteral(varargs, _), _) => varargs.map(arg => TypedSplice(arg)) - case arg: Select if arg.name.is(DefaultGetterName) => Nil // Ignore default values, they will be added later by the compiler - case arg => List(TypedSplice(arg)) - } - - recurse(annot.tree, Nil) - } - - New(TypeTree(annot.symbol.typeRef), argss) - end instantiateAnnotation - - def generateMainClass(mainCall: Tree, args: List[Tree], parameterInfos: List[Tree]): TypeDef = - val cmdInfo = - val nameTree = Literal(Constant(mainFun.showName)) - val docTree = Literal(Constant(documentation.mainDoc)) - val paramInfos = Apply(ref(defn.SeqModule.termRef), parameterInfos) - New(TypeTree(defn.MainAnnotationInfo.typeRef), List(List(nameTree, docTree, paramInfos))) - - val annotVal = ValDef( - nme.annotation, - TypeTree(), - instantiateAnnotation(mainAnnot) - ) - val infoVal = ValDef( - nme.info, - TypeTree(), - cmdInfo - ) - val command = ValDef( - nme.command, - TypeTree(), - Apply( - Select(Ident(nme.annotation), nme.command), - List(Ident(nme.info), Ident(nme.args)) - ) - ) - val argsVal = ValDef( - nme.cmd, - TypeTree(), - Select(Ident(nme.command), nme.get) - ) - val run = Apply(Select(Ident(nme.annotation), nme.run), mainCall) - val body0 = If( - Select(Ident(nme.command), nme.isDefined), - Block(argsVal :: args, run), - EmptyTree - ) - val body = Block(List(annotVal, infoVal, command), body0) // TODO add `if (cmd.nonEmpty)` - - val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) - .withFlags(Param) - /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. - * The annotations will be retype-checked in another scope that may not have the same imports. - */ - def insertTypeSplices = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match - case tree: tpd.Ident @unchecked => TypedSplice(tree) - case tree => super.transform(tree) - } - val annots = mainFun.annotations - .filterNot(_.matches(defn.MainAnnotationClass)) - .map(annot => insertTypeSplices.transform(annot.tree)) - val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) - .withFlags(JavaStatic) - .withAnnotations(annots) - val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) - val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) - .withFlags(Final | Invisible) - mainCls.withSpan(mainAnnot.tree.span.toSynthetic) - end generateMainClass - - if (!mainFun.owner.isStaticOwner) - report.error(em"main method is not statically accessible", pos) - None - else mainFun.info match { - case _: ExprType => - Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) - case mt: MethodType => - if (mt.isImplicitMethod) - report.error(em"main method cannot have implicit parameters", pos) - None - else mt.resType match - case restpe: MethodType => - report.error(em"main method cannot be curried", pos) - None - case _ => - Some(generateMainClass(unitToValue(Apply(ref(mainFun.termRef), argRefs(mt))), argValDefs(mt), parameterInfos(mt))) - case _: PolyType => - report.error(em"main method cannot have type parameters", pos) - None - case _ => - report.error(em"main can only annotate a method", pos) - None - } - } - - /** A class responsible for extracting the docstrings of a method. */ - private class Documentation(docComment: Option[Comment]): - import util.CommentParsing._ - - /** The main part of the documentation. */ - lazy val mainDoc: String = _mainDoc - /** The parameters identified by @param. Maps from parameter name to its documentation. */ - lazy val argDocs: Map[String, String] = _argDocs - - private var _mainDoc: String = "" - private var _argDocs: Map[String, String] = Map() - - docComment match { - case Some(comment) => if comment.isDocComment then parseDocComment(comment.raw) else _mainDoc = comment.raw - case None => - } - - private def cleanComment(raw: String): String = - var lines: Seq[String] = raw.trim.nn.split('\n').nn.toSeq - lines = lines.map(l => l.substring(skipLineLead(l, -1), l.length).nn.trim.nn) - var s = lines.foldLeft("") { - case ("", s2) => s2 - case (s1, "") if s1.last == '\n' => s1 // Multiple newlines are kept as single newlines - case (s1, "") => s1 + '\n' - case (s1, s2) if s1.last == '\n' => s1 + s2 - case (s1, s2) => s1 + ' ' + s2 - } - s.replaceAll(raw"\[\[", "").nn.replaceAll(raw"\]\]", "").nn.trim.nn - - private def parseDocComment(raw: String): Unit = - // Positions of the sections (@) in the docstring - val tidx: List[(Int, Int)] = tagIndex(raw) - - // Parse main comment - var mainComment: String = raw.substring(skipLineLead(raw, 0), startTag(raw, tidx)).nn - _mainDoc = cleanComment(mainComment) - - // Parse arguments comments - val argsCommentsSpans: Map[String, (Int, Int)] = paramDocs(raw, "@param", tidx) - val argsCommentsTextSpans = argsCommentsSpans.view.mapValues(extractSectionText(raw, _)) - val argsCommentsTexts = argsCommentsTextSpans.mapValues({ case (beg, end) => raw.substring(beg, end).nn }) - _argDocs = argsCommentsTexts.mapValues(cleanComment(_)).toMap - end Documentation -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala b/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala deleted file mode 100644 index 054ffe66f323..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala +++ /dev/null @@ -1,129 +0,0 @@ -package dotty.tools.dotc -package ast - -import core.Contexts._ -import core.Decorators._ -import util.Spans._ -import Trees.{MemberDef, DefTree, WithLazyField} -import dotty.tools.dotc.core.Types.AnnotatedType -import dotty.tools.dotc.core.Types.ImportType -import dotty.tools.dotc.core.Types.Type - -/** Utility functions to go from typed to untyped ASTs */ -// TODO: Handle trees with mixed source files -object NavigateAST { - - /** The untyped tree corresponding to typed tree `tree` in the compilation - * unit specified by `ctx` - */ - def toUntyped(tree: tpd.Tree)(using Context): untpd.Tree = - untypedPath(tree, exactMatch = true) match { - case (utree: untpd.Tree) :: _ => - utree - case _ => - val loosePath = untypedPath(tree, exactMatch = false) - throw new - Error(i"""no untyped tree for $tree, pos = ${tree.sourcePos} - |best matching path =\n$loosePath%\n====\n% - |path positions = ${loosePath.map(_.sourcePos)}""") - } - - /** The reverse path of untyped trees starting with a tree that closest matches - * `tree` and ending in the untyped tree at the root of the compilation unit - * specified by `ctx`. - * @param exactMatch If `true`, the path must start with a node that exactly - * matches `tree`, or `Nil` is returned. - * If `false` the path might start with a node enclosing - * the logical position of `tree`. - * Note: A complication concerns member definitions. ValDefs and DefDefs - * have after desugaring a position that spans just the name of the symbol being - * defined and nothing else. So we look instead for an untyped tree approximating the - * envelope of the definition, and declare success if we find another DefTree. - */ - def untypedPath(tree: tpd.Tree, exactMatch: Boolean = false)(using Context): List[Positioned] = - tree match { - case tree: MemberDef[?] => - untypedPath(tree.span) match { - case path @ (last: DefTree[?]) :: _ => path - case path if !exactMatch => path - case _ => Nil - } - case _ => - untypedPath(tree.span) match { - case (path @ last :: _) if last.span == tree.span || !exactMatch => path - case _ => Nil - } - } - - /** The reverse part of the untyped root of the compilation unit of `ctx` to - * the given `span`. - */ - def untypedPath(span: Span)(using Context): List[Positioned] = - pathTo(span, List(ctx.compilationUnit.untpdTree)) - - - /** The reverse path from any node in `from` to the node that closest encloses `span`, - * or `Nil` if no such path exists. If a non-empty path is returned it starts with - * the node closest enclosing `span` and ends with one of the nodes in `from`. - * - * @param skipZeroExtent If true, skip over zero-extent nodes in the search. These nodes - * do not correspond to code the user wrote since their start and - * end point are the same, so this is useful when trying to reconcile - * nodes with source code. - */ - def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { - def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = { - var bestFit: List[Positioned] = path - while (it.hasNext) { - val path1 = it.next() match { - case p: Positioned => singlePath(p, path) - case m: untpd.Modifiers => childPath(m.productIterator, path) - case xs: List[?] => childPath(xs.iterator, path) - case _ => path - } - if ((path1 ne path) && - ((bestFit eq path) || - bestFit.head.span != path1.head.span && - bestFit.head.span.contains(path1.head.span))) - bestFit = path1 - } - bestFit - } - /* - * Annotations trees are located in the Type - */ - def unpackAnnotations(t: Type, path: List[Positioned]): List[Positioned] = - t match { - case ann: AnnotatedType => - unpackAnnotations(ann.parent, childPath(ann.annot.tree.productIterator, path)) - case imp: ImportType => - childPath(imp.expr.productIterator, path) - case other => - path - } - def singlePath(p: Positioned, path: List[Positioned]): List[Positioned] = - if (p.span.exists && !(skipZeroExtent && p.span.isZeroExtent) && p.span.contains(span)) { - // FIXME: We shouldn't be manually forcing trees here, we should replace - // our usage of `productIterator` by something in `Positioned` that takes - // care of low-level details like this for us. - p match { - case p: WithLazyField[?] => - p.forceIfLazy - case _ => - } - val iterator = p match - case defdef: DefTree[?] => - p.productIterator ++ defdef.mods.productIterator - case _ => - p.productIterator - childPath(iterator, p :: path) - } - else { - p match { - case t: untpd.TypeTree => unpackAnnotations(t.typeOpt, path) - case _ => path - } - } - childPath(from.iterator, Nil) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala deleted file mode 100644 index 7b558c65e425..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala +++ /dev/null @@ -1,246 +0,0 @@ -package dotty.tools -package dotc -package ast - -import util.Spans._ -import util.{SourceFile, SourcePosition, SrcPos} -import core.Contexts._ -import core.Decorators._ -import core.NameOps._ -import core.Flags.{JavaDefined, ExtensionMethod} -import core.StdNames.nme -import ast.Trees.mods -import annotation.constructorOnly -import annotation.internal.sharable - -/** A base class for things that have positions (currently: modifiers and trees) - */ -abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable, Pure { - import Positioned.{ids, nextId, debugId} - - private var mySpan: Span = _ - - private var mySource: SourceFile = src - - /** A unique identifier in case -Yshow-tree-ids, or -Ydebug-tree-with-id - * is set, -1 otherwise. - */ - def uniqueId: Int = - if ids != null && ids.nn.containsKey(this) then ids.nn.get(this).nn else -1 - - private def allocateId() = - if ids != null then - val ownId = nextId - nextId += 1 - ids.nn.put(this: @unchecked, ownId) - if ownId == debugId then - println(s"Debug tree (id=$debugId) creation \n${this: @unchecked}\n") - Thread.dumpStack() - - allocateId() - - /** The span part of the item's position */ - def span: Span = mySpan - - def span_=(span: Span): Unit = - mySpan = span - - span = envelope(src) - - def source: SourceFile = mySource - - def sourcePos(using Context): SourcePosition = source.atSpan(span) - - /** This positioned item, widened to `SrcPos`. Used to make clear we only need the - * position, typically for error reporting. - */ - final def srcPos: SrcPos = this - - /** A positioned item like this one with given `span`. - * If the positioned item is source-derived, a clone is returned. - * If the positioned item is synthetic, the position is updated - * destructively and the item itself is returned. - */ - def withSpan(span: Span): this.type = - if (span == mySpan) this - else { - val newpd: this.type = - if !mySpan.exists then - if span.exists then envelope(source, span.startPos) // fill in children spans - this - else - cloneIn(source) - newpd.span = span - newpd - } - - /** The union of startSpan and the spans of all positioned children that - * have the same source as this node, except that Inlined nodes only - * consider their `call` child. - * - * Side effect: Any descendants without spans have but with the same source as this - * node have their span set to the end position of the envelope of all children to - * the left, or, if that one does not exist, to the start position of the envelope - * of all children to the right. - */ - def envelope(src: SourceFile, startSpan: Span = NoSpan): Span = (this: @unchecked) match { - case Trees.Inlined(call, _, _) => - call.span - case _ => - def include(span: Span, x: Any): Span = x match { - case p: Positioned => - if (p.source != src) span - else if (p.span.exists) span.union(p.span) - else if (span.exists) { - if (span.end != MaxOffset) - p.span = p.envelope(src, span.endPos) - span - } - else // No span available to assign yet, signal this by returning a span with MaxOffset end - Span(MaxOffset, MaxOffset) - case m: untpd.Modifiers => - include(include(span, m.mods), m.annotations) - case y :: ys => - include(include(span, y), ys) - case _ => span - } - val limit = productArity - def includeChildren(span: Span, n: Int): Span = - if (n < limit) includeChildren(include(span, productElement(n): @unchecked), n + 1) - else span - val span1 = includeChildren(startSpan, 0) - val span2 = - if (!span1.exists || span1.end != MaxOffset) - span1 - else if (span1.start == MaxOffset) - // No positioned child was found - NoSpan - else - ///println(s"revisit $uniqueId with $span1") - // We have some children left whose span could not be assigned. - // Go through it again with the known start position. - includeChildren(span1.startPos, 0) - span2.toSynthetic - } - - /** Clone this node but assign it a fresh id which marks it as a node in `file`. */ - def cloneIn(src: SourceFile): this.type = { - val newpd: this.type = clone.asInstanceOf[this.type] - newpd.allocateId() - newpd.mySource = src - newpd - } - - def contains(that: Positioned): Boolean = { - def isParent(x: Any): Boolean = x match { - case x: Positioned => - x.contains(that) - case m: untpd.Modifiers => - m.mods.exists(isParent) || m.annotations.exists(isParent) - case xs: List[?] => - xs.exists(isParent) - case _ => - false - } - (this eq that) || - (this.span contains that.span) && { - var n = productArity - var found = false - while (!found && n > 0) { - n -= 1 - found = isParent(productElement(n)) - } - found - } - } - - /** Check that all positioned items in this tree satisfy the following conditions: - * - Parent spans contain child spans - * - If item is a non-empty tree, it has a position - */ - def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { - import untpd._ - var lastPositioned: Positioned | Null = null - var lastSpan = NoSpan - def check(p: Any): Unit = p match { - case p: Positioned => - assert(span contains p.span, - i"""position error, parent span does not contain child span - |parent = $this # $uniqueId, - |parent span = $span, - |child = $p # ${p.uniqueId}, - |child span = ${p.span}""".stripMargin) - p match { - case tree: Tree if !tree.isEmpty => - assert(tree.span.exists, - s"position error: position not set for $tree # ${tree.uniqueId}") - case _ => - } - if nonOverlapping then - this match { - case _: XMLBlock => - // FIXME: Trees generated by the XML parser do not satisfy `checkPos` - case _: WildcardFunction - if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] => - // ignore transition from last wildcard parameter to body - case _ => - assert(!lastSpan.exists || !p.span.exists || lastSpan.end <= p.span.start, - i"""position error, child positions overlap or in wrong order - |parent = $this - |1st child = $lastPositioned - |1st child span = $lastSpan - |2nd child = $p - |2nd child span = ${p.span}""".stripMargin) - } - lastPositioned = p - lastSpan = p.span - p.checkPos(nonOverlapping) - case m: untpd.Modifiers => - m.annotations.foreach(check) - m.mods.foreach(check) - case xs: List[?] => - xs.foreach(check) - case _ => - } - this match { - case tree: DefDef if tree.name == nme.CONSTRUCTOR && tree.mods.is(JavaDefined) => - // Special treatment for constructors coming from Java: - // Leave out leading type params, they are copied with wrong positions from parent class - check(tree.mods) - check(tree.trailingParamss) - case tree: DefDef if tree.mods.is(ExtensionMethod) => - tree.paramss match - case vparams1 :: vparams2 :: rest if tree.name.isRightAssocOperatorName => - // omit check for right-associatiove extension methods; their parameters were swapped - case _ => - check(tree.paramss) - check(tree.tpt) - check(tree.rhs) - case _ => - val end = productArity - var n = 0 - while (n < end) { - check(productElement(n)) - n += 1 - } - } - } - catch { - case ex: AssertionError => - println(i"error while checking $this") - throw ex - } -} - -object Positioned { - @sharable private var debugId = Int.MinValue - @sharable private var ids: java.util.WeakHashMap[Positioned, Int] | Null = null - @sharable private var nextId: Int = 0 - - def init(using Context): Unit = - debugId = ctx.settings.YdebugTreeWithId.value - if ids == null && ctx.settings.YshowTreeIds.value - || debugId != ctx.settings.YdebugTreeWithId.default - then - ids = java.util.WeakHashMap() -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala deleted file mode 100644 index b650a0088de4..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala +++ /dev/null @@ -1,1070 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import Flags._, Trees._, Types._, Contexts._ -import Names._, StdNames._, NameOps._, Symbols._ -import typer.ConstFold -import reporting.trace -import dotty.tools.dotc.transform.SymUtils._ -import Decorators._ -import Constants.Constant -import scala.collection.mutable - -import scala.annotation.tailrec - -trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => - - def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree - - def isDeclarationOrTypeDef(tree: Tree): Boolean = unsplice(tree) match { - case DefDef(_, _, _, EmptyTree) - | ValDef(_, _, EmptyTree) - | TypeDef(_, _) => true - case _ => false - } - - def isOpAssign(tree: Tree): Boolean = unsplice(tree) match { - case Apply(fn, _ :: _) => - unsplice(fn) match { - case Select(_, name) if name.isOpAssignmentName => true - case _ => false - } - case _ => false - } - - class MatchingArgs(params: List[Symbol], args: List[Tree])(using Context) { - def foreach(f: (Symbol, Tree) => Unit): Boolean = { - def recur(params: List[Symbol], args: List[Tree]): Boolean = params match { - case Nil => args.isEmpty - case param :: params1 => - if (param.info.isRepeatedParam) { - for (arg <- args) f(param, arg) - true - } - else args match { - case Nil => false - case arg :: args1 => - f(param, args.head) - recur(params1, args1) - } - } - recur(params, args) - } - def zipped: List[(Symbol, Tree)] = map((_, _)) - def map[R](f: (Symbol, Tree) => R): List[R] = { - val b = List.newBuilder[R] - foreach(b += f(_, _)) - b.result() - } - } - - /** The method part of an application node, possibly enclosed in a block - * with only valdefs as statements. the reason for also considering blocks - * is that named arguments can transform a call into a block, e.g. - * (b = foo, a = bar) - * is transformed to - * { val x$1 = foo - * val x$2 = bar - * (x$2, x$1) - * } - */ - def methPart(tree: Tree): Tree = stripApply(tree) match { - case TypeApply(fn, _) => methPart(fn) - case AppliedTypeTree(fn, _) => methPart(fn) // !!! should not be needed - case Block(stats, expr) => methPart(expr) - case mp => mp - } - - /** If this is an application, its function part, stripping all - * Apply nodes (but leaving TypeApply nodes in). Otherwise the tree itself. - */ - def stripApply(tree: Tree): Tree = unsplice(tree) match { - case Apply(fn, _) => stripApply(fn) - case _ => tree - } - - /** If this is a block, its expression part */ - def stripBlock(tree: Tree): Tree = unsplice(tree) match { - case Block(_, expr) => stripBlock(expr) - case Inlined(_, _, expr) => stripBlock(expr) - case _ => tree - } - - def stripInlined(tree: Tree): Tree = unsplice(tree) match { - case Inlined(_, _, expr) => stripInlined(expr) - case _ => tree - } - - def stripAnnotated(tree: Tree): Tree = tree match { - case Annotated(arg, _) => arg - case _ => tree - } - - /** The number of arguments in an application */ - def numArgs(tree: Tree): Int = unsplice(tree) match { - case Apply(fn, args) => numArgs(fn) + args.length - case TypeApply(fn, _) => numArgs(fn) - case Block(_, expr) => numArgs(expr) - case _ => 0 - } - - /** All term arguments of an application in a single flattened list */ - def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { - case Apply(fn, args) => allArguments(fn) ::: args - case TypeApply(fn, _) => allArguments(fn) - case Block(_, expr) => allArguments(expr) - case _ => Nil - } - - /** Is tree explicitly parameterized with type arguments? */ - def hasExplicitTypeArgs(tree: Tree): Boolean = tree match - case TypeApply(tycon, args) => - args.exists(arg => !arg.span.isZeroExtent && !tycon.span.contains(arg.span)) - case _ => false - - /** Is tree a path? */ - def isPath(tree: Tree): Boolean = unsplice(tree) match { - case Ident(_) | This(_) | Super(_, _) => true - case Select(qual, _) => isPath(qual) - case _ => false - } - - /** Is tree a self constructor call this(...)? I.e. a call to a constructor of the - * same object? - */ - def isSelfConstrCall(tree: Tree): Boolean = methPart(tree) match { - case Ident(nme.CONSTRUCTOR) | Select(This(_), nme.CONSTRUCTOR) => true - case _ => false - } - - /** Is tree a super constructor call? - */ - def isSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { - case Select(Super(_, _), nme.CONSTRUCTOR) => true - case _ => false - } - - def isSuperSelection(tree: Tree): Boolean = unsplice(tree) match { - case Select(Super(_, _), _) => true - case _ => false - } - - def isSelfOrSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { - case Ident(nme.CONSTRUCTOR) - | Select(This(_), nme.CONSTRUCTOR) - | Select(Super(_, _), nme.CONSTRUCTOR) => true - case _ => false - } - - /** Is tree a backquoted identifier or definition */ - def isBackquoted(tree: Tree): Boolean = tree.hasAttachment(Backquoted) - - /** Is tree a variable pattern? */ - def isVarPattern(pat: Tree): Boolean = unsplice(pat) match { - case x: Ident => x.name.isVarPattern && !isBackquoted(x) - case _ => false - } - - /** The first constructor definition in `stats` */ - def firstConstructor(stats: List[Tree]): Tree = stats match { - case (meth: DefDef) :: _ if meth.name.isConstructorName => meth - case stat :: stats => firstConstructor(stats) - case nil => EmptyTree - } - - /** Is tpt a vararg type of the form T* or => T*? */ - def isRepeatedParamType(tpt: Tree)(using Context): Boolean = stripByNameType(tpt) match { - case tpt: TypeTree => tpt.typeOpt.isRepeatedParam - case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS), _) => true - case _ => false - } - - /** Is this argument node of the form *, or is it a reference to - * such an argument ? The latter case can happen when an argument is lifted. - */ - def isWildcardStarArg(tree: Tree)(using Context): Boolean = unbind(tree) match { - case Typed(Ident(nme.WILDCARD_STAR), _) => true - case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true - case Typed(_, tpt: TypeTree) => tpt.typeOpt.isRepeatedParam - case NamedArg(_, arg) => isWildcardStarArg(arg) - case arg => arg.typeOpt.widen.isRepeatedParam - } - - /** Is tree a type tree of the form `=> T` or (under pureFunctions) `{refs}-> T`? */ - def isByNameType(tree: Tree)(using Context): Boolean = - stripByNameType(tree) ne tree - - /** Strip `=> T` to `T` and (under pureFunctions) `{refs}-> T` to `T` */ - def stripByNameType(tree: Tree)(using Context): Tree = unsplice(tree) match - case ByNameTypeTree(t1) => t1 - case untpd.CapturingTypeTree(_, parent) => - val parent1 = stripByNameType(parent) - if parent1 eq parent then tree else parent1 - case _ => tree - - /** All type and value parameter symbols of this DefDef */ - def allParamSyms(ddef: DefDef)(using Context): List[Symbol] = - ddef.paramss.flatten.map(_.symbol) - - /** Does this argument list end with an argument of the form : _* ? */ - def isWildcardStarArgList(trees: List[Tree])(using Context): Boolean = - trees.nonEmpty && isWildcardStarArg(trees.last) - - /** Is the argument a wildcard argument of the form `_` or `x @ _`? - */ - def isWildcardArg(tree: Tree): Boolean = unbind(tree) match { - case Ident(nme.WILDCARD) => true - case _ => false - } - - /** Does this list contain a named argument tree? */ - def hasNamedArg(args: List[Any]): Boolean = args exists isNamedArg - val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]] - - /** Is this pattern node a catch-all (wildcard or variable) pattern? */ - def isDefaultCase(cdef: CaseDef): Boolean = cdef match { - case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) - case _ => false - } - - /** Does this CaseDef catch Throwable? */ - def catchesThrowable(cdef: CaseDef)(using Context): Boolean = - catchesAllOf(cdef, defn.ThrowableType) - - /** Does this CaseDef catch everything of a certain Type? */ - def catchesAllOf(cdef: CaseDef, threshold: Type)(using Context): Boolean = - isDefaultCase(cdef) || - cdef.guard.isEmpty && { - unbind(cdef.pat) match { - case Typed(Ident(nme.WILDCARD), tpt) => threshold <:< tpt.typeOpt - case _ => false - } - } - - /** Is this case guarded? */ - def isGuardedCase(cdef: CaseDef): Boolean = cdef.guard ne EmptyTree - - /** Is this parameter list a using clause? */ - def isUsingClause(params: ParamClause)(using Context): Boolean = params match - case ValDefs(vparam :: _) => - val sym = vparam.symbol - if sym.exists then sym.is(Given) else vparam.mods.is(Given) - case _ => - false - - def isUsingOrTypeParamClause(params: ParamClause)(using Context): Boolean = params match - case TypeDefs(_) => true - case _ => isUsingClause(params) - - def isTypeParamClause(params: ParamClause)(using Context): Boolean = params match - case TypeDefs(_) => true - case _ => false - - private val languageSubCategories = Set(nme.experimental, nme.deprecated) - - /** If `path` looks like a language import, `Some(name)` where name - * is `experimental` if that sub-module is imported, and the empty - * term name otherwise. - */ - def languageImport(path: Tree): Option[TermName] = path match - case Select(p1, name: TermName) if languageSubCategories.contains(name) => - languageImport(p1) match - case Some(EmptyTermName) => Some(name) - case _ => None - case p1: RefTree if p1.name == nme.language => - p1.qualifier match - case EmptyTree => Some(EmptyTermName) - case p2: RefTree if p2.name == nme.scala => - p2.qualifier match - case EmptyTree => Some(EmptyTermName) - case Ident(nme.ROOTPKG) => Some(EmptyTermName) - case _ => None - case _ => None - case _ => None - - /** The underlying pattern ignoring any bindings */ - def unbind(x: Tree): Tree = unsplice(x) match { - case Bind(_, y) => unbind(y) - case y => y - } - - /** The largest subset of {NoInits, PureInterface} that a - * trait or class with these parents can have as flags. - */ - def parentsKind(parents: List[Tree])(using Context): FlagSet = parents match { - case Nil => NoInitsInterface - case Apply(_, _ :: _) :: _ => EmptyFlags - case _ :: parents1 => parentsKind(parents1) - } - - /** Checks whether predicate `p` is true for all result parts of this expression, - * where we zoom into Ifs, Matches, and Blocks. - */ - def forallResults(tree: Tree, p: Tree => Boolean): Boolean = tree match { - case If(_, thenp, elsep) => forallResults(thenp, p) && forallResults(elsep, p) - case Match(_, cases) => cases forall (c => forallResults(c.body, p)) - case Block(_, expr) => forallResults(expr, p) - case _ => p(tree) - } -} - -trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => - import untpd._ - - /** The underlying tree when stripping any TypedSplice or Parens nodes */ - override def unsplice(tree: Tree): Tree = tree match { - case TypedSplice(tree1) => tree1 - case Parens(tree1) => unsplice(tree1) - case _ => tree - } - - def functionWithUnknownParamType(tree: Tree): Option[Tree] = tree match { - case Function(args, _) => - if (args.exists { - case ValDef(_, tpt, _) => tpt.isEmpty - case _ => false - }) Some(tree) - else None - case Match(EmptyTree, _) => - Some(tree) - case Block(Nil, expr) => - functionWithUnknownParamType(expr) - case _ => - None - } - - def isFunctionWithUnknownParamType(tree: Tree): Boolean = - functionWithUnknownParamType(tree).isDefined - - def isFunction(tree: Tree): Boolean = tree match - case Function(_, _) | Match(EmptyTree, _) => true - case Block(Nil, expr) => isFunction(expr) - case _ => false - - /** Is `tree` an context function or closure, possibly nested in a block? */ - def isContextualClosure(tree: Tree)(using Context): Boolean = unsplice(tree) match { - case tree: FunctionWithMods => tree.mods.is(Given) - case Function((param: untpd.ValDef) :: _, _) => param.mods.is(Given) - case Closure(_, meth, _) => true - case Block(Nil, expr) => isContextualClosure(expr) - case Block(DefDef(nme.ANON_FUN, params :: _, _, _) :: Nil, cl: Closure) => - if params.isEmpty then - cl.tpt.eq(untpd.ContextualEmptyTree) || defn.isContextFunctionType(cl.tpt.typeOpt) - else - isUsingClause(params) - case _ => false - } - - /** The largest subset of {NoInits, PureInterface} that a - * trait or class enclosing this statement can have as flags. - */ - private def defKind(tree: Tree)(using Context): FlagSet = unsplice(tree) match { - case EmptyTree | _: Import => NoInitsInterface - case tree: TypeDef => if (tree.isClassDef) NoInits else NoInitsInterface - case tree: DefDef => - if tree.unforcedRhs == EmptyTree - && tree.paramss.forall { - case ValDefs(vparams) => vparams.forall(_.rhs.isEmpty) - case _ => true - } - then - NoInitsInterface - else if tree.mods.is(Given) && tree.paramss.isEmpty then - EmptyFlags // might become a lazy val: TODO: check whether we need to suppress NoInits once we have new lazy val impl - else - NoInits - case tree: ValDef => if (tree.unforcedRhs == EmptyTree) NoInitsInterface else EmptyFlags - case _ => EmptyFlags - } - - /** The largest subset of {NoInits, PureInterface} that a - * trait or class with this body can have as flags. - */ - def bodyKind(body: List[Tree])(using Context): FlagSet = - body.foldLeft(NoInitsInterface)((fs, stat) => fs & defKind(stat)) - - /** Info of a variable in a pattern: The named tree and its type */ - type VarInfo = (NameTree, Tree) - - /** An extractor for trees of the form `id` or `id: T` */ - object IdPattern { - def unapply(tree: Tree)(using Context): Option[VarInfo] = tree match { - case id: Ident if id.name != nme.WILDCARD => Some(id, TypeTree()) - case Typed(id: Ident, tpt) => Some((id, tpt)) - case _ => None - } - } - - /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. - * Only trees of the form `=> T` are matched; trees written directly as `{*}-> T` - * are ignored by the extractor. - */ - object ImpureByNameTypeTree: - - def apply(tp: ByNameTypeTree)(using Context): untpd.CapturingTypeTree = - untpd.CapturingTypeTree( - untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp) - - def unapply(tp: Tree)(using Context): Option[ByNameTypeTree] = tp match - case untpd.CapturingTypeTree(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) - if id.span == bntp.span.startPos => Some(bntp) - case _ => None - end ImpureByNameTypeTree -} - -trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => - import TreeInfo._ - import tpd._ - - /** The purity level of this statement. - * @return Pure if statement has no side effects - * Idempotent if running the statement a second time has no side effects - * Impure otherwise - */ - def statPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { - case EmptyTree - | TypeDef(_, _) - | Import(_, _) - | DefDef(_, _, _, _) => - Pure - case vdef @ ValDef(_, _, _) => - if (vdef.symbol.flags is Mutable) Impure else exprPurity(vdef.rhs) `min` Pure - case _ => - Impure - // TODO: It seem like this should be exprPurity(tree) - // But if we do that the repl/vars test break. Need to figure out why that's the case. - } - - /** The purity level of this expression. See docs for PurityLevel for what that means - * - * Note that purity and idempotency are treated differently. - * References to modules and lazy vals are impure (side-effecting) both because - * side-effecting code may be executed and because the first reference - * takes a different code path than all to follow; but they are idempotent - * because running the expression a second time gives the cached result. - */ - def exprPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { - case EmptyTree - | This(_) - | Super(_, _) - | Literal(_) => - PurePath - case Ident(_) => - refPurity(tree) - case Select(qual, _) => - if (tree.symbol.is(Erased)) Pure - else refPurity(tree) `min` exprPurity(qual) - case New(_) | Closure(_, _, _) => - Pure - case TypeApply(fn, _) => - if (fn.symbol.is(Erased) || fn.symbol == defn.QuotedTypeModule_of || fn.symbol == defn.Predef_classOf) Pure else exprPurity(fn) - case Apply(fn, args) => - if isPureApply(tree, fn) then - minOf(exprPurity(fn), args.map(exprPurity)) `min` Pure - else if fn.symbol.is(Erased) then - Pure - else if fn.symbol.isStableMember /* && fn.symbol.is(Lazy) */ then - minOf(exprPurity(fn), args.map(exprPurity)) `min` Idempotent - else - Impure - case Typed(expr, _) => - exprPurity(expr) - case Block(stats, expr) => - minOf(exprPurity(expr), stats.map(statPurity)) - case Inlined(_, bindings, expr) => - minOf(exprPurity(expr), bindings.map(statPurity)) - case NamedArg(_, expr) => - exprPurity(expr) - case _ => - Impure - } - - private def minOf(l0: PurityLevel, ls: List[PurityLevel]) = ls.foldLeft(l0)(_ `min` _) - - def isPurePath(tree: Tree)(using Context): Boolean = tree.tpe match { - case tpe: ConstantType => exprPurity(tree) >= Pure - case _ => exprPurity(tree) == PurePath - } - - def isPureExpr(tree: Tree)(using Context): Boolean = - exprPurity(tree) >= Pure - - def isIdempotentPath(tree: Tree)(using Context): Boolean = tree.tpe match { - case tpe: ConstantType => exprPurity(tree) >= Idempotent - case _ => exprPurity(tree) >= IdempotentPath - } - - def isIdempotentExpr(tree: Tree)(using Context): Boolean = - exprPurity(tree) >= Idempotent - - def isPureBinding(tree: Tree)(using Context): Boolean = statPurity(tree) >= Pure - - /** Is the application `tree` with function part `fn` known to be pure? - * Function value and arguments can still be impure. - */ - def isPureApply(tree: Tree, fn: Tree)(using Context): Boolean = - def isKnownPureOp(sym: Symbol) = - sym.owner.isPrimitiveValueClass - || sym.owner == defn.StringClass - || defn.pureMethods.contains(sym) - tree.tpe.isInstanceOf[ConstantType] && tree.symbol != NoSymbol && isKnownPureOp(tree.symbol) // A constant expression with pure arguments is pure. - || fn.symbol.isStableMember && !fn.symbol.is(Lazy) // constructors of no-inits classes are stable - - /** The purity level of this reference. - * @return - * PurePath if reference is (nonlazy and stable) - * or to a parameterized function - * or its type is a constant type - * IdempotentPath if reference is lazy and stable - * Impure otherwise - * @DarkDimius: need to make sure that lazy accessor methods have Lazy and Stable - * flags set. - */ - def refPurity(tree: Tree)(using Context): PurityLevel = { - val sym = tree.symbol - if (!tree.hasType) Impure - else if !tree.tpe.widen.isParameterless then PurePath - else if sym.is(Erased) then PurePath - else if tree.tpe.isInstanceOf[ConstantType] then PurePath - else if (!sym.isStableMember) Impure - else if (sym.is(Module)) - if (sym.moduleClass.isNoInitsRealClass) PurePath else IdempotentPath - else if (sym.is(Lazy)) IdempotentPath - else if sym.isAllOf(InlineParam) then Impure - else PurePath - } - - def isPureRef(tree: Tree)(using Context): Boolean = - refPurity(tree) == PurePath - def isIdempotentRef(tree: Tree)(using Context): Boolean = - refPurity(tree) >= IdempotentPath - - /** (1) If `tree` is a constant expression, its value as a Literal, - * or `tree` itself otherwise. - * - * Note: Demanding idempotency instead of purity in literalize is strictly speaking too loose. - * Example - * - * object O { final val x = 42; println("43") } - * O.x - * - * Strictly speaking we can't replace `O.x` with `42`. But this would make - * most expressions non-constant. Maybe we can change the spec to accept this - * kind of eliding behavior. Or else enforce true purity in the compiler. - * The choice will be affected by what we will do with `inline` and with - * Singleton type bounds (see SIP 23). Presumably - * - * object O1 { val x: Singleton = 42; println("43") } - * object O2 { inline val x = 42; println("43") } - * - * should behave differently. - * - * O1.x should have the same effect as { println("43"); 42 } - * - * whereas - * - * O2.x = 42 - * - * Revisit this issue once we have standardized on `inline`. Then we can demand - * purity of the prefix unless the selection goes to a inline val. - * - * Note: This method should be applied to all term tree nodes that are not literals, - * that can be idempotent, and that can have constant types. So far, only nodes - * of the following classes qualify: - * - * Ident - * Select - * TypeApply - * - * (2) A primitive unary operator expression `pre.op` where `op` is one of `+`, `-`, `~`, `!` - * that has a constant type `ConstantType(v)` but that is not a constant expression - * (i.e. `pre` has side-effects) is translated to - * - * { pre; v } - * - * (3) An expression `pre.getClass[..]()` that has a constant type `ConstantType(v)` but where - * `pre` has side-effects is translated to: - * - * { pre; v } - * - * This avoids the situation where we have a Select node that does not have a symbol. - */ - def constToLiteral(tree: Tree)(using Context): Tree = { - assert(!tree.isType) - val tree1 = ConstFold(tree) - tree1.tpe.widenTermRefExpr.dealias.normalized match { - case ConstantType(Constant(_: Type)) if tree.isInstanceOf[Block] => - // We can't rewrite `{ class A; classOf[A] }` to `classOf[A]`, so we leave - // blocks returning a class literal alone, even if they're idempotent. - tree1 - case ConstantType(value) => - def dropOp(t: Tree): Tree = t match - case Select(pre, _) if t.tpe.isInstanceOf[ConstantType] => - // it's a primitive unary operator - pre - case Apply(TypeApply(Select(pre, nme.getClass_), _), Nil) => - pre - case _ => - tree1 - - val countsAsPure = - if dropOp(tree1).symbol.isInlineVal - then isIdempotentExpr(tree1) - else isPureExpr(tree1) - - if countsAsPure then Literal(value).withSpan(tree.span) - else - val pre = dropOp(tree1) - if pre eq tree1 then tree1 - else - // it's a primitive unary operator or getClass call; - // Simplify `pre.op` to `{ pre; v }` where `v` is the value of `pre.op` - Block(pre :: Nil, Literal(value)).withSpan(tree.span) - case _ => tree1 - } - } - - def isExtMethodApply(tree: Tree)(using Context): Boolean = methPart(tree) match - case Inlined(call, _, _) => isExtMethodApply(call) - case tree @ Select(qual, nme.apply) => tree.symbol.is(ExtensionMethod) || isExtMethodApply(qual) - case tree => tree.symbol.is(ExtensionMethod) - - /** Is symbol potentially a getter of a mutable variable? - */ - def mayBeVarGetter(sym: Symbol)(using Context): Boolean = { - def maybeGetterType(tpe: Type): Boolean = tpe match { - case _: ExprType => true - case tpe: MethodType => tpe.isImplicitMethod - case tpe: PolyType => maybeGetterType(tpe.resultType) - case _ => false - } - sym.owner.isClass && !sym.isStableMember && maybeGetterType(sym.info) - } - - /** Is tree a reference to a mutable variable, or to a potential getter - * that has a setter in the same class? - */ - def isVariableOrGetter(tree: Tree)(using Context): Boolean = { - def sym = tree.symbol - def isVar = sym.is(Mutable) - def isGetter = - mayBeVarGetter(sym) && sym.owner.info.member(sym.name.asTermName.setterName).exists - - unsplice(tree) match { - case Ident(_) => isVar - case Select(_, _) => isVar || isGetter - case Apply(_, _) => - methPart(tree) match { - case Select(qual, nme.apply) => qual.tpe.member(nme.update).exists - case _ => false - } - case _ => false - } - } - - /** Is tree a `this` node which belongs to `enclClass`? */ - def isSelf(tree: Tree, enclClass: Symbol)(using Context): Boolean = unsplice(tree) match { - case This(_) => tree.symbol == enclClass - case _ => false - } - - /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */ - def stripCast(tree: Tree)(using Context): Tree = { - def isCast(sel: Tree) = sel.symbol.isTypeCast - unsplice(tree) match { - case TypeApply(sel @ Select(inner, _), _) if isCast(sel) => - stripCast(inner) - case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCast(sel) => - stripCast(inner) - case t => - t - } - } - - /** The type arguments of a possibly curried call */ - def typeArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case TypeApply(fn, args) => loop(fn, args :: argss) - case Apply(fn, args) => loop(fn, argss) - case _ => argss - loop(tree, Nil) - - /** The term arguments of a possibly curried call */ - def termArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case Apply(fn, args) => loop(fn, args :: argss) - case TypeApply(fn, args) => loop(fn, argss) - case _ => argss - loop(tree, Nil) - - /** The type and term arguments of a possibly curried call, in the order they are given */ - def allArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case tree: GenericApply => loop(tree.fun, tree.args :: argss) - case _ => argss - loop(tree, Nil) - - /** The function part of a possibly curried call. Unlike `methPart` this one does - * not decompose blocks - */ - def funPart(tree: Tree): Tree = tree match - case tree: GenericApply => funPart(tree.fun) - case tree => tree - - /** Decompose a template body into parameters and other statements */ - def decomposeTemplateBody(body: List[Tree])(using Context): (List[Tree], List[Tree]) = - body.partition { - case stat: TypeDef => stat.symbol is Flags.Param - case stat: ValOrDefDef => - stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter - case _ => false - } - - /** An extractor for closures, either contained in a block or standalone. - */ - object closure { - def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match { - case Block(_, expr) => unapply(expr) - case Closure(env, meth, tpt) => Some(env, meth, tpt) - case Typed(expr, _) => unapply(expr) - case _ => None - } - } - - /** An extractor for def of a closure contained the block of the closure. */ - object closureDef { - def unapply(tree: Tree)(using Context): Option[DefDef] = tree match { - case Block((meth : DefDef) :: Nil, closure: Closure) if meth.symbol == closure.meth.symbol => - Some(meth) - case Block(Nil, expr) => - unapply(expr) - case _ => - None - } - } - - /** If tree is a closure, its body, otherwise tree itself */ - def closureBody(tree: Tree)(using Context): Tree = tree match { - case closureDef(meth) => meth.rhs - case _ => tree - } - - /** The variables defined by a pattern, in reverse order of their appearance. */ - def patVars(tree: Tree)(using Context): List[Symbol] = { - val acc = new TreeAccumulator[List[Symbol]] { - def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { - case Bind(_, body) => apply(tree.symbol :: syms, body) - case Annotated(tree, id @ Ident(tpnme.BOUNDTYPE_ANNOT)) => apply(id.symbol :: syms, tree) - case _ => foldOver(syms, tree) - } - } - acc(Nil, tree) - } - - /** Is this pattern node a catch-all or type-test pattern? */ - def isCatchCase(cdef: CaseDef)(using Context): Boolean = cdef match { - case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) => - isSimpleThrowable(tpt.tpe) - case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) => - isSimpleThrowable(tpt.tpe) - case _ => - isDefaultCase(cdef) - } - - private def isSimpleThrowable(tp: Type)(using Context): Boolean = tp match { - case tp @ TypeRef(pre, _) => - (pre == NoPrefix || pre.typeSymbol.isStatic) && - (tp.symbol derivesFrom defn.ThrowableClass) && !tp.symbol.is(Trait) - case _ => - false - } - - /** The symbols defined locally in a statement list */ - def localSyms(stats: List[Tree])(using Context): List[Symbol] = - val locals = new mutable.ListBuffer[Symbol] - for stat <- stats do - if stat.isDef && stat.symbol.exists then locals += stat.symbol - locals.toList - - /** If `tree` is a DefTree, the symbol defined by it, otherwise NoSymbol */ - def definedSym(tree: Tree)(using Context): Symbol = - if (tree.isDef) tree.symbol else NoSymbol - - /** Going from child to parent, the path of tree nodes that starts - * with a definition of symbol `sym` and ends with `root`, or Nil - * if no such path exists. - * Pre: `sym` must have a position. - */ - def defPath(sym: Symbol, root: Tree)(using Context): List[Tree] = trace.onDebug(s"defpath($sym with position ${sym.span}, ${root.show})") { - require(sym.span.exists, sym) - object accum extends TreeAccumulator[List[Tree]] { - def apply(x: List[Tree], tree: Tree)(using Context): List[Tree] = - if (tree.span.contains(sym.span)) - if (definedSym(tree) == sym) tree :: x - else { - val x1 = foldOver(x, tree) - if (x1 ne x) tree :: x1 else x1 - } - else x - } - accum(Nil, root) - } - - /** The top level classes in this tree, including only those module classes that - * are not a linked class of some other class in the result. - */ - def topLevelClasses(tree: Tree)(using Context): List[ClassSymbol] = tree match { - case PackageDef(_, stats) => stats.flatMap(topLevelClasses) - case tdef: TypeDef if tdef.symbol.isClass => tdef.symbol.asClass :: Nil - case _ => Nil - } - - /** The tree containing only the top-level classes and objects matching either `cls` or its companion object */ - def sliceTopLevel(tree: Tree, cls: ClassSymbol)(using Context): List[Tree] = tree match { - case PackageDef(pid, stats) => - val slicedStats = stats.flatMap(sliceTopLevel(_, cls)) - val isEffectivelyEmpty = slicedStats.forall(_.isInstanceOf[Import]) - if isEffectivelyEmpty then Nil - else cpy.PackageDef(tree)(pid, slicedStats) :: Nil - case tdef: TypeDef => - val sym = tdef.symbol - assert(sym.isClass) - if (cls == sym || cls == sym.linkedClass) tdef :: Nil - else Nil - case vdef: ValDef => - val sym = vdef.symbol - assert(sym.is(Module)) - if (cls == sym.companionClass || cls == sym.moduleClass) vdef :: Nil - else Nil - case tree => - tree :: Nil - } - - /** The statement sequence that contains a definition of `sym`, or Nil - * if none was found. - * For a tree to be found, The symbol must have a position and its definition - * tree must be reachable from come tree stored in an enclosing context. - */ - def definingStats(sym: Symbol)(using Context): List[Tree] = - if (!sym.span.exists || (ctx eq NoContext) || (ctx.compilationUnit eq NoCompilationUnit)) Nil - else defPath(sym, ctx.compilationUnit.tpdTree) match { - case defn :: encl :: _ => - def verify(stats: List[Tree]) = - if (stats exists (definedSym(_) == sym)) stats else Nil - encl match { - case Block(stats, _) => verify(stats) - case encl: Template => verify(encl.body) - case PackageDef(_, stats) => verify(stats) - case _ => Nil - } - case nil => - Nil - } - - /** If `tree` is an instance of `TupleN[...](e1, ..., eN)`, the arguments `e1, ..., eN` - * otherwise the empty list. - */ - def tupleArgs(tree: Tree)(using Context): List[Tree] = tree match { - case Block(Nil, expr) => tupleArgs(expr) - case Inlined(_, Nil, expr) => tupleArgs(expr) - case Apply(fn: NameTree, args) - if fn.name == nme.apply && - fn.symbol.owner.is(Module) && - defn.isTupleClass(fn.symbol.owner.companionClass) => args - case _ => Nil - } - - /** The qualifier part of a Select or Ident. - * For an Ident, this is the `This` of the current class. - */ - def qualifier(tree: Tree)(using Context): Tree = tree match { - case Select(qual, _) => qual - case tree: Ident => desugarIdentPrefix(tree) - case _ => This(ctx.owner.enclosingClass.asClass) - } - - /** Is this a (potentially applied) selection of a member of a structural type - * that is not a member of an underlying class or trait? - */ - def isStructuralTermSelectOrApply(tree: Tree)(using Context): Boolean = { - def isStructuralTermSelect(tree: Select) = - def hasRefinement(qualtpe: Type): Boolean = qualtpe.dealias match - case RefinedType(parent, rname, rinfo) => - rname == tree.name || hasRefinement(parent) - case tp: TypeProxy => - hasRefinement(tp.superType) - case tp: AndType => - hasRefinement(tp.tp1) || hasRefinement(tp.tp2) - case tp: OrType => - hasRefinement(tp.tp1) || hasRefinement(tp.tp2) - case _ => - false - !tree.symbol.exists - && tree.isTerm - && { - val qualType = tree.qualifier.tpe - hasRefinement(qualType) && !qualType.derivesFrom(defn.PolyFunctionClass) - } - def loop(tree: Tree): Boolean = tree match - case TypeApply(fun, _) => - loop(fun) - case Apply(fun, _) => - loop(fun) - case tree: Select => - isStructuralTermSelect(tree) - case _ => - false - loop(tree) - } - - /** Return a pair consisting of (supercall, rest) - * - * - supercall: the superclass call, excluding trait constr calls - * - * The supercall is always the first statement (if it exists) - */ - final def splitAtSuper(constrStats: List[Tree])(implicit ctx: Context): (List[Tree], List[Tree]) = - constrStats.toList match { - case (sc: Apply) :: rest if sc.symbol.isConstructor => (sc :: Nil, rest) - case (block @ Block(_, sc: Apply)) :: rest if sc.symbol.isConstructor => (block :: Nil, rest) - case stats => (Nil, stats) - } - - /** Structural tree comparison (since == on trees is reference equality). - * For the moment, only Ident, Select, Literal, Apply and TypeApply are supported - */ - extension (t1: Tree) { - def === (t2: Tree)(using Context): Boolean = (t1, t2) match { - case (t1: Ident, t2: Ident) => - t1.symbol == t2.symbol - case (t1 @ Select(q1, _), t2 @ Select(q2, _)) => - t1.symbol == t2.symbol && q1 === q2 - case (Literal(c1), Literal(c2)) => - c1 == c2 - case (Apply(f1, as1), Apply(f2, as2)) => - f1 === f2 && as1.corresponds(as2)(_ === _) - case (TypeApply(f1, ts1), TypeApply(f2, ts2)) => - f1 === f2 && ts1.tpes.corresponds(ts2.tpes)(_ =:= _) - case _ => - false - } - def hash(using Context): Int = - t1.getClass.hashCode * 37 + { - t1 match { - case t1: Ident => t1.symbol.hashCode - case t1 @ Select(q1, _) => t1.symbol.hashCode * 41 + q1.hash - case Literal(c1) => c1.hashCode - case Apply(f1, as1) => as1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.hash) - case TypeApply(f1, ts1) => ts1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.tpe.hash) - case _ => t1.hashCode - } - } - } - - def assertAllPositioned(tree: Tree)(using Context): Unit = - tree.foreachSubTree { - case t: WithoutTypeOrPos[_] => - case t => assert(t.span.exists, i"$t") - } - - /** Extractors for quotes */ - object Quoted { - /** Extracts the content of a quoted tree. - * The result can be the contents of a term or type quote, which - * will return a term or type tree respectively. - */ - def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = - if tree.symbol == defn.QuotedRuntime_exprQuote then - // quoted.runtime.Expr.quote[T]() - Some(tree.args.head) - else if tree.symbol == defn.QuotedTypeModule_of then - // quoted.Type.of[](quotes) - val TypeApply(_, body :: _) = tree.fun: @unchecked - Some(body) - else None - } - - /** Extractors for splices */ - object Spliced { - /** Extracts the content of a spliced expression tree. - * The result can be the contents of a term splice, which - * will return a term tree. - */ - def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = - if tree.symbol.isExprSplice then Some(tree.args.head) else None - } - - /** Extractors for type splices */ - object SplicedType { - /** Extracts the content of a spliced type tree. - * The result can be the contents of a type splice, which - * will return a type tree. - */ - def unapply(tree: tpd.Select)(using Context): Option[tpd.Tree] = - if tree.symbol.isTypeSplice then Some(tree.qualifier) else None - } - - /** Extractor for not-null assertions. - * A not-null assertion for reference `x` has the form `x.$asInstanceOf$[x.type & T]`. - */ - object AssertNotNull : - def apply(tree: tpd.Tree, tpnn: Type)(using Context): tpd.Tree = - tree.select(defn.Any_typeCast).appliedToType(AndType(tree.tpe, tpnn)) - - def unapply(tree: tpd.TypeApply)(using Context): Option[tpd.Tree] = tree match - case TypeApply(Select(qual: RefTree, nme.asInstanceOfPM), arg :: Nil) => - arg.tpe match - case AndType(ref, nn1) if qual.tpe eq ref => - qual.tpe.widen match - case OrNull(nn2) if nn1 eq nn2 => - Some(qual) - case _ => None - case _ => None - case _ => None - end AssertNotNull - - object ConstantValue { - def unapply(tree: Tree)(using Context): Option[Any] = - tree match - case Typed(expr, _) => unapply(expr) - case Inlined(_, Nil, expr) => unapply(expr) - case Block(Nil, expr) => unapply(expr) - case _ => - tree.tpe.widenTermRefExpr.normalized match - case ConstantType(Constant(x)) => Some(x) - case _ => None - } -} - -object TreeInfo { - /** A purity level is represented as a bitset (expressed as an Int) */ - class PurityLevel(val x: Int) extends AnyVal { - /** `this` contains the bits of `that` */ - def >= (that: PurityLevel): Boolean = (x & that.x) == that.x - - /** The intersection of the bits of `this` and `that` */ - def min(that: PurityLevel): PurityLevel = new PurityLevel(x & that.x) - } - - /** An expression is a stable path. Requires that expression is at least idempotent */ - val Path: PurityLevel = new PurityLevel(4) - - /** The expression has no side effects */ - val Pure: PurityLevel = new PurityLevel(3) - - /** Running the expression a second time has no side effects. Implied by `Pure`. */ - val Idempotent: PurityLevel = new PurityLevel(1) - - val Impure: PurityLevel = new PurityLevel(0) - - /** A stable path that is evaluated without side effects */ - val PurePath: PurityLevel = new PurityLevel(Pure.x | Path.x) - - /** A stable path that is also idempotent */ - val IdempotentPath: PurityLevel = new PurityLevel(Idempotent.x | Path.x) -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala deleted file mode 100644 index caf8d68442f6..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala +++ /dev/null @@ -1,82 +0,0 @@ -package dotty.tools.dotc -package ast - -import Trees._ -import core.Contexts._ -import core.ContextOps.enter -import core.Flags._ -import core.Symbols._ -import core.TypeError - -/** A TreeMap that maintains the necessary infrastructure to support - * contextual implicit searches (type-scope implicits are supported anyway). - * - * This incudes implicits defined in scope as well as imported implicits. - */ -class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { - import tpd._ - - def transformSelf(vd: ValDef)(using Context): ValDef = - cpy.ValDef(vd)(tpt = transform(vd.tpt)) - - private def nestedScopeCtx(defs: List[Tree])(using Context): Context = { - val nestedCtx = ctx.fresh.setNewScope - defs foreach { - case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) - case _ => - } - nestedCtx - } - - private def patternScopeCtx(pattern: Tree)(using Context): Context = { - val nestedCtx = ctx.fresh.setNewScope - new TreeTraverser { - def traverse(tree: Tree)(using Context): Unit = { - tree match { - case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => - nestedCtx.enter(d.symbol) - case _ => - } - traverseChildren(tree) - } - }.traverse(pattern) - nestedCtx - } - - override def transform(tree: Tree)(using Context): Tree = { - try tree match { - case Block(stats, expr) => - super.transform(tree)(using nestedScopeCtx(stats)) - case tree: DefDef => - inContext(localCtx(tree)) { - cpy.DefDef(tree)( - tree.name, - transformParamss(tree.paramss), - transform(tree.tpt), - transform(tree.rhs)(using nestedScopeCtx(tree.paramss.flatten))) - } - case impl @ Template(constr, parents, self, _) => - cpy.Template(tree)( - transformSub(constr), - transform(parents)(using ctx.superCallContext), - Nil, - transformSelf(self), - transformStats(impl.body, tree.symbol)) - case tree: CaseDef => - val patCtx = patternScopeCtx(tree.pat)(using ctx) - cpy.CaseDef(tree)( - transform(tree.pat), - transform(tree.guard)(using patCtx), - transform(tree.body)(using patCtx) - ) - case _ => - super.transform(tree) - } - catch { - case ex: TypeError => - report.error(ex, tree.srcPos) - tree - } - } -} - diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala deleted file mode 100644 index 3b250118f9b3..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala +++ /dev/null @@ -1,232 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import Types._, Contexts._, Flags._ -import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant -import Decorators._ -import dotty.tools.dotc.transform.SymUtils._ -import language.experimental.pureFunctions - -/** A map that applies three functions and a substitution together to a tree and - * makes sure they are coordinated so that the result is well-typed. The functions are - * @param typeMap A function from Type to Type that gets applied to the - * type of every tree node and to all locally defined symbols, - * followed by the substitution [substFrom := substTo]. - * @param treeMap A transformer that translates all encountered subtrees in - * prefix traversal orders - * @param oldOwners Previous owners. If a top-level local symbol in the mapped tree - * has one of these as an owner, the owner is replaced by the corresponding - * symbol in `newOwners`. - * @param newOwners New owners, replacing previous owners. - * @param substFrom The symbols that need to be substituted. - * @param substTo The substitution targets. - * - * The reason the substitution is broken out from the rest of the type map is - * that all symbols have to be substituted at the same time. If we do not do this, - * we risk data races on named types. Example: Say we have `outer#1.inner#2` and we - * have two substitutions S1 = [outer#1 := outer#3], S2 = [inner#2 := inner#4] where - * hashtags precede symbol ids. If we do S1 first, we get outer#2.inner#3. If we then - * do S2 we get outer#2.inner#4. But that means that the named type outer#2.inner - * gets two different denotations in the same period. Hence, if -Yno-double-bindings is - * set, we would get a data race assertion error. - */ -class TreeTypeMap( - val typeMap: Type -> Type = IdentityTypeMap, - val treeMap: tpd.Tree -> tpd.Tree = identity[tpd.Tree](_), // !cc! need explicit instantiation of default argument - val oldOwners: List[Symbol] = Nil, - val newOwners: List[Symbol] = Nil, - val substFrom: List[Symbol] = Nil, - val substTo: List[Symbol] = Nil, - cpy: tpd.TreeCopier = tpd.cpy)(using DetachedContext) extends tpd.TreeMap(cpy) { - import tpd._ - - def copy( - typeMap: Type -> Type, - treeMap: tpd.Tree -> tpd.Tree, - oldOwners: List[Symbol], - newOwners: List[Symbol], - substFrom: List[Symbol], - substTo: List[Symbol])(using Context): TreeTypeMap = - new TreeTypeMap(typeMap, treeMap, oldOwners, newOwners, substFrom, substTo) - - /** If `sym` is one of `oldOwners`, replace by corresponding symbol in `newOwners` */ - def mapOwner(sym: Symbol): Symbol = sym.subst(oldOwners, newOwners) - - /** Replace occurrences of `This(oldOwner)` in some prefix of a type - * by the corresponding `This(newOwner)`. - */ - private val mapOwnerThis = new TypeMap with cc.CaptureSet.IdempotentCaptRefMap { - private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match { - case Nil => tp - case (cls: ClassSymbol) :: from1 => mapPrefix(from1, to.tail, tp.substThis(cls, to.head.thisType)) - case _ :: from1 => mapPrefix(from1, to.tail, tp) - } - def apply(tp: Type): Type = tp match { - case tp: NamedType => tp.derivedSelect(mapPrefix(oldOwners, newOwners, tp.prefix)) - case _ => mapOver(tp) - } - } - - def mapType(tp: Type): Type = - mapOwnerThis(typeMap(tp).substSym(substFrom, substTo)) - - private def updateDecls(prevStats: List[Tree], newStats: List[Tree]): Unit = - if (prevStats.isEmpty) assert(newStats.isEmpty) - else { - prevStats.head match { - case pdef: MemberDef => - val prevSym = pdef.symbol - val newSym = newStats.head.symbol - val newCls = newSym.owner.asClass - if (prevSym != newSym) newCls.replace(prevSym, newSym) - case _ => - } - updateDecls(prevStats.tail, newStats.tail) - } - - def transformInlined(tree: tpd.Inlined)(using Context): tpd.Tree = - val Inlined(call, bindings, expanded) = tree - val (tmap1, bindings1) = transformDefs(bindings) - val expanded1 = tmap1.transform(expanded) - cpy.Inlined(tree)(call, bindings1, expanded1) - - override def transform(tree: tpd.Tree)(using Context): tpd.Tree = treeMap(tree) match { - case impl @ Template(constr, parents, self, _) => - val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) - cpy.Template(impl)( - constr = tmap.transformSub(constr), - parents = parents.mapconserve(transform), - self = tmap.transformSub(self), - body = impl.body mapconserve - (tmap.transform(_)(using ctx.withOwner(mapOwner(impl.symbol.owner)))) - ).withType(tmap.mapType(impl.tpe)) - case tree1 => - tree1.withType(mapType(tree1.tpe)) match { - case id: Ident if tpd.needsSelect(id.tpe) => - ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) - case ddef @ DefDef(name, paramss, tpt, _) => - val (tmap1, paramss1) = transformAllParamss(paramss) - val res = cpy.DefDef(ddef)(name, paramss1, tmap1.transform(tpt), tmap1.transform(ddef.rhs)) - res.symbol.setParamssFromDefs(paramss1) - res.symbol.transformAnnotations { - case ann: BodyAnnotation => ann.derivedAnnotation(transform(ann.tree)) - case ann => ann - } - res - case tdef @ LambdaTypeTree(tparams, body) => - val (tmap1, tparams1) = transformDefs(tparams) - cpy.LambdaTypeTree(tdef)(tparams1, tmap1.transform(body)) - case blk @ Block(stats, expr) => - val (tmap1, stats1) = transformDefs(stats) - val expr1 = tmap1.transform(expr) - cpy.Block(blk)(stats1, expr1) - case inlined: Inlined => - transformInlined(inlined) - case cdef @ CaseDef(pat, guard, rhs) => - val tmap = withMappedSyms(patVars(pat)) - val pat1 = tmap.transform(pat) - val guard1 = tmap.transform(guard) - val rhs1 = tmap.transform(rhs) - cpy.CaseDef(cdef)(pat1, guard1, rhs1) - case labeled @ Labeled(bind, expr) => - val tmap = withMappedSyms(bind.symbol :: Nil) - val bind1 = tmap.transformSub(bind) - val expr1 = tmap.transform(expr) - cpy.Labeled(labeled)(bind1, expr1) - case tree @ Hole(_, _, args, content, tpt) => - val args1 = args.mapConserve(transform) - val content1 = transform(content) - val tpt1 = transform(tpt) - cpy.Hole(tree)(args = args1, content = content1, tpt = tpt1) - case lit @ Literal(Constant(tpe: Type)) => - cpy.Literal(lit)(Constant(mapType(tpe))) - case tree1 => - super.transform(tree1) - } - } - - override def transformStats(trees: List[tpd.Tree], exprOwner: Symbol)(using Context): List[Tree] = - transformDefs(trees)._2 - - def transformDefs[TT <: tpd.Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { - val tmap = withMappedSyms(tpd.localSyms(trees)) - (tmap, tmap.transformSub(trees)) - } - - private def transformAllParamss(paramss: List[ParamClause]): (TreeTypeMap, List[ParamClause]) = paramss match - case params :: paramss1 => - val (tmap1, params1: ParamClause) = ((params: @unchecked) match - case ValDefs(vparams) => transformDefs(vparams) - case TypeDefs(tparams) => transformDefs(tparams) - ): @unchecked - val (tmap2, paramss2) = tmap1.transformAllParamss(paramss1) - (tmap2, params1 :: paramss2) - case nil => - (this, paramss) - - def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] - - def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree)) - - /** The current tree map composed with a substitution [from -> to] */ - def withSubstitution(from: List[Symbol], to: List[Symbol]): TreeTypeMap = - if (from eq to) this - else { - // assert that substitution stays idempotent, assuming its parts are - // TODO: It might be better to cater for the asserted-away conditions, by - // setting up a proper substitution abstraction with a compose operator that - // guarantees idempotence. But this might be too inefficient in some cases. - // We'll cross that bridge when we need to. - assert(!from.exists(substTo contains _)) - assert(!to.exists(substFrom contains _)) - assert(!from.exists(newOwners contains _)) - assert(!to.exists(oldOwners contains _)) - copy( - typeMap, - treeMap, - from ++ oldOwners, - to ++ newOwners, - from ++ substFrom, - to ++ substTo) - } - - /** Apply `typeMap` and `ownerMap` to given symbols `syms` - * and return a treemap that contains the substitution - * between original and mapped symbols. - */ - def withMappedSyms(syms: List[Symbol]): TreeTypeMap = - withMappedSyms(syms, mapSymbols(syms, this)) - - /** The tree map with the substitution between originals `syms` - * and mapped symbols `mapped`. Also goes into mapped classes - * and substitutes their declarations. - */ - def withMappedSyms(syms: List[Symbol], mapped: List[Symbol]): TreeTypeMap = - if syms eq mapped then this - else - val substMap = withSubstitution(syms, mapped) - lazy val origCls = mapped.zip(syms).filter(_._1.isClass).toMap - mapped.filter(_.isClass).foldLeft(substMap) { (tmap, cls) => - val origDcls = cls.info.decls.toList.filterNot(_.is(TypeParam)) - val tmap0 = tmap.withSubstitution(origCls(cls).typeParams, cls.typeParams) - val mappedDcls = mapSymbols(origDcls, tmap0, mapAlways = true) - val tmap1 = tmap.withMappedSyms( - origCls(cls).typeParams ::: origDcls, - cls.typeParams ::: mappedDcls) - origDcls.lazyZip(mappedDcls).foreach(cls.asClass.replace) - tmap1 - } - - override def toString = - def showSyms(syms: List[Symbol]) = - syms.map(sym => s"$sym#${sym.id}").mkString(", ") - s"""TreeTypeMap( - |typeMap = $typeMap - |treeMap = $treeMap - |oldOwners = ${showSyms(oldOwners)} - |newOwners = ${showSyms(newOwners)} - |substFrom = ${showSyms(substFrom)} - |substTo = ${showSyms(substTo)}""".stripMargin -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala deleted file mode 100644 index 0b1842603316..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala +++ /dev/null @@ -1,1787 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import Types._, Names._, NameOps._, Flags._, util.Spans._, Contexts._, Constants._ -import typer.{ ConstFold, ProtoTypes } -import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._ -import collection.mutable.ListBuffer -import printing.Printer -import printing.Texts.Text -import util.{Stats, Attachment, Property, SourceFile, NoSource, SrcPos, SourcePosition} -import config.Config -import config.Printers.overload -import annotation.internal.sharable -import annotation.unchecked.uncheckedVariance -import annotation.constructorOnly -import compiletime.uninitialized -import Decorators._ -import annotation.retains -import language.experimental.pureFunctions - -object Trees { - - type Untyped = Type | Null - - /** The total number of created tree nodes, maintained if Stats.enabled */ - @sharable var ntrees: Int = 0 - - /** Property key for trees with documentation strings attached */ - val DocComment: Property.StickyKey[Comments.Comment] = Property.StickyKey() - - /** Property key for backquoted identifiers and definitions */ - val Backquoted: Property.StickyKey[Unit] = Property.StickyKey() - - /** Trees take a parameter indicating what the type of their `tpe` field - * is. Two choices: `Type` or `Untyped`. - * Untyped trees have type `Tree[Untyped]`. - * - * Tree typing uses a copy-on-write implementation: - * - * - You can never observe a `tpe` which is `null` (throws an exception) - * - So when creating a typed tree with `withType` we can re-use - * the existing tree transparently, assigning its `tpe` field. - * - It is impossible to embed untyped trees in typed ones. - * - Typed trees can be embedded in untyped ones provided they are rooted - * in a TypedSplice node. - * - Type checking an untyped tree should remove all embedded `TypedSplice` - * nodes. - */ - abstract class Tree[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable { - - if (Stats.enabled) ntrees += 1 - - /** The type constructor at the root of the tree */ - type ThisTree[T <: Untyped] <: Tree[T] - - protected var myTpe: T @uncheckedVariance = uninitialized - - /** Destructively set the type of the tree. This should be called only when it is known that - * it is safe under sharing to do so. One use-case is in the withType method below - * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer, - * where we overwrite with a simplified version of the type itself. - */ - private[dotc] def overwriteType(tpe: T @uncheckedVariance): Unit = - myTpe = tpe - - /** The type of the tree. In case of an untyped tree, - * an UnAssignedTypeException is thrown. (Overridden by empty trees) - */ - final def tpe: T = - if myTpe == null then throw UnAssignedTypeException(this) - myTpe.uncheckedNN - - /** Copy `tpe` attribute from tree `from` into this tree, independently - * whether it is null or not. - final def copyAttr[U <: Untyped](from: Tree[U]): ThisTree[T] = { - val t1 = this.withSpan(from.span) - val t2 = - if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type]) - else t1 - t2.asInstanceOf[ThisTree[T]] - } - */ - - /** Return a typed tree that's isomorphic to this tree, but has given - * type. (Overridden by empty trees) - */ - def withType(tpe: Type)(using Context): ThisTree[Type] = { - if (tpe.isInstanceOf[ErrorType]) - assert(!Config.checkUnreportedErrors || - ctx.reporter.errorsReported || - ctx.settings.YshowPrintErrors.value - // under -Yshow-print-errors, errors might arise during printing, but they do not count as reported - ) - else if (Config.checkTreesConsistent) - checkChildrenTyped(productIterator) - withTypeUnchecked(tpe) - } - - /** Check that typed trees don't refer to untyped ones, except if - * - the parent tree is an import, or - * - the child tree is an identifier, or - * - errors were reported - */ - private def checkChildrenTyped(it: Iterator[Any])(using Context): Unit = - if (!this.isInstanceOf[Import[?]]) - while (it.hasNext) - it.next() match { - case x: Ident[?] => // untyped idents are used in a number of places in typed trees - case x: Tree[?] => - assert(x.hasType || ctx.reporter.errorsReported, - s"$this has untyped child $x") - case xs: List[?] => checkChildrenTyped(xs.iterator) - case _ => - } - - def withTypeUnchecked(tpe: Type): ThisTree[Type] = { - val tree = - (if (myTpe == null || - (myTpe.asInstanceOf[AnyRef] eq tpe.asInstanceOf[AnyRef])) this - else cloneIn(source)).asInstanceOf[Tree[Type]] - tree overwriteType tpe - tree.asInstanceOf[ThisTree[Type]] - } - - /** Does the tree have its type field set? Note: this operation is not - * referentially transparent, because it can observe the withType - * modifications. Should be used only in special circumstances (we - * need it for printing trees with optional type info). - */ - final def hasType: Boolean = myTpe != null - - final def typeOpt: Type = myTpe match - case tp: Type => tp - case null => NoType - - /** The denotation referred to by this tree. - * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other - * kinds of trees - */ - def denot(using Context): Denotation = NoDenotation - - /** Shorthand for `denot.symbol`. */ - final def symbol(using Context): Symbol = denot.symbol - - /** Does this tree represent a type? */ - def isType: Boolean = false - - /** Does this tree represent a term? */ - def isTerm: Boolean = false - - /** Is this a legal part of a pattern which is not at the same time a term? */ - def isPattern: Boolean = false - - /** Does this tree define a new symbol that is not defined elsewhere? */ - def isDef: Boolean = false - - /** Is this tree either the empty tree or the empty ValDef or an empty type ident? */ - def isEmpty: Boolean = false - - /** Convert tree to a list. Gives a singleton list, except - * for thickets which return their element trees. - */ - def toList: List[Tree[T]] = this :: Nil - - /** if this tree is the empty tree, the alternative, else this tree */ - inline def orElse[U >: T <: Untyped](inline that: Tree[U]): Tree[U] = - if (this eq genericEmptyTree) that else this - - /** The number of nodes in this tree */ - def treeSize: Int = { - var s = 1 - def addSize(elem: Any): Unit = elem match { - case t: Tree[?] => s += t.treeSize - case ts: List[?] => ts foreach addSize - case _ => - } - productIterator foreach addSize - s - } - - /** If this is a thicket, perform `op` on each of its trees - * otherwise, perform `op` ion tree itself. - */ - def foreachInThicket(op: Tree[T] => Unit): Unit = op(this) - - override def toText(printer: Printer): Text = printer.toText(this) - - def sameTree(that: Tree[?]): Boolean = { - def isSame(x: Any, y: Any): Boolean = - x.asInstanceOf[AnyRef].eq(y.asInstanceOf[AnyRef]) || { - x match { - case x: Tree[?] => - y match { - case y: Tree[?] => x.sameTree(y) - case _ => false - } - case x: List[?] => - y match { - case y: List[?] => x.corresponds(y)(isSame) - case _ => false - } - case _ => - false - } - } - this.getClass == that.getClass && { - val it1 = this.productIterator - val it2 = that.productIterator - it1.corresponds(it2)(isSame) - } - } - - override def hashCode(): Int = System.identityHashCode(this) - override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - } - - class UnAssignedTypeException[T <: Untyped](tree: Tree[T]) extends RuntimeException { - override def getMessage: String = s"type of $tree is not assigned" - } - - type LazyTree[+T <: Untyped] = Tree[T] | Lazy[Tree[T]] - type LazyTreeList[+T <: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] - - // ------ Categories of trees ----------------------------------- - - /** Instances of this class are trees for which isType is definitely true. - * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) - */ - trait TypTree[+T <: Untyped] extends Tree[T] { - type ThisTree[+T <: Untyped] <: TypTree[T] - override def isType: Boolean = true - } - - /** Instances of this class are trees for which isTerm is definitely true. - * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) - */ - trait TermTree[+T <: Untyped] extends Tree[T] { - type ThisTree[+T <: Untyped] <: TermTree[T] - override def isTerm: Boolean = true - } - - /** Instances of this class are trees which are not terms but are legal - * parts of patterns. - */ - trait PatternTree[+T <: Untyped] extends Tree[T] { - type ThisTree[+T <: Untyped] <: PatternTree[T] - override def isPattern: Boolean = true - } - - /** Tree's denotation can be derived from its type */ - abstract class DenotingTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[+T <: Untyped] <: DenotingTree[T] - override def denot(using Context): Denotation = typeOpt.stripped match - case tpe: NamedType => tpe.denot - case tpe: ThisType => tpe.cls.denot - case _ => NoDenotation - } - - /** Tree's denot/isType/isTerm properties come from a subtree - * identified by `forwardTo`. - */ - abstract class ProxyTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[+T <: Untyped] <: ProxyTree[T] - def forwardTo: Tree[T] - override def denot(using Context): Denotation = forwardTo.denot - override def isTerm: Boolean = forwardTo.isTerm - override def isType: Boolean = forwardTo.isType - } - - /** Tree has a name */ - abstract class NameTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[+T <: Untyped] <: NameTree[T] - def name: Name - } - - /** Tree refers by name to a denotation */ - abstract class RefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[+T <: Untyped] <: RefTree[T] - def qualifier: Tree[T] - override def isType: Boolean = name.isTypeName - override def isTerm: Boolean = name.isTermName - } - - /** Tree defines a new symbol */ - trait DefTree[+T <: Untyped] extends DenotingTree[T] { - type ThisTree[+T <: Untyped] <: DefTree[T] - - private var myMods: untpd.Modifiers | Null = uninitialized - - private[dotc] def rawMods: untpd.Modifiers = - if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN - - def withAnnotations(annots: List[untpd.Tree]): ThisTree[Untyped] = withMods(rawMods.withAnnotations(annots)) - - def withMods(mods: untpd.Modifiers): ThisTree[Untyped] = { - val tree = if (myMods == null || (myMods == mods)) this else cloneIn(source) - tree.setMods(mods) - tree.asInstanceOf[ThisTree[Untyped]] - } - - def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(untpd.Modifiers(flags)) - def withAddedFlags(flags: FlagSet): ThisTree[Untyped] = withMods(rawMods | flags) - - /** Destructively update modifiers. To be used with care. */ - def setMods(mods: untpd.Modifiers): Unit = myMods = mods - - override def isDef: Boolean = true - def namedType: NamedType = tpe.asInstanceOf[NamedType] - } - - extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods - - sealed trait WithEndMarker[+T <: Untyped]: - self: PackageDef[T] | NamedDefTree[T] => - - import WithEndMarker.* - - final def endSpan(using Context): Span = - if hasEndMarker then - val realName = srcName.stripModuleClassSuffix.lastPart - span.withStart(span.end - realName.length) - else - NoSpan - - /** The name in source code that represents this construct, - * and is the name that the user must write to create a valid - * end marker. - * e.g. a constructor definition is terminated in the source - * code by `end this`, so it's `srcName` should return `this`. - */ - protected def srcName(using Context): Name - - final def withEndMarker(): self.type = - self.withAttachment(HasEndMarker, ()) - - final def withEndMarker(copyFrom: WithEndMarker[?]): self.type = - if copyFrom.hasEndMarker then - this.withEndMarker() - else - this - - final def dropEndMarker(): self.type = - self.removeAttachment(HasEndMarker) - this - - protected def hasEndMarker: Boolean = self.hasAttachment(HasEndMarker) - - object WithEndMarker: - /** Property key that signals the tree was terminated - * with an `end` marker in the source code - */ - private val HasEndMarker: Property.StickyKey[Unit] = Property.StickyKey() - - end WithEndMarker - - abstract class NamedDefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends NameTree[T] with DefTree[T] with WithEndMarker[T] { - type ThisTree[+T <: Untyped] <: NamedDefTree[T] - - protected def srcName(using Context): Name = - if name == nme.CONSTRUCTOR then nme.this_ - else if symbol.isPackageObject then symbol.owner.name - else name - - /** The position of the name defined by this definition. - * This is a point position if the definition is synthetic, or a range position - * if the definition comes from source. - * It might also be that the definition does not have a position (for instance when synthesized by - * a calling chain from `viewExists`), in that case the return position is NoSpan. - * Overridden in Bind - */ - def nameSpan(using Context): Span = - if (span.exists) { - val point = span.point - if (rawMods.is(Synthetic) || span.isSynthetic || name.toTermName == nme.ERROR) Span(point) - else { - val realName = srcName.stripModuleClassSuffix.lastPart - Span(point, point + realName.length, point) - } - } - else span - - /** The source position of the name defined by this definition. - * This is a point position if the definition is synthetic, or a range position - * if the definition comes from source. - */ - def namePos(using Context): SourcePosition = source.atSpan(nameSpan) - } - - /** Tree defines a new symbol and carries modifiers. - * The position of a MemberDef contains only the defined identifier or pattern. - * The envelope of a MemberDef contains the whole definition and has its point - * on the opening keyword (or the next token after that if keyword is missing). - */ - abstract class MemberDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { - type ThisTree[+T <: Untyped] <: MemberDef[T] - - def rawComment: Option[Comment] = getAttachment(DocComment) - - def setComment(comment: Option[Comment]): this.type = { - comment.map(putAttachment(DocComment, _)) - this - } - - def name: Name - } - - /** A ValDef or DefDef tree */ - abstract class ValOrDefDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { - type ThisTree[+T <: Untyped] <: ValOrDefDef[T] - def name: TermName - def tpt: Tree[T] - def unforcedRhs: LazyTree[T] = unforced - def rhs(using Context): Tree[T] = forceIfLazy - } - - trait ValOrTypeDef[+T <: Untyped] extends MemberDef[T]: - type ThisTree[+T <: Untyped] <: ValOrTypeDef[T] - - type ParamClause[T <: Untyped] = List[ValDef[T]] | List[TypeDef[T]] - - // ----------- Tree case classes ------------------------------------ - - /** name */ - case class Ident[+T <: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) - extends RefTree[T] { - type ThisTree[+T <: Untyped] = Ident[T] - def qualifier: Tree[T] = genericEmptyTree - - def isBackquoted: Boolean = hasAttachment(Backquoted) - } - - class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: -> String)(implicit @constructorOnly src: SourceFile) - extends Ident[T](name) { - def explanation = expl - override def toString: String = s"SearchFailureIdent($explanation)" - } - - /** qualifier.name, or qualifier#name, if qualifier is a type */ - case class Select[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) - extends RefTree[T] { - type ThisTree[+T <: Untyped] = Select[T] - - override def denot(using Context): Denotation = typeOpt match - case ConstantType(_) if ConstFold.foldedUnops.contains(name) => - // Recover the denotation of a constant-folded selection - qualifier.typeOpt.member(name).atSignature(Signature.NotAMethod, name) - case _ => - super.denot - - def nameSpan(using Context): Span = - if span.exists then - val point = span.point - if name.toTermName == nme.ERROR then - Span(point) - else if qualifier.span.start > span.start then // right associative - val realName = name.stripModuleClassSuffix.lastPart - Span(span.start, span.start + realName.length, point) - else - Span(point, span.end, point) - else span - } - - class SelectWithSig[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) - extends Select[T](qualifier, name) { - override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" - } - - /** qual.this */ - case class This[+T <: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = This[T] - // Denotation of a This tree is always the underlying class; needs correction for modules. - override def denot(using Context): Denotation = - typeOpt match { - case tpe @ TermRef(pre, _) if tpe.symbol.is(Module) => - tpe.symbol.moduleClass.denot.asSeenFrom(pre) - case _ => - super.denot - } - } - - /** C.super[mix], where qual = C.this */ - case class Super[+T <: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = Super[T] - def forwardTo: Tree[T] = qual - } - - abstract class GenericApply[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] <: GenericApply[T] - val fun: Tree[T] - val args: List[Tree[T]] - def forwardTo: Tree[T] = fun - } - - object GenericApply: - def unapply[T <: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match - case tree: GenericApply[T] => Some((tree.fun, tree.args)) - case _ => None - - /** The kind of application */ - enum ApplyKind: - case Regular // r.f(x) - case Using // r.f(using x) - case InfixTuple // r f (x1, ..., xN) where N != 1; needs to be treated specially for an error message in typedApply - - /** fun(args) */ - case class Apply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends GenericApply[T] { - type ThisTree[+T <: Untyped] = Apply[T] - - def setApplyKind(kind: ApplyKind) = - putAttachment(untpd.KindOfApply, kind) - this - - /** The kind of this application. Works reliably only for untyped trees; typed trees - * are under no obligation to update it correctly. - */ - def applyKind: ApplyKind = - attachmentOrElse(untpd.KindOfApply, ApplyKind.Regular) - } - - /** fun[args] */ - case class TypeApply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends GenericApply[T] { - type ThisTree[+T <: Untyped] = TypeApply[T] - } - - /** const */ - case class Literal[+T <: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) - extends Tree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = Literal[T] - } - - /** new tpt, but no constructor call */ - case class New[+T <: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = New[T] - } - - /** expr : tpt */ - case class Typed[+T <: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = Typed[T] - def forwardTo: Tree[T] = expr - } - - /** name = arg, in a parameter list */ - case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = NamedArg[T] - } - - /** name = arg, outside a parameter list */ - case class Assign[+T <: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Assign[T] - } - - /** { stats; expr } */ - case class Block[+T <: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = Block[T] - override def isType: Boolean = expr.isType - override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary - } - - /** if cond then thenp else elsep */ - case class If[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = If[T] - def isInline = false - } - class InlineIf[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) - extends If(cond, thenp, elsep) { - override def isInline = true - override def toString = s"InlineIf($cond, $thenp, $elsep)" - } - - /** A closure with an environment and a reference to a method. - * @param env The captured parameters of the closure - * @param meth A ref tree that refers to the method of the closure. - * The first (env.length) parameters of that method are filled - * with env values. - * @param tpt Either EmptyTree or a TypeTree. If tpt is EmptyTree the type - * of the closure is a function type, otherwise it is the type - * given in `tpt`, which must be a SAM type. - */ - case class Closure[+T <: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Closure[T] - } - - /** selector match { cases } */ - case class Match[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Match[T] - def isInline = false - } - class InlineMatch[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) - extends Match(selector, cases) { - override def isInline = true - override def toString = s"InlineMatch($selector, $cases)" - } - - /** case pat if guard => body */ - case class CaseDef[+T <: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = CaseDef[T] - } - - /** label[tpt]: { expr } */ - case class Labeled[+T <: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) - extends NameTree[T] { - type ThisTree[+T <: Untyped] = Labeled[T] - def name: Name = bind.name - } - - /** return expr - * where `from` refers to the method or label from which the return takes place - * After program transformations this is not necessarily the enclosing method, because - * closures can intervene. - */ - case class Return[+T <: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Return[T] - } - - /** while (cond) { body } */ - case class WhileDo[+T <: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = WhileDo[T] - } - - /** try block catch cases finally finalizer */ - case class Try[+T <: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Try[T] - } - - /** Seq(elems) - * @param tpt The element type of the sequence. - */ - case class SeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = SeqLiteral[T] - } - - /** Array(elems) */ - class JavaSeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends SeqLiteral(elems, elemtpt) { - override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" - } - - /** A tree representing inlined code. - * - * @param call Info about the original call that was inlined - * Until PostTyper, this is the full call, afterwards only - * a reference to the toplevel class from which the call was inlined. - * @param bindings Bindings for proxies to be used in the inlined code - * @param expansion The inlined tree, minus bindings. - * - * The full inlined code is equivalent to - * - * { bindings; expansion } - * - * The reason to keep `bindings` separate is because they are typed in a - * different context: `bindings` represent the arguments to the inlined - * call, whereas `expansion` represents the body of the inlined function. - */ - case class Inlined[+T <: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = Inlined[T] - override def isTerm = expansion.isTerm - override def isType = expansion.isType - } - - /** A type tree that represents an existing or inferred type */ - case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] with TypTree[T] { - type ThisTree[+T <: Untyped] = TypeTree[T] - override def isEmpty: Boolean = !hasType - override def toString: String = - s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" - } - - /** A type tree whose type is inferred. These trees appear in two contexts - * - as an argument of a TypeApply. In that case its type is always a TypeVar - * - as a (result-)type of an inferred ValDef or DefDef. - * Every TypeVar is created as the type of one InferredTypeTree. - */ - class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] - - /** ref.type */ - case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] with TypTree[T] { - type ThisTree[+T <: Untyped] = SingletonTypeTree[T] - } - - /** tpt { refinements } */ - case class RefinedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TypTree[T] { - type ThisTree[+T <: Untyped] = RefinedTypeTree[T] - def forwardTo: Tree[T] = tpt - } - - /** tpt[args] */ - case class AppliedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TypTree[T] { - type ThisTree[+T <: Untyped] = AppliedTypeTree[T] - def forwardTo: Tree[T] = tpt - } - - /** [typeparams] -> tpt - * - * Note: the type of such a tree is not necessarily a `HKTypeLambda`, it can - * also be a `TypeBounds` where the upper bound is an `HKTypeLambda`, and the - * lower bound is either a reference to `Nothing` or an `HKTypeLambda`, - * this happens because these trees are typed by `HKTypeLambda#fromParams` which - * makes sure to move bounds outside of the type lambda itself to simplify their - * handling in the compiler. - * - * You may ask: why not normalize the trees too? That way, - * - * LambdaTypeTree(X, TypeBoundsTree(A, B)) - * - * would become, - * - * TypeBoundsTree(LambdaTypeTree(X, A), LambdaTypeTree(X, B)) - * - * which would maintain consistency between a tree and its type. The problem - * with this definition is that the same tree `X` appears twice, therefore - * we'd have to create two symbols for it which makes it harder to relate the - * source code written by the user with the trees used by the compiler (for - * example, to make "find all references" work in the IDE). - */ - case class LambdaTypeTree[+T <: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = LambdaTypeTree[T] - } - - case class TermLambdaTypeTree[+T <: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = TermLambdaTypeTree[T] - } - - /** [bound] selector match { cases } */ - case class MatchTypeTree[+T <: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = MatchTypeTree[T] - } - - /** => T */ - case class ByNameTypeTree[+T <: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = ByNameTypeTree[T] - } - - /** >: lo <: hi - * >: lo <: hi = alias for RHS of bounded opaque type - */ - case class TypeBoundsTree[+T <: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = TypeBoundsTree[T] - } - - /** name @ body */ - case class Bind[+T <: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends NamedDefTree[T] with PatternTree[T] { - type ThisTree[+T <: Untyped] = Bind[T] - override def isType: Boolean = name.isTypeName - override def isTerm: Boolean = name.isTermName - - override def nameSpan(using Context): Span = - if span.exists then Span(span.start, span.start + name.toString.length) else span - } - - /** tree_1 | ... | tree_n */ - case class Alternative[+T <: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends PatternTree[T] { - type ThisTree[+T <: Untyped] = Alternative[T] - } - - /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following - * components: - * - * @param fun is `extractor.unapply` (or, for backwards compatibility, `extractor.unapplySeq`) - * possibly with type parameters - * @param implicits Any implicit parameters passed to the unapply after the selector - * @param patterns The argument patterns in the pattern match. - * - * It is typed with same type as first `fun` argument - * Given a match selector `sel` a pattern UnApply(fun, implicits, patterns) is roughly translated as follows - * - * val result = fun(sel)(implicits) - * if (result.isDefined) "match patterns against result" - */ - case class UnApply[+T <: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with PatternTree[T] { - type ThisTree[+T <: Untyped] = UnApply[T] - def forwardTo = fun - } - - /** mods val name: tpt = rhs */ - case class ValDef[+T <: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) - extends ValOrDefDef[T], ValOrTypeDef[T] { - type ThisTree[+T <: Untyped] = ValDef[T] - assert(isEmpty || (tpt ne genericEmptyTree)) - def unforced: LazyTree[T] = preRhs - protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x - } - - /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ - case class DefDef[+T <: Untyped] private[ast] (name: TermName, - paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) - extends ValOrDefDef[T] { - type ThisTree[+T <: Untyped] = DefDef[T] - assert(tpt ne genericEmptyTree) - def unforced: LazyTree[T] = preRhs - protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x - - def leadingTypeParams(using Context): List[TypeDef[T]] = paramss match - case (tparams @ (tparam: TypeDef[_]) :: _) :: _ => tparams.asInstanceOf[List[TypeDef[T]]] - case _ => Nil - - def trailingParamss(using Context): List[ParamClause[T]] = paramss match - case ((tparam: TypeDef[_]) :: _) :: paramss1 => paramss1 - case _ => paramss - - def termParamss(using Context): List[List[ValDef[T]]] = - (if ctx.erasedTypes then paramss else untpd.termParamssIn(paramss)) - .asInstanceOf[List[List[ValDef[T]]]] - } - - /** mods class name template or - * mods trait name template or - * mods type name = rhs or - * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) or - * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods - */ - case class TypeDef[+T <: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) - extends MemberDef[T], ValOrTypeDef[T] { - type ThisTree[+T <: Untyped] = TypeDef[T] - - /** Is this a definition of a class? */ - def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] - - def isBackquoted: Boolean = hasAttachment(Backquoted) - } - - /** extends parents { self => body } - * @param parentsOrDerived A list of parents followed by a list of derived classes, - * if this is of class untpd.DerivingTemplate. - * Typed templates only have parents. - */ - case class Template[+T <: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T])(implicit @constructorOnly src: SourceFile) - extends DefTree[T] with WithLazyField[List[Tree[T]]] { - type ThisTree[+T <: Untyped] = Template[T] - def unforcedBody: LazyTreeList[T] = unforced - def unforced: LazyTreeList[T] = preBody - protected def force(x: List[Tree[T @uncheckedVariance]]): Unit = preBody = x - def body(using Context): List[Tree[T]] = forceIfLazy - - def parents: List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate - def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate - } - - - abstract class ImportOrExport[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] { - type ThisTree[+T <: Untyped] <: ImportOrExport[T] - val expr: Tree[T] - val selectors: List[untpd.ImportSelector] - } - - /** import expr.selectors - * where a selector is either an untyped `Ident`, `name` or - * an untyped thicket consisting of `name` and `rename`. - */ - case class Import[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) - extends ImportOrExport[T] { - type ThisTree[+T <: Untyped] = Import[T] - } - - /** export expr.selectors - * where a selector is either an untyped `Ident`, `name` or - * an untyped thicket consisting of `name` and `rename`. - */ - case class Export[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) - extends ImportOrExport[T] { - type ThisTree[+T <: Untyped] = Export[T] - } - - /** package pid { stats } */ - case class PackageDef[+T <: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with WithEndMarker[T] { - type ThisTree[+T <: Untyped] = PackageDef[T] - def forwardTo: RefTree[T] = pid - protected def srcName(using Context): Name = pid.name - } - - /** arg @annot */ - case class Annotated[+T <: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] { - type ThisTree[+T <: Untyped] = Annotated[T] - def forwardTo: Tree[T] = arg - } - - trait WithoutTypeOrPos[+T <: Untyped] extends Tree[T] { - override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] - override def span: Span = NoSpan - override def span_=(span: Span): Unit = {} - } - - /** Temporary class that results from translation of ModuleDefs - * (and possibly other statements). - * The contained trees will be integrated when transformed with - * a `transform(List[Tree])` call. - */ - case class Thicket[+T <: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends Tree[T] with WithoutTypeOrPos[T] { - myTpe = NoType.asInstanceOf[T] - type ThisTree[+T <: Untyped] = Thicket[T] - - def mapElems[U >: T <: Untyped](op: Tree[T] => Tree[U]): Thicket[U] = { - val newTrees = trees.mapConserve(op) - if (trees eq newTrees) - this - else - Thicket[U](newTrees)(source).asInstanceOf[this.type] - } - - override def foreachInThicket(op: Tree[T] => Unit): Unit = - trees foreach (_.foreachInThicket(op)) - - override def isEmpty: Boolean = trees.isEmpty - override def toList: List[Tree[T]] = flatten(trees) - override def toString: String = if (isEmpty) "EmptyTree" else "Thicket(" + trees.mkString(", ") + ")" - override def span: Span = - def combine(s: Span, ts: List[Tree[T]]): Span = ts match - case t :: ts1 => combine(s.union(t.span), ts1) - case nil => s - combine(NoSpan, trees) - - override def withSpan(span: Span): this.type = - mapElems(_.withSpan(span)).asInstanceOf[this.type] - } - - class EmptyTree[T <: Untyped] extends Thicket(Nil)(NoSource) { - // assert(uniqueId != 1492) - override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") - } - - class EmptyValDef[T <: Untyped] extends ValDef[T]( - nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { - myTpe = NoType.asInstanceOf[T] - setMods(untpd.Modifiers(PrivateLocal)) - override def isEmpty: Boolean = true - override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyValDef") - } - - @sharable val theEmptyTree = new EmptyTree[Type]() - @sharable val theEmptyValDef = new EmptyValDef[Type]() - - def genericEmptyValDef[T <: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] - def genericEmptyTree[T <: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] - - /** Tree that replaces a level 1 splices in pickled (level 0) quotes. - * It is only used when picking quotes (will never be in a TASTy file). - * - * @param isTermHole If this hole is a term, otherwise it is a type hole. - * @param idx The index of the hole in it's enclosing level 0 quote. - * @param args The arguments of the splice to compute its content - * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. - * @param tpt Type of the hole - */ - case class Hole[+T <: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[+T <: Untyped] <: Hole[T] - override def isTerm: Boolean = isTermHole - override def isType: Boolean = !isTermHole - } - - def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { - def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = - remaining match { - case Thicket(elems) :: remaining1 => - var buf1 = buf - if (buf1 == null) { - buf1 = new ListBuffer[Tree[T]] - var scanned = trees - while (scanned `ne` remaining) { - buf1 += scanned.head - scanned = scanned.tail - } - } - recur(recur(buf1, elems), remaining1) - case tree :: remaining1 => - if (buf != null) buf += tree - recur(buf, remaining1) - case nil => - buf - } - val buf = recur(null, trees) - if (buf != null) buf.toList else trees - } - - // ----- Lazy trees and tree sequences - - /** A tree that can have a lazy field - * The field is represented by some private `var` which is - * accessed by `unforced` and `force`. Forcing the field will - * set the `var` to the underlying value. - */ - trait WithLazyField[+T <: AnyRef] { - def unforced: T | Lazy[T] - protected def force(x: T @uncheckedVariance): Unit - def forceIfLazy(using Context): T = unforced match { - case lzy: Lazy[T @unchecked] => - val x = lzy.complete - force(x) - x - case x: T @ unchecked => x - } - } - - /** A base trait for lazy tree fields. - * These can be instantiated with Lazy instances which - * can delay tree construction until the field is first demanded. - */ - trait Lazy[+T <: AnyRef] { - def complete(using Context): T - } - - // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. - - abstract class Instance[T <: Untyped] { inst => - - type Tree = Trees.Tree[T] - type TypTree = Trees.TypTree[T] - type TermTree = Trees.TermTree[T] - type PatternTree = Trees.PatternTree[T] - type DenotingTree = Trees.DenotingTree[T] - type ProxyTree = Trees.ProxyTree[T] - type NameTree = Trees.NameTree[T] - type RefTree = Trees.RefTree[T] - type DefTree = Trees.DefTree[T] - type NamedDefTree = Trees.NamedDefTree[T] - type MemberDef = Trees.MemberDef[T] - type ValOrDefDef = Trees.ValOrDefDef[T] - type ValOrTypeDef = Trees.ValOrTypeDef[T] - type LazyTree = Trees.LazyTree[T] - type LazyTreeList = Trees.LazyTreeList[T] - type ParamClause = Trees.ParamClause[T] - - type Ident = Trees.Ident[T] - type SearchFailureIdent = Trees.SearchFailureIdent[T] - type Select = Trees.Select[T] - type SelectWithSig = Trees.SelectWithSig[T] - type This = Trees.This[T] - type Super = Trees.Super[T] - type Apply = Trees.Apply[T] - type TypeApply = Trees.TypeApply[T] - type GenericApply = Trees.GenericApply[T] - type Literal = Trees.Literal[T] - type New = Trees.New[T] - type Typed = Trees.Typed[T] - type NamedArg = Trees.NamedArg[T] - type Assign = Trees.Assign[T] - type Block = Trees.Block[T] - type If = Trees.If[T] - type InlineIf = Trees.InlineIf[T] - type Closure = Trees.Closure[T] - type Match = Trees.Match[T] - type InlineMatch = Trees.InlineMatch[T] - type CaseDef = Trees.CaseDef[T] - type Labeled = Trees.Labeled[T] - type Return = Trees.Return[T] - type WhileDo = Trees.WhileDo[T] - type Try = Trees.Try[T] - type SeqLiteral = Trees.SeqLiteral[T] - type JavaSeqLiteral = Trees.JavaSeqLiteral[T] - type Inlined = Trees.Inlined[T] - type TypeTree = Trees.TypeTree[T] - type InferredTypeTree = Trees.InferredTypeTree[T] - type SingletonTypeTree = Trees.SingletonTypeTree[T] - type RefinedTypeTree = Trees.RefinedTypeTree[T] - type AppliedTypeTree = Trees.AppliedTypeTree[T] - type LambdaTypeTree = Trees.LambdaTypeTree[T] - type TermLambdaTypeTree = Trees.TermLambdaTypeTree[T] - type MatchTypeTree = Trees.MatchTypeTree[T] - type ByNameTypeTree = Trees.ByNameTypeTree[T] - type TypeBoundsTree = Trees.TypeBoundsTree[T] - type Bind = Trees.Bind[T] - type Alternative = Trees.Alternative[T] - type UnApply = Trees.UnApply[T] - type ValDef = Trees.ValDef[T] - type DefDef = Trees.DefDef[T] - type TypeDef = Trees.TypeDef[T] - type Template = Trees.Template[T] - type Import = Trees.Import[T] - type Export = Trees.Export[T] - type ImportOrExport = Trees.ImportOrExport[T] - type PackageDef = Trees.PackageDef[T] - type Annotated = Trees.Annotated[T] - type Thicket = Trees.Thicket[T] - - type Hole = Trees.Hole[T] - - @sharable val EmptyTree: Thicket = genericEmptyTree - @sharable val EmptyValDef: ValDef = genericEmptyValDef - @sharable val ContextualEmptyTree: Thicket = new EmptyTree() // an empty tree marking a contextual closure - - // ----- Auxiliary creation methods ------------------ - - def Thicket(): Thicket = EmptyTree - def Thicket(x1: Tree, x2: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: Nil) - def Thicket(x1: Tree, x2: Tree, x3: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: x3 :: Nil) - def Thicket(xs: List[Tree])(implicit src: SourceFile) = new Thicket(xs) - - def flatTree(xs: List[Tree])(implicit src: SourceFile): Tree = flatten(xs) match { - case x :: Nil => x - case ys => Thicket(ys) - } - - // ----- Helper classes for copying, transforming, accumulating ----------------- - - val cpy: TreeCopier - - /** A class for copying trees. The copy methods avoid creating a new tree - * If all arguments stay the same. - * - * Note: Some of the copy methods take a context. - * These are exactly those methods that are overridden in TypedTreeCopier - * so that they selectively retype themselves. Retyping needs a context. - */ - abstract class TreeCopier { - protected def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] - protected def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] - - /** Soucre of the copied tree */ - protected def sourceFile(tree: Tree): SourceFile = tree.source - - protected def finalize(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] = - Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") - postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) - - protected def finalize(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] = - Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") - postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) - - def Ident(tree: Tree)(name: Name)(using Context): Ident = tree match { - case tree: Ident if name == tree.name => tree - case _ => finalize(tree, untpd.Ident(name)(sourceFile(tree))) - } - def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = tree match { - case tree: SelectWithSig => - if ((qualifier eq tree.qualifier) && (name == tree.name)) tree - else finalize(tree, SelectWithSig(qualifier, name, tree.sig)(sourceFile(tree))) - case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree - case _ => finalize(tree, untpd.Select(qualifier, name)(sourceFile(tree))) - } - /** Copy Ident or Select trees */ - def Ref(tree: RefTree)(name: Name)(using Context): RefTree = tree match { - case Ident(_) => Ident(tree)(name) - case Select(qual, _) => Select(tree)(qual, name) - } - def This(tree: Tree)(qual: untpd.Ident)(using Context): This = tree match { - case tree: This if (qual eq tree.qual) => tree - case _ => finalize(tree, untpd.This(qual)(sourceFile(tree))) - } - def Super(tree: Tree)(qual: Tree, mix: untpd.Ident)(using Context): Super = tree match { - case tree: Super if (qual eq tree.qual) && (mix eq tree.mix) => tree - case _ => finalize(tree, untpd.Super(qual, mix)(sourceFile(tree))) - } - def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = tree match { - case tree: Apply if (fun eq tree.fun) && (args eq tree.args) => tree - case _ => finalize(tree, untpd.Apply(fun, args)(sourceFile(tree))) - //.ensuring(res => res.uniqueId != 2213, s"source = $tree, ${tree.uniqueId}, ${tree.span}") - } - def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = tree match { - case tree: TypeApply if (fun eq tree.fun) && (args eq tree.args) => tree - case _ => finalize(tree, untpd.TypeApply(fun, args)(sourceFile(tree))) - } - def Literal(tree: Tree)(const: Constant)(using Context): Literal = tree match { - case tree: Literal if const == tree.const => tree - case _ => finalize(tree, untpd.Literal(const)(sourceFile(tree))) - } - def New(tree: Tree)(tpt: Tree)(using Context): New = tree match { - case tree: New if (tpt eq tree.tpt) => tree - case _ => finalize(tree, untpd.New(tpt)(sourceFile(tree))) - } - def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = tree match { - case tree: Typed if (expr eq tree.expr) && (tpt eq tree.tpt) => tree - case tree => finalize(tree, untpd.Typed(expr, tpt)(sourceFile(tree))) - } - def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = tree match { - case tree: NamedArg if (name == tree.name) && (arg eq tree.arg) => tree - case _ => finalize(tree, untpd.NamedArg(name, arg)(sourceFile(tree))) - } - def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = tree match { - case tree: Assign if (lhs eq tree.lhs) && (rhs eq tree.rhs) => tree - case _ => finalize(tree, untpd.Assign(lhs, rhs)(sourceFile(tree))) - } - def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = tree match { - case tree: Block if (stats eq tree.stats) && (expr eq tree.expr) => tree - case _ => finalize(tree, untpd.Block(stats, expr)(sourceFile(tree))) - } - def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = tree match { - case tree: If if (cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep) => tree - case tree: InlineIf => finalize(tree, untpd.InlineIf(cond, thenp, elsep)(sourceFile(tree))) - case _ => finalize(tree, untpd.If(cond, thenp, elsep)(sourceFile(tree))) - } - def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = tree match { - case tree: Closure if (env eq tree.env) && (meth eq tree.meth) && (tpt eq tree.tpt) => tree - case _ => finalize(tree, untpd.Closure(env, meth, tpt)(sourceFile(tree))) - } - def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = tree match { - case tree: Match if (selector eq tree.selector) && (cases eq tree.cases) => tree - case tree: InlineMatch => finalize(tree, untpd.InlineMatch(selector, cases)(sourceFile(tree))) - case _ => finalize(tree, untpd.Match(selector, cases)(sourceFile(tree))) - } - def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = tree match { - case tree: CaseDef if (pat eq tree.pat) && (guard eq tree.guard) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.CaseDef(pat, guard, body)(sourceFile(tree))) - } - def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = tree match { - case tree: Labeled if (bind eq tree.bind) && (expr eq tree.expr) => tree - case _ => finalize(tree, untpd.Labeled(bind, expr)(sourceFile(tree))) - } - def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = tree match { - case tree: Return if (expr eq tree.expr) && (from eq tree.from) => tree - case _ => finalize(tree, untpd.Return(expr, from)(sourceFile(tree))) - } - def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = tree match { - case tree: WhileDo if (cond eq tree.cond) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.WhileDo(cond, body)(sourceFile(tree))) - } - def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = tree match { - case tree: Try if (expr eq tree.expr) && (cases eq tree.cases) && (finalizer eq tree.finalizer) => tree - case _ => finalize(tree, untpd.Try(expr, cases, finalizer)(sourceFile(tree))) - } - def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = tree match { - case tree: JavaSeqLiteral => - if ((elems eq tree.elems) && (elemtpt eq tree.elemtpt)) tree - else finalize(tree, untpd.JavaSeqLiteral(elems, elemtpt)) - case tree: SeqLiteral if (elems eq tree.elems) && (elemtpt eq tree.elemtpt) => tree - case _ => finalize(tree, untpd.SeqLiteral(elems, elemtpt)(sourceFile(tree))) - } - def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = tree match { - case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree - case _ => finalize(tree, untpd.Inlined(call, bindings, expansion)(sourceFile(tree))) - } - def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { - case tree: SingletonTypeTree if (ref eq tree.ref) => tree - case _ => finalize(tree, untpd.SingletonTypeTree(ref)(sourceFile(tree))) - } - def RefinedTypeTree(tree: Tree)(tpt: Tree, refinements: List[Tree])(using Context): RefinedTypeTree = tree match { - case tree: RefinedTypeTree if (tpt eq tree.tpt) && (refinements eq tree.refinements) => tree - case _ => finalize(tree, untpd.RefinedTypeTree(tpt, refinements)(sourceFile(tree))) - } - def AppliedTypeTree(tree: Tree)(tpt: Tree, args: List[Tree])(using Context): AppliedTypeTree = tree match { - case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree - case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args)(sourceFile(tree))) - } - def LambdaTypeTree(tree: Tree)(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = tree match { - case tree: LambdaTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.LambdaTypeTree(tparams, body)(sourceFile(tree))) - } - def TermLambdaTypeTree(tree: Tree)(params: List[ValDef], body: Tree)(using Context): TermLambdaTypeTree = tree match { - case tree: TermLambdaTypeTree if (params eq tree.params) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.TermLambdaTypeTree(params, body)(sourceFile(tree))) - } - def MatchTypeTree(tree: Tree)(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = tree match { - case tree: MatchTypeTree if (bound eq tree.bound) && (selector eq tree.selector) && (cases eq tree.cases) => tree - case _ => finalize(tree, untpd.MatchTypeTree(bound, selector, cases)(sourceFile(tree))) - } - def ByNameTypeTree(tree: Tree)(result: Tree)(using Context): ByNameTypeTree = tree match { - case tree: ByNameTypeTree if (result eq tree.result) => tree - case _ => finalize(tree, untpd.ByNameTypeTree(result)(sourceFile(tree))) - } - def TypeBoundsTree(tree: Tree)(lo: Tree, hi: Tree, alias: Tree)(using Context): TypeBoundsTree = tree match { - case tree: TypeBoundsTree if (lo eq tree.lo) && (hi eq tree.hi) && (alias eq tree.alias) => tree - case _ => finalize(tree, untpd.TypeBoundsTree(lo, hi, alias)(sourceFile(tree))) - } - def Bind(tree: Tree)(name: Name, body: Tree)(using Context): Bind = tree match { - case tree: Bind if (name eq tree.name) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.Bind(name, body)(sourceFile(tree))) - } - def Alternative(tree: Tree)(trees: List[Tree])(using Context): Alternative = tree match { - case tree: Alternative if (trees eq tree.trees) => tree - case _ => finalize(tree, untpd.Alternative(trees)(sourceFile(tree))) - } - def UnApply(tree: Tree)(fun: Tree, implicits: List[Tree], patterns: List[Tree])(using Context): UnApply = tree match { - case tree: UnApply if (fun eq tree.fun) && (implicits eq tree.implicits) && (patterns eq tree.patterns) => tree - case _ => finalize(tree, untpd.UnApply(fun, implicits, patterns)(sourceFile(tree))) - } - def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: LazyTree)(using Context): ValDef = tree match { - case tree: ValDef if (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree - case _ => finalize(tree, untpd.ValDef(name, tpt, rhs)(sourceFile(tree))) - } - def DefDef(tree: Tree)(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(using Context): DefDef = tree match { - case tree: DefDef if (name == tree.name) && (paramss eq tree.paramss) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree - case _ => finalize(tree, untpd.DefDef(name, paramss, tpt, rhs)(sourceFile(tree))) - } - def TypeDef(tree: Tree)(name: TypeName, rhs: Tree)(using Context): TypeDef = tree match { - case tree: TypeDef if (name == tree.name) && (rhs eq tree.rhs) => tree - case _ => finalize(tree, untpd.TypeDef(name, rhs)(sourceFile(tree))) - } - def Template(tree: Tree)(constr: DefDef, parents: List[Tree], derived: List[untpd.Tree], self: ValDef, body: LazyTreeList)(using Context): Template = tree match { - case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (derived eq tree.derived) && (self eq tree.self) && (body eq tree.unforcedBody) => tree - case tree => finalize(tree, untpd.Template(constr, parents, derived, self, body)(sourceFile(tree))) - } - def Import(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = tree match { - case tree: Import if (expr eq tree.expr) && (selectors eq tree.selectors) => tree - case _ => finalize(tree, untpd.Import(expr, selectors)(sourceFile(tree))) - } - def Export(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = tree match { - case tree: Export if (expr eq tree.expr) && (selectors eq tree.selectors) => tree - case _ => finalize(tree, untpd.Export(expr, selectors)(sourceFile(tree))) - } - def PackageDef(tree: Tree)(pid: RefTree, stats: List[Tree])(using Context): PackageDef = tree match { - case tree: PackageDef if (pid eq tree.pid) && (stats eq tree.stats) => tree - case _ => finalize(tree, untpd.PackageDef(pid, stats)(sourceFile(tree))) - } - def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = tree match { - case tree: Annotated if (arg eq tree.arg) && (annot eq tree.annot) => tree - case _ => finalize(tree, untpd.Annotated(arg, annot)(sourceFile(tree))) - } - def Thicket(tree: Tree)(trees: List[Tree])(using Context): Thicket = tree match { - case tree: Thicket if (trees eq tree.trees) => tree - case _ => finalize(tree, untpd.Thicket(trees)(sourceFile(tree))) - } - def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = tree match { - case tree: Hole if isTerm == tree.isTerm && idx == tree.idx && args.eq(tree.args) && content.eq(tree.content) && content.eq(tree.content) => tree - case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content, tpt)(sourceFile(tree))) - } - - // Copier methods with default arguments; these demand that the original tree - // is of the same class as the copy. We only include trees with more than 2 elements here. - def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = - If(tree: Tree)(cond, thenp, elsep) - def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = - Closure(tree: Tree)(env, meth, tpt) - def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = - CaseDef(tree: Tree)(pat, guard, body) - def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = - Try(tree: Tree)(expr, cases, finalizer) - def UnApply(tree: UnApply)(fun: Tree = tree.fun, implicits: List[Tree] = tree.implicits, patterns: List[Tree] = tree.patterns)(using Context): UnApply = - UnApply(tree: Tree)(fun, implicits, patterns) - def ValDef(tree: ValDef)(name: TermName = tree.name, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): ValDef = - ValDef(tree: Tree)(name, tpt, rhs) - def DefDef(tree: DefDef)(name: TermName = tree.name, paramss: List[ParamClause] = tree.paramss, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): DefDef = - DefDef(tree: Tree)(name, paramss, tpt, rhs) - def TypeDef(tree: TypeDef)(name: TypeName = tree.name, rhs: Tree = tree.rhs)(using Context): TypeDef = - TypeDef(tree: Tree)(name, rhs) - def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody)(using Context): Template = - Template(tree: Tree)(constr, parents, derived, self, body) - def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content, tpt: Tree = tree.tpt)(using Context): Hole = - Hole(tree: Tree)(isTerm, idx, args, content, tpt) - - } - - /** Hook to indicate that a transform of some subtree should be skipped */ - protected def skipTransform(tree: Tree)(using Context): Boolean = false - - /** For untyped trees, this is just the identity. - * For typed trees, a context derived form `ctx` that records `call` as the - * innermost enclosing call for which the inlined version is currently - * processed. - */ - protected def inlineContext(call: tpd.Tree)(using Context): Context = ctx - - /** The context to use when mapping or accumulating over a tree */ - def localCtx(tree: Tree)(using Context): Context - - /** The context to use when transforming a tree. - * It ensures that the source is correct, and that the local context is used if - * that's necessary for transforming the whole tree. - * TODO: ensure transform is always called with the correct context as argument - * @see https://github.com/lampepfl/dotty/pull/13880#discussion_r836395977 - */ - def transformCtx(tree: Tree)(using Context): Context = - val sourced = - if tree.source.exists && tree.source != ctx.source - then ctx.withSource(tree.source) - else ctx - tree match - case t: (MemberDef | PackageDef | LambdaTypeTree | TermLambdaTypeTree) => - localCtx(t)(using sourced) - case _ => - sourced - - abstract class TreeMap(val cpy: TreeCopier = inst.cpy) { self: TreeMap @retains(caps.cap) => - def transform(tree: Tree)(using Context): Tree = { - inContext(transformCtx(tree)) { - Stats.record(s"TreeMap.transform/$getClass") - if (skipTransform(tree)) tree - else tree match { - case Ident(name) => - tree - case Select(qualifier, name) => - cpy.Select(tree)(transform(qualifier), name) - case This(qual) => - tree - case Super(qual, mix) => - cpy.Super(tree)(transform(qual), mix) - case Apply(fun, args) => - cpy.Apply(tree)(transform(fun), transform(args)) - case TypeApply(fun, args) => - cpy.TypeApply(tree)(transform(fun), transform(args)) - case Literal(const) => - tree - case New(tpt) => - cpy.New(tree)(transform(tpt)) - case Typed(expr, tpt) => - cpy.Typed(tree)(transform(expr), transform(tpt)) - case NamedArg(name, arg) => - cpy.NamedArg(tree)(name, transform(arg)) - case Assign(lhs, rhs) => - cpy.Assign(tree)(transform(lhs), transform(rhs)) - case blk: Block => - transformBlock(blk) - case If(cond, thenp, elsep) => - cpy.If(tree)(transform(cond), transform(thenp), transform(elsep)) - case Closure(env, meth, tpt) => - cpy.Closure(tree)(transform(env), transform(meth), transform(tpt)) - case Match(selector, cases) => - cpy.Match(tree)(transform(selector), transformSub(cases)) - case CaseDef(pat, guard, body) => - cpy.CaseDef(tree)(transform(pat), transform(guard), transform(body)) - case Labeled(bind, expr) => - cpy.Labeled(tree)(transformSub(bind), transform(expr)) - case Return(expr, from) => - cpy.Return(tree)(transform(expr), transformSub(from)) - case WhileDo(cond, body) => - cpy.WhileDo(tree)(transform(cond), transform(body)) - case Try(block, cases, finalizer) => - cpy.Try(tree)(transform(block), transformSub(cases), transform(finalizer)) - case SeqLiteral(elems, elemtpt) => - cpy.SeqLiteral(tree)(transform(elems), transform(elemtpt)) - case Inlined(call, bindings, expansion) => - cpy.Inlined(tree)(call, transformSub(bindings), transform(expansion)(using inlineContext(call))) - case TypeTree() => - tree - case SingletonTypeTree(ref) => - cpy.SingletonTypeTree(tree)(transform(ref)) - case RefinedTypeTree(tpt, refinements) => - cpy.RefinedTypeTree(tree)(transform(tpt), transformSub(refinements)) - case AppliedTypeTree(tpt, args) => - cpy.AppliedTypeTree(tree)(transform(tpt), transform(args)) - case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(tree)(transformSub(tparams), transform(body)) - case TermLambdaTypeTree(params, body) => - cpy.TermLambdaTypeTree(tree)(transformSub(params), transform(body)) - case MatchTypeTree(bound, selector, cases) => - cpy.MatchTypeTree(tree)(transform(bound), transform(selector), transformSub(cases)) - case ByNameTypeTree(result) => - cpy.ByNameTypeTree(tree)(transform(result)) - case TypeBoundsTree(lo, hi, alias) => - cpy.TypeBoundsTree(tree)(transform(lo), transform(hi), transform(alias)) - case Bind(name, body) => - cpy.Bind(tree)(name, transform(body)) - case Alternative(trees) => - cpy.Alternative(tree)(transform(trees)) - case UnApply(fun, implicits, patterns) => - cpy.UnApply(tree)(transform(fun), transform(implicits), transform(patterns)) - case EmptyValDef => - tree - case tree @ ValDef(name, tpt, _) => - val tpt1 = transform(tpt) - val rhs1 = transform(tree.rhs) - cpy.ValDef(tree)(name, tpt1, rhs1) - case tree @ DefDef(name, paramss, tpt, _) => - cpy.DefDef(tree)(name, transformParamss(paramss), transform(tpt), transform(tree.rhs)) - case tree @ TypeDef(name, rhs) => - cpy.TypeDef(tree)(name, transform(rhs)) - case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => - cpy.Template(tree)(transformSub(constr), transform(tree.parents), Nil, transformSub(self), transformStats(tree.body, tree.symbol)) - case Import(expr, selectors) => - cpy.Import(tree)(transform(expr), selectors) - case Export(expr, selectors) => - cpy.Export(tree)(transform(expr), selectors) - case PackageDef(pid, stats) => - cpy.PackageDef(tree)(transformSub(pid), transformStats(stats, ctx.owner)) - case Annotated(arg, annot) => - cpy.Annotated(tree)(transform(arg), transform(annot)) - case Thicket(trees) => - val trees1 = transform(trees) - if (trees1 eq trees) tree else Thicket(trees1) - case tree @ Hole(_, _, args, content, tpt) => - cpy.Hole(tree)(args = transform(args), content = transform(content), tpt = transform(tpt)) - case _ => - transformMoreCases(tree) - } - } - } - - def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = - transform(trees) - def transformBlock(blk: Block)(using Context): Block = - cpy.Block(blk)(transformStats(blk.stats, ctx.owner), transform(blk.expr)) - def transform(trees: List[Tree])(using Context): List[Tree] = - flatten(trees mapConserve (transform(_))) - def transformSub[Tr <: Tree](tree: Tr)(using Context): Tr = - transform(tree).asInstanceOf[Tr] - def transformSub[Tr <: Tree](trees: List[Tr])(using Context): List[Tr] = - transform(trees).asInstanceOf[List[Tr]] - def transformParams(params: ParamClause)(using Context): ParamClause = - transform(params).asInstanceOf[ParamClause] - def transformParamss(paramss: List[ParamClause])(using Context): List[ParamClause] = - paramss.mapConserve(transformParams) - - protected def transformMoreCases(tree: Tree)(using Context): Tree = { - assert(ctx.reporter.errorsReported) - tree - } - } - - abstract class TreeAccumulator[X] { self: TreeAccumulator[X] @retains(caps.cap) => - // Ties the knot of the traversal: call `foldOver(x, tree))` to dive in the `tree` node. - def apply(x: X, tree: Tree)(using Context): X - - def apply(x: X, trees: List[Tree])(using Context): X = - def fold(x: X, trees: List[Tree]): X = trees match - case tree :: rest => fold(apply(x, tree), rest) - case Nil => x - fold(x, trees) - - def foldOver(x: X, tree: Tree)(using Context): X = - if (tree.source != ctx.source && tree.source.exists) - foldOver(x, tree)(using ctx.withSource(tree.source)) - else { - Stats.record(s"TreeAccumulator.foldOver/$getClass") - tree match { - case Ident(name) => - x - case Select(qualifier, name) => - this(x, qualifier) - case This(qual) => - x - case Super(qual, mix) => - this(x, qual) - case Apply(fun, args) => - this(this(x, fun), args) - case TypeApply(fun, args) => - this(this(x, fun), args) - case Literal(const) => - x - case New(tpt) => - this(x, tpt) - case Typed(expr, tpt) => - this(this(x, expr), tpt) - case NamedArg(name, arg) => - this(x, arg) - case Assign(lhs, rhs) => - this(this(x, lhs), rhs) - case Block(stats, expr) => - this(this(x, stats), expr) - case If(cond, thenp, elsep) => - this(this(this(x, cond), thenp), elsep) - case Closure(env, meth, tpt) => - this(this(this(x, env), meth), tpt) - case Match(selector, cases) => - this(this(x, selector), cases) - case CaseDef(pat, guard, body) => - this(this(this(x, pat), guard), body) - case Labeled(bind, expr) => - this(this(x, bind), expr) - case Return(expr, from) => - this(this(x, expr), from) - case WhileDo(cond, body) => - this(this(x, cond), body) - case Try(block, handler, finalizer) => - this(this(this(x, block), handler), finalizer) - case SeqLiteral(elems, elemtpt) => - this(this(x, elems), elemtpt) - case Inlined(call, bindings, expansion) => - this(this(x, bindings), expansion)(using inlineContext(call)) - case TypeTree() => - x - case SingletonTypeTree(ref) => - this(x, ref) - case RefinedTypeTree(tpt, refinements) => - this(this(x, tpt), refinements) - case AppliedTypeTree(tpt, args) => - this(this(x, tpt), args) - case LambdaTypeTree(tparams, body) => - inContext(localCtx(tree)) { - this(this(x, tparams), body) - } - case TermLambdaTypeTree(params, body) => - inContext(localCtx(tree)) { - this(this(x, params), body) - } - case MatchTypeTree(bound, selector, cases) => - this(this(this(x, bound), selector), cases) - case ByNameTypeTree(result) => - this(x, result) - case TypeBoundsTree(lo, hi, alias) => - this(this(this(x, lo), hi), alias) - case Bind(name, body) => - this(x, body) - case Alternative(trees) => - this(x, trees) - case UnApply(fun, implicits, patterns) => - this(this(this(x, fun), implicits), patterns) - case tree @ ValDef(_, tpt, _) => - inContext(localCtx(tree)) { - this(this(x, tpt), tree.rhs) - } - case tree @ DefDef(_, paramss, tpt, _) => - inContext(localCtx(tree)) { - this(this(paramss.foldLeft(x)(apply), tpt), tree.rhs) - } - case TypeDef(_, rhs) => - inContext(localCtx(tree)) { - this(x, rhs) - } - case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => - this(this(this(this(x, constr), parents), self), tree.body) - case Import(expr, _) => - this(x, expr) - case Export(expr, _) => - this(x, expr) - case PackageDef(pid, stats) => - this(this(x, pid), stats)(using localCtx(tree)) - case Annotated(arg, annot) => - this(this(x, arg), annot) - case Thicket(ts) => - this(x, ts) - case Hole(_, _, args, content, tpt) => - this(this(this(x, args), content), tpt) - case _ => - foldMoreCases(x, tree) - } - } - - def foldMoreCases(x: X, tree: Tree)(using Context): X = { - assert(ctx.reporter.hasUnreportedErrors - || ctx.reporter.errorsReported - || ctx.mode.is(Mode.Interactive), tree) - // In interactive mode, errors might come from previous runs. - // In case of errors it may be that typed trees point to untyped ones. - // The IDE can still traverse inside such trees, either in the run where errors - // are reported, or in subsequent ones. - x - } - } - - abstract class TreeTraverser extends TreeAccumulator[Unit] { - def traverse(tree: Tree)(using Context): Unit - def traverse(trees: List[Tree])(using Context) = apply((), trees) - def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) - protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) - } - - /** Fold `f` over all tree nodes, in depth-first, prefix order */ - class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { - def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) - } - - /** Fold `f` over all tree nodes, in depth-first, prefix order, but don't visit - * subtrees where `f` returns a different result for the root, i.e. `f(x, root) ne x`. - */ - class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { - def apply(x: X, tree: Tree)(using Context): X = { - val x1 = f(x, tree) - if (x1.asInstanceOf[AnyRef] ne x.asInstanceOf[AnyRef]) x1 - else foldOver(x1, tree) - } - } - - def rename(tree: NameTree, newName: Name)(using Context): tree.ThisTree[T] = { - tree match { - case tree: Ident => cpy.Ident(tree)(newName) - case tree: Select => cpy.Select(tree)(tree.qualifier, newName) - case tree: Bind => cpy.Bind(tree)(newName, tree.body) - case tree: ValDef => cpy.ValDef(tree)(name = newName.asTermName) - case tree: DefDef => cpy.DefDef(tree)(name = newName.asTermName) - case tree: TypeDef => cpy.TypeDef(tree)(name = newName.asTypeName) - } - }.asInstanceOf[tree.ThisTree[T]] - - object TypeDefs: - def unapply(xs: List[Tree]): Option[List[TypeDef]] = xs match - case (x: TypeDef) :: _ => Some(xs.asInstanceOf[List[TypeDef]]) - case _ => None - - object ValDefs: - def unapply(xs: List[Tree]): Option[List[ValDef]] = xs match - case Nil => Some(Nil) - case (x: ValDef) :: _ => Some(xs.asInstanceOf[List[ValDef]]) - case _ => None - - def termParamssIn(paramss: List[ParamClause]): List[List[ValDef]] = paramss match - case ValDefs(vparams) :: paramss1 => - val paramss2 = termParamssIn(paramss1) - if paramss2 eq paramss1 then paramss.asInstanceOf[List[List[ValDef]]] - else vparams :: paramss2 - case _ :: paramss1 => - termParamssIn(paramss1) - case nil => - Nil - - /** If `tparams` is non-empty, add it to the left `paramss`, merging - * it with a leading type parameter list of `paramss`, if one exists. - */ - def joinParams(tparams: List[TypeDef], paramss: List[ParamClause]): List[ParamClause] = - if tparams.isEmpty then paramss - else paramss match - case TypeDefs(tparams1) :: paramss1 => (tparams ++ tparams1) :: paramss1 - case _ => tparams :: paramss - - def isTermOnly(paramss: List[ParamClause]): Boolean = paramss match - case Nil => true - case params :: paramss1 => - params match - case (param: untpd.TypeDef) :: _ => false - case _ => isTermOnly(paramss1) - - def asTermOnly(paramss: List[ParamClause]): List[List[ValDef]] = - assert(isTermOnly(paramss)) - paramss.asInstanceOf[List[List[ValDef]]] - - /** Delegate to FunProto or FunProtoTyped depending on whether the prefix is `untpd` or `tpd`. */ - protected def FunProto(args: List[Tree], resType: Type)(using Context): ProtoTypes.FunProto - - /** Construct the application `$receiver.$method[$targs]($args)` using overloading resolution - * to find a matching overload of `$method` if necessary. - * This is useful when overloading resolution needs to be performed in a phase after typer. - * Note that this will not perform any kind of implicit search. - * - * @param expectedType An expected type of the application used to guide overloading resolution - */ - def applyOverloaded( - receiver: tpd.Tree, method: TermName, args: List[Tree], targs: List[Type], - expectedType: Type)(using parentCtx: Context): tpd.Tree = { - given ctx: Context = parentCtx.retractMode(Mode.ImplicitsEnabled) - import dotty.tools.dotc.ast.tpd.TreeOps - - val typer = ctx.typer - val proto = FunProto(args, expectedType) - val denot = receiver.tpe.member(method) - if !denot.exists then - overload.println(i"members = ${receiver.tpe.decls}") - report.error(em"no member $receiver . $method", receiver.srcPos) - val selected = - if (denot.isOverloaded) { - def typeParamCount(tp: Type) = tp.widen match { - case tp: PolyType => tp.paramInfos.length - case _ => 0 - } - val allAlts = denot.alternatives - .map(denot => TermRef(receiver.tpe, denot.symbol)) - .filter(tr => typeParamCount(tr) == targs.length) - .filter { _.widen match { - case MethodTpe(_, _, x: MethodType) => !x.isImplicitMethod - case _ => true - }} - val alternatives = ctx.typer.resolveOverloaded(allAlts, proto) - assert(alternatives.size == 1, - i"${if (alternatives.isEmpty) "no" else "multiple"} overloads available for " + - i"$method on ${receiver.tpe.widenDealiasKeepAnnots} with targs: $targs%, %; args: $args%, %; expectedType: $expectedType." + - i"all alternatives: ${allAlts.map(_.symbol.showDcl).mkString(", ")}\n" + - i"matching alternatives: ${alternatives.map(_.symbol.showDcl).mkString(", ")}.") // this is parsed from bytecode tree. there's nothing user can do about it - alternatives.head - } - else TermRef(receiver.tpe, denot.symbol) - val fun = receiver.select(selected).appliedToTypes(targs) - - val apply = untpd.Apply(fun, args) - typer.ApplyTo(apply, fun, selected, proto, expectedType) - } - - - def resolveConstructor(atp: Type, args: List[Tree])(using Context): tpd.Tree = { - val targs = atp.argTypes - withoutMode(Mode.PatternOrTypeBits) { - applyOverloaded(tpd.New(atp.typeConstructor), nme.CONSTRUCTOR, args, targs, atp) - } - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala deleted file mode 100644 index f778824a18d3..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala +++ /dev/null @@ -1,1546 +0,0 @@ -package dotty.tools -package dotc -package ast - -import dotty.tools.dotc.transform.{ExplicitOuter, Erasure} -import typer.ProtoTypes -import transform.SymUtils._ -import transform.TypeUtils._ -import core._ -import Scopes.newScope -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._ -import Symbols._, StdNames._, Annotations._, Trees._, Symbols._ -import Decorators._, DenotTransformers._ -import collection.{immutable, mutable} -import util.{Property, SourceFile} -import NameKinds.{TempResultName, OuterSelectName} -import typer.ConstFold - -import scala.annotation.tailrec -import scala.collection.mutable.ListBuffer -import language.experimental.pureFunctions - -/** Some creators for typed trees */ -object tpd extends Trees.Instance[Type] with TypedTreeInfo { - - private def ta(using Context) = ctx.typeAssigner - - def Ident(tp: NamedType)(using Context): Ident = - ta.assignType(untpd.Ident(tp.name), tp) - - def Select(qualifier: Tree, name: Name)(using Context): Select = - ta.assignType(untpd.Select(qualifier, name), qualifier) - - def Select(qualifier: Tree, tp: NamedType)(using Context): Select = - untpd.Select(qualifier, tp.name).withType(tp) - - def This(cls: ClassSymbol)(using Context): This = - untpd.This(untpd.Ident(cls.name)).withType(cls.thisType) - - def Super(qual: Tree, mix: untpd.Ident, mixinClass: Symbol)(using Context): Super = - ta.assignType(untpd.Super(qual, mix), qual, mixinClass) - - def Super(qual: Tree, mixName: TypeName, mixinClass: Symbol = NoSymbol)(using Context): Super = - Super(qual, if (mixName.isEmpty) untpd.EmptyTypeIdent else untpd.Ident(mixName), mixinClass) - - def Apply(fn: Tree, args: List[Tree])(using Context): Apply = fn match - case Block(Nil, expr) => - Apply(expr, args) - case _: RefTree | _: GenericApply | _: Inlined | _: Hole => - ta.assignType(untpd.Apply(fn, args), fn, args) - - def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match - case Block(Nil, expr) => - TypeApply(expr, args) - case _: RefTree | _: GenericApply => - ta.assignType(untpd.TypeApply(fn, args), fn, args) - - def Literal(const: Constant)(using Context): Literal = - ta.assignType(untpd.Literal(const)) - - def unitLiteral(using Context): Literal = - Literal(Constant(())) - - def nullLiteral(using Context): Literal = - Literal(Constant(null)) - - def New(tpt: Tree)(using Context): New = - ta.assignType(untpd.New(tpt), tpt) - - def New(tp: Type)(using Context): New = New(TypeTree(tp)) - - def Typed(expr: Tree, tpt: Tree)(using Context): Typed = - ta.assignType(untpd.Typed(expr, tpt), tpt) - - def NamedArg(name: Name, arg: Tree)(using Context): NamedArg = - ta.assignType(untpd.NamedArg(name, arg), arg) - - def Assign(lhs: Tree, rhs: Tree)(using Context): Assign = - ta.assignType(untpd.Assign(lhs, rhs)) - - def Block(stats: List[Tree], expr: Tree)(using Context): Block = - ta.assignType(untpd.Block(stats, expr), stats, expr) - - /** Join `stats` in front of `expr` creating a new block if necessary */ - def seq(stats: List[Tree], expr: Tree)(using Context): Tree = - if (stats.isEmpty) expr - else expr match { - case Block(_, _: Closure) => - Block(stats, expr) // leave closures in their own block - case Block(estats, eexpr) => - cpy.Block(expr)(stats ::: estats, eexpr).withType(ta.avoidingType(eexpr, stats)) - case _ => - Block(stats, expr) - } - - def If(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = - ta.assignType(untpd.If(cond, thenp, elsep), thenp, elsep) - - def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = - ta.assignType(untpd.InlineIf(cond, thenp, elsep), thenp, elsep) - - def Closure(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = - ta.assignType(untpd.Closure(env, meth, tpt), meth, tpt) - - /** A function def - * - * vparams => expr - * - * gets expanded to - * - * { def $anonfun(vparams) = expr; Closure($anonfun) } - * - * where the closure's type is the target type of the expression (FunctionN, unless - * otherwise specified). - */ - def Closure(meth: TermSymbol, rhsFn: List[List[Tree]] => Tree, targs: List[Tree] = Nil, targetType: Type = NoType)(using Context): Block = { - val targetTpt = if (targetType.exists) TypeTree(targetType) else EmptyTree - val call = - if (targs.isEmpty) Ident(TermRef(NoPrefix, meth)) - else TypeApply(Ident(TermRef(NoPrefix, meth)), targs) - Block( - DefDef(meth, rhsFn) :: Nil, - Closure(Nil, call, targetTpt)) - } - - /** A closure whose anonymous function has the given method type */ - def Lambda(tpe: MethodType, rhsFn: List[Tree] => Tree)(using Context): Block = { - val meth = newAnonFun(ctx.owner, tpe) - Closure(meth, tss => rhsFn(tss.head).changeOwner(ctx.owner, meth)) - } - - def CaseDef(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = - ta.assignType(untpd.CaseDef(pat, guard, body), pat, body) - - def Match(selector: Tree, cases: List[CaseDef])(using Context): Match = - ta.assignType(untpd.Match(selector, cases), selector, cases) - - def InlineMatch(selector: Tree, cases: List[CaseDef])(using Context): Match = - ta.assignType(untpd.InlineMatch(selector, cases), selector, cases) - - def Labeled(bind: Bind, expr: Tree)(using Context): Labeled = - ta.assignType(untpd.Labeled(bind, expr)) - - def Labeled(sym: TermSymbol, expr: Tree)(using Context): Labeled = - Labeled(Bind(sym, EmptyTree), expr) - - def Return(expr: Tree, from: Tree)(using Context): Return = - ta.assignType(untpd.Return(expr, from)) - - def Return(expr: Tree, from: Symbol)(using Context): Return = - Return(expr, Ident(from.termRef)) - - def WhileDo(cond: Tree, body: Tree)(using Context): WhileDo = - ta.assignType(untpd.WhileDo(cond, body)) - - def Try(block: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = - ta.assignType(untpd.Try(block, cases, finalizer), block, cases) - - def SeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = - ta.assignType(untpd.SeqLiteral(elems, elemtpt), elems, elemtpt) - - def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): JavaSeqLiteral = - ta.assignType(untpd.JavaSeqLiteral(elems, elemtpt), elems, elemtpt).asInstanceOf[JavaSeqLiteral] - - def Inlined(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = - ta.assignType(untpd.Inlined(call, bindings, expansion), bindings, expansion) - - def TypeTree(tp: Type, inferred: Boolean = false)(using Context): TypeTree = - (if inferred then untpd.InferredTypeTree() else untpd.TypeTree()).withType(tp) - - def SingletonTypeTree(ref: Tree)(using Context): SingletonTypeTree = - ta.assignType(untpd.SingletonTypeTree(ref), ref) - - def RefinedTypeTree(parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(using Context): Tree = - ta.assignType(untpd.RefinedTypeTree(parent, refinements), parent, refinements, refineCls) - - def AppliedTypeTree(tycon: Tree, args: List[Tree])(using Context): AppliedTypeTree = - ta.assignType(untpd.AppliedTypeTree(tycon, args), tycon, args) - - def ByNameTypeTree(result: Tree)(using Context): ByNameTypeTree = - ta.assignType(untpd.ByNameTypeTree(result), result) - - def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = - ta.assignType(untpd.LambdaTypeTree(tparams, body), tparams, body) - - def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = - ta.assignType(untpd.MatchTypeTree(bound, selector, cases), bound, selector, cases) - - def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(using Context): TypeBoundsTree = - ta.assignType(untpd.TypeBoundsTree(lo, hi, alias), lo, hi, alias) - - def Bind(sym: Symbol, body: Tree)(using Context): Bind = - ta.assignType(untpd.Bind(sym.name, body), sym) - - /** A pattern corresponding to `sym: tpe` */ - def BindTyped(sym: TermSymbol, tpe: Type)(using Context): Bind = - Bind(sym, Typed(Underscore(tpe), TypeTree(tpe))) - - def Alternative(trees: List[Tree])(using Context): Alternative = - ta.assignType(untpd.Alternative(trees), trees) - - def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree], proto: Type)(using Context): UnApply = { - assert(fun.isInstanceOf[RefTree] || fun.isInstanceOf[GenericApply]) - ta.assignType(untpd.UnApply(fun, implicits, patterns), proto) - } - - def ValDef(sym: TermSymbol, rhs: LazyTree = EmptyTree, inferred: Boolean = false)(using Context): ValDef = - ta.assignType(untpd.ValDef(sym.name, TypeTree(sym.info, inferred), rhs), sym) - - def SyntheticValDef(name: TermName, rhs: Tree, flags: FlagSet = EmptyFlags)(using Context): ValDef = - ValDef(newSymbol(ctx.owner, name, Synthetic | flags, rhs.tpe.widen, coord = rhs.span), rhs) - - def DefDef(sym: TermSymbol, paramss: List[List[Symbol]], - resultType: Type, rhs: Tree)(using Context): DefDef = - sym.setParamss(paramss) - ta.assignType( - untpd.DefDef( - sym.name, - paramss.map { - case TypeSymbols(params) => params.map(param => TypeDef(param).withSpan(param.span)) - case TermSymbols(params) => params.map(param => ValDef(param).withSpan(param.span)) - case _ => unreachable() - }, - TypeTree(resultType), - rhs), - sym) - - def DefDef(sym: TermSymbol, rhs: Tree = EmptyTree)(using Context): DefDef = - ta.assignType(DefDef(sym, Function.const(rhs) _), sym) - - /** A DefDef with given method symbol `sym`. - * @rhsFn A function from parameter references - * to the method's right-hand side. - * Parameter symbols are taken from the `rawParamss` field of `sym`, or - * are freshly generated if `rawParamss` is empty. - */ - def DefDef(sym: TermSymbol, rhsFn: List[List[Tree]] => Tree)(using Context): DefDef = - - // Map method type `tp` with remaining parameters stored in rawParamss to - // final result type and all (given or synthesized) parameters - def recur(tp: Type, remaining: List[List[Symbol]]): (Type, List[List[Symbol]]) = tp match - case tp: PolyType => - val (tparams: List[TypeSymbol], remaining1) = remaining match - case tparams :: remaining1 => - assert(tparams.hasSameLengthAs(tp.paramNames) && tparams.head.isType) - (tparams.asInstanceOf[List[TypeSymbol]], remaining1) - case nil => - (newTypeParams(sym, tp.paramNames, EmptyFlags, tp.instantiateParamInfos(_)), Nil) - val (rtp, paramss) = recur(tp.instantiate(tparams.map(_.typeRef)), remaining1) - (rtp, tparams :: paramss) - case tp: MethodType => - val isParamDependent = tp.isParamDependent - val previousParamRefs: ListBuffer[TermRef] = - // It is ok to assign `null` here. - // If `isParamDependent == false`, the value of `previousParamRefs` is not used. - if isParamDependent then mutable.ListBuffer[TermRef]() else (null: ListBuffer[TermRef] | Null).uncheckedNN - - def valueParam(name: TermName, origInfo: Type): TermSymbol = - val maybeImplicit = - if tp.isContextualMethod then Given - else if tp.isImplicitMethod then Implicit - else EmptyFlags - val maybeErased = if tp.isErasedMethod then Erased else EmptyFlags - - def makeSym(info: Type) = newSymbol(sym, name, TermParam | maybeImplicit | maybeErased, info, coord = sym.coord) - - if isParamDependent then - val sym = makeSym(origInfo.substParams(tp, previousParamRefs.toList)) - previousParamRefs += sym.termRef - sym - else makeSym(origInfo) - end valueParam - - val (vparams: List[TermSymbol], remaining1) = - if tp.paramNames.isEmpty then (Nil, remaining) - else remaining match - case vparams :: remaining1 => - assert(vparams.hasSameLengthAs(tp.paramNames) && vparams.head.isTerm) - (vparams.asInstanceOf[List[TermSymbol]], remaining1) - case nil => - (tp.paramNames.lazyZip(tp.paramInfos).map(valueParam), Nil) - val (rtp, paramss) = recur(tp.instantiate(vparams.map(_.termRef)), remaining1) - (rtp, vparams :: paramss) - case _ => - assert(remaining.isEmpty) - (tp.widenExpr, Nil) - end recur - - val (rtp, paramss) = recur(sym.info, sym.rawParamss) - DefDef(sym, paramss, rtp, rhsFn(paramss.nestedMap(ref))) - end DefDef - - def TypeDef(sym: TypeSymbol)(using Context): TypeDef = - ta.assignType(untpd.TypeDef(sym.name, TypeTree(sym.info)), sym) - - def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(using Context): TypeDef = { - val firstParent :: otherParents = cls.info.parents: @unchecked - val superRef = - if (cls.is(Trait)) TypeTree(firstParent) - else { - def isApplicable(ctpe: Type): Boolean = ctpe match { - case ctpe: PolyType => - isApplicable(ctpe.instantiate(firstParent.argTypes)) - case ctpe: MethodType => - (superArgs corresponds ctpe.paramInfos)(_.tpe <:< _) - case _ => - false - } - val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info)) - New(firstParent, constr.symbol.asTerm, superArgs) - } - ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) - } - - def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(using Context): TypeDef = { - val selfType = - if (cls.classInfo.selfInfo ne NoType) ValDef(newSelfSym(cls)) - else EmptyValDef - def isOwnTypeParam(stat: Tree) = - stat.symbol.is(TypeParam) && stat.symbol.owner == cls - val bodyTypeParams = body filter isOwnTypeParam map (_.symbol) - val newTypeParams = - for (tparam <- cls.typeParams if !(bodyTypeParams contains tparam)) - yield TypeDef(tparam) - val findLocalDummy = FindLocalDummyAccumulator(cls) - val localDummy = body.foldLeft(NoSymbol: Symbol)(findLocalDummy.apply) - .orElse(newLocalDummy(cls)) - val impl = untpd.Template(constr, parents, Nil, selfType, newTypeParams ++ body) - .withType(localDummy.termRef) - ta.assignType(untpd.TypeDef(cls.name, impl), cls) - } - - /** An anonymous class - * - * new parents { forwarders } - * - * where `forwarders` contains forwarders for all functions in `fns`. - * @param parents a non-empty list of class types - * @param fns a non-empty of functions for which forwarders should be defined in the class. - * The class has the same owner as the first function in `fns`. - * Its position is the union of all functions in `fns`. - */ - def AnonClass(parents: List[Type], fns: List[TermSymbol], methNames: List[TermName])(using Context): Block = { - AnonClass(fns.head.owner, parents, fns.map(_.span).reduceLeft(_ union _)) { cls => - def forwarder(fn: TermSymbol, name: TermName) = { - val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm - for overridden <- fwdMeth.allOverriddenSymbols do - if overridden.is(Extension) then fwdMeth.setFlag(Extension) - if !overridden.is(Deferred) then fwdMeth.setFlag(Override) - DefDef(fwdMeth, ref(fn).appliedToArgss(_)) - } - fns.lazyZip(methNames).map(forwarder) - } - } - - /** An anonymous class - * - * new parents { body } - * - * with the specified owner and position. - */ - def AnonClass(owner: Symbol, parents: List[Type], coord: Coord)(body: ClassSymbol => List[Tree])(using Context): Block = - val parents1 = - if (parents.head.classSymbol.is(Trait)) { - val head = parents.head.parents.head - if (head.isRef(defn.AnyClass)) defn.AnyRefType :: parents else head :: parents - } - else parents - val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, coord = coord) - val constr = newConstructor(cls, Synthetic, Nil, Nil).entered - val cdef = ClassDef(cls, DefDef(constr), body(cls)) - Block(cdef :: Nil, New(cls.typeRef, Nil)) - - def Import(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = - ta.assignType(untpd.Import(expr, selectors), newImportSymbol(ctx.owner, expr)) - - def Export(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = - ta.assignType(untpd.Export(expr, selectors)) - - def PackageDef(pid: RefTree, stats: List[Tree])(using Context): PackageDef = - ta.assignType(untpd.PackageDef(pid, stats), pid) - - def Annotated(arg: Tree, annot: Tree)(using Context): Annotated = - ta.assignType(untpd.Annotated(arg, annot), arg, annot) - - def Throw(expr: Tree)(using Context): Tree = - ref(defn.throwMethod).appliedTo(expr) - - def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = - ta.assignType(untpd.Hole(isTermHole, idx, args, content, tpt), tpt) - - // ------ Making references ------------------------------------------------------ - - def prefixIsElidable(tp: NamedType)(using Context): Boolean = { - val typeIsElidable = tp.prefix match { - case pre: ThisType => - tp.isType || - pre.cls.isStaticOwner || - tp.symbol.isParamOrAccessor && !pre.cls.is(Trait) && ctx.owner.enclosingClass == pre.cls - // was ctx.owner.enclosingClass.derivesFrom(pre.cls) which was not tight enough - // and was spuriously triggered in case inner class would inherit from outer one - // eg anonymous TypeMap inside TypeMap.andThen - case pre: TermRef => - pre.symbol.is(Module) && pre.symbol.isStatic - case pre => - pre `eq` NoPrefix - } - typeIsElidable || - tp.symbol.is(JavaStatic) || - tp.symbol.hasAnnotation(defn.ScalaStaticAnnot) - } - - def needsSelect(tp: Type)(using Context): Boolean = tp match { - case tp: TermRef => !prefixIsElidable(tp) - case _ => false - } - - /** A tree representing the same reference as the given type */ - def ref(tp: NamedType, needLoad: Boolean = true)(using Context): Tree = - if (tp.isType) TypeTree(tp) - else if (prefixIsElidable(tp)) Ident(tp) - else if (tp.symbol.is(Module) && ctx.owner.isContainedIn(tp.symbol.moduleClass)) - followOuterLinks(This(tp.symbol.moduleClass.asClass)) - else if (tp.symbol hasAnnotation defn.ScalaStaticAnnot) - Ident(tp) - else - val pre = tp.prefix - if (pre.isSingleton) followOuterLinks(singleton(pre.dealias, needLoad)).select(tp) - else - val res = Select(TypeTree(pre), tp) - if needLoad && !res.symbol.isStatic then - throw TypeError(em"cannot establish a reference to $res") - res - - def ref(sym: Symbol)(using Context): Tree = - ref(NamedType(sym.owner.thisType, sym.name, sym.denot)) - - private def followOuterLinks(t: Tree)(using Context) = t match { - case t: This if ctx.erasedTypes && !(t.symbol == ctx.owner.enclosingClass || t.symbol.isStaticOwner) => - // after erasure outer paths should be respected - ExplicitOuter.OuterOps(ctx.detach).path(toCls = t.tpe.classSymbol) - case t => - t - } - - def singleton(tp: Type, needLoad: Boolean = true)(using Context): Tree = tp.dealias match { - case tp: TermRef => ref(tp, needLoad) - case tp: ThisType => This(tp.cls) - case tp: SkolemType => singleton(tp.narrow, needLoad) - case SuperType(qual, _) => singleton(qual, needLoad) - case ConstantType(value) => Literal(value) - } - - /** A path that corresponds to the given type `tp`. Error if `tp` is not a refinement - * of an addressable singleton type. - */ - def pathFor(tp: Type)(using Context): Tree = { - def recur(tp: Type): Tree = tp match { - case tp: NamedType => - tp.info match { - case TypeAlias(alias) => recur(alias) - case _: TypeBounds => EmptyTree - case _ => singleton(tp) - } - case tp: TypeProxy => recur(tp.superType) - case _ => EmptyTree - } - recur(tp).orElse { - report.error(em"$tp is not an addressable singleton type") - TypeTree(tp) - } - } - - /** A tree representing a `newXYZArray` operation of the right - * kind for the given element type in `elemTpe`. No type arguments or - * `length` arguments are given. - */ - def newArray(elemTpe: Type, returnTpe: Type, span: Span, dims: JavaSeqLiteral)(using Context): Tree = { - val elemClass = elemTpe.classSymbol - def newArr = - ref(defn.DottyArraysModule).select(defn.newArrayMethod).withSpan(span) - - if (!ctx.erasedTypes) { - assert(!TypeErasure.isGeneric(elemTpe), elemTpe) //needs to be done during typer. See Applications.convertNewGenericArray - newArr.appliedToTypeTrees(TypeTree(returnTpe) :: Nil).appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) - } - else // after erasure - newArr.appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) - } - - /** The wrapped array method name for an array of type elemtp */ - def wrapArrayMethodName(elemtp: Type)(using Context): TermName = { - val elemCls = elemtp.classSymbol - if (elemCls.isPrimitiveValueClass) nme.wrapXArray(elemCls.name) - else if (elemCls.derivesFrom(defn.ObjectClass) && !elemCls.isNotRuntimeClass) nme.wrapRefArray - else nme.genericWrapArray - } - - /** A tree representing a `wrapXYZArray(tree)` operation of the right - * kind for the given element type in `elemTpe`. - */ - def wrapArray(tree: Tree, elemtp: Type)(using Context): Tree = - val wrapper = ref(defn.getWrapVarargsArrayModule) - .select(wrapArrayMethodName(elemtp)) - .appliedToTypes(if (elemtp.isPrimitiveValueType) Nil else elemtp :: Nil) - val actualElem = wrapper.tpe.widen.firstParamTypes.head - wrapper.appliedTo(tree.ensureConforms(actualElem)) - - // ------ Creating typed equivalents of trees that exist only in untyped form ------- - - /** new C(args), calling the primary constructor of C */ - def New(tp: Type, args: List[Tree])(using Context): Apply = - New(tp, tp.dealias.typeSymbol.primaryConstructor.asTerm, args) - - /** new C(args), calling given constructor `constr` of C */ - def New(tp: Type, constr: TermSymbol, args: List[Tree])(using Context): Apply = { - val targs = tp.argTypes - val tycon = tp.typeConstructor - New(tycon) - .select(TermRef(tycon, constr)) - .appliedToTypes(targs) - .appliedToTermArgs(args) - } - - /** An object def - * - * object obs extends parents { decls } - * - * gets expanded to - * - * val obj = new obj$ - * class obj$ extends parents { this: obj.type => decls } - * - * (The following no longer applies: - * What's interesting here is that the block is well typed - * (because class obj$ is hoistable), but the type of the `obj` val is - * not expressible. What needs to happen in general when - * inferring the type of a val from its RHS, is: if the type contains - * a class that has the val itself as owner, then that class - * is remapped to have the val's owner as owner. Remapping could be - * done by cloning the class with the new owner and substituting - * everywhere in the tree. We know that remapping is safe - * because the only way a local class can appear in the RHS of a val is - * by being hoisted outside of a block, and the necessary checks are - * done at this point already. - * - * On the other hand, for method result type inference, if the type of - * the RHS of a method contains a class owned by the method, this would be - * an error.) - */ - def ModuleDef(sym: TermSymbol, body: List[Tree])(using Context): tpd.Thicket = { - val modcls = sym.moduleClass.asClass - val constrSym = modcls.primaryConstructor orElse newDefaultConstructor(modcls).entered - val constr = DefDef(constrSym.asTerm, EmptyTree) - val clsdef = ClassDef(modcls, constr, body) - val valdef = ValDef(sym, New(modcls.typeRef).select(constrSym).appliedToNone) - Thicket(valdef, clsdef) - } - - /** A `_` with given type */ - def Underscore(tp: Type)(using Context): Ident = untpd.Ident(nme.WILDCARD).withType(tp) - - def defaultValue(tpe: Type)(using Context): Tree = { - val tpw = tpe.widen - - if (tpw isRef defn.IntClass) Literal(Constant(0)) - else if (tpw isRef defn.LongClass) Literal(Constant(0L)) - else if (tpw isRef defn.BooleanClass) Literal(Constant(false)) - else if (tpw isRef defn.CharClass) Literal(Constant('\u0000')) - else if (tpw isRef defn.FloatClass) Literal(Constant(0f)) - else if (tpw isRef defn.DoubleClass) Literal(Constant(0d)) - else if (tpw isRef defn.ByteClass) Literal(Constant(0.toByte)) - else if (tpw isRef defn.ShortClass) Literal(Constant(0.toShort)) - else nullLiteral.select(defn.Any_asInstanceOf).appliedToType(tpe) - } - - private class FindLocalDummyAccumulator(cls: ClassSymbol)(using Context) extends TreeAccumulator[Symbol] { - def apply(sym: Symbol, tree: Tree)(using Context) = - if (sym.exists) sym - else if (tree.isDef) { - val owner = tree.symbol.owner - if (owner.isLocalDummy && owner.owner == cls) owner - else if (owner == cls) foldOver(sym, tree) - else sym - } - else foldOver(sym, tree) - } - - /** The owner to be used in a local context when traversing a tree */ - def localOwner(tree: Tree)(using Context): Symbol = - val sym = tree.symbol - (if sym.is(PackageVal) then sym.moduleClass else sym).orElse(ctx.owner) - - /** The local context to use when traversing trees */ - def localCtx(tree: Tree)(using Context): Context = ctx.withOwner(localOwner(tree)) - - override val cpy: TypedTreeCopier = // Type ascription needed to pick up any new members in TreeCopier (currently there are none) - TypedTreeCopier() - - val cpyBetweenPhases: TimeTravellingTreeCopier = TimeTravellingTreeCopier() - - class TypedTreeCopier extends TreeCopier { - def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[Type] = - copied.withTypeUnchecked(tree.tpe) - def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[Type] = - copied.withTypeUnchecked(tree.tpe) - - protected val untpdCpy = untpd.cpy - - override def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = { - val tree1 = untpdCpy.Select(tree)(qualifier, name) - tree match { - case tree: Select if qualifier.tpe eq tree.qualifier.tpe => - tree1.withTypeUnchecked(tree.tpe) - case _ => - val tree2: Select = tree.tpe match { - case tpe: NamedType => - val qualType = qualifier.tpe.widenIfUnstable - if qualType.isExactlyNothing then tree1.withTypeUnchecked(tree.tpe) - else tree1.withType(tpe.derivedSelect(qualType)) - case _ => tree1.withTypeUnchecked(tree.tpe) - } - ConstFold.Select(tree2) - } - } - - override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = { - val tree1 = untpdCpy.Apply(tree)(fun, args) - tree match { - case tree: Apply - if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, fun, args) - } - } - - override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = { - val tree1 = untpdCpy.TypeApply(tree)(fun, args) - tree match { - case tree: TypeApply - if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, fun, args) - } - } - - override def Literal(tree: Tree)(const: Constant)(using Context): Literal = - ta.assignType(untpdCpy.Literal(tree)(const)) - - override def New(tree: Tree)(tpt: Tree)(using Context): New = - ta.assignType(untpdCpy.New(tree)(tpt), tpt) - - override def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = - ta.assignType(untpdCpy.Typed(tree)(expr, tpt), tpt) - - override def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = - ta.assignType(untpdCpy.NamedArg(tree)(name, arg), arg) - - override def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = - ta.assignType(untpdCpy.Assign(tree)(lhs, rhs)) - - override def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = { - val tree1 = untpdCpy.Block(tree)(stats, expr) - tree match { - case tree: Block if (expr.tpe eq tree.expr.tpe) && (expr.tpe eq tree.tpe) => - // The last guard is a conservative check: if `tree.tpe` is different from `expr.tpe`, then - // it was computed from widening `expr.tpe`, and tree transforms might cause `expr.tpe.widen` - // to change even if `expr.tpe` itself didn't change, e.g: - // { val s = ...; s } - // If the type of `s` changed, then the type of the block might have changed, even though `expr.tpe` - // will still be `TermRef(NoPrefix, s)` - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, stats, expr) - } - } - - override def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = { - val tree1 = untpdCpy.If(tree)(cond, thenp, elsep) - tree match { - case tree: If if (thenp.tpe eq tree.thenp.tpe) && (elsep.tpe eq tree.elsep.tpe) && - ((tree.tpe eq thenp.tpe) || (tree.tpe eq elsep.tpe)) => - // The last guard is a conservative check similar to the one done in `Block` above, - // if `tree.tpe` is not identical to the type of one of its branch, it might have been - // computed from the widened type of the branches, so the same reasoning than - // in `Block` applies. - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, thenp, elsep) - } - } - - override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = { - val tree1 = untpdCpy.Closure(tree)(env, meth, tpt) - tree match { - case tree: Closure if sameTypes(env, tree.env) && (meth.tpe eq tree.meth.tpe) && (tpt.tpe eq tree.tpt.tpe) => - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, meth, tpt) - } - } - - override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = { - val tree1 = untpdCpy.Match(tree)(selector, cases) - tree match { - case tree: Match if sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, selector, cases) - } - } - - override def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = { - val tree1 = untpdCpy.CaseDef(tree)(pat, guard, body) - tree match { - case tree: CaseDef if body.tpe eq tree.body.tpe => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, pat, body) - } - } - - override def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = - ta.assignType(untpdCpy.Labeled(tree)(bind, expr)) - - override def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = - ta.assignType(untpdCpy.Return(tree)(expr, from)) - - override def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = - ta.assignType(untpdCpy.WhileDo(tree)(cond, body)) - - override def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = { - val tree1 = untpdCpy.Try(tree)(expr, cases, finalizer) - tree match { - case tree: Try if (expr.tpe eq tree.expr.tpe) && sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, expr, cases) - } - } - - override def Inlined(tree: Tree)(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = { - val tree1 = untpdCpy.Inlined(tree)(call, bindings, expansion) - tree match { - case tree: Inlined if sameTypes(bindings, tree.bindings) && (expansion.tpe eq tree.expansion.tpe) => - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, bindings, expansion) - } - } - - override def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = { - val tree1 = untpdCpy.SeqLiteral(tree)(elems, elemtpt) - tree match { - case tree: SeqLiteral - if sameTypes(elems, tree.elems) && (elemtpt.tpe eq tree.elemtpt.tpe) => - tree1.withTypeUnchecked(tree.tpe) - case _ => - ta.assignType(tree1, elems, elemtpt) - } - } - - override def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = { - val tree1 = untpdCpy.Annotated(tree)(arg, annot) - tree match { - case tree: Annotated if (arg.tpe eq tree.arg.tpe) && (annot eq tree.annot) => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, arg, annot) - } - } - - override def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = - If(tree: Tree)(cond, thenp, elsep) - override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = - Closure(tree: Tree)(env, meth, tpt) - override def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = - CaseDef(tree: Tree)(pat, guard, body) - override def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = - Try(tree: Tree)(expr, cases, finalizer) - } - - class TimeTravellingTreeCopier extends TypedTreeCopier { - override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = - tree match - case tree: Apply - if (tree.fun eq fun) && (tree.args eq args) - && tree.tpe.isInstanceOf[ConstantType] - && isPureExpr(tree) => tree - case _ => - ta.assignType(untpdCpy.Apply(tree)(fun, args), fun, args) - // Note: Reassigning the original type if `fun` and `args` have the same types as before - // does not work here in general: The computed type depends on the widened function type, not - // the function type itself. A tree transform may keep the function type the - // same but its widened type might change. - // However, we keep constant types of pure expressions. This uses the underlying assumptions - // that pure functions yielding a constant will not change in later phases. - - override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = - ta.assignType(untpdCpy.TypeApply(tree)(fun, args), fun, args) - // Same remark as for Apply - - override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = - ta.assignType(untpdCpy.Closure(tree)(env, meth, tpt), meth, tpt) - - override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = - Closure(tree: Tree)(env, meth, tpt) - } - - override def skipTransform(tree: Tree)(using Context): Boolean = tree.tpe.isError - - implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal { - - def isValue(using Context): Boolean = - tree.isTerm && tree.tpe.widen.isValueType - - def isValueOrPattern(using Context): Boolean = - tree.isValue || tree.isPattern - - def isValueType: Boolean = - tree.isType && tree.tpe.isValueType - - def isInstantiation: Boolean = tree match { - case Apply(Select(New(_), nme.CONSTRUCTOR), _) => true - case _ => false - } - - def shallowFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = - ShallowFolder(op).apply(z, tree) - - def deepFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = - DeepFolder(op).apply(z, tree) - - def find[T](pred: (tpd.Tree) => Boolean)(using Context): Option[tpd.Tree] = - shallowFold[Option[tpd.Tree]](None)((accum, tree) => if (pred(tree)) Some(tree) else accum) - - def subst(from: List[Symbol], to: List[Symbol])(using Context): ThisTree = - TreeTypeMap(substFrom = from, substTo = to).apply(tree) - - /** Change owner from `from` to `to`. If `from` is a weak owner, also change its - * owner to `to`, and continue until a non-weak owner is reached. - */ - def changeOwner(from: Symbol, to: Symbol)(using Context): ThisTree = { - @tailrec def loop(from: Symbol, froms: List[Symbol], tos: List[Symbol]): ThisTree = - if (from.isWeakOwner && !from.owner.isClass) - loop(from.owner, from :: froms, to :: tos) - else - //println(i"change owner ${from :: froms}%, % ==> $tos of $tree") - TreeTypeMap(oldOwners = from :: froms, newOwners = tos).apply(tree) - if (from == to) tree else loop(from, Nil, to :: Nil) - } - - /** - * Set the owner of every definition in this tree which is not itself contained in this - * tree to be `newowner` - */ - def changeNonLocalOwners(newOwner: Symbol)(using Context): Tree = { - val ownerAcc = new TreeAccumulator[immutable.Set[Symbol]] { - def apply(ss: immutable.Set[Symbol], tree: Tree)(using Context) = tree match { - case tree: DefTree => - val sym = tree.symbol - if sym.exists && !sym.owner.is(Package) then ss + sym.owner else ss - case _ => - foldOver(ss, tree) - } - } - val owners = ownerAcc(immutable.Set.empty[Symbol], tree).toList - val newOwners = List.fill(owners.size)(newOwner) - TreeTypeMap(oldOwners = owners, newOwners = newOwners).apply(tree) - } - - /** After phase `trans`, set the owner of every definition in this tree that was formerly - * owner by `from` to `to`. - */ - def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(using Context): ThisTree = - if (ctx.phase == trans.next) { - val traverser = new TreeTraverser { - def traverse(tree: Tree)(using Context) = tree match { - case tree: DefTree => - val sym = tree.symbol - val prevDenot = atPhase(trans)(sym.denot) - if (prevDenot.effectiveOwner == from.skipWeakOwner) { - val d = sym.copySymDenotation(owner = to) - d.installAfter(trans) - d.transformAfter(trans, d => if (d.owner eq from) d.copySymDenotation(owner = to) else d) - } - if (sym.isWeakOwner) traverseChildren(tree) - case _ => - traverseChildren(tree) - } - } - traverser.traverse(tree) - tree - } - else atPhase(trans.next)(changeOwnerAfter(from, to, trans)) - - /** A select node with the given selector name and a computed type */ - def select(name: Name)(using Context): Select = - Select(tree, name) - - /** A select node with the given selector name such that the designated - * member satisfies predicate `p`. Useful for disambiguating overloaded members. - */ - def select(name: Name, p: Symbol => Boolean)(using Context): Select = - select(tree.tpe.member(name).suchThat(p).symbol) - - /** A select node with the given type */ - def select(tp: NamedType)(using Context): Select = - untpd.Select(tree, tp.name).withType(tp) - - /** A select node that selects the given symbol. Note: Need to make sure this - * is in fact the symbol you would get when you select with the symbol's name, - * otherwise a data race may occur which would be flagged by -Yno-double-bindings. - */ - def select(sym: Symbol)(using Context): Select = { - val tp = - if (sym.isType) { - assert(!sym.is(TypeParam)) - TypeRef(tree.tpe, sym.asType) - } - else - TermRef(tree.tpe, sym.name.asTermName, sym.denot.asSeenFrom(tree.tpe)) - untpd.Select(tree, sym.name).withType(tp) - } - - /** A select node with the given selector name and signature and a computed type */ - def selectWithSig(name: Name, sig: Signature, target: Name)(using Context): Tree = - untpd.SelectWithSig(tree, name, sig).withType(tree.tpe.select(name.asTermName, sig, target)) - - /** A select node with selector name and signature taken from `sym`. - * Note: Use this method instead of select(sym) if the referenced symbol - * might be overridden in the type of the qualifier prefix. See note - * on select(sym: Symbol). - */ - def selectWithSig(sym: Symbol)(using Context): Tree = - selectWithSig(sym.name, sym.signature, sym.targetName) - - /** A unary apply node with given argument: `tree(arg)` */ - def appliedTo(arg: Tree)(using Context): Apply = - appliedToTermArgs(arg :: Nil) - - /** An apply node with given arguments: `tree(arg, args0, ..., argsN)` */ - def appliedTo(arg: Tree, args: Tree*)(using Context): Apply = - appliedToTermArgs(arg :: args.toList) - - /** An apply node with given argument list `tree(args(0), ..., args(args.length - 1))` */ - def appliedToTermArgs(args: List[Tree])(using Context): Apply = - Apply(tree, args) - - /** An applied node that accepts only varargs as arguments */ - def appliedToVarargs(args: List[Tree], tpt: Tree)(using Context): Apply = - appliedTo(repeated(args, tpt)) - - /** An apply or type apply node with given argument list */ - def appliedToArgs(args: List[Tree])(using Context): GenericApply = args match - case arg :: args1 if arg.isType => TypeApply(tree, args) - case _ => Apply(tree, args) - - /** The current tree applied to given argument lists: - * `tree (argss(0)) ... (argss(argss.length -1))` - */ - def appliedToArgss(argss: List[List[Tree]])(using Context): Tree = - argss.foldLeft(tree: Tree)(_.appliedToArgs(_)) - - /** The current tree applied to (): `tree()` */ - def appliedToNone(using Context): Apply = Apply(tree, Nil) - - /** The current tree applied to given type argument: `tree[targ]` */ - def appliedToType(targ: Type)(using Context): Tree = - appliedToTypes(targ :: Nil) - - /** The current tree applied to given type arguments: `tree[targ0, ..., targN]` */ - def appliedToTypes(targs: List[Type])(using Context): Tree = - appliedToTypeTrees(targs map (TypeTree(_))) - - /** The current tree applied to given type argument: `tree[targ]` */ - def appliedToTypeTree(targ: Tree)(using Context): Tree = - appliedToTypeTrees(targ :: Nil) - - /** The current tree applied to given type argument list: `tree[targs(0), ..., targs(targs.length - 1)]` */ - def appliedToTypeTrees(targs: List[Tree])(using Context): Tree = - if targs.isEmpty then tree else TypeApply(tree, targs) - - /** Apply to `()` unless tree's widened type is parameterless */ - def ensureApplied(using Context): Tree = - if (tree.tpe.widen.isParameterless) tree else tree.appliedToNone - - /** `tree == that` */ - def equal(that: Tree)(using Context): Tree = - if (that.tpe.widen.isRef(defn.NothingClass)) - Literal(Constant(false)) - else - applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) - - /** `tree.isInstanceOf[tp]`, with special treatment of singleton types */ - def isInstance(tp: Type)(using Context): Tree = tp.dealias match { - case ConstantType(c) if c.tag == StringTag => - singleton(tp).equal(tree) - case tp: SingletonType => - if tp.widen.derivesFrom(defn.ObjectClass) then - tree.ensureConforms(defn.ObjectType).select(defn.Object_eq).appliedTo(singleton(tp)) - else - singleton(tp).equal(tree) - case _ => - tree.select(defn.Any_isInstanceOf).appliedToType(tp) - } - - /** tree.asInstanceOf[`tp`] */ - def asInstance(tp: Type)(using Context): Tree = { - assert(tp.isValueType, i"bad cast: $tree.asInstanceOf[$tp]") - tree.select(defn.Any_asInstanceOf).appliedToType(tp) - } - - /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ - def cast(tp: Type)(using Context): Tree = cast(TypeTree(tp)) - - /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ - def cast(tpt: TypeTree)(using Context): Tree = - assert(tpt.tpe.isValueType, i"bad cast: $tree.asInstanceOf[$tpt]") - tree.select(if (ctx.erasedTypes) defn.Any_asInstanceOf else defn.Any_typeCast) - .appliedToTypeTree(tpt) - - /** cast `tree` to `tp` (or its box/unbox/cast equivalent when after - * erasure and value and non-value types are mixed), - * unless tree's type already conforms to `tp`. - */ - def ensureConforms(tp: Type)(using Context): Tree = - if (tree.tpe <:< tp) tree - else if (!ctx.erasedTypes) cast(tp) - else Erasure.Boxing.adaptToType(tree, tp) - - /** `tree ne null` (might need a cast to be type correct) */ - def testNotNull(using Context): Tree = { - // If the receiver is of type `Nothing` or `Null`, add an ascription or cast - // so that the selection succeeds. - // e.g. `null.ne(null)` doesn't type, but `(null: AnyRef).ne(null)` does. - val receiver = - if tree.tpe.isBottomType then - if ctx.explicitNulls then tree.cast(defn.AnyRefType) - else Typed(tree, TypeTree(defn.AnyRefType)) - else tree.ensureConforms(defn.ObjectType) - // also need to cast the null literal to AnyRef in explicit nulls - val nullLit = if ctx.explicitNulls then nullLiteral.cast(defn.AnyRefType) else nullLiteral - receiver.select(defn.Object_ne).appliedTo(nullLit).withSpan(tree.span) - } - - /** If inititializer tree is `_`, the default value of its type, - * otherwise the tree itself. - */ - def wildcardToDefault(using Context): Tree = - if (isWildcardArg(tree)) defaultValue(tree.tpe) else tree - - /** `this && that`, for boolean trees `this`, `that` */ - def and(that: Tree)(using Context): Tree = - tree.select(defn.Boolean_&&).appliedTo(that) - - /** `this || that`, for boolean trees `this`, `that` */ - def or(that: Tree)(using Context): Tree = - tree.select(defn.Boolean_||).appliedTo(that) - - /** The translation of `tree = rhs`. - * This is either the tree as an assignment, or a setter call. - */ - def becomes(rhs: Tree)(using Context): Tree = { - val sym = tree.symbol - if (sym.is(Method)) { - val setter = sym.setter.orElse { - assert(sym.name.isSetterName && sym.info.firstParamTypes.nonEmpty, sym) - sym - } - val qual = tree match { - case id: Ident => desugarIdentPrefix(id) - case Select(qual, _) => qual - } - qual.select(setter).appliedTo(rhs) - } - else Assign(tree, rhs) - } - - /** tree @annot - * - * works differently for type trees and term trees - */ - def annotated(annot: Tree)(using Context): Tree = - if (tree.isTerm) - Typed(tree, TypeTree(AnnotatedType(tree.tpe.widenIfUnstable, Annotation(annot)))) - else - Annotated(tree, annot) - - /** A synthetic select with that will be turned into an outer path by ExplicitOuter. - * @param levels How many outer levels to select - * @param tp The type of the destination of the outer path. - */ - def outerSelect(levels: Int, tp: Type)(using Context): Tree = - untpd.Select(tree, OuterSelectName(EmptyTermName, levels)).withType(SkolemType(tp)) - - /** Replace Inlined nodes and InlineProxy references to underlying arguments */ - def underlyingArgument(using Context): Tree = { - val mapToUnderlying = new MapToUnderlying { - /** Should get the rhs of this binding - * Returns true if the symbol is a val or def generated by eta-expansion/inline - */ - override protected def skipLocal(sym: Symbol): Boolean = - sym.isOneOf(InlineProxy | Synthetic) - } - mapToUnderlying.transform(tree) - } - - /** Replace Ident nodes references to the underlying tree that defined them */ - def underlying(using Context): Tree = MapToUnderlying().transform(tree) - - // --- Higher order traversal methods ------------------------------- - - /** Apply `f` to each subtree of this tree */ - def foreachSubTree(f: Tree => Unit)(using Context): Unit = { - val traverser = new TreeTraverser { - def traverse(tree: Tree)(using Context) = foldOver(f(tree), tree) - } - traverser.traverse(tree) - } - - /** Is there a subtree of this tree that satisfies predicate `p`? */ - def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { - val acc = new TreeAccumulator[Boolean] { - def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) - } - acc(false, tree) - } - - /** All subtrees of this tree that satisfy predicate `p`. */ - def filterSubTrees(f: Tree => Boolean)(using Context): List[Tree] = { - val buf = mutable.ListBuffer[Tree]() - foreachSubTree { tree => if (f(tree)) buf += tree } - buf.toList - } - - /** Set this tree as the `defTree` of its symbol and return this tree */ - def setDefTree(using Context): ThisTree = { - val sym = tree.symbol - if (sym.exists) sym.defTree = tree - tree - } - - def etaExpandCFT(using Context): Tree = - def expand(target: Tree, tp: Type)(using Context): Tree = tp match - case defn.ContextFunctionType(argTypes, resType, isErased) => - val anonFun = newAnonFun( - ctx.owner, - MethodType.companion(isContextual = true, isErased = isErased)(argTypes, resType), - coord = ctx.owner.coord) - def lambdaBody(refss: List[List[Tree]]) = - expand(target.select(nme.apply).appliedToArgss(refss), resType)( - using ctx.withOwner(anonFun)) - Closure(anonFun, lambdaBody) - case _ => - target - expand(tree, tree.tpe.widen) - } - - inline val MapRecursionLimit = 10 - - extension (trees: List[Tree]) - - /** A map that expands to a recursive function. It's equivalent to - * - * flatten(trees.mapConserve(op)) - * - * and falls back to it after `MaxRecursionLimit` recursions. - * Before that it uses a simpler method that uses stackspace - * instead of heap. - * Note `op` is duplicated in the generated code, so it should be - * kept small. - */ - inline def mapInline(inline op: Tree => Tree): List[Tree] = - def recur(trees: List[Tree], count: Int): List[Tree] = - if count > MapRecursionLimit then - // use a slower implementation that avoids stack overflows - flatten(trees.mapConserve(op)) - else trees match - case tree :: rest => - val tree1 = op(tree) - val rest1 = recur(rest, count + 1) - if (tree1 eq tree) && (rest1 eq rest) then trees - else tree1 match - case Thicket(elems1) => elems1 ::: rest1 - case _ => tree1 :: rest1 - case nil => nil - recur(trees, 0) - - /** Transform statements while maintaining import contexts and expression contexts - * in the same way as Typer does. The code addresses additional concerns: - * - be tail-recursive where possible - * - don't re-allocate trees where nothing has changed - */ - inline def mapStatements[T]( - exprOwner: Symbol, - inline op: Tree => Context ?=> Tree, - inline wrapResult: List[Tree] => Context ?=> T)(using Context): T = - @tailrec - def loop(mapped: mutable.ListBuffer[Tree] | Null, unchanged: List[Tree], pending: List[Tree])(using Context): T = - pending match - case stat :: rest => - val statCtx = stat match - case _: DefTree | _: ImportOrExport => ctx - case _ => ctx.exprContext(stat, exprOwner) - val stat1 = op(stat)(using statCtx) - val restCtx = stat match - case stat: Import => ctx.importContext(stat, stat.symbol) - case _ => ctx - if stat1 eq stat then - loop(mapped, unchanged, rest)(using restCtx) - else - val buf = if mapped == null then new mutable.ListBuffer[Tree] else mapped - var xc = unchanged - while xc ne pending do - buf += xc.head - xc = xc.tail - stat1 match - case Thicket(stats1) => buf ++= stats1 - case _ => buf += stat1 - loop(buf, rest, rest)(using restCtx) - case nil => - wrapResult( - if mapped == null then unchanged - else mapped.prependToList(unchanged)) - - loop(null, trees, trees) - end mapStatements - end extension - - /** A treemap that generates the same contexts as the original typer for statements. - * This means: - * - statements that are not definitions get the exprOwner as owner - * - imports are reflected in the contexts of subsequent statements - */ - class TreeMapWithPreciseStatContexts(cpy: TreeCopier = tpd.cpy) extends TreeMap(cpy): - def transformStats[T](trees: List[Tree], exprOwner: Symbol, wrapResult: List[Tree] => Context ?=> T)(using Context): T = - trees.mapStatements(exprOwner, transform(_), wrapResult) - final override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = - transformStats(trees, exprOwner, sameStats) - override def transformBlock(blk: Block)(using Context) = - transformStats(blk.stats, ctx.owner, - stats1 => ctx ?=> cpy.Block(blk)(stats1, transform(blk.expr))) - - val sameStats: List[Tree] => Context ?=> List[Tree] = stats => stats - - /** Map Inlined nodes, NamedArgs, Blocks with no statements and local references to underlying arguments. - * Also drops Inline and Block with no statements. - */ - private class MapToUnderlying extends TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match { - case tree: Ident if isBinding(tree.symbol) && skipLocal(tree.symbol) => - tree.symbol.defTree match { - case defTree: ValOrDefDef => - val rhs = defTree.rhs - assert(!rhs.isEmpty) - transform(rhs) - case _ => tree - } - case Inlined(_, Nil, arg) => transform(arg) - case Block(Nil, arg) => transform(arg) - case NamedArg(_, arg) => transform(arg) - case tree => super.transform(tree) - } - - /** Should get the rhs of this binding */ - protected def skipLocal(sym: Symbol): Boolean = true - - /** Is this a symbol that of a local val or parameterless def for which we could get the rhs */ - private def isBinding(sym: Symbol)(using Context): Boolean = - sym.isTerm && !sym.is(Param) && !sym.owner.isClass && - !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless - } - - extension (xs: List[tpd.Tree]) - def tpes: List[Type] = xs match { - case x :: xs1 => x.tpe :: xs1.tpes - case nil => Nil - } - - /** A trait for loaders that compute trees. Currently implemented just by DottyUnpickler. */ - trait TreeProvider { - protected def computeRootTrees(using Context): List[Tree] - - private var myTrees: List[Tree] | Null = _ - - /** Get trees defined by this provider. Cache them if -Yretain-trees is set. */ - def rootTrees(using Context): List[Tree] = - if (ctx.settings.YretainTrees.value) { - if (myTrees == null) myTrees = computeRootTrees - myTrees.uncheckedNN - } - else computeRootTrees - - /** Get first tree defined by this provider, or EmptyTree if none exists */ - def tree(using Context): Tree = - rootTrees.headOption.getOrElse(EmptyTree) - - /** Is it possible that the tree to load contains a definition of or reference to `id`? */ - def mightContain(id: String)(using Context): Boolean = true - } - - // convert a numeric with a toXXX method - def primitiveConversion(tree: Tree, numericCls: Symbol)(using Context): Tree = { - val mname = "to".concat(numericCls.name) - val conversion = tree.tpe member(mname) - if (conversion.symbol.exists) - tree.select(conversion.symbol.termRef).ensureApplied - else if (tree.tpe.widen isRef numericCls) - tree - else { - report.warning(em"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") - Throw(New(defn.ClassCastExceptionClass.typeRef, Nil)).withSpan(tree.span) - } - } - - /** A tree that corresponds to `Predef.classOf[$tp]` in source */ - def clsOf(tp: Type)(using Context): Tree = - if ctx.erasedTypes && !tp.isRef(defn.UnitClass) then - Literal(Constant(TypeErasure.erasure(tp))) - else - Literal(Constant(tp)) - - @tailrec - def sameTypes(trees: List[tpd.Tree], trees1: List[tpd.Tree]): Boolean = - if (trees.isEmpty) trees.isEmpty - else if (trees1.isEmpty) trees.isEmpty - else (trees.head.tpe eq trees1.head.tpe) && sameTypes(trees.tail, trees1.tail) - - /** If `tree`'s purity level is less than `level`, let-bind it so that it gets evaluated - * only once. I.e. produce a - * - * { val x = 'tree ; ~within('x) } - * - * instead of otherwise - * - * ~within('tree) - */ - def letBindUnless(level: TreeInfo.PurityLevel, tree: Tree)(within: Tree => Tree)(using Context): Tree = - if (exprPurity(tree) >= level) within(tree) - else { - val vdef = SyntheticValDef(TempResultName.fresh(), tree) - Block(vdef :: Nil, within(Ident(vdef.namedType))) - } - - /** Let bind `tree` unless `tree` is at least idempotent */ - def evalOnce(tree: Tree)(within: Tree => Tree)(using Context): Tree = - letBindUnless(TreeInfo.Idempotent, tree)(within) - - def runtimeCall(name: TermName, args: List[Tree])(using Context): Tree = - Ident(defn.ScalaRuntimeModule.requiredMethod(name).termRef).appliedToTermArgs(args) - - /** An extractor that pulls out type arguments */ - object MaybePoly: - def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match - case TypeApply(tree, targs) => Some(tree, targs) - case _ => Some(tree, Nil) - - object TypeArgs: - def unapply(ts: List[Tree]): Option[List[Tree]] = - if ts.nonEmpty && ts.head.isType then Some(ts) else None - - /** Split argument clauses into a leading type argument clause if it exists and - * remaining clauses - */ - def splitArgs(argss: List[List[Tree]]): (List[Tree], List[List[Tree]]) = argss match - case TypeArgs(targs) :: argss1 => (targs, argss1) - case _ => (Nil, argss) - - def joinArgs(targs: List[Tree], argss: List[List[Tree]]): List[List[Tree]] = - if targs.isEmpty then argss else targs :: argss - - /** A key to be used in a context property that tracks enclosing inlined calls */ - private val InlinedCalls = Property.Key[List[Tree]]() - - /** A key to be used in a context property that tracks the number of inlined trees */ - private val InlinedTrees = Property.Key[Counter]() - final class Counter { - var count: Int = 0 - } - - /** Record an enclosing inlined call. - * EmptyTree calls (for parameters) cancel the next-enclosing call in the list instead of being added to it. - * We assume parameters are never nested inside parameters. - */ - override def inlineContext(call: Tree)(using Context): Context = { - // We assume enclosingInlineds is already normalized, and only process the new call with the head. - val oldIC = enclosingInlineds - - val newIC = - if call.isEmpty then - oldIC match - case t1 :: ts2 => ts2 - case _ => oldIC - else - call :: oldIC - - val ctx1 = ctx.fresh.setProperty(InlinedCalls, newIC) - if oldIC.isEmpty then ctx1.setProperty(InlinedTrees, new Counter) else ctx1 - } - - /** All enclosing calls that are currently inlined, from innermost to outermost. - */ - def enclosingInlineds(using Context): List[Tree] = - ctx.property(InlinedCalls).getOrElse(Nil) - - /** Record inlined trees */ - def addInlinedTrees(n: Int)(using Context): Unit = - ctx.property(InlinedTrees).foreach(_.count += n) - - /** Check if the limit on the number of inlined trees has been reached */ - def reachedInlinedTreesLimit(using Context): Boolean = - ctx.property(InlinedTrees) match - case Some(c) => c.count > ctx.settings.XmaxInlinedTrees.value - case None => false - - /** The source file where the symbol of the `inline` method referred to by `call` - * is defined - */ - def sourceFile(call: Tree)(using Context): SourceFile = call.symbol.source - - /** Desugar identifier into a select node. Return the tree itself if not possible */ - def desugarIdent(tree: Ident)(using Context): RefTree = { - val qual = desugarIdentPrefix(tree) - if (qual.isEmpty) tree - else qual.select(tree.symbol) - } - - /** Recover identifier prefix (e.g. this) if it exists */ - def desugarIdentPrefix(tree: Ident)(using Context): Tree = tree.tpe match { - case TermRef(prefix: TermRef, _) => - prefix.info match - case mt: MethodType if mt.paramInfos.isEmpty && mt.resultType.typeSymbol.is(Module) => - ref(mt.resultType.typeSymbol.sourceModule) - case _ => - ref(prefix) - case TermRef(prefix: ThisType, _) => - This(prefix.cls) - case _ => - EmptyTree - } - - /** - * The symbols that are imported with `expr.name` - * - * @param expr The base of the import statement - * @param name The name that is being imported. - * @return All the symbols that would be imported with `expr.name`. - */ - def importedSymbols(expr: Tree, name: Name)(using Context): List[Symbol] = { - def lookup(name: Name): Symbol = expr.tpe.member(name).symbol - val symbols = - List(lookup(name.toTermName), - lookup(name.toTypeName), - lookup(name.moduleClassName), - lookup(name.sourceModuleName)) - - symbols.map(_.sourceSymbol).filter(_.exists).distinct - } - - /** - * All the symbols that are imported by the first selector of `imp` that matches - * `selectorPredicate`. - * - * @param imp The import statement to analyze - * @param selectorPredicate A test to find the selector to use. - * @return The symbols imported. - */ - def importedSymbols(imp: Import, - selectorPredicate: untpd.ImportSelector -> Boolean = util.common.alwaysTrue) - (using Context): List[Symbol] = - imp.selectors.find(selectorPredicate) match - case Some(sel) => importedSymbols(imp.expr, sel.name) - case _ => Nil - - /** - * The list of select trees that resolve to the same symbols as the ones that are imported - * by `imp`. - */ - def importSelections(imp: Import)(using Context): List[Select] = { - def imported(sym: Symbol, id: untpd.Ident, rename: Option[untpd.Ident]): List[Select] = { - // Give a zero-extent position to the qualifier to prevent it from being included several - // times in results in the language server. - val noPosExpr = focusPositions(imp.expr) - val selectTree = Select(noPosExpr, sym.name).withSpan(id.span) - rename match { - case None => - selectTree :: Nil - case Some(rename) => - // Get the type of the symbol that is actually selected, and construct a select - // node with the new name and the type of the real symbol. - val name = if (sym.name.isTypeName) rename.name.toTypeName else rename.name - val actual = Select(noPosExpr, sym.name) - val renameTree = Select(noPosExpr, name).withSpan(rename.span).withType(actual.tpe) - selectTree :: renameTree :: Nil - } - } - - imp.selectors.flatMap { sel => - if sel.isWildcard then Nil - else - val renamedOpt = sel.renamed match - case renamed: untpd.Ident => Some(renamed) - case untpd.EmptyTree => None - importedSymbols(imp.expr, sel.name).flatMap { sym => - imported(sym, sel.imported, renamedOpt) - } - } - } - - /** Creates the tuple type tree repesentation of the type trees in `ts` */ - def tupleTypeTree(elems: List[Tree])(using Context): Tree = { - val arity = elems.length - if arity <= Definitions.MaxTupleArity then - val tupleTp = defn.TupleType(arity) - if tupleTp != null then - AppliedTypeTree(TypeTree(tupleTp), elems) - else nestedPairsTypeTree(elems) - else nestedPairsTypeTree(elems) - } - - /** Creates the nested pairs type tree repesentation of the type trees in `ts` */ - def nestedPairsTypeTree(ts: List[Tree])(using Context): Tree = - ts.foldRight[Tree](TypeTree(defn.EmptyTupleModule.termRef))((x, acc) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), x :: acc :: Nil)) - - /** Replaces all positions in `tree` with zero-extent positions */ - private def focusPositions(tree: Tree)(using Context): Tree = { - val transformer = new tpd.TreeMap { - override def transform(tree: Tree)(using Context): Tree = - super.transform(tree).withSpan(tree.span.focus) - } - transformer.transform(tree) - } - - /** Convert a list of trees to a vararg-compatible tree. - * Used to make arguments for methods that accept varargs. - */ - def repeated(trees: List[Tree], tpt: Tree)(using Context): Tree = - ctx.typeAssigner.arrayToRepeated(JavaSeqLiteral(trees, tpt)) - - /** Create a tree representing a list containing all - * the elements of the argument list. A "list of tree to - * tree of list" conversion. - * - * @param trees the elements the list represented by - * the resulting tree should contain. - * @param tpe the type of the elements of the resulting list. - * - */ - def mkList(trees: List[Tree], tpt: Tree)(using Context): Tree = - ref(defn.ListModule).select(nme.apply) - .appliedToTypeTree(tpt) - .appliedToVarargs(trees, tpt) - - - protected def FunProto(args: List[Tree], resType: Type)(using Context) = - ProtoTypes.FunProtoTyped(args, resType)(ctx.typer, ApplyKind.Regular) -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala deleted file mode 100644 index a6d3bc5a072c..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala +++ /dev/null @@ -1,829 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import Types._, Contexts._, Constants._, Names._, Flags._ -import dotty.tools.dotc.typer.ProtoTypes -import Symbols._, StdNames._, Trees._ -import util.{Property, SourceFile, NoSource} -import util.Spans.Span -import annotation.constructorOnly -import annotation.internal.sharable -import Decorators._ -import annotation.retains -import language.experimental.pureFunctions - -object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { - - // ----- Tree cases that exist in untyped form only ------------------ - - abstract class OpTree(implicit @constructorOnly src: SourceFile) extends Tree { - def op: Ident - override def isTerm: Boolean = op.isTerm - override def isType: Boolean = op.isType - } - - /** A typed subtree of an untyped tree needs to be wrapped in a TypedSplice - * @param owner The current owner at the time the tree was defined - * @param isExtensionReceiver The splice was created from the receiver `e` in an extension - * method call `e.f(...)` - */ - abstract case class TypedSplice(splice: tpd.Tree)(val owner: Symbol, val isExtensionReceiver: Boolean)(implicit @constructorOnly src: SourceFile) extends ProxyTree { - def forwardTo: tpd.Tree = splice - override def toString = - def ext = if isExtensionReceiver then ", isExtensionReceiver = true" else "" - s"TypedSplice($splice$ext)" - } - - object TypedSplice { - def apply(tree: tpd.Tree, isExtensionReceiver: Boolean = false)(using Context): TypedSplice = - val owner = ctx.owner - given SourceFile = ctx.source - new TypedSplice(tree)(owner, isExtensionReceiver) {} - } - - /** mods object name impl */ - case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) - extends MemberDef { - type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef - def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) - } - - /** An untyped template with a derives clause. Derived parents are added to the end - * of the `parents` list. `derivedCount` keeps track of how many there are. - * This representation was chosen because it balances two concerns: - * - maximize overlap between DerivingTemplate and Template for code streamlining - * - keep invariant that elements of untyped trees align with source positions - */ - class DerivingTemplate(constr: DefDef, parentsOrDerived: List[Tree], self: ValDef, preBody: LazyTreeList, derivedCount: Int)(implicit @constructorOnly src: SourceFile) - extends Template(constr, parentsOrDerived, self, preBody) { - override val parents = parentsOrDerived.dropRight(derivedCount) - override val derived = parentsOrDerived.takeRight(derivedCount) - } - - case class ParsedTry(expr: Tree, handler: Tree, finalizer: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - - case class SymbolLit(str: String)(implicit @constructorOnly src: SourceFile) extends TermTree - - /** An interpolated string - * @param segments a list of two element tickets consisting of string literal and argument tree, - * possibly with a simple string literal as last element of the list - */ - case class InterpolatedString(id: TermName, segments: List[Tree])(implicit @constructorOnly src: SourceFile) - extends TermTree - - /** A function type or closure */ - case class Function(args: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { - override def isTerm: Boolean = body.isTerm - override def isType: Boolean = body.isType - } - - /** A function type or closure with `implicit`, `erased`, or `given` modifiers */ - class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers)(implicit @constructorOnly src: SourceFile) - extends Function(args, body) - - /** A polymorphic function type */ - case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { - override def isTerm = body.isTerm - override def isType = body.isType - } - - /** A function created from a wildcard expression - * @param placeholderParams a list of definitions of synthetic parameters. - * @param body the function body where wildcards are replaced by - * references to synthetic parameters. - * This is equivalent to Function, except that forms a special case for the overlapping - * positions tests. - */ - class WildcardFunction(placeholderParams: List[ValDef], body: Tree)(implicit @constructorOnly src: SourceFile) - extends Function(placeholderParams, body) - - case class InfixOp(left: Tree, op: Ident, right: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree - case class PostfixOp(od: Tree, op: Ident)(implicit @constructorOnly src: SourceFile) extends OpTree - case class PrefixOp(op: Ident, od: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree - case class Parens(t: Tree)(implicit @constructorOnly src: SourceFile) extends ProxyTree { - def forwardTo: Tree = t - } - case class Tuple(trees: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree { - override def isTerm: Boolean = trees.isEmpty || trees.head.isTerm - override def isType: Boolean = !isTerm - } - case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class Quote(quoted: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class Splice(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree { - def isInBraces: Boolean = span.end != expr.span.end - } - case class ForYield(enums: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class ForDo(enums: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class GenFrom(pat: Tree, expr: Tree, checkMode: GenCheckMode)(implicit @constructorOnly src: SourceFile) extends Tree - case class GenAlias(pat: Tree, expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree - case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree - case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree - case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree - case class Into(tpt: Tree)(implicit @constructorOnly src: SourceFile) extends Tree - case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree - - case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { - // TODO: Make bound a typed tree? - - /** It's a `given` selector */ - val isGiven: Boolean = imported.name.isEmpty - - /** It's a `given` or `_` selector */ - val isWildcard: Boolean = isGiven || imported.name == nme.WILDCARD - - /** The imported name, EmptyTermName if it's a given selector */ - val name: TermName = imported.name.asInstanceOf[TermName] - - /** The renamed part (which might be `_`), if present, or `name`, if missing */ - val rename: TermName = renamed match - case Ident(rename: TermName) => rename - case _ => name - } - - case class Number(digits: String, kind: NumberKind)(implicit @constructorOnly src: SourceFile) extends TermTree - - enum NumberKind { - case Whole(radix: Int) - case Decimal - case Floating - } - - /** {x1, ..., xN} T (only relevant under captureChecking) */ - case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree - - /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ - case class DependentTypeTree(tp: List[Symbol] -> Context ?-> Type)(implicit @constructorOnly src: SourceFile) extends Tree - - @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped] { - override def isEmpty: Boolean = true - } - - def WildcardTypeBoundsTree()(using src: SourceFile): TypeBoundsTree = TypeBoundsTree(EmptyTree, EmptyTree, EmptyTree) - object WildcardTypeBoundsTree: - def unapply(tree: untpd.Tree): Boolean = tree match - case TypeBoundsTree(EmptyTree, EmptyTree, _) => true - case _ => false - - - /** A block generated by the XML parser, only treated specially by - * `Positioned#checkPos` */ - class XMLBlock(stats: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends Block(stats, expr) - - /** An enum to control checking or filtering of patterns in GenFrom trees */ - enum GenCheckMode { - case Ignore // neither filter nor check since filtering was done before - case Check // check that pattern is irrefutable - case CheckAndFilter // both check and filter (transitional period starting with 3.2) - case FilterNow // filter out non-matching elements if we are not in 3.2 or later - case FilterAlways // filter out non-matching elements since pattern is prefixed by `case` - } - - // ----- Modifiers ----------------------------------------------------- - /** Mod is intended to record syntactic information about modifiers, it's - * NOT a replacement of FlagSet. - * - * For any query about semantic information, check `flags` instead. - */ - sealed abstract class Mod(val flags: FlagSet)(implicit @constructorOnly src: SourceFile) - extends Positioned - - object Mod { - case class Private()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Private) - - case class Protected()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Protected) - - case class Var()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Mutable) - - case class Implicit()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Implicit) - - case class Given()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Given) - - case class Erased()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Erased) - - case class Final()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Final) - - case class Sealed()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Sealed) - - case class Opaque()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Opaque) - - case class Open()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Open) - - case class Override()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Override) - - case class Abstract()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Abstract) - - case class Lazy()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Lazy) - - case class Inline()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Inline) - - case class Transparent()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Transparent) - - case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) - - /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ - case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) - } - - /** Modifiers and annotations for definitions - * - * @param flags The set flags - * @param privateWithin If a private or protected has is followed by a - * qualifier [q], the name q, "" as a typename otherwise. - * @param annotations The annotations preceding the modifiers - */ - case class Modifiers ( - flags: FlagSet = EmptyFlags, - privateWithin: TypeName = tpnme.EMPTY, - annotations: List[Tree] = Nil, - mods: List[Mod] = Nil) { - - def is(flag: Flag): Boolean = flags.is(flag) - def is(flag: Flag, butNot: FlagSet): Boolean = flags.is(flag, butNot = butNot) - def isOneOf(fs: FlagSet): Boolean = flags.isOneOf(fs) - def isOneOf(fs: FlagSet, butNot: FlagSet): Boolean = flags.isOneOf(fs, butNot = butNot) - def isAllOf(fc: FlagSet): Boolean = flags.isAllOf(fc) - - def | (fs: FlagSet): Modifiers = withFlags(flags | fs) - def & (fs: FlagSet): Modifiers = withFlags(flags & fs) - def &~(fs: FlagSet): Modifiers = withFlags(flags &~ fs) - - def toTypeFlags: Modifiers = withFlags(flags.toTypeFlags) - def toTermFlags: Modifiers = withFlags(flags.toTermFlags) - - def withFlags(flags: FlagSet): Modifiers = - if (this.flags == flags) this - else copy(flags = flags) - - def withoutFlags(flags: FlagSet): Modifiers = - if (this.isOneOf(flags)) - Modifiers(this.flags &~ flags, this.privateWithin, this.annotations, this.mods.filterNot(_.flags.isOneOf(flags))) - else this - - def withAddedMod(mod: Mod): Modifiers = - if (mods.exists(_ eq mod)) this - else withMods(mods :+ mod) - - private def compatible(flags1: FlagSet, flags2: FlagSet): Boolean = - flags1.isEmpty || flags2.isEmpty - || flags1.isTermFlags && flags2.isTermFlags - || flags1.isTypeFlags && flags2.isTypeFlags - - /** Add `flags` to thos modifier set, checking that there are no type/term conflicts. - * If there are conflicts, issue an error and return the modifiers consisting of - * the added flags only. The reason to do it this way is that the added flags usually - * describe the core of a construct whereas the existing set are the modifiers - * given in the source. - */ - def withAddedFlags(flags: FlagSet, span: Span)(using Context): Modifiers = - if this.flags.isAllOf(flags) then this - else if compatible(this.flags, flags) then this | flags - else - val what = if flags.isTermFlags then "values" else "types" - report.error(em"${(flags & ModifierFlags).flagsString} $what cannot be ${this.flags.flagsString}", ctx.source.atSpan(span)) - Modifiers(flags) - - /** Modifiers with given list of Mods. It is checked that - * all modifiers are already accounted for in `flags` and `privateWithin`. - */ - def withMods(ms: List[Mod]): Modifiers = - if (mods eq ms) this - else { - if (ms.nonEmpty) - for (m <- ms) - assert(flags.isAllOf(m.flags) - || m.isInstanceOf[Mod.Private] && !privateWithin.isEmpty - || (m.isInstanceOf[Mod.Abstract] || m.isInstanceOf[Mod.Override]) && flags.is(AbsOverride), - s"unaccounted modifier: $m in $this with flags ${flags.flagsString} when adding $ms") - copy(mods = ms) - } - - def withAddedAnnotation(annot: Tree): Modifiers = - if (annotations.exists(_ eq annot)) this - else withAnnotations(annotations :+ annot) - - def withAnnotations(annots: List[Tree]): Modifiers = - if (annots eq annotations) this - else copy(annotations = annots) - - def withPrivateWithin(pw: TypeName): Modifiers = - if (pw.isEmpty) this - else copy(privateWithin = pw) - - def hasFlags: Boolean = flags != EmptyFlags - def hasAnnotations: Boolean = annotations.nonEmpty - def hasPrivateWithin: Boolean = privateWithin != tpnme.EMPTY - def hasMod(cls: Class[?]) = mods.exists(_.getClass == cls) - - private def isEnum = is(Enum, butNot = JavaDefined) - - def isEnumCase: Boolean = isEnum && is(Case) - def isEnumClass: Boolean = isEnum && !is(Case) - } - - @sharable val EmptyModifiers: Modifiers = Modifiers() - - // ----- TypeTrees that refer to other tree's symbols ------------------- - - /** A type tree that gets its type from some other tree's symbol. Enters the - * type tree in the References attachment of the `from` tree as a side effect. - */ - abstract class DerivedTypeTree(implicit @constructorOnly src: SourceFile) extends TypeTree { - - private var myWatched: Tree = EmptyTree - - /** The watched tree; used only for printing */ - def watched: Tree = myWatched - - /** Install the derived type tree as a dependency on `original` */ - def watching(original: DefTree): this.type = { - myWatched = original - val existing = original.attachmentOrElse(References, Nil) - original.putAttachment(References, this :: existing) - this - } - - /** Install the derived type tree as a dependency on `sym` */ - def watching(sym: Symbol): this.type = withAttachment(OriginalSymbol, sym) - - /** A hook to ensure that all necessary symbols are completed so that - * OriginalSymbol attachments are propagated to this tree - */ - def ensureCompletions(using Context): Unit = () - - /** The method that computes the tree with the derived type */ - def derivedTree(originalSym: Symbol)(using Context): tpd.Tree - } - - /** Property key containing TypeTrees whose type is computed - * from the symbol in this type. These type trees have marker trees - * TypeRefOfSym or InfoOfSym as their originals. - */ - val References: Property.Key[List[DerivedTypeTree]] = Property.Key() - - /** Property key for TypeTrees marked with TypeRefOfSym or InfoOfSym - * which contains the symbol of the original tree from which this - * TypeTree is derived. - */ - val OriginalSymbol: Property.Key[Symbol] = Property.Key() - - /** Property key for contextual Apply trees of the form `fn given arg` */ - val KindOfApply: Property.StickyKey[ApplyKind] = Property.StickyKey() - - // ------ Creation methods for untyped only ----------------- - - def Ident(name: Name)(implicit src: SourceFile): Ident = new Ident(name) - def SearchFailureIdent(name: Name, explanation: -> String)(implicit src: SourceFile): SearchFailureIdent = new SearchFailureIdent(name, explanation) - def Select(qualifier: Tree, name: Name)(implicit src: SourceFile): Select = new Select(qualifier, name) - def SelectWithSig(qualifier: Tree, name: Name, sig: Signature)(implicit src: SourceFile): Select = new SelectWithSig(qualifier, name, sig) - def This(qual: Ident)(implicit src: SourceFile): This = new This(qual) - def Super(qual: Tree, mix: Ident)(implicit src: SourceFile): Super = new Super(qual, mix) - def Apply(fun: Tree, args: List[Tree])(implicit src: SourceFile): Apply = new Apply(fun, args) - def TypeApply(fun: Tree, args: List[Tree])(implicit src: SourceFile): TypeApply = new TypeApply(fun, args) - def Literal(const: Constant)(implicit src: SourceFile): Literal = new Literal(const) - def New(tpt: Tree)(implicit src: SourceFile): New = new New(tpt) - def Typed(expr: Tree, tpt: Tree)(implicit src: SourceFile): Typed = new Typed(expr, tpt) - def NamedArg(name: Name, arg: Tree)(implicit src: SourceFile): NamedArg = new NamedArg(name, arg) - def Assign(lhs: Tree, rhs: Tree)(implicit src: SourceFile): Assign = new Assign(lhs, rhs) - def Block(stats: List[Tree], expr: Tree)(implicit src: SourceFile): Block = new Block(stats, expr) - def If(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new If(cond, thenp, elsep) - def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new InlineIf(cond, thenp, elsep) - def Closure(env: List[Tree], meth: Tree, tpt: Tree)(implicit src: SourceFile): Closure = new Closure(env, meth, tpt) - def Match(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new Match(selector, cases) - def InlineMatch(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new InlineMatch(selector, cases) - def CaseDef(pat: Tree, guard: Tree, body: Tree)(implicit src: SourceFile): CaseDef = new CaseDef(pat, guard, body) - def Labeled(bind: Bind, expr: Tree)(implicit src: SourceFile): Labeled = new Labeled(bind, expr) - def Return(expr: Tree, from: Tree)(implicit src: SourceFile): Return = new Return(expr, from) - def WhileDo(cond: Tree, body: Tree)(implicit src: SourceFile): WhileDo = new WhileDo(cond, body) - def Try(expr: Tree, cases: List[CaseDef], finalizer: Tree)(implicit src: SourceFile): Try = new Try(expr, cases, finalizer) - def SeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): SeqLiteral = new SeqLiteral(elems, elemtpt) - def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): JavaSeqLiteral = new JavaSeqLiteral(elems, elemtpt) - def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit src: SourceFile): Inlined = new Inlined(call, bindings, expansion) - def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() - def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() - def SingletonTypeTree(ref: Tree)(implicit src: SourceFile): SingletonTypeTree = new SingletonTypeTree(ref) - def RefinedTypeTree(tpt: Tree, refinements: List[Tree])(implicit src: SourceFile): RefinedTypeTree = new RefinedTypeTree(tpt, refinements) - def AppliedTypeTree(tpt: Tree, args: List[Tree])(implicit src: SourceFile): AppliedTypeTree = new AppliedTypeTree(tpt, args) - def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(implicit src: SourceFile): LambdaTypeTree = new LambdaTypeTree(tparams, body) - def TermLambdaTypeTree(params: List[ValDef], body: Tree)(implicit src: SourceFile): TermLambdaTypeTree = new TermLambdaTypeTree(params, body) - def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): MatchTypeTree = new MatchTypeTree(bound, selector, cases) - def ByNameTypeTree(result: Tree)(implicit src: SourceFile): ByNameTypeTree = new ByNameTypeTree(result) - def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(implicit src: SourceFile): TypeBoundsTree = new TypeBoundsTree(lo, hi, alias) - def Bind(name: Name, body: Tree)(implicit src: SourceFile): Bind = new Bind(name, body) - def Alternative(trees: List[Tree])(implicit src: SourceFile): Alternative = new Alternative(trees) - def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree])(implicit src: SourceFile): UnApply = new UnApply(fun, implicits, patterns) - def ValDef(name: TermName, tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): ValDef = new ValDef(name, tpt, rhs) - def DefDef(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): DefDef = new DefDef(name, paramss, tpt, rhs) - def TypeDef(name: TypeName, rhs: Tree)(implicit src: SourceFile): TypeDef = new TypeDef(name, rhs) - def Template(constr: DefDef, parents: List[Tree], derived: List[Tree], self: ValDef, body: LazyTreeList)(implicit src: SourceFile): Template = - if (derived.isEmpty) new Template(constr, parents, self, body) - else new DerivingTemplate(constr, parents ++ derived, self, body, derived.length) - def Import(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Import = new Import(expr, selectors) - def Export(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Export = new Export(expr, selectors) - def PackageDef(pid: RefTree, stats: List[Tree])(implicit src: SourceFile): PackageDef = new PackageDef(pid, stats) - def Annotated(arg: Tree, annot: Tree)(implicit src: SourceFile): Annotated = new Annotated(arg, annot) - def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(implicit src: SourceFile): Hole = new Hole(isTermHole, idx, args, content, tpt) - - // ------ Additional creation methods for untyped only ----------------- - - /** new T(args1)...(args_n) - * ==> - * new T.[Ts](args1)...(args_n) - * - * where `Ts` are the class type arguments of `T` or its class type alias. - * Note: we also keep any type arguments as parts of `T`. This is necessary to allow - * navigation into these arguments from the IDE, and to do the right thing in - * PrepareInlineable. - */ - def New(tpt: Tree, argss: List[List[Tree]])(using Context): Tree = - ensureApplied(argss.foldLeft(makeNew(tpt))(Apply(_, _))) - - /** A new expression with constrictor and possibly type arguments. See - * `New(tpt, argss)` for details. - */ - def makeNew(tpt: Tree)(using Context): Tree = { - val (tycon, targs) = tpt match { - case AppliedTypeTree(tycon, targs) => - (tycon, targs) - case TypedSplice(tpt1: tpd.Tree) => - val argTypes = tpt1.tpe.dealias.argTypesLo - def wrap(tpe: Type) = TypeTree(tpe).withSpan(tpt.span) - (tpt, argTypes.map(wrap)) - case _ => - (tpt, Nil) - } - val nu: Tree = Select(New(tycon), nme.CONSTRUCTOR) - if (targs.nonEmpty) TypeApply(nu, targs) else nu - } - - def Block(stat: Tree, expr: Tree)(implicit src: SourceFile): Block = - Block(stat :: Nil, expr) - - def Apply(fn: Tree, arg: Tree)(implicit src: SourceFile): Apply = - Apply(fn, arg :: Nil) - - def ensureApplied(tpt: Tree)(implicit src: SourceFile): Tree = tpt match { - case _: Apply => tpt - case _ => Apply(tpt, Nil) - } - - def AppliedTypeTree(tpt: Tree, arg: Tree)(implicit src: SourceFile): AppliedTypeTree = - AppliedTypeTree(tpt, arg :: Nil) - - def TypeTree(tpe: Type)(using Context): TypedSplice = - TypedSplice(TypeTree().withTypeUnchecked(tpe)) - - def InferredTypeTree(tpe: Type)(using Context): TypedSplice = - TypedSplice(new InferredTypeTree().withTypeUnchecked(tpe)) - - def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())) - - def ref(tp: NamedType)(using Context): Tree = - TypedSplice(tpd.ref(tp)) - - def ref(sym: Symbol)(using Context): Tree = - TypedSplice(tpd.ref(sym)) - - def rawRef(tp: NamedType)(using Context): Tree = - if tp.typeParams.isEmpty then ref(tp) - else AppliedTypeTree(ref(tp), tp.typeParams.map(_ => WildcardTypeBoundsTree())) - - def rootDot(name: Name)(implicit src: SourceFile): Select = Select(Ident(nme.ROOTPKG), name) - def scalaDot(name: Name)(implicit src: SourceFile): Select = Select(rootDot(nme.scala), name) - def scalaAnnotationDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.annotation), name) - def scalaRuntimeDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.runtime), name) - def scalaUnit(implicit src: SourceFile): Select = scalaDot(tpnme.Unit) - def scalaAny(implicit src: SourceFile): Select = scalaDot(tpnme.Any) - def javaDotLangDot(name: Name)(implicit src: SourceFile): Select = Select(Select(Ident(nme.java), nme.lang), name) - - def captureRoot(using Context): Select = - Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) - - def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = - DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) - - def emptyConstructor(using Context): DefDef = - makeConstructor(Nil, Nil) - - def makeSelfDef(name: TermName, tpt: Tree)(using Context): ValDef = - ValDef(name, tpt, EmptyTree).withFlags(PrivateLocal) - - def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match { - case t :: Nil => Parens(t) - case _ => Tuple(ts) - } - - def makeTuple(ts: List[Tree])(using Context): Tree = ts match { - case t :: Nil => t - case _ => Tuple(ts) - } - - def makeAndType(left: Tree, right: Tree)(using Context): AppliedTypeTree = - AppliedTypeTree(ref(defn.andType.typeRef), left :: right :: Nil) - - def makeParameter(pname: TermName, tpe: Tree, mods: Modifiers, isBackquoted: Boolean = false)(using Context): ValDef = { - val vdef = ValDef(pname, tpe, EmptyTree) - if (isBackquoted) vdef.pushAttachment(Backquoted, ()) - vdef.withMods(mods | Param) - } - - def makeSyntheticParameter(n: Int = 1, tpt: Tree | Null = null, flags: FlagSet = SyntheticTermParam)(using Context): ValDef = - ValDef(nme.syntheticParamName(n), if (tpt == null) TypeTree() else tpt, EmptyTree) - .withFlags(flags) - - def lambdaAbstract(params: List[ValDef] | List[TypeDef], tpt: Tree)(using Context): Tree = - params match - case Nil => tpt - case (vd: ValDef) :: _ => TermLambdaTypeTree(params.asInstanceOf[List[ValDef]], tpt) - case _ => LambdaTypeTree(params.asInstanceOf[List[TypeDef]], tpt) - - def lambdaAbstractAll(paramss: List[List[ValDef] | List[TypeDef]], tpt: Tree)(using Context): Tree = - paramss.foldRight(tpt)(lambdaAbstract) - - /** A reference to given definition. If definition is a repeated - * parameter, the reference will be a repeated argument. - */ - def refOfDef(tree: MemberDef)(using Context): Tree = tree match { - case ValDef(_, PostfixOp(_, Ident(tpnme.raw.STAR)), _) => repeated(Ident(tree.name)) - case _ => Ident(tree.name) - } - - /** A repeated argument such as `arg: _*` */ - def repeated(arg: Tree)(using Context): Typed = Typed(arg, Ident(tpnme.WILDCARD_STAR)) - - -// --------- Copier/Transformer/Accumulator classes for untyped trees ----- - - def localCtx(tree: Tree)(using Context): Context = ctx - - override val cpy: UntypedTreeCopier = UntypedTreeCopier() - - class UntypedTreeCopier extends TreeCopier { - - def postProcess(tree: Tree, copied: Tree): copied.ThisTree[Untyped] = - copied.asInstanceOf[copied.ThisTree[Untyped]] - - def postProcess(tree: Tree, copied: MemberDef): copied.ThisTree[Untyped] = { - tree match { - case tree: MemberDef => copied.withMods(tree.rawMods) - case _ => copied - } - }.asInstanceOf[copied.ThisTree[Untyped]] - - def ModuleDef(tree: Tree)(name: TermName, impl: Template)(using Context): ModuleDef = tree match { - case tree: ModuleDef if (name eq tree.name) && (impl eq tree.impl) => tree - case _ => finalize(tree, untpd.ModuleDef(name, impl)(tree.source)) - } - def ParsedTry(tree: Tree)(expr: Tree, handler: Tree, finalizer: Tree)(using Context): TermTree = tree match { - case tree: ParsedTry if (expr eq tree.expr) && (handler eq tree.handler) && (finalizer eq tree.finalizer) => tree - case _ => finalize(tree, untpd.ParsedTry(expr, handler, finalizer)(tree.source)) - } - def SymbolLit(tree: Tree)(str: String)(using Context): TermTree = tree match { - case tree: SymbolLit if str == tree.str => tree - case _ => finalize(tree, untpd.SymbolLit(str)(tree.source)) - } - def InterpolatedString(tree: Tree)(id: TermName, segments: List[Tree])(using Context): TermTree = tree match { - case tree: InterpolatedString if (id eq tree.id) && (segments eq tree.segments) => tree - case _ => finalize(tree, untpd.InterpolatedString(id, segments)(tree.source)) - } - def Function(tree: Tree)(args: List[Tree], body: Tree)(using Context): Tree = tree match { - case tree: Function if (args eq tree.args) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.Function(args, body)(tree.source)) - } - def PolyFunction(tree: Tree)(targs: List[Tree], body: Tree)(using Context): Tree = tree match { - case tree: PolyFunction if (targs eq tree.targs) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.PolyFunction(targs, body)(tree.source)) - } - def InfixOp(tree: Tree)(left: Tree, op: Ident, right: Tree)(using Context): Tree = tree match { - case tree: InfixOp if (left eq tree.left) && (op eq tree.op) && (right eq tree.right) => tree - case _ => finalize(tree, untpd.InfixOp(left, op, right)(tree.source)) - } - def PostfixOp(tree: Tree)(od: Tree, op: Ident)(using Context): Tree = tree match { - case tree: PostfixOp if (od eq tree.od) && (op eq tree.op) => tree - case _ => finalize(tree, untpd.PostfixOp(od, op)(tree.source)) - } - def PrefixOp(tree: Tree)(op: Ident, od: Tree)(using Context): Tree = tree match { - case tree: PrefixOp if (op eq tree.op) && (od eq tree.od) => tree - case _ => finalize(tree, untpd.PrefixOp(op, od)(tree.source)) - } - def Parens(tree: Tree)(t: Tree)(using Context): ProxyTree = tree match { - case tree: Parens if t eq tree.t => tree - case _ => finalize(tree, untpd.Parens(t)(tree.source)) - } - def Tuple(tree: Tree)(trees: List[Tree])(using Context): Tree = tree match { - case tree: Tuple if trees eq tree.trees => tree - case _ => finalize(tree, untpd.Tuple(trees)(tree.source)) - } - def Throw(tree: Tree)(expr: Tree)(using Context): TermTree = tree match { - case tree: Throw if expr eq tree.expr => tree - case _ => finalize(tree, untpd.Throw(expr)(tree.source)) - } - def Quote(tree: Tree)(quoted: Tree)(using Context): Tree = tree match { - case tree: Quote if quoted eq tree.quoted => tree - case _ => finalize(tree, untpd.Quote(quoted)(tree.source)) - } - def Splice(tree: Tree)(expr: Tree)(using Context): Tree = tree match { - case tree: Splice if expr eq tree.expr => tree - case _ => finalize(tree, untpd.Splice(expr)(tree.source)) - } - def ForYield(tree: Tree)(enums: List[Tree], expr: Tree)(using Context): TermTree = tree match { - case tree: ForYield if (enums eq tree.enums) && (expr eq tree.expr) => tree - case _ => finalize(tree, untpd.ForYield(enums, expr)(tree.source)) - } - def ForDo(tree: Tree)(enums: List[Tree], body: Tree)(using Context): TermTree = tree match { - case tree: ForDo if (enums eq tree.enums) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.ForDo(enums, body)(tree.source)) - } - def GenFrom(tree: Tree)(pat: Tree, expr: Tree, checkMode: GenCheckMode)(using Context): Tree = tree match { - case tree: GenFrom if (pat eq tree.pat) && (expr eq tree.expr) && (checkMode == tree.checkMode) => tree - case _ => finalize(tree, untpd.GenFrom(pat, expr, checkMode)(tree.source)) - } - def GenAlias(tree: Tree)(pat: Tree, expr: Tree)(using Context): Tree = tree match { - case tree: GenAlias if (pat eq tree.pat) && (expr eq tree.expr) => tree - case _ => finalize(tree, untpd.GenAlias(pat, expr)(tree.source)) - } - def ContextBounds(tree: Tree)(bounds: TypeBoundsTree, cxBounds: List[Tree])(using Context): TypTree = tree match { - case tree: ContextBounds if (bounds eq tree.bounds) && (cxBounds eq tree.cxBounds) => tree - case _ => finalize(tree, untpd.ContextBounds(bounds, cxBounds)(tree.source)) - } - def PatDef(tree: Tree)(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(using Context): Tree = tree match { - case tree: PatDef if (mods eq tree.mods) && (pats eq tree.pats) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree - case _ => finalize(tree, untpd.PatDef(mods, pats, tpt, rhs)(tree.source)) - } - def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match - case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree - case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) - def Into(tree: Tree)(tpt: Tree)(using Context): Tree = tree match - case tree: Into if tpt eq tree.tpt => tree - case _ => finalize(tree, untpd.Into(tpt)(tree.source)) - def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { - case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree - case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) - } - def Number(tree: Tree)(digits: String, kind: NumberKind)(using Context): Tree = tree match { - case tree: Number if (digits == tree.digits) && (kind == tree.kind) => tree - case _ => finalize(tree, untpd.Number(digits, kind)) - } - def CapturingTypeTree(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match - case tree: CapturingTypeTree if (refs eq tree.refs) && (parent eq tree.parent) => tree - case _ => finalize(tree, untpd.CapturingTypeTree(refs, parent)) - - def TypedSplice(tree: Tree)(splice: tpd.Tree)(using Context): ProxyTree = tree match { - case tree: TypedSplice if splice `eq` tree.splice => tree - case _ => finalize(tree, untpd.TypedSplice(splice)(using ctx)) - } - def MacroTree(tree: Tree)(expr: Tree)(using Context): Tree = tree match { - case tree: MacroTree if expr `eq` tree.expr => tree - case _ => finalize(tree, untpd.MacroTree(expr)(tree.source)) - } - } - - abstract class UntypedTreeMap(cpy: UntypedTreeCopier = untpd.cpy) extends TreeMap(cpy) { - override def transformMoreCases(tree: Tree)(using Context): Tree = tree match { - case ModuleDef(name, impl) => - cpy.ModuleDef(tree)(name, transformSub(impl)) - case tree: DerivingTemplate => - cpy.Template(tree)(transformSub(tree.constr), transform(tree.parents), - transform(tree.derived), transformSub(tree.self), transformStats(tree.body, tree.symbol)) - case ParsedTry(expr, handler, finalizer) => - cpy.ParsedTry(tree)(transform(expr), transform(handler), transform(finalizer)) - case SymbolLit(str) => - cpy.SymbolLit(tree)(str) - case InterpolatedString(id, segments) => - cpy.InterpolatedString(tree)(id, segments.mapConserve(transform)) - case Function(args, body) => - cpy.Function(tree)(transform(args), transform(body)) - case PolyFunction(targs, body) => - cpy.PolyFunction(tree)(transform(targs), transform(body)) - case InfixOp(left, op, right) => - cpy.InfixOp(tree)(transform(left), op, transform(right)) - case PostfixOp(od, op) => - cpy.PostfixOp(tree)(transform(od), op) - case PrefixOp(op, od) => - cpy.PrefixOp(tree)(op, transform(od)) - case Parens(t) => - cpy.Parens(tree)(transform(t)) - case Tuple(trees) => - cpy.Tuple(tree)(transform(trees)) - case Throw(expr) => - cpy.Throw(tree)(transform(expr)) - case Quote(t) => - cpy.Quote(tree)(transform(t)) - case Splice(expr) => - cpy.Splice(tree)(transform(expr)) - case ForYield(enums, expr) => - cpy.ForYield(tree)(transform(enums), transform(expr)) - case ForDo(enums, body) => - cpy.ForDo(tree)(transform(enums), transform(body)) - case GenFrom(pat, expr, checkMode) => - cpy.GenFrom(tree)(transform(pat), transform(expr), checkMode) - case GenAlias(pat, expr) => - cpy.GenAlias(tree)(transform(pat), transform(expr)) - case ContextBounds(bounds, cxBounds) => - cpy.ContextBounds(tree)(transformSub(bounds), transform(cxBounds)) - case PatDef(mods, pats, tpt, rhs) => - cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) - case ExtMethods(paramss, methods) => - cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) - case Into(tpt) => - cpy.Into(tree)(transform(tpt)) - case ImportSelector(imported, renamed, bound) => - cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) - case Number(_, _) | TypedSplice(_) => - tree - case MacroTree(expr) => - cpy.MacroTree(tree)(transform(expr)) - case CapturingTypeTree(refs, parent) => - cpy.CapturingTypeTree(tree)(transform(refs), transform(parent)) - case _ => - super.transformMoreCases(tree) - } - } - - abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { - self: UntypedTreeAccumulator[X] @retains(caps.cap) => - override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match { - case ModuleDef(name, impl) => - this(x, impl) - case tree: DerivingTemplate => - this(this(this(this(this(x, tree.constr), tree.parents), tree.derived), tree.self), tree.body) - case ParsedTry(expr, handler, finalizer) => - this(this(this(x, expr), handler), finalizer) - case SymbolLit(str) => - x - case InterpolatedString(id, segments) => - this(x, segments) - case Function(args, body) => - this(this(x, args), body) - case PolyFunction(targs, body) => - this(this(x, targs), body) - case InfixOp(left, op, right) => - this(this(this(x, left), op), right) - case PostfixOp(od, op) => - this(this(x, od), op) - case PrefixOp(op, od) => - this(this(x, op), od) - case Parens(t) => - this(x, t) - case Tuple(trees) => - this(x, trees) - case Throw(expr) => - this(x, expr) - case Quote(t) => - this(x, t) - case Splice(expr) => - this(x, expr) - case ForYield(enums, expr) => - this(this(x, enums), expr) - case ForDo(enums, body) => - this(this(x, enums), body) - case GenFrom(pat, expr, _) => - this(this(x, pat), expr) - case GenAlias(pat, expr) => - this(this(x, pat), expr) - case ContextBounds(bounds, cxBounds) => - this(this(x, bounds), cxBounds) - case PatDef(mods, pats, tpt, rhs) => - this(this(this(x, pats), tpt), rhs) - case ExtMethods(paramss, methods) => - this(paramss.foldLeft(x)(apply), methods) - case Into(tpt) => - this(x, tpt) - case ImportSelector(imported, renamed, bound) => - this(this(this(x, imported), renamed), bound) - case Number(_, _) => - x - case TypedSplice(splice) => - this(x, splice) - case MacroTree(expr) => - this(x, expr) - case CapturingTypeTree(refs, parent) => - this(this(x, refs), parent) - case _ => - super.foldMoreCases(x, tree) - } - } - - abstract class UntypedTreeTraverser extends UntypedTreeAccumulator[Unit] { - def traverse(tree: Tree)(using Context): Unit - def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) - protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) - } - - /** Fold `f` over all tree nodes, in depth-first, prefix order */ - class UntypedDeepFolder[X](f: (X, Tree) => X) extends UntypedTreeAccumulator[X] { - def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) - } - - /** Is there a subtree of this tree that satisfies predicate `p`? */ - extension (tree: Tree) def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { - val acc = new UntypedTreeAccumulator[Boolean] { - def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) - } - acc(false, tree) - } - - protected def FunProto(args: List[Tree], resType: Type)(using Context) = - ProtoTypes.FunProto(args, resType)(ctx.typer, ApplyKind.Regular) -} diff --git a/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala b/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala deleted file mode 100644 index 56b3f5ba5047..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala +++ /dev/null @@ -1,19 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.* - -/** A one-element cache for the boxed version of an unboxed capturing type */ -class BoxedTypeCache: - private var boxed: Type = compiletime.uninitialized - private var unboxed: Type = NoType - - def apply(tp: AnnotatedType)(using Context): Type = - if tp ne unboxed then - unboxed = tp - val CapturingType(parent, refs) = tp: @unchecked - boxed = CapturingType(parent, refs, boxed = true) - boxed -end BoxedTypeCache \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala deleted file mode 100644 index 67222f07efbb..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala +++ /dev/null @@ -1,77 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.*, Annotations.* -import ast.Trees.* -import ast.{tpd, untpd} -import Decorators.* -import config.Printers.capt -import printing.Printer -import printing.Texts.Text -import annotation.retains - -/** An annotation representing a capture set and whether it is boxed. - * It simulates a normal @retains annotation except that it is more efficient, - * supports variables as capture sets, and adds a `boxed` flag. - * These annotations are created during capture checking. Before that - * there are only regular @retains and @retainsByName annotations. - * @param refs the capture set - * @param boxed whether the type carrying the annotation is boxed - * @param cls the underlying class (either annotation.retains or annotation.retainsByName) - */ -case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) extends Annotation: - import CaptureAnnotation.* - import tpd.* - - /** A cache for boxed version of a capturing type with this annotation */ - val boxedType = BoxedTypeCache() - - /** Reconstitute annotation tree from capture set */ - override def tree(using Context) = - val elems = refs.elems.toList.map { - case cr: TermRef => ref(cr) - case cr: TermParamRef => untpd.Ident(cr.paramName).withType(cr) - case cr: ThisType => This(cr.cls) - } - val arg = repeated(elems, TypeTree(defn.AnyType)) - New(symbol.typeRef, arg :: Nil) - - override def symbol(using Context) = cls - - override def derivedAnnotation(tree: Tree)(using Context): Annotation = this - - def derivedAnnotation(refs: CaptureSet, boxed: Boolean)(using Context): Annotation = - if (this.refs eq refs) && (this.boxed == boxed) then this - else CaptureAnnotation(refs, boxed)(cls) - - override def sameAnnotation(that: Annotation)(using Context): Boolean = that match - case CaptureAnnotation(refs, boxed) => - this.refs == refs && this.boxed == boxed && this.symbol == that.symbol - case _ => false - - override def mapWith(tm: TypeMap @retains(caps.cap))(using Context) = - val elems = refs.elems.toList - val elems1 = elems.mapConserve(tm) - if elems1 eq elems then this - else if elems1.forall(_.isInstanceOf[CaptureRef]) - then derivedAnnotation(CaptureSet(elems1.asInstanceOf[List[CaptureRef]]*), boxed) - else EmptyAnnotation - - override def refersToParamOf(tl: TermLambda)(using Context): Boolean = - refs.elems.exists { - case TermParamRef(tl1, _) => tl eq tl1 - case _ => false - } - - override def toText(printer: Printer): Text = refs.toText(printer) - - override def hash: Int = - (refs.hashCode << 1) | (if boxed then 1 else 0) - - override def eql(that: Annotation) = that match - case that: CaptureAnnotation => (this.refs eq that.refs) && (this.boxed == that.boxed) - case _ => false - -end CaptureAnnotation diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala deleted file mode 100644 index 0ede1825e611..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala +++ /dev/null @@ -1,256 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* -import ast.{tpd, untpd} -import Decorators.*, NameOps.* -import config.Printers.capt -import util.Property.Key -import tpd.* -import config.Feature - -private val Captures: Key[CaptureSet] = Key() -private val BoxedType: Key[BoxedTypeCache] = Key() - -/** The arguments of a @retains or @retainsByName annotation */ -private[cc] def retainedElems(tree: Tree)(using Context): List[Tree] = tree match - case Apply(_, Typed(SeqLiteral(elems, _), _) :: Nil) => elems - case _ => Nil - -/** An exception thrown if a @retains argument is not syntactically a CaptureRef */ -class IllegalCaptureRef(tpe: Type) extends Exception - -extension (tree: Tree) - - /** Map tree with CaptureRef type to its type, throw IllegalCaptureRef otherwise */ - def toCaptureRef(using Context): CaptureRef = tree.tpe match - case ref: CaptureRef => ref - case tpe => throw IllegalCaptureRef(tpe) - - /** Convert a @retains or @retainsByName annotation tree to the capture set it represents. - * For efficience, the result is cached as an Attachment on the tree. - */ - def toCaptureSet(using Context): CaptureSet = - tree.getAttachment(Captures) match - case Some(refs) => refs - case None => - val refs = CaptureSet(retainedElems(tree).map(_.toCaptureRef)*) - .showing(i"toCaptureSet $tree --> $result", capt) - tree.putAttachment(Captures, refs) - refs - - /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of - * a by name parameter type, turning the latter into an impure by name parameter type. - */ - def adaptByNameArgUnderPureFuns(using Context): Tree = - if Feature.pureFunsEnabledSomewhere then - val rbn = defn.RetainsByNameAnnot - Annotated(tree, - New(rbn.typeRef).select(rbn.primaryConstructor).appliedTo( - Typed( - SeqLiteral(ref(defn.captureRoot) :: Nil, TypeTree(defn.AnyType)), - TypeTree(defn.RepeatedParamType.appliedTo(defn.AnyType)) - ) - ) - ) - else tree - -extension (tp: Type) - - /** @pre `tp` is a CapturingType */ - def derivedCapturingType(parent: Type, refs: CaptureSet)(using Context): Type = tp match - case tp @ CapturingType(p, r) => - if (parent eq p) && (refs eq r) then tp - else CapturingType(parent, refs, tp.isBoxed) - - /** If this is a unboxed capturing type with nonempty capture set, its boxed version. - * Or, if type is a TypeBounds of capturing types, the version where the bounds are boxed. - * The identity for all other types. - */ - def boxed(using Context): Type = tp.dealias match - case tp @ CapturingType(parent, refs) if !tp.isBoxed && !refs.isAlwaysEmpty => - tp.annot match - case ann: CaptureAnnotation => - ann.boxedType(tp) - case ann => - ann.tree.getAttachment(BoxedType) match - case None => ann.tree.putAttachment(BoxedType, BoxedTypeCache()) - case _ => - ann.tree.attachment(BoxedType)(tp) - case tp: RealTypeBounds => - tp.derivedTypeBounds(tp.lo.boxed, tp.hi.boxed) - case _ => - tp - - /** If `sym` is a type parameter, the boxed version of `tp`, otherwise `tp` */ - def boxedIfTypeParam(sym: Symbol)(using Context) = - if sym.is(TypeParam) then tp.boxed else tp - - /** The boxed version of `tp`, unless `tycon` is a function symbol */ - def boxedUnlessFun(tycon: Type)(using Context) = - if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionSymbol(tycon.typeSymbol) - then tp - else tp.boxed - - /** The capture set consisting of all top-level captures of `tp` that appear under a box. - * Unlike for `boxed` this also considers parents of capture types, unions and - * intersections, and type proxies other than abstract types. - */ - def boxedCaptureSet(using Context): CaptureSet = - def getBoxed(tp: Type): CaptureSet = tp match - case tp @ CapturingType(parent, refs) => - val pcs = getBoxed(parent) - if tp.isBoxed then refs ++ pcs else pcs - case tp: TypeRef if tp.symbol.isAbstractType => CaptureSet.empty - case tp: TypeProxy => getBoxed(tp.superType) - case tp: AndType => getBoxed(tp.tp1) ** getBoxed(tp.tp2) - case tp: OrType => getBoxed(tp.tp1) ++ getBoxed(tp.tp2) - case _ => CaptureSet.empty - getBoxed(tp) - - /** Is the boxedCaptureSet of this type nonempty? */ - def isBoxedCapturing(using Context) = !tp.boxedCaptureSet.isAlwaysEmpty - - /** If this type is a capturing type, the version with boxed statues as given by `boxed`. - * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing - * type that captures the TermRef. - */ - def forceBoxStatus(boxed: Boolean)(using Context): Type = tp.widenDealias match - case tp @ CapturingType(parent, refs) if tp.isBoxed != boxed => - val refs1 = tp match - case ref: CaptureRef if ref.isTracked => ref.singletonCaptureSet - case _ => refs - CapturingType(parent, refs1, boxed) - case _ => - tp - - /** Map capturing type to their parents. Capturing types accessible - * via dealising are also stripped. - */ - def stripCapturing(using Context): Type = tp.dealiasKeepAnnots match - case CapturingType(parent, _) => - parent.stripCapturing - case atd @ AnnotatedType(parent, annot) => - atd.derivedAnnotatedType(parent.stripCapturing, annot) - case _ => - tp - - /** Under pureFunctions, map regular function type to impure function type - */ - def adaptFunctionTypeUnderPureFuns(using Context): Type = tp match - case AppliedType(fn, args) - if Feature.pureFunsEnabledSomewhere && defn.isFunctionClass(fn.typeSymbol) => - val fname = fn.typeSymbol.name - defn.FunctionType( - fname.functionArity, - isContextual = fname.isContextFunction, - isErased = fname.isErasedFunction, - isImpure = true).appliedTo(args) - case _ => - tp - - /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of - * a by name parameter type, turning the latter into an impure by name parameter type. - */ - def adaptByNameArgUnderPureFuns(using Context): Type = - if Feature.pureFunsEnabledSomewhere then - AnnotatedType(tp, - CaptureAnnotation(CaptureSet.universal, boxed = false)(defn.RetainsByNameAnnot)) - else - tp - - def isCapturingType(using Context): Boolean = - tp match - case CapturingType(_, _) => true - case _ => false - - /** Is type known to be always pure by its class structure, - * so that adding a capture set to it would not make sense? - */ - def isAlwaysPure(using Context): Boolean = tp.dealias match - case tp: (TypeRef | AppliedType) => - val sym = tp.typeSymbol - if sym.isClass then sym.isPureClass - else tp.superType.isAlwaysPure - case CapturingType(parent, refs) => - parent.isAlwaysPure || refs.isAlwaysEmpty - case tp: TypeProxy => - tp.superType.isAlwaysPure - case tp: AndType => - tp.tp1.isAlwaysPure || tp.tp2.isAlwaysPure - case tp: OrType => - tp.tp1.isAlwaysPure && tp.tp2.isAlwaysPure - case _ => - false - -extension (cls: ClassSymbol) - - def pureBaseClass(using Context): Option[Symbol] = - cls.baseClasses.find(bc => - defn.pureBaseClasses.contains(bc) - || { - val selfType = bc.givenSelfType - selfType.exists && selfType.captureSet.isAlwaysEmpty - }) - -extension (sym: Symbol) - - /** A class is pure if: - * - one its base types has an explicitly declared self type with an empty capture set - * - or it is a value class - * - or it is an exception - * - or it is one of Nothing, Null, or String - */ - def isPureClass(using Context): Boolean = sym match - case cls: ClassSymbol => - cls.pureBaseClass.isDefined || defn.pureSimpleClasses.contains(cls) - case _ => - false - - /** Does this symbol allow results carrying the universal capability? - * Currently this is true only for function type applies (since their - * results are unboxed) and `erasedValue` since this function is magic in - * that is allows to conjure global capabilies from nothing (aside: can we find a - * more controlled way to achieve this?). - * But it could be generalized to other functions that so that they can take capability - * classes as arguments. - */ - def allowsRootCapture(using Context): Boolean = - sym == defn.Compiletime_erasedValue - || defn.isFunctionClass(sym.maybeOwner) - - /** When applying `sym`, would the result type be unboxed? - * This is the case if the result type contains a top-level reference to an enclosing - * class or method type parameter and the method does not allow root capture. - * If the type parameter is instantiated to a boxed type, that type would - * have to be unboxed in the method's result. - */ - def unboxesResult(using Context): Boolean = - def containsEnclTypeParam(tp: Type): Boolean = tp.strippedDealias match - case tp @ TypeRef(pre: ThisType, _) => tp.symbol.is(Param) - case tp: TypeParamRef => true - case tp: AndOrType => containsEnclTypeParam(tp.tp1) || containsEnclTypeParam(tp.tp2) - case tp: RefinedType => containsEnclTypeParam(tp.parent) || containsEnclTypeParam(tp.refinedInfo) - case _ => false - containsEnclTypeParam(sym.info.finalResultType) - && !sym.allowsRootCapture - && sym != defn.Caps_unsafeBox - && sym != defn.Caps_unsafeUnbox - -extension (tp: AnnotatedType) - /** Is this a boxed capturing type? */ - def isBoxed(using Context): Boolean = tp.annot match - case ann: CaptureAnnotation => ann.boxed - case _ => false - -extension (ts: List[Type]) - /** Equivalent to ts.mapconserve(_.boxedUnlessFun(tycon)) but more efficient where - * it is the identity. - */ - def boxedUnlessFun(tycon: Type)(using Context) = - if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionClass(tycon.typeSymbol) - then ts - else ts.mapconserve(_.boxed) - diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala deleted file mode 100644 index 2072b43089fb..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala +++ /dev/null @@ -1,902 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Flags.*, Contexts.*, Decorators.* -import config.Printers.capt -import Annotations.Annotation -import annotation.threadUnsafe -import annotation.constructorOnly -import annotation.internal.sharable -import reporting.trace -import printing.{Showable, Printer} -import printing.Texts.* -import util.{SimpleIdentitySet, Property} -import util.common.alwaysTrue -import scala.collection.mutable -import config.Config.ccAllowUnsoundMaps -import language.experimental.pureFunctions -import annotation.retains - -/** A class for capture sets. Capture sets can be constants or variables. - * Capture sets support inclusion constraints <:< where <:< is subcapturing. - * - * They also allow - * - mapping with functions from elements to capture sets - * - filtering with predicates on elements - * - intersecting wo capture sets - * - * That is, constraints can be of the forms - * - * cs1 <:< cs2 - * cs1 = ∪ {f(x) | x ∈ cs2} where f is a function from capture references to capture sets. - * cs1 = ∪ {x | x ∈ cs2, p(x)} where p is a predicate on capture references - * cs1 = cs2 ∩ cs2 - * - * We call the resulting constraint system "monadic set constraints". - * To support capture propagation across maps, mappings are supported only - * if the mapped function is either a bijection or if it is idempotent - * on capture references (c.f. doc comment on `map` below). - */ -sealed abstract class CaptureSet extends Showable, Pure: - import CaptureSet.* - - /** The elements of this capture set. For capture variables, - * the elements known so far. - */ - def elems: Refs - - /** Is this capture set constant (i.e. not an unsolved capture variable)? - * Solved capture variables count as constant. - */ - def isConst: Boolean - - /** Is this capture set always empty? For unsolved capture veriables, returns - * always false. - */ - def isAlwaysEmpty: Boolean - - /** Is this capture set definitely non-empty? */ - final def isNotEmpty: Boolean = !elems.isEmpty - - /** Convert to Const. @pre: isConst */ - def asConst: Const = this match - case c: Const => c - case v: Var => - assert(v.isConst) - Const(v.elems) - - /** Cast to variable. @pre: !isConst */ - def asVar: Var = - assert(!isConst) - asInstanceOf[Var] - - /** Does this capture set contain the root reference `*` as element? */ - final def isUniversal(using Context) = - elems.exists { - case ref: TermRef => ref.symbol == defn.captureRoot - case _ => false - } - - /** Add new elements to this capture set if allowed. - * @pre `newElems` is not empty and does not overlap with `this.elems`. - * Constant capture sets never allow to add new elements. - * Variables allow it if and only if the new elements can be included - * in all their dependent sets. - * @param origin The set where the elements come from, or `empty` if not known. - * @return CompareResult.OK if elements were added, or a conflicting - * capture set that prevents addition otherwise. - */ - protected def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult - - /** If this is a variable, add `cs` as a dependent set */ - protected def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult - - /** If `cs` is a variable, add this capture set as one of its dependent sets */ - protected def addAsDependentTo(cs: CaptureSet)(using Context): this.type = - cs.addDependent(this)(using ctx, UnrecordedState) - this - - /** Try to include all references of `elems` that are not yet accounted for by this - * capture set. Inclusion is via `addNewElems`. - * @param origin The set where the elements come from, or `empty` if not known. - * @return CompareResult.OK if all unaccounted elements could be added, - * capture set that prevents addition otherwise. - */ - protected final def tryInclude(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val unaccounted = elems.filter(!accountsFor(_)) - if unaccounted.isEmpty then CompareResult.OK - else addNewElems(unaccounted, origin) - - /** Equivalent to `tryInclude({elem}, origin)`, but more efficient */ - protected final def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = - if accountsFor(elem) then CompareResult.OK - else addNewElems(elem.singletonCaptureSet.elems, origin) - - /* x subsumes y if x is the same as y, or x is a this reference and y refers to a field of x */ - extension (x: CaptureRef) private def subsumes(y: CaptureRef) = - (x eq y) - || y.match - case y: TermRef => y.prefix eq x - case _ => false - - /** {x} <:< this where <:< is subcapturing, but treating all variables - * as frozen. - */ - def accountsFor(x: CaptureRef)(using Context): Boolean = - reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true) { - elems.exists(_.subsumes(x)) - || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK - } - - /** A more optimistic version of accountsFor, which does not take variable supersets - * of the `x` reference into account. A set might account for `x` if it accounts - * for `x` in a state where we assume all supersets of `x` have just the elements - * known at this point. On the other hand if x's capture set has no known elements, - * a set `cs` might account for `x` only if it subsumes `x` or it contains the - * root capability `*`. - */ - def mightAccountFor(x: CaptureRef)(using Context): Boolean = - reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { - elems.exists(elem => elem.subsumes(x) || elem.isRootCapability) - || !x.isRootCapability - && { - val elems = x.captureSetOfInfo.elems - !elems.isEmpty && elems.forall(mightAccountFor) - } - } - - /** A more optimistic version of subCaptures used to choose one of two typing rules - * for selections and applications. `cs1 mightSubcapture cs2` if `cs2` might account for - * every element currently known to be in `cs1`. - */ - def mightSubcapture(that: CaptureSet)(using Context): Boolean = - elems.forall(that.mightAccountFor) - - /** The subcapturing test. - * @param frozen if true, no new variables or dependent sets are allowed to - * be added when making this test. An attempt to add either - * will result in failure. - */ - final def subCaptures(that: CaptureSet, frozen: Boolean)(using Context): CompareResult = - subCaptures(that)(using ctx, if frozen then FrozenState else VarState()) - - /** The subcapturing test, using a given VarState */ - private def subCaptures(that: CaptureSet)(using Context, VarState): CompareResult = - def recur(elems: List[CaptureRef]): CompareResult = elems match - case elem :: elems1 => - var result = that.tryInclude(elem, this) - if !result.isOK && !elem.isRootCapability && summon[VarState] != FrozenState then - result = elem.captureSetOfInfo.subCaptures(that) - if result.isOK then - recur(elems1) - else - varState.rollBack() - result - case Nil => - addDependent(that) - recur(elems.toList) - .showing(i"subcaptures $this <:< $that = $result", capt)(using null) - - /** Two capture sets are considered =:= equal if they mutually subcapture each other - * in a frozen state. - */ - def =:= (that: CaptureSet)(using Context): Boolean = - this.subCaptures(that, frozen = true).isOK - && that.subCaptures(this, frozen = true).isOK - - /** The smallest capture set (via <:<) that is a superset of both - * `this` and `that` - */ - def ++ (that: CaptureSet)(using Context): CaptureSet = - if this.subCaptures(that, frozen = true).isOK then that - else if that.subCaptures(this, frozen = true).isOK then this - else if this.isConst && that.isConst then Const(this.elems ++ that.elems) - else Var(this.elems ++ that.elems).addAsDependentTo(this).addAsDependentTo(that) - - /** The smallest superset (via <:<) of this capture set that also contains `ref`. - */ - def + (ref: CaptureRef)(using Context): CaptureSet = - this ++ ref.singletonCaptureSet - - /** The largest capture set (via <:<) that is a subset of both `this` and `that` - */ - def **(that: CaptureSet)(using Context): CaptureSet = - if this.subCaptures(that, frozen = true).isOK then this - else if that.subCaptures(this, frozen = true).isOK then that - else if this.isConst && that.isConst then Const(elemIntersection(this, that)) - else Intersected(this, that) - - /** The largest subset (via <:<) of this capture set that does not account for - * any of the elements in the constant capture set `that` - */ - def -- (that: CaptureSet.Const)(using Context): CaptureSet = - val elems1 = elems.filter(!that.accountsFor(_)) - if elems1.size == elems.size then this - else if this.isConst then Const(elems1) - else Diff(asVar, that) - - /** The largest subset (via <:<) of this capture set that does not account for `ref` */ - def - (ref: CaptureRef)(using Context): CaptureSet = - this -- ref.singletonCaptureSet - - /** The largest subset (via <:<) of this capture set that only contains elements - * for which `p` is true. - */ - def filter(p: (c: Context) ?-> (CaptureRef -> Boolean) @retains(c))(using Context): CaptureSet = - if this.isConst then - val elems1 = elems.filter(p) - if elems1 == elems then this - else Const(elems.filter(p)) - else Filtered(asVar, p) - - /** Capture set obtained by applying `tm` to all elements of the current capture set - * and joining the results. If the current capture set is a variable, the same - * transformation is applied to all future additions of new elements. - * - * Note: We have a problem how we handle the situation where we have a mapped set - * - * cs2 = tm(cs1) - * - * and then the propagation solver adds a new element `x` to `cs2`. What do we - * know in this case about `cs1`? We can answer this question in a sound way only - * if `tm` is a bijection on capture references or it is idempotent on capture references. - * (see definition in IdempotentCapRefMap). - * If `tm` is a bijection we know that `tm^-1(x)` must be in `cs1`. If `tm` is idempotent - * one possible solution is that `x` is in `cs1`, which is what we assume in this case. - * That strategy is sound but not complete. - * - * If `tm` is some other map, we don't know how to handle this case. For now, - * we simply refuse to handle other maps. If they do need to be handled, - * `OtherMapped` provides some approximation to a solution, but it is neither - * sound nor complete. - */ - def map(tm: TypeMap)(using Context): CaptureSet = tm match - case tm: BiTypeMap => - val mappedElems = elems.map(tm.forward) - if isConst then - if mappedElems == elems then this - else Const(mappedElems) - else BiMapped(asVar, tm, mappedElems) - case tm: IdentityCaptRefMap => - this - case _ => - val mapped = mapRefs(elems, tm, tm.variance) - if isConst then - if mapped.isConst && mapped.elems == elems then this - else mapped - else Mapped(asVar, tm, tm.variance, mapped) - - /** A mapping resulting from substituting parameters of a BindingType to a list of types */ - def substParams(tl: BindingType, to: List[Type])(using Context) = - map(Substituters.SubstParamsMap(tl, to).detach) - - /** Invoke handler if this set has (or later aquires) the root capability `*` */ - def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = - if isUniversal then handler() - this - - /** An upper approximation of this capture set, i.e. a constant set that is - * subcaptured by this set. If the current set is a variable - * it is the intersection of all upper approximations of known supersets - * of the variable. - * The upper approximation is meaningful only if it is constant. If not, - * `upperApprox` can return an arbitrary capture set variable. - * `upperApprox` is used in `solve`. - */ - protected def upperApprox(origin: CaptureSet)(using Context): CaptureSet - - /** Assuming set this set dependds on was just solved to be constant, propagate this info - * to this set. This might result in the set being solved to be constant - * itself. - */ - protected def propagateSolved()(using Context): Unit = () - - /** This capture set with a description that tells where it comes from */ - def withDescription(description: String): CaptureSet - - /** The provided description (using `withDescription`) for this capture set or else "" */ - def description: String - - /** A regular @retains or @retainsByName annotation with the elements of this set as arguments. */ - def toRegularAnnotation(cls: Symbol)(using Context): Annotation = - Annotation(CaptureAnnotation(this, boxed = false)(cls).tree) - - override def toText(printer: Printer): Text = - Str("{") ~ Text(elems.toList.map(printer.toTextCaptureRef), ", ") ~ Str("}") ~~ description - -object CaptureSet: - type Refs = SimpleIdentitySet[CaptureRef] - type Vars = SimpleIdentitySet[Var] - type Deps = SimpleIdentitySet[CaptureSet] - - @sharable private var varId = 0 - - /** If set to `true`, capture stack traces that tell us where sets are created */ - private final val debugSets = false - - private val emptySet = SimpleIdentitySet.empty - - /** The empty capture set `{}` */ - val empty: CaptureSet.Const = Const(emptySet) - - /** The universal capture set `{*}` */ - def universal(using Context): CaptureSet = - defn.captureRoot.termRef.singletonCaptureSet - - /** Used as a recursion brake */ - @sharable private[dotc] val Pending = Const(SimpleIdentitySet.empty) - - def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = - if elems.isEmpty then empty - else Const(SimpleIdentitySet(elems.map(_.normalizedRef)*)) - - def apply(elems: Refs)(using Context): CaptureSet.Const = - if elems.isEmpty then empty else Const(elems) - - /** The subclass of constant capture sets with given elements `elems` */ - class Const private[CaptureSet] (val elems: Refs, val description: String = "") extends CaptureSet: - def isConst = true - def isAlwaysEmpty = elems.isEmpty - - def addNewElems(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - CompareResult.fail(this) - - def addDependent(cs: CaptureSet)(using Context, VarState) = CompareResult.OK - - def upperApprox(origin: CaptureSet)(using Context): CaptureSet = this - - def withDescription(description: String): Const = Const(elems, description) - - override def toString = elems.toString - end Const - - /** The subclass of captureset variables with given initial elements */ - class Var(initialElems: Refs = emptySet) extends CaptureSet: - - /** A unique identification number for diagnostics */ - val id = - varId += 1 - varId - - /** A variable is solved if it is aproximated to a from-then-on constant set. */ - private var isSolved: Boolean = false - - /** The elements currently known to be in the set */ - var elems: Refs = initialElems - - /** The sets currently known to be dependent sets (i.e. new additions to this set - * are propagated to these dependent sets.) - */ - var deps: Deps = emptySet - - def isConst = isSolved - def isAlwaysEmpty = false - - /** A handler to be invoked if the root reference `*` is added to this set - * The handler is pure in the sense that it will only output diagnostics. - */ - var rootAddedHandler: () -> Context ?-> Unit = () => () - - var description: String = "" - - /** Record current elements in given VarState provided it does not yet - * contain an entry for this variable. - */ - private def recordElemsState()(using VarState): Boolean = - varState.getElems(this) match - case None => varState.putElems(this, elems) - case _ => true - - /** Record current dependent sets in given VarState provided it does not yet - * contain an entry for this variable. - */ - private[CaptureSet] def recordDepsState()(using VarState): Boolean = - varState.getDeps(this) match - case None => varState.putDeps(this, deps) - case _ => true - - /** Reset elements to what was recorded in `state` */ - def resetElems()(using state: VarState): Unit = - elems = state.elems(this) - - /** Reset dependent sets to what was recorded in `state` */ - def resetDeps()(using state: VarState): Unit = - deps = state.deps(this) - - def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - if !isConst && recordElemsState() then - elems ++= newElems - if isUniversal then rootAddedHandler() - // assert(id != 2 || elems.size != 2, this) - (CompareResult.OK /: deps) { (r, dep) => - r.andAlso(dep.tryInclude(newElems, this)) - } - else // fail if variable is solved or given VarState is frozen - CompareResult.fail(this) - - def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult = - if (cs eq this) || cs.isUniversal || isConst then - CompareResult.OK - else if recordDepsState() then - deps += cs - CompareResult.OK - else - CompareResult.fail(this) - - override def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = - rootAddedHandler = handler - super.disallowRootCapability(handler) - - private var computingApprox = false - - /** Roughly: the intersection of all constant known supersets of this set. - * The aim is to find an as-good-as-possible constant set that is a superset - * of this set. The universal set {*} is a sound fallback. - */ - final def upperApprox(origin: CaptureSet)(using Context): CaptureSet = - if computingApprox then universal - else if isConst then this - else - computingApprox = true - try computeApprox(origin).ensuring(_.isConst) - finally computingApprox = false - - /** The intersection of all upper approximations of dependent sets */ - protected def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - (universal /: deps) { (acc, sup) => acc ** sup.upperApprox(this) } - - /** Widen the variable's elements to its upper approximation and - * mark it as constant from now on. This is used for contra-variant type variables - * in the results of defs and vals. - */ - def solve()(using Context): Unit = - if !isConst then - val approx = upperApprox(empty) - //println(i"solving var $this $approx ${approx.isConst} deps = ${deps.toList}") - val newElems = approx.elems -- elems - if newElems.isEmpty || addNewElems(newElems, empty)(using ctx, VarState()).isOK then - markSolved() - - /** Mark set as solved and propagate this info to all dependent sets */ - def markSolved()(using Context): Unit = - isSolved = true - deps.foreach(_.propagateSolved()) - - def withDescription(description: String): this.type = - this.description = - if this.description.isEmpty then description - else s"${this.description} and $description" - this - - /** Used for diagnostics and debugging: A string that traces the creation - * history of a variable by following source links. Each variable on the - * path is characterized by the variable's id and the first letter of the - * variable's class name. The path ends in a plain variable with letter `V` that - * is not derived from some other variable. - */ - protected def ids(using Context): String = - val trail = this.match - case dv: DerivedVar => dv.source.ids - case _ => "" - s"$id${getClass.getSimpleName.nn.take(1)}$trail" - - /** Adds variables to the ShownVars context property if that exists, which - * establishes a record of all variables printed in an error message. - * Prints variables wih ids under -Ycc-debug. - */ - override def toText(printer: Printer): Text = inContext(printer.printerContext) { - for vars <- ctx.property(ShownVars) do vars += this - super.toText(printer) ~ (Str(ids) provided !isConst && ctx.settings.YccDebug.value) - } - - override def toString = s"Var$id$elems" - end Var - - /** A variable that is derived from some other variable via a map or filter. */ - abstract class DerivedVar(initialElems: Refs)(using @constructorOnly ctx: Context) - extends Var(initialElems): - - // For debugging: A trace where a set was created. Note that logically it would make more - // sense to place this variable in Mapped, but that runs afoul of the initializatuon checker. - val stack = if debugSets && this.isInstanceOf[Mapped] then (new Throwable).getStackTrace().nn.take(20) else null - - /** The variable from which this variable is derived */ - def source: Var - - addAsDependentTo(source) - - override def propagateSolved()(using Context) = - if source.isConst && !isConst then markSolved() - end DerivedVar - - /** A variable that changes when `source` changes, where all additional new elements are mapped - * using ∪ { tm(x) | x <- source.elems }. - * @param source the original set that is mapped - * @param tm the type map, which is assumed to be idempotent on capture refs - * (except if ccUnsoundMaps is enabled) - * @param variance the assumed variance with which types with capturesets of size >= 2 are approximated - * (i.e. co: full capture set, contra: empty set, nonvariant is not allowed.) - * @param initial The initial mappings of source's elements at the point the Mapped set is created. - */ - class Mapped private[CaptureSet] - (val source: Var, tm: TypeMap, variance: Int, initial: CaptureSet)(using @constructorOnly ctx: Context) - extends DerivedVar(initial.elems): - addAsDependentTo(initial) // initial mappings could change by propagation - - private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] - - assert(ccAllowUnsoundMaps || mapIsIdempotent, tm.getClass) - - private def whereCreated(using Context): String = - if stack == null then "" - else i""" - |Stack trace of variable creation:" - |${stack.mkString("\n")}""" - - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val added = - if origin eq source then // elements have to be mapped - mapRefs(newElems, tm, variance) - else - // elements are added by subcapturing propagation with this Mapped set - // as superset; no mapping is necessary or allowed. - Const(newElems) - super.addNewElems(added.elems, origin) - .andAlso { - if added.isConst then CompareResult.OK - else if added.asVar.recordDepsState() then { addAsDependentTo(added); CompareResult.OK } - else CompareResult.fail(this) - } - .andAlso { - if (origin ne source) && (origin ne initial) && mapIsIdempotent then - // `tm` is idempotent, propagate back elems from image set. - // This is sound, since we know that for `r in newElems: tm(r) = r`, hence - // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. - // It's not necessarily the only possible solution, so the scheme is incomplete. - source.tryInclude(newElems, this) - else if !mapIsIdempotent && variance <= 0 && !origin.isConst && (origin ne initial) && (origin ne source) then - // The map is neither a BiTypeMap nor an idempotent type map. - // In that case there's no much we can do. - // The scheme then does not propagate added elements back to source and rejects adding - // elements from variable sources in contra- and non-variant positions. In essence, - // we approximate types resulting from such maps by returning a possible super type - // from the actual type. But this is neither sound nor complete. - report.warning(em"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") - CompareResult.fail(this) - else - CompareResult.OK - } - - override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - if source eq origin then - // it's a mapping of origin, so not a superset of `origin`, - // therefore don't contribute to the intersection. - universal - else - source.upperApprox(this).map(tm) - - override def propagateSolved()(using Context) = - if initial.isConst then super.propagateSolved() - - override def toString = s"Mapped$id($source, elems = $elems)" - end Mapped - - /** A mapping where the type map is required to be a bijection. - * Parameters as in Mapped. - */ - final class BiMapped private[CaptureSet] - (val source: Var, bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) - extends DerivedVar(initialElems): - - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - if origin eq source then - super.addNewElems(newElems.map(bimap.forward), origin) - else - super.addNewElems(newElems, origin) - .andAlso { - source.tryInclude(newElems.map(bimap.backward), this) - .showing(i"propagating new elems ${CaptureSet(newElems)} backward from $this to $source", capt)(using null) - } - - /** For a BiTypeMap, supertypes of the mapped type also constrain - * the source via the inverse type mapping and vice versa. That is, if - * B = f(A) and B <: C, then A <: f^-1(C), so C should flow into - * the upper approximation of A. - * Conversely if A <: C2, then we also know that B <: f(C2). - * These situations are modeled by the two branches of the conditional below. - */ - override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - val supApprox = super.computeApprox(this) - if source eq origin then supApprox.map(bimap.inverseTypeMap.detach) - else source.upperApprox(this).map(bimap) ** supApprox - - override def toString = s"BiMapped$id($source, elems = $elems)" - end BiMapped - - /** A variable with elements given at any time as { x <- source.elems | p(x) } */ - class Filtered private[CaptureSet] - (val source: Var, p: (c: Context) ?-> (CaptureRef -> Boolean) @retains(c))(using @constructorOnly ctx: Context) - extends DerivedVar(source.elems.filter(p)): - - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val filtered = newElems.filter(p) - if origin eq source then - super.addNewElems(filtered, origin) - else - // Filtered elements have to be back-propagated to source. - // Elements that don't satisfy `p` are not allowed. - super.addNewElems(newElems, origin) - .andAlso { - if filtered.size == newElems.size then source.tryInclude(newElems, this) - else CompareResult.fail(this) - } - - override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - if source eq origin then - // it's a filter of origin, so not a superset of `origin`, - // therefore don't contribute to the intersection. - universal - else - source.upperApprox(this).filter(p) - - override def toString = s"${getClass.getSimpleName}$id($source, elems = $elems)" - end Filtered - - /** A variable with elements given at any time as { x <- source.elems | !other.accountsFor(x) } */ - class Diff(source: Var, other: Const)(using @constructorOnly ctx: Context) - extends Filtered(source, !other.accountsFor(_)) - - class Intersected(cs1: CaptureSet, cs2: CaptureSet)(using @constructorOnly ctx: Context) - extends Var(elemIntersection(cs1, cs2)): - addAsDependentTo(cs1) - addAsDependentTo(cs2) - deps += cs1 - deps += cs2 - - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val added = - if origin eq cs1 then newElems.filter(cs2.accountsFor) - else if origin eq cs2 then newElems.filter(cs1.accountsFor) - else newElems - // If origin is not cs1 or cs2, then newElems will be propagated to - // cs1, cs2 since they are in deps. - super.addNewElems(added, origin) - - override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - if (origin eq cs1) || (origin eq cs2) then - // it's a combination of origin with some other set, so not a superset of `origin`, - // therefore don't contribute to the intersection. - universal - else - CaptureSet(elemIntersection(cs1.upperApprox(this), cs2.upperApprox(this))) - - override def propagateSolved()(using Context) = - if cs1.isConst && cs2.isConst && !isConst then markSolved() - end Intersected - - def elemIntersection(cs1: CaptureSet, cs2: CaptureSet)(using Context): Refs = - cs1.elems.filter(cs2.mightAccountFor) ++ cs2.elems.filter(cs1.mightAccountFor) - - /** Extrapolate tm(r) according to `variance`. Let r1 be the result of tm(r). - * - If r1 is a tracked CaptureRef, return {r1} - * - If r1 has an empty capture set, return {} - * - Otherwise, - * - if the variance is covariant, return r1's capture set - * - if the variance is contravariant, return {} - * - Otherwise assertion failure - */ - def extrapolateCaptureRef(r: CaptureRef, tm: TypeMap, variance: Int)(using Context): CaptureSet = - val r1 = tm(r) - val upper = r1.captureSet - def isExact = - upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) - if variance > 0 || isExact then upper - else if variance < 0 then CaptureSet.empty - else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") - - /** Apply `f` to each element in `xs`, and join result sets with `++` */ - def mapRefs(xs: Refs, f: CaptureRef => CaptureSet)(using Context): CaptureSet = - ((empty: CaptureSet) /: xs)((cs, x) => cs ++ f(x)) - - /** Apply extrapolated `tm` to each element in `xs`, and join result sets with `++` */ - def mapRefs(xs: Refs, tm: TypeMap, variance: Int)(using Context): CaptureSet = - mapRefs(xs, extrapolateCaptureRef(_, tm, variance)) - - /** Return true iff - * - arg1 is a TypeBounds >: CL T <: CH T of two capturing types with equal parents. - * - arg2 is a capturing type CA U - * - CH <: CA <: CL - * In other words, we can unify CL, CH and CA. - */ - def subCapturesRange(arg1: TypeBounds, arg2: Type)(using Context): Boolean = arg1 match - case TypeBounds(CapturingType(lo, loRefs), CapturingType(hi, hiRefs)) if lo =:= hi => - given VarState = VarState() - val cs2 = arg2.captureSet - hiRefs.subCaptures(cs2).isOK && cs2.subCaptures(loRefs).isOK - case _ => - false - - /** A TypeMap with the property that every capture reference in the image - * of the map is mapped to itself. I.e. for all capture references r1, r2, - * if M(r1) == r2 then M(r2) == r2. - */ - trait IdempotentCaptRefMap extends TypeMap - - /** A TypeMap that is the identity on capture references */ - trait IdentityCaptRefMap extends TypeMap - - type CompareResult = CompareResult.TYPE - - /** The result of subcapturing comparisons is an opaque type CompareResult.TYPE. - * This is either OK, indicating success, or - * another capture set, indicating failure. The failure capture set - * is the one that did not allow propagaton of elements into it. - */ - object CompareResult: - opaque type TYPE = CaptureSet - val OK: TYPE = Const(emptySet) - def fail(cs: CaptureSet): TYPE = cs - - extension (result: TYPE) - /** The result is OK */ - def isOK: Boolean = result eq OK - /** If not isOK, the blocking capture set */ - def blocking: CaptureSet = result - inline def andAlso(op: Context ?=> TYPE)(using Context): TYPE = if result.isOK then op else result - def show(using Context): String = if result.isOK then "OK" else i"$result" - end CompareResult - - /** A VarState serves as a snapshot mechanism that can undo - * additions of elements or super sets if an operation fails - */ - class VarState: - - /** A map from captureset variables to their elements at the time of the snapshot. */ - private val elemsMap: util.EqHashMap[Var, Refs] = new util.EqHashMap - - /** A map from captureset variables to their dependent sets at the time of the snapshot. */ - private val depsMap: util.EqHashMap[Var, Deps] = new util.EqHashMap - - /** The recorded elements of `v` (it's required that a recording was made) */ - def elems(v: Var): Refs = elemsMap(v) - - /** Optionally the recorded elements of `v`, None if nothing was recorded for `v` */ - def getElems(v: Var): Option[Refs] = elemsMap.get(v) - - /** Record elements, return whether this was allowed. - * By default, recording is allowed but the special state FrozenState - * overrides this. - */ - def putElems(v: Var, elems: Refs): Boolean = { elemsMap(v) = elems; true } - - /** The recorded dependent sets of `v` (it's required that a recording was made) */ - def deps(v: Var): Deps = depsMap(v) - - /** Optionally the recorded dependent sets of `v`, None if nothing was recorded for `v` */ - def getDeps(v: Var): Option[Deps] = depsMap.get(v) - - /** Record dependent sets, return whether this was allowed. - * By default, recording is allowed but the special state FrozenState - * overrides this. - */ - def putDeps(v: Var, deps: Deps): Boolean = { depsMap(v) = deps; true } - - /** Roll back global state to what was recorded in this VarState */ - def rollBack(): Unit = - elemsMap.keysIterator.foreach(_.resetElems()(using this)) - depsMap.keysIterator.foreach(_.resetDeps()(using this)) - end VarState - - /** A special state that does not allow to record elements or dependent sets. - * In effect this means that no new elements or dependent sets can be added - * in this state (since the previous state cannot be recorded in a snapshot) - */ - @sharable - object FrozenState extends VarState: - override def putElems(v: Var, refs: Refs) = false - override def putDeps(v: Var, deps: Deps) = false - override def rollBack(): Unit = () - - @sharable - /** A special state that turns off recording of elements. Used only - * in `addSub` to prevent cycles in recordings. - */ - private object UnrecordedState extends VarState: - override def putElems(v: Var, refs: Refs) = true - override def putDeps(v: Var, deps: Deps) = true - override def rollBack(): Unit = () - - /** The current VarState, as passed by the implicit context */ - def varState(using state: VarState): VarState = state - - /* Not needed: - def ofClass(cinfo: ClassInfo, argTypes: List[Type])(using Context): CaptureSet = - CaptureSet.empty - def captureSetOf(tp: Type): CaptureSet = tp match - case tp: TypeRef if tp.symbol.is(ParamAccessor) => - def mapArg(accs: List[Symbol], tps: List[Type]): CaptureSet = accs match - case acc :: accs1 if tps.nonEmpty => - if acc == tp.symbol then tps.head.captureSet - else mapArg(accs1, tps.tail) - case _ => - empty - mapArg(cinfo.cls.paramAccessors, argTypes) - case _ => - tp.captureSet - val css = - for - parent <- cinfo.parents if parent.classSymbol == defn.RetainingClass - arg <- parent.argInfos - yield captureSetOf(arg) - css.foldLeft(empty)(_ ++ _) - */ - - /** The capture set of the type underlying a CaptureRef */ - def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match - case ref: TermRef if ref.isRootCapability => ref.singletonCaptureSet - case _ => ofType(ref.underlying) - - /** Capture set of a type */ - def ofType(tp: Type)(using Context): CaptureSet = - def recur(tp: Type): CaptureSet = tp.dealias match - case tp: TermRef => - tp.captureSet - case tp: TermParamRef => - tp.captureSet - case _: TypeRef => - if tp.classSymbol.hasAnnotation(defn.CapabilityAnnot) then universal else empty - case _: TypeParamRef => - empty - case CapturingType(parent, refs) => - recur(parent) ++ refs - case AppliedType(tycon, args) => - val cs = recur(tycon) - tycon.typeParams match - case tparams @ (LambdaParam(tl, _) :: _) => cs.substParams(tl, args) - case _ => cs - case tp: TypeProxy => - recur(tp.underlying) - case AndType(tp1, tp2) => - recur(tp1) ** recur(tp2) - case OrType(tp1, tp2) => - recur(tp1) ++ recur(tp2) - case _ => - empty - recur(tp) - .showing(i"capture set of $tp = $result", capt) - - private val ShownVars: Property.Key[mutable.Set[Var]] = Property.Key() - - /** Perform `op`. Under -Ycc-debug, collect and print info about all variables reachable - * via `(_.deps)*` from the variables that were shown in `op`. - */ - def withCaptureSetsExplained[T](op: Context ?=> T)(using ctx: Context): T = - if ctx.settings.YccDebug.value then - val shownVars = mutable.Set[Var]() - inContext(ctx.withProperty(ShownVars, Some(shownVars))) { - try op - finally - val reachable = mutable.Set[Var]() - val todo = mutable.Queue[Var]() ++= shownVars - def incl(cv: Var): Unit = - if !reachable.contains(cv) then todo += cv - while todo.nonEmpty do - val cv = todo.dequeue() - if !reachable.contains(cv) then - reachable += cv - cv.deps.foreach { - case cv: Var => incl(cv) - case _ => - } - cv match - case cv: DerivedVar => incl(cv.source) - case _ => - val allVars = reachable.toArray.sortBy(_.id) - println(i"Capture set dependencies:") - for cv <- allVars do - println(i" ${cv.show.padTo(20, ' ')} :: ${cv.deps.toList}%, %") - } - else op -end CaptureSet diff --git a/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala b/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala deleted file mode 100644 index e9862f1f20b8..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala +++ /dev/null @@ -1,72 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.* - -/** A (possibly boxed) capturing type. This is internally represented as an annotated type with a @retains - * or @retainsByName annotation, but the extractor will succeed only at phase CheckCaptures. - * That way, we can ignore caturing information until phase CheckCaptures since it is - * wrapped in a plain annotation. - * - * The same trick does not work for the boxing information. Boxing is context dependent, so - * we have to add that information in the Setup step preceding CheckCaptures. Boxes are - * added for all type arguments of methods. For type arguments of applied types a different - * strategy is used where we box arguments of applied types that are not functions when - * accessing the argument. - * - * An alternative strategy would add boxes also to arguments of applied types during setup. - * But this would have to be done for all possibly accessibly types from the compiled units - * as well as their dependencies. It's difficult to do this in a DenotationTransformer without - * accidentally forcing symbol infos. That's why this alternative was not implemented. - * If we would go back on this it would make sense to also treat captuyring types different - * from annotations and to generate them all during Setup and in DenotationTransformers. - */ -object CapturingType: - - /** Smart constructor that drops empty capture sets and fuses compatible capturiong types. - * An outer type capturing type A can be fused with an inner capturing type B if their - * boxing status is the same or if A is boxed. - */ - def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = - if refs.isAlwaysEmpty then parent - else parent match - case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => - apply(parent1, refs ++ refs1, boxed) - case _ => - AnnotatedType(parent, CaptureAnnotation(refs, boxed)(defn.RetainsAnnot)) - - /** An extractor that succeeds only during CheckCapturingPhase. Boxing statis is - * returned separately by CaptureOps.isBoxed. - */ - def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = - if ctx.phase == Phases.checkCapturesPhase - && tp.annot.symbol == defn.RetainsAnnot - && !ctx.mode.is(Mode.IgnoreCaptures) - then - EventuallyCapturingType.unapply(tp) - else None - -end CapturingType - -/** An extractor for types that will be capturing types at phase CheckCaptures. Also - * included are types that indicate captures on enclosing call-by-name parameters - * before phase ElimByName. - */ -object EventuallyCapturingType: - - def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = - val sym = tp.annot.symbol - if sym == defn.RetainsAnnot || sym == defn.RetainsByNameAnnot then - tp.annot match - case ann: CaptureAnnotation => - Some((tp.parent, ann.refs)) - case ann => - try Some((tp.parent, ann.tree.toCaptureSet)) - catch case ex: IllegalCaptureRef => None - else None - -end EventuallyCapturingType - - diff --git a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala deleted file mode 100644 index ce3f788202b6..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala +++ /dev/null @@ -1,1039 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Phases.*, DenotTransformers.*, SymDenotations.* -import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* -import Types.*, StdNames.*, Denotations.* -import config.Printers.{capt, recheckr} -import config.{Config, Feature} -import ast.{tpd, untpd, Trees} -import Trees.* -import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents} -import typer.Checking.{checkBounds, checkAppliedTypesIn} -import util.{SimpleIdentitySet, EqHashMap, SrcPos} -import transform.SymUtils.* -import transform.{Recheck, PreRecheck} -import Recheck.* -import scala.collection.mutable -import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} -import StdNames.nme -import NameKinds.DefaultGetterName -import reporting.trace -import language.experimental.pureFunctions - -/** The capture checker */ -object CheckCaptures: - import ast.tpd.* - - class Pre extends PreRecheck, SymTransformer: - - override def isEnabled(using Context) = true - - /** Reset `private` flags of parameter accessors so that we can refine them - * in Setup if they have non-empty capture sets. Special handling of some - * symbols defined for case classes. - */ - def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if sym.isAllOf(PrivateParamAccessor) && !sym.hasAnnotation(defn.ConstructorOnlyAnnot) then - sym.copySymDenotation(initFlags = sym.flags &~ Private | Recheck.ResetPrivate) - else if Synthetics.needsTransform(sym) then - Synthetics.transformToCC(sym) - else - sym - end Pre - - /** A class describing environments. - * @param owner the current owner - * @param nestedInOwner true if the environment is a temporary one nested in the owner's environment, - * and does not have a different actual owner symbol (this happens when doing box adaptation). - * @param captured the caputure set containing all references to tracked free variables outside of boxes - * @param isBoxed true if the environment is inside a box (in which case references are not counted) - * @param outer0 the next enclosing environment - */ - case class Env( - owner: Symbol, - nestedInOwner: Boolean, - captured: CaptureSet, - isBoxed: Boolean, - outer0: Env | Null - ): - def outer = outer0.nn - - def isOutermost = outer0 == null - - /** If an environment is open it tracks free references */ - def isOpen = !captured.isAlwaysEmpty && !isBoxed - end Env - - /** Similar normal substParams, but this is an approximating type map that - * maps parameters in contravariant capture sets to the empty set. - * TODO: check what happens with non-variant. - */ - final class SubstParamsMap(from: BindingType, to: List[Type])(using DetachedContext) - extends ApproximatingTypeMap, IdempotentCaptRefMap: - def apply(tp: Type): Type = tp match - case tp: ParamRef => - if tp.binder == from then to(tp.paramNum) else tp - case tp: NamedType => - if tp.prefix `eq` NoPrefix then tp - else tp.derivedSelect(apply(tp.prefix)) - case _: ThisType => - tp - case _ => - mapOver(tp) - - /** Check that a @retains annotation only mentions references that can be tracked. - * This check is performed at Typer. - */ - def checkWellformed(ann: Tree)(using Context): Unit = - for elem <- retainedElems(ann) do - elem.tpe match - case ref: CaptureRef => - if !ref.canBeTracked then - report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) - case tpe => - report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) - - /** If `tp` is a capturing type, check that all references it mentions have non-empty - * capture sets. Also: warn about redundant capture annotations. - * This check is performed after capture sets are computed in phase cc. - */ - def checkWellformedPost(tp: Type, pos: SrcPos)(using Context): Unit = tp match - case CapturingType(parent, refs) => - for ref <- refs.elems do - if ref.captureSetOfInfo.elems.isEmpty then - report.error(em"$ref cannot be tracked since its capture set is empty", pos) - else if parent.captureSet.accountsFor(ref) then - report.warning(em"redundant capture: $parent already accounts for $ref", pos) - case _ => - - /** Warn if `ann`, which is a tree of a @retains annotation, defines some elements that - * are already accounted for by other elements of the same annotation. - * Note: We need to perform the check on the original annotation rather than its - * capture set since the conversion to a capture set already eliminates redundant elements. - */ - def warnIfRedundantCaptureSet(ann: Tree)(using Context): Unit = - // The lists `elems(i) :: prev.reverse :: elems(0),...,elems(i-1),elems(i+1),elems(n)` - // where `n == elems.length-1`, i <- 0..n`. - // I.e. - // choices(Nil, elems) = [[elems(i), elems(0), ..., elems(i-1), elems(i+1), .... elems(n)] | i <- 0..n] - def choices(prev: List[Tree], elems: List[Tree]): List[List[Tree]] = elems match - case Nil => Nil - case elem :: elems => - List(elem :: (prev reverse_::: elems)) ++ choices(elem :: prev, elems) - for case first :: others <- choices(Nil, retainedElems(ann)) do - val firstRef = first.toCaptureRef - val remaining = CaptureSet(others.map(_.toCaptureRef)*) - if remaining.accountsFor(firstRef) then - report.warning(em"redundant capture: $remaining already accounts for $firstRef", ann.srcPos) - -class CheckCaptures extends Recheck, SymTransformer: - thisPhase => - - import ast.tpd.* - import CheckCaptures.* - - def phaseName: String = "cc" - override def isEnabled(using Context) = true - - def newRechecker()(using Context) = CaptureChecker(ctx.detach) - - override def run(using Context): Unit = - if Feature.ccEnabled then - checkOverrides.traverse(ctx.compilationUnit.tpdTree) - super.run - - override def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if Synthetics.needsTransform(sym) then Synthetics.transformFromCC(sym) - else super.transformSym(sym) - - /** Check overrides again, taking capture sets into account. - * TODO: Can we avoid doing overrides checks twice? - * We need to do them here since only at this phase CaptureTypes are relevant - * But maybe we can then elide the check during the RefChecks phase under captureChecking? - */ - def checkOverrides = new TreeTraverser: - def traverse(t: Tree)(using Context) = - t match - case t: Template => checkAllOverrides(ctx.owner.asClass) - case _ => - traverseChildren(t) - - class CaptureChecker(ictx: DetachedContext) extends Rechecker(ictx): - import ast.tpd.* - - override def keepType(tree: Tree) = - super.keepType(tree) - || tree.isInstanceOf[Try] // type of `try` needs tp be checked for * escapes - - /** Instantiate capture set variables appearing contra-variantly to their - * upper approximation. - */ - private def interpolator(startingVariance: Int = 1)(using Context) = new TypeTraverser: - variance = startingVariance - override def traverse(t: Type) = - t match - case CapturingType(parent, refs: CaptureSet.Var) => - if variance < 0 then - capt.println(i"solving $t") - refs.solve() - traverse(parent) - case t @ RefinedType(_, nme.apply, rinfo) if defn.isFunctionOrPolyType(t) => - traverse(rinfo) - case tp: TypeVar => - case tp: TypeRef => - traverse(tp.prefix) - case _ => - traverseChildren(t) - - /** If `tpt` is an inferred type, interpolate capture set variables appearing contra- - * variantly in it. - */ - private def interpolateVarsIn(tpt: Tree)(using Context): Unit = - if tpt.isInstanceOf[InferredTypeTree] then - interpolator().traverse(tpt.knownType) - .showing(i"solved vars in ${tpt.knownType}", capt)(using null) - - /** Assert subcapturing `cs1 <: cs2` */ - def assertSub(cs1: CaptureSet, cs2: CaptureSet)(using Context) = - assert(cs1.subCaptures(cs2, frozen = false).isOK, i"$cs1 is not a subset of $cs2") - - /** Check subcapturing `{elem} <: cs`, report error on failure */ - def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos)(using Context) = - val res = elem.singletonCaptureSet.subCaptures(cs, frozen = false) - if !res.isOK then - report.error(em"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) - - /** Check subcapturing `cs1 <: cs2`, report error on failure */ - def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos)(using Context) = - val res = cs1.subCaptures(cs2, frozen = false) - if !res.isOK then - def header = - if cs1.elems.size == 1 then i"reference ${cs1.elems.toList}%, % is not" - else i"references $cs1 are not all" - report.error(em"$header included in allowed capture set ${res.blocking}", pos) - - /** The current environment */ - private var curEnv: Env = Env(NoSymbol, nestedInOwner = false, CaptureSet.empty, isBoxed = false, null) - - private val myCapturedVars: util.EqHashMap[Symbol, CaptureSet] = EqHashMap() - - /** If `sym` is a class or method nested inside a term, a capture set variable representing - * the captured variables of the environment associated with `sym`. - */ - def capturedVars(sym: Symbol)(using Context) = - myCapturedVars.getOrElseUpdate(sym, - if sym.ownersIterator.exists(_.isTerm) then CaptureSet.Var() - else CaptureSet.empty) - - /** For all nested environments up to `limit` perform `op` */ - def forallOuterEnvsUpTo(limit: Symbol)(op: Env => Unit)(using Context): Unit = - def recur(env: Env): Unit = - if env.isOpen && env.owner != limit then - op(env) - if !env.isOutermost then - var nextEnv = env.outer - if env.owner.isConstructor then - if nextEnv.owner != limit && !nextEnv.isOutermost then - recur(nextEnv.outer) - else recur(nextEnv) - recur(curEnv) - - /** Include `sym` in the capture sets of all enclosing environments nested in the - * the environment in which `sym` is defined. - */ - def markFree(sym: Symbol, pos: SrcPos)(using Context): Unit = - if sym.exists then - val ref = sym.termRef - if ref.isTracked then - forallOuterEnvsUpTo(sym.enclosure) { env => - capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") - checkElem(ref, env.captured, pos) - } - - /** Make sure (projected) `cs` is a subset of the capture sets of all enclosing - * environments. At each stage, only include references from `cs` that are outside - * the environment's owner - */ - def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = - if !cs.isAlwaysEmpty then - forallOuterEnvsUpTo(ctx.owner.topLevelClass) { env => - val included = cs.filter { - case ref: TermRef => - (env.nestedInOwner || env.owner != ref.symbol.owner) - && env.owner.isContainedIn(ref.symbol.owner) - case ref: ThisType => - (env.nestedInOwner || env.owner != ref.cls) - && env.owner.isContainedIn(ref.cls) - case _ => false - } - capt.println(i"Include call capture $included in ${env.owner}") - checkSubset(included, env.captured, pos) - } - - /** Include references captured by the called method in the current environment stack */ - def includeCallCaptures(sym: Symbol, pos: SrcPos)(using Context): Unit = - if sym.exists && curEnv.isOpen then markFree(capturedVars(sym), pos) - - override def recheckIdent(tree: Ident)(using Context): Type = - if tree.symbol.is(Method) then includeCallCaptures(tree.symbol, tree.srcPos) - else markFree(tree.symbol, tree.srcPos) - super.recheckIdent(tree) - - /** A specialized implementation of the selection rule. - * - * E |- f: Cf f { m: Cr R } - * ------------------------ - * E |- f.m: C R - * - * The implementation picks as `C` one of `{f}` or `Cr`, depending on the - * outcome of a `mightSubcapture` test. It picks `{f}` if this might subcapture Cr - * and Cr otherwise. - */ - override def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context) = { - def disambiguate(denot: Denotation): Denotation = denot match - case MultiDenotation(denot1, denot2) => - // This case can arise when we try to merge multiple types that have different - // capture sets on some part. For instance an asSeenFrom might produce - // a bi-mapped capture set arising from a substition. Applying the same substitution - // to the same type twice will nevertheless produce different capture setsw which can - // lead to a failure in disambiguation since neither alternative is better than the - // other in a frozen constraint. An example test case is disambiguate-select.scala. - // We address the problem by disambiguating while ignoring all capture sets as a fallback. - withMode(Mode.IgnoreCaptures) { - disambiguate(denot1).meet(disambiguate(denot2), qualType) - } - case _ => denot - - val selType = recheckSelection(tree, qualType, name, disambiguate) - val selCs = selType.widen.captureSet - if selCs.isAlwaysEmpty || selType.widen.isBoxedCapturing || qualType.isBoxedCapturing then - selType - else - val qualCs = qualType.captureSet - capt.println(i"intersect $qualType, ${selType.widen}, $qualCs, $selCs in $tree") - if qualCs.mightSubcapture(selCs) - && !selCs.mightSubcapture(qualCs) - && !pt.stripCapturing.isInstanceOf[SingletonType] - then - selType.widen.stripCapturing.capturing(qualCs) - .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) - else - selType - }//.showing(i"recheck sel $tree, $qualType = $result") - - /** A specialized implementation of the apply rule. - * - * E |- f: Cf (Ra -> Cr Rr) - * E |- a: Ca Ra - * ------------------------ - * E |- f a: C Rr - * - * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the - * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr - * and Cr otherwise. - */ - override def recheckApply(tree: Apply, pt: Type)(using Context): Type = - val meth = tree.fun.symbol - includeCallCaptures(meth, tree.srcPos) - def mapArgUsing(f: Type => Type) = - val arg :: Nil = tree.args: @unchecked - val argType0 = f(recheckStart(arg, pt)) - val argType = super.recheckFinish(argType0, arg, pt) - super.recheckFinish(argType, tree, pt) - - if meth == defn.Caps_unsafeBox then - mapArgUsing(_.forceBoxStatus(true)) - else if meth == defn.Caps_unsafeUnbox then - mapArgUsing(_.forceBoxStatus(false)) - else if meth == defn.Caps_unsafeBoxFunArg then - mapArgUsing { - case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual, isErased) => - defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual, isErased) - } - else - super.recheckApply(tree, pt) match - case appType @ CapturingType(appType1, refs) => - tree.fun match - case Select(qual, _) - if !tree.fun.symbol.isConstructor - && !qual.tpe.isBoxedCapturing - && !tree.args.exists(_.tpe.isBoxedCapturing) - && qual.tpe.captureSet.mightSubcapture(refs) - && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) - => - val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => - cs ++ arg.tpe.captureSet) - appType.derivedCapturingType(appType1, callCaptures) - .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) - case _ => appType - case appType => appType - end recheckApply - - /** Handle an application of method `sym` with type `mt` to arguments of types `argTypes`. - * This means: - * - Instantiate result type with actual arguments - * - If call is to a constructor: - * - remember types of arguments corresponding to tracked - * parameters in refinements. - * - add capture set of instantiated class to capture set of result type. - */ - override def instantiate(mt: MethodType, argTypes: List[Type], sym: Symbol)(using Context): Type = - val ownType = - if mt.isResultDependent then SubstParamsMap(mt, argTypes)(mt.resType) - else mt.resType - - if sym.isConstructor then - val cls = sym.owner.asClass - - /** First half of result pair: - * Refine the type of a constructor call `new C(t_1, ..., t_n)` - * to C{val x_1: T_1, ..., x_m: T_m} where x_1, ..., x_m are the tracked - * parameters of C and T_1, ..., T_m are the types of the corresponding arguments. - * - * Second half: union of all capture sets of arguments to tracked parameters. - */ - def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = - mt.paramNames.lazyZip(argTypes).foldLeft((core, initCs)) { (acc, refine) => - val (core, allCaptures) = acc - val (getterName, argType) = refine - val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol - if getter.termRef.isTracked && !getter.is(Private) - then (RefinedType(core, getterName, argType), allCaptures ++ argType.captureSet) - else (core, allCaptures) - } - - def augmentConstructorType(core: Type, initCs: CaptureSet): Type = core match - case core: MethodType => - // more parameters to follow; augment result type - core.derivedLambdaType(resType = augmentConstructorType(core.resType, initCs)) - case CapturingType(parent, refs) => - // can happen for curried constructors if instantiate of a previous step - // added capture set to result. - augmentConstructorType(parent, initCs ++ refs) - case _ => - val (refined, cs) = addParamArgRefinements(core, initCs) - refined.capturing(cs) - - augmentConstructorType(ownType, CaptureSet.empty) match - case augmented: MethodType => - augmented - case augmented => - // add capture sets of class and constructor to final result of constructor call - augmented.capturing(capturedVars(cls) ++ capturedVars(sym)) - .showing(i"constr type $mt with $argTypes%, % in $cls = $result", capt) - else ownType - end instantiate - - override def recheckClosure(tree: Closure, pt: Type)(using Context): Type = - val cs = capturedVars(tree.meth.symbol) - capt.println(i"typing closure $tree with cvs $cs") - super.recheckClosure(tree, pt).capturing(cs) - .showing(i"rechecked $tree / $pt = $result", capt) - - /** Additionally to normal processing, update types of closures if the expected type - * is a function with only pure parameters. In that case, make the anonymous function - * also have the same parameters as the prototype. - * TODO: Develop a clearer rationale for this. - * TODO: Can we generalize this to arbitrary parameters? - * Currently some tests fail if we do this. (e.g. neg.../stackAlloc.scala, others) - */ - override def recheckBlock(block: Block, pt: Type)(using Context): Type = - block match - case closureDef(mdef) => - pt.dealias match - case defn.FunctionOf(ptformals, _, _, _) - if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => - // Redo setup of the anonymous function so that formal parameters don't - // get capture sets. This is important to avoid false widenings to `*` - // when taking the base type of the actual closures's dependent function - // type so that it conforms to the expected non-dependent function type. - // See withLogFile.scala for a test case. - val meth = mdef.symbol - // First, undo the previous setup which installed a completer for `meth`. - atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) - .installAfter(preRecheckPhase) - - // Next, update all parameter symbols to match expected formals - meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => - psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) - } - // Next, update types of parameter ValDefs - mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => - val ValDef(_, tpt, _) = param: @unchecked - tpt.rememberTypeAlways(pformal) - } - // Next, install a new completer reflecting the new parameters for the anonymous method - val mt = meth.info.asInstanceOf[MethodType] - val completer = new LazyType: - def complete(denot: SymDenotation)(using Context) = - denot.info = mt.companion(ptformals, mdef.tpt.knownType) - .showing(i"simplify info of $meth to $result", capt) - recheckDef(mdef, meth) - meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) - case _ => - case _ => - super.recheckBlock(block, pt) - - override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Unit = - try - if !sym.is(Module) then // Modules are checked by checking the module class - super.recheckValDef(tree, sym) - finally - if !sym.is(Param) then - // Parameters with inferred types belong to anonymous methods. We need to wait - // for more info from the context, so we cannot interpolate. Note that we cannot - // expect to have all necessary info available at the point where the anonymous - // function is compiled since we do not propagate expected types into blocks. - interpolateVarsIn(tree.tpt) - - override def recheckDefDef(tree: DefDef, sym: Symbol)(using Context): Unit = - if !Synthetics.isExcluded(sym) then - val saved = curEnv - val localSet = capturedVars(sym) - if !localSet.isAlwaysEmpty then curEnv = Env(sym, nestedInOwner = false, localSet, isBoxed = false, curEnv) - try super.recheckDefDef(tree, sym) - finally - interpolateVarsIn(tree.tpt) - curEnv = saved - - /** Class-specific capture set relations: - * 1. The capture set of a class includes the capture sets of its parents. - * 2. The capture set of the self type of a class includes the capture set of the class. - * 3. The capture set of the self type of a class includes the capture set of every class parameter, - * unless the parameter is marked @constructorOnly. - */ - override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = - val saved = curEnv - val localSet = capturedVars(cls) - for parent <- impl.parents do // (1) - checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos) - if !localSet.isAlwaysEmpty then curEnv = Env(cls, nestedInOwner = false, localSet, isBoxed = false, curEnv) - try - val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") - checkSubset(localSet, thisSet, tree.srcPos) // (2) - for param <- cls.paramGetters do - if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then - checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) - for pureBase <- cls.pureBaseClass do - checkSubset(thisSet, - CaptureSet.empty.withDescription(i"of pure base class $pureBase"), - tree.srcPos) - super.recheckClassDef(tree, impl, cls) - finally - curEnv = saved - - /** If type is of the form `T @requiresCapability(x)`, - * mark `x` as free in the current environment. This is used to require the - * correct `CanThrow` capability when encountering a `throw`. - */ - override def recheckTyped(tree: Typed)(using Context): Type = - tree.tpt.tpe match - case AnnotatedType(_, annot) if annot.symbol == defn.RequiresCapabilityAnnot => - annot.tree match - case Apply(_, cap :: Nil) => - markFree(cap.symbol, tree.srcPos) - case _ => - case _ => - super.recheckTyped(tree) - - /* Currently not needed, since capture checking takes place after ElimByName. - * Keep around in case we need to get back to it - def recheckByNameArg(tree: Tree, pt: Type)(using Context): Type = - val closureDef(mdef) = tree: @unchecked - val arg = mdef.rhs - val localSet = CaptureSet.Var() - curEnv = Env(mdef.symbol, localSet, isBoxed = false, curEnv) - val result = - try - inContext(ctx.withOwner(mdef.symbol)) { - recheckStart(arg, pt).capturing(localSet) - } - finally curEnv = curEnv.outer - recheckFinish(result, arg, pt) - */ - - /** If expected type `pt` is boxed and the tree is a function or a reference, - * don't propagate free variables. - * Otherwise, if the result type is boxed, simulate an unboxing by - * adding all references in the boxed capture set to the current environment. - */ - override def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = - if tree.isTerm && pt.isBoxedCapturing then - val saved = curEnv - - tree match - case _: RefTree | closureDef(_) => - curEnv = Env(curEnv.owner, nestedInOwner = false, CaptureSet.Var(), isBoxed = true, curEnv) - case _ => - - try super.recheck(tree, pt) - finally curEnv = saved - else - val res = super.recheck(tree, pt) - if tree.isTerm then markFree(res.boxedCaptureSet, tree.srcPos) - res - - /** If `tree` is a reference or an application where the result type refers - * to an enclosing class or method parameter of the reference, check that the result type - * does not capture the universal capability. This is justified since the - * result type would have to be implicitly unboxed. - * TODO: Can we find a cleaner way to achieve this? Logically, this should be part - * of simulated boxing and unboxing. - */ - override def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = - val typeToCheck = tree match - case _: Ident | _: Select | _: Apply | _: TypeApply if tree.symbol.unboxesResult => - tpe - case _: Try => - tpe - case _ => - NoType - def checkNotUniversal(tp: Type): Unit = tp.widenDealias match - case wtp @ CapturingType(parent, refs) => - refs.disallowRootCapability { () => - val kind = if tree.isInstanceOf[ValDef] then "mutable variable" else "expression" - report.error( - em"""The $kind's type $wtp is not allowed to capture the root capability `*`. - |This usually means that a capability persists longer than its allowed lifetime.""", - tree.srcPos) - } - checkNotUniversal(parent) - case _ => - checkNotUniversal(typeToCheck) - super.recheckFinish(tpe, tree, pt) - - /** Massage `actual` and `expected` types using the methods below before checking conformance */ - override def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = - val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) - val actual1 = adaptBoxed(actual, expected1, tree.srcPos) - //println(i"check conforms $actual1 <<< $expected1") - super.checkConformsExpr(actual1, expected1, tree) - - private def toDepFun(args: List[Type], resultType: Type, isContextual: Boolean, isErased: Boolean)(using Context): Type = - MethodType.companion(isContextual = isContextual, isErased = isErased)(args, resultType) - .toFunctionType(isJava = false, alwaysDependent = true) - - /** Turn `expected` into a dependent function when `actual` is dependent. */ - private def alignDependentFunction(expected: Type, actual: Type)(using Context): Type = - def recur(expected: Type): Type = expected.dealias match - case expected @ CapturingType(eparent, refs) => - CapturingType(recur(eparent), refs, boxed = expected.isBoxed) - case expected @ defn.FunctionOf(args, resultType, isContextual, isErased) - if defn.isNonRefinedFunction(expected) && defn.isFunctionType(actual) && !defn.isNonRefinedFunction(actual) => - val expected1 = toDepFun(args, resultType, isContextual, isErased) - expected1 - case _ => - expected - recur(expected) - - /** For the expected type, implement the rule outlined in #14390: - * - when checking an expression `a: Ca Ta` against an expected type `Ce Te`, - * - where the capture set `Ce` contains Cls.this, - * - and where and all method definitions enclosing `a` inside class `Cls` - * have only pure parameters, - * - add to `Ce` all references to variables or this-references in `Ca` - * that are outside `Cls`. These are all accessed through `Cls.this`, - * so we can assume they are already accounted for by `Ce` and adding - * them explicitly to `Ce` changes nothing. - */ - private def addOuterRefs(expected: Type, actual: Type)(using Context): Type = - def isPure(info: Type): Boolean = info match - case info: PolyType => isPure(info.resType) - case info: MethodType => info.paramInfos.forall(_.captureSet.isAlwaysEmpty) && isPure(info.resType) - case _ => true - def isPureContext(owner: Symbol, limit: Symbol): Boolean = - if owner == limit then true - else if !owner.exists then false - else isPure(owner.info) && isPureContext(owner.owner, limit) - def augment(erefs: CaptureSet, arefs: CaptureSet): CaptureSet = - (erefs /: erefs.elems) { (erefs, eref) => - eref match - case eref: ThisType if isPureContext(ctx.owner, eref.cls) => - erefs ++ arefs.filter { - case aref: TermRef => eref.cls.isProperlyContainedIn(aref.symbol.owner) - case aref: ThisType => eref.cls.isProperlyContainedIn(aref.cls) - case _ => false - } - case _ => - erefs - } - expected match - case CapturingType(ecore, erefs) => - val erefs1 = augment(erefs, actual.captureSet) - if erefs1 ne erefs then - capt.println(i"augmented $expected from ${actual.captureSet} --> $erefs1") - expected.derivedCapturingType(ecore, erefs1) - case _ => - expected - - /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions */ - def adaptBoxed(actual: Type, expected: Type, pos: SrcPos)(using Context): Type = - - /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) - * to `expected` type. - * It returns the adapted type along with the additionally captured variable - * during adaptation. - * @param reconstruct how to rebuild the adapted function type - */ - def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, - covariant: Boolean, boxed: Boolean, - reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = - val saved = curEnv - curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) - - try - val (eargs, eres) = expected.dealias.stripCapturing match - case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) - case expected: MethodType => (expected.paramInfos, expected.resType) - case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) - case _ => (aargs.map(_ => WildcardType), WildcardType) - val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } - val ares1 = adapt(ares, eres, covariant) - - val resTp = - if (ares1 eq ares) && (aargs1 eq aargs) then actual - else reconstruct(aargs1, ares1) - - (resTp, curEnv.captured) - finally - curEnv = saved - - /** Adapt type function type `actual` to the expected type. - * @see [[adaptFun]] - */ - def adaptTypeFun( - actual: Type, ares: Type, expected: Type, - covariant: Boolean, boxed: Boolean, - reconstruct: Type => Type): (Type, CaptureSet) = - val saved = curEnv - curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) - - try - val eres = expected.dealias.stripCapturing match - case RefinedType(_, _, rinfo: PolyType) => rinfo.resType - case expected: PolyType => expected.resType - case _ => WildcardType - - val ares1 = adapt(ares, eres, covariant) - - val resTp = - if ares1 eq ares then actual - else reconstruct(ares1) - - (resTp, curEnv.captured) - finally - curEnv = saved - end adaptTypeFun - - def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = - val arrow = if covariant then "~~>" else "<~~" - i"adapting $actual $arrow $expected" - - /** Destruct a capturing type `tp` to a tuple (cs, tp0, boxed), - * where `tp0` is not a capturing type. - * - * If `tp` is a nested capturing type, the return tuple always represents - * the innermost capturing type. The outer capture annotations can be - * reconstructed with the returned function. - */ - def destructCapturingType(tp: Type, reconstruct: Type -> Context ?-> Type = (x: Type) => x) // !cc! need monomorphic default argument - : (Type, CaptureSet, Boolean, Type -> Context ?-> Type) = - tp.dealias match - case tp @ CapturingType(parent, cs) => - if parent.dealias.isCapturingType then - destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) - else - (parent, cs, tp.isBoxed, reconstruct) - case actual => - (actual, CaptureSet(), false, reconstruct) - - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { - if expected.isInstanceOf[WildcardType] then actual - else - val (parent, cs, actualIsBoxed, recon: (Type -> Context ?-> Type)) = destructCapturingType(actual) - - val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing - val insertBox = needsAdaptation && covariant != actualIsBoxed - - val (parent1, cs1) = parent match { - case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => - val (parent1, leaked) = adaptFun(parent, args.init, args.last, expected, covariant, insertBox, - (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) - (parent1, leaked ++ cs) - case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => - // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) - val (parent1, leaked) = adaptFun(parent, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, - (aargs1, ares1) => - rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) - .toFunctionType(isJava = false, alwaysDependent = true)) - (parent1, leaked ++ cs) - case actual: MethodType => - val (parent1, leaked) = adaptFun(parent, actual.paramInfos, actual.resType, expected, covariant, insertBox, - (aargs1, ares1) => - actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) - (parent1, leaked ++ cs) - case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => - val (parent1, leaked) = adaptTypeFun(parent, rinfo.resType, expected, covariant, insertBox, - ares1 => - val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) - val actual1 = actual.derivedRefinedType(p, nme, rinfo1) - actual1 - ) - (parent1, leaked ++ cs) - case _ => - (parent, cs) - } - - if needsAdaptation then - val criticalSet = // the set which is not allowed to have `*` - if covariant then cs1 // can't box with `*` - else expected.captureSet // can't unbox with `*` - if criticalSet.isUniversal && expected.isValueType then - // We can't box/unbox the universal capability. Leave `actual` as it is - // so we get an error in checkConforms. This tends to give better error - // messages than disallowing the root capability in `criticalSet`. - if ctx.settings.YccDebug.value then - println(i"cannot box/unbox $actual vs $expected") - actual - else - // Disallow future addition of `*` to `criticalSet`. - criticalSet.disallowRootCapability { () => - report.error( - em"""$actual cannot be box-converted to $expected - |since one of their capture sets contains the root capability `*`""", - pos) - } - if !insertBox then // unboxing - markFree(criticalSet, pos) - recon(CapturingType(parent1, cs1, !actualIsBoxed)) - else - recon(CapturingType(parent1, cs1, actualIsBoxed)) - } - - var actualw = actual.widenDealias - actual match - case ref: CaptureRef if ref.isTracked => - actualw match - case CapturingType(p, refs) => - actualw = actualw.derivedCapturingType(p, ref.singletonCaptureSet) - // given `a: C T`, improve `C T` to `{a} T` - case _ => - case _ => - val adapted = adapt(actualw, expected, covariant = true) - if adapted ne actualw then - capt.println(i"adapt boxed $actual vs $expected ===> $adapted") - adapted - else actual - end adaptBoxed - - override def checkUnit(unit: CompilationUnit)(using Context): Unit = - Setup(preRecheckPhase, thisPhase, recheckDef) - .traverse(ctx.compilationUnit.tpdTree) - //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") - withCaptureSetsExplained { - super.checkUnit(unit) - checkSelfTypes(unit.tpdTree) - postCheck(unit.tpdTree) - if ctx.settings.YccDebug.value then - show(unit.tpdTree) // this does not print tree, but makes its variables visible for dependency printing - } - - /** Check that self types of subclasses conform to self types of super classes. - * (See comment below how this is achieved). The check assumes that classes - * without an explicit self type have the universal capture set `{*}` on the - * self type. If a class without explicit self type is not `effectivelyFinal` - * it is checked that the inferred self type is universal, in order to assure - * that joint and separate compilation give the same result. - */ - def checkSelfTypes(unit: tpd.Tree)(using Context): Unit = - val parentTrees = mutable.HashMap[Symbol, List[Tree]]() - unit.foreachSubTree { - case cdef @ TypeDef(_, impl: Template) => parentTrees(cdef.symbol) = impl.parents - case _ => - } - // Perform self type checking. The problem here is that `checkParents` compares a - // self type of a subclass with the result of an asSeenFrom of the self type of the - // superclass. That's no good. We need to constrain the original superclass self type - // capture set, not the set mapped by asSeenFrom. - // - // Instead, we proceed from parent classes to child classes. For every class - // we first check its parents, and then interpolate the self type to an - // upper approximation that satisfies all constraints on its capture set. - // That means all capture sets of parent self types are constants, so mapping - // them with asSeenFrom is OK. - while parentTrees.nonEmpty do - val roots = parentTrees.keysIterator.filter { - cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) - } - assert(roots.nonEmpty) - for case root: ClassSymbol <- roots do - checkSelfAgainstParents(root, root.baseClasses) - val selfType = root.asClass.classInfo.selfType - interpolator(startingVariance = -1).traverse(selfType) - if !root.isEffectivelySealed then - def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = - cls.baseClasses.tail.exists { psym => - val selfType = psym.asClass.givenSelfType - selfType.exists && selfType.captureSet.elems == refs.elems - } - selfType match - case CapturingType(_, refs: CaptureSet.Var) - if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => - // Forbid inferred self types unless they are already implied by an explicit - // self type in a parent. - report.error( - em"""$root needs an explicitly declared self type since its - |inferred self type $selfType - |is not visible in other compilation units that define subclasses.""", - root.srcPos) - case _ => - parentTrees -= root - capt.println(i"checked $root with $selfType") - end checkSelfTypes - - /** Heal ill-formed capture sets in the type parameter. - * - * We can push parameter refs into a capture set in type parameters - * that this type parameter can't see. - * For example, when capture checking the following expression: - * - * def usingLogFile[T](op: (f: {*} File) => T): T = ... - * - * usingLogFile[box ?1 () -> Unit] { (f: {*} File) => () => { f.write(0) } } - * - * We may propagate `f` into ?1, making ?1 ill-formed. - * This also causes soundness issues, since `f` in ?1 should be widened to `*`, - * giving rise to an error that `*` cannot be included in a boxed capture set. - * - * To solve this, we still allow ?1 to capture parameter refs like `f`, but - * compensate this by pushing the widened capture set of `f` into ?1. - * This solves the soundness issue caused by the ill-formness of ?1. - */ - private def healTypeParam(tree: Tree)(using Context): Unit = - val checker = new TypeTraverser: - private def isAllowed(ref: CaptureRef): Boolean = ref match - case ref: TermParamRef => allowed.contains(ref) - case _ => true - - // Widen the given term parameter refs x₁ : C₁ S₁ , ⋯ , xₙ : Cₙ Sₙ to their capture sets C₁ , ⋯ , Cₙ. - // - // If in these capture sets there are any capture references that are term parameter references we should avoid, - // we will widen them recursively. - private def widenParamRefs(refs: List[TermParamRef]): List[CaptureSet] = - @scala.annotation.tailrec - def recur(todos: List[TermParamRef], acc: List[CaptureSet]): List[CaptureSet] = - todos match - case Nil => acc - case ref :: rem => - val cs = ref.captureSetOfInfo - val nextAcc = cs.filter(isAllowed(_)) :: acc - val nextRem: List[TermParamRef] = (cs.elems.toList.filter(!isAllowed(_)) ++ rem).asInstanceOf - recur(nextRem, nextAcc) - recur(refs, Nil) - - private def healCaptureSet(cs: CaptureSet): Unit = - val toInclude = widenParamRefs(cs.elems.toList.filter(!isAllowed(_)).asInstanceOf) - toInclude.foreach(checkSubset(_, cs, tree.srcPos)) - - private var allowed: SimpleIdentitySet[TermParamRef] = SimpleIdentitySet.empty - - def traverse(tp: Type) = - tp match - case CapturingType(parent, refs) => - healCaptureSet(refs) - traverse(parent) - case tp @ RefinedType(parent, rname, rinfo: MethodType) if defn.isFunctionType(tp) => - traverse(rinfo) - case tp: TermLambda => - val saved = allowed - try - tp.paramRefs.foreach(allowed += _) - traverseChildren(tp) - finally allowed = saved - case _ => - traverseChildren(tp) - - if tree.isInstanceOf[InferredTypeTree] then - checker.traverse(tree.knownType) - end healTypeParam - - /** Perform the following kinds of checks - * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. - * - Check that externally visible `val`s or `def`s have empty capture sets. If not, - * suggest an explicit type. This is so that separate compilation (where external - * symbols have empty capture sets) gives the same results as joint compilation. - * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. - * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. - */ - def postCheck(unit: tpd.Tree)(using Context): Unit = - unit.foreachSubTree { - case _: InferredTypeTree => - case tree: TypeTree if !tree.span.isZeroExtent => - tree.knownType.foreachPart { tp => - checkWellformedPost(tp, tree.srcPos) - tp match - case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => - warnIfRedundantCaptureSet(annot.tree) - case _ => - } - case t: ValOrDefDef - if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => - val sym = t.symbol - val isLocal = - sym.owner.ownersIterator.exists(_.isTerm) - || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) - def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly - sym.is(Private) // private symbols can always have inferred types - || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be - // too annoying. This is a hole since a defualt getter's result type - // might leak into a type variable. - || // non-local symbols cannot have inferred types since external capture types are not inferred - isLocal // local symbols still need explicit types if - && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference - def isNotPureThis(ref: CaptureRef) = ref match { - case ref: ThisType => !ref.cls.isPureClass - case _ => true - } - if !canUseInferred then - val inferred = t.tpt.knownType - def checkPure(tp: Type) = tp match - case CapturingType(_, refs) - if !refs.elems.filter(isNotPureThis).isEmpty => - val resultStr = if t.isInstanceOf[DefDef] then " result" else "" - report.error( - em"""Non-local $sym cannot have an inferred$resultStr type - |$inferred - |with non-empty capture set $refs. - |The type needs to be declared explicitly.""".withoutDisambiguation(), - t.srcPos) - case _ => - inferred.foreachPart(checkPure, StopAt.Static) - case t @ TypeApply(fun, args) => - fun.knownType.widen match - case tl: PolyType => - val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => - arg.withType(arg.knownType.forceBoxStatus( - bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) - } - checkBounds(normArgs, tl) - case _ => - - args.foreach(healTypeParam(_)) - case _ => - } - if !ctx.reporter.errorsReported then - // We dont report errors here if previous errors were reported, because other - // errors often result in bad applied types, but flagging these bad types gives - // often worse error messages than the original errors. - val checkApplied = new TreeTraverser: - def traverse(t: Tree)(using Context) = t match - case tree: InferredTypeTree => - case tree: New => - case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) - case _ => traverseChildren(t) - checkApplied.traverse(unit) - end CaptureChecker -end CheckCaptures diff --git a/tests/pos-with-compiler-cc/dotc/cc/Setup.scala b/tests/pos-with-compiler-cc/dotc/cc/Setup.scala deleted file mode 100644 index a91831022984..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/Setup.scala +++ /dev/null @@ -1,482 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core._ -import Phases.*, DenotTransformers.*, SymDenotations.* -import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* -import Types.*, StdNames.* -import config.Printers.capt -import ast.tpd -import transform.Recheck.* -import CaptureSet.IdentityCaptRefMap -import Synthetics.isExcluded - -/** A tree traverser that prepares a compilation unit to be capture checked. - * It does the following: - * - For every inferred type, drop any retains annotations, - * add capture sets to all its parts, add refinements to class types and function types. - * (c.f. mapInferred) - * - For explicit capturing types, expand throws aliases to the underlying (pure) function, - * and add some implied capture sets to curried functions (c.f. expandThrowsAlias, expandAbbreviations). - * - Add capture sets to self types of classes and objects, unless the self type was written explicitly. - * - Box the types of mutable variables and type arguments to methods (type arguments of types - * are boxed on access). - * - Link the external types of val and def symbols with the inferred types based on their parameter symbols. - */ -class Setup( - preRecheckPhase: DenotTransformer, - thisPhase: DenotTransformer, - recheckDef: (tpd.ValOrDefDef, Symbol) => Context ?=> Unit) -extends tpd.TreeTraverser: - import tpd.* - - /** Create dependent function with underlying function class `tycon` and given - * arguments `argTypes` and result `resType`. - */ - private def depFun(tycon: Type, argTypes: List[Type], resType: Type)(using Context): Type = - MethodType.companion( - isContextual = defn.isContextFunctionClass(tycon.classSymbol), - isErased = defn.isErasedFunctionClass(tycon.classSymbol) - )(argTypes, resType) - .toFunctionType(isJava = false, alwaysDependent = true) - - /** If `tp` is an unboxed capturing type or a function returning an unboxed capturing type, - * convert it to be boxed. - */ - private def box(tp: Type)(using Context): Type = - def recur(tp: Type): Type = tp.dealias match - case tp @ CapturingType(parent, refs) if !tp.isBoxed => - tp.boxed - case tp1 @ AppliedType(tycon, args) if defn.isNonRefinedFunction(tp1) => - val res = args.last - val boxedRes = recur(res) - if boxedRes eq res then tp - else tp1.derivedAppliedType(tycon, args.init :+ boxedRes) - case tp1 @ RefinedType(_, _, rinfo) if defn.isFunctionType(tp1) => - val boxedRinfo = recur(rinfo) - if boxedRinfo eq rinfo then tp - else boxedRinfo.toFunctionType(isJava = false, alwaysDependent = true) - case tp1: MethodOrPoly => - val res = tp1.resType - val boxedRes = recur(res) - if boxedRes eq res then tp - else tp1.derivedLambdaType(resType = boxedRes) - case _ => tp - tp match - case tp: MethodOrPoly => tp // don't box results of methods outside refinements - case _ => recur(tp) - - /** Perform the following transformation steps everywhere in a type: - * 1. Drop retains annotations - * 2. Turn plain function types into dependent function types, so that - * we can refer to their parameters in capture sets. Currently this is - * only done at the toplevel, i.e. for function types that are not - * themselves argument types of other function types. Without this restriction - * pos.../lists.scala and pos/...curried-shorthands.scala fail. - * Need to figure out why. - * 3. Refine other class types C by adding capture set variables to their parameter getters - * (see addCaptureRefinements) - * 4. Add capture set variables to all types that can be tracked - * - * Polytype bounds are only cleaned using step 1, but not otherwise transformed. - */ - private def mapInferred(using DetachedContext) = new TypeMap: - - /** Drop @retains annotations everywhere */ - object cleanup extends TypeMap: - def apply(t: Type) = t match - case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => - apply(parent) - case _ => - mapOver(t) - - /** Refine a possibly applied class type C where the class has tracked parameters - * x_1: T_1, ..., x_n: T_n to C { val x_1: CV_1 T_1, ..., val x_n: CV_n T_n } - * where CV_1, ..., CV_n are fresh capture sets. - */ - def addCaptureRefinements(tp: Type): Type = tp match - case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => - tp.typeSymbol match - case cls: ClassSymbol - if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => - // We assume that Java classes can refer to capturing Scala types only indirectly, - // using type parameters. Hence, no need to refine them. - cls.paramGetters.foldLeft(tp) { (core, getter) => - if getter.termRef.isTracked then - val getterType = tp.memberInfo(getter).strippedDealias - RefinedType(core, getter.name, CapturingType(getterType, CaptureSet.Var())) - .showing(i"add capture refinement $tp --> $result", capt) - else - core - } - case _ => tp - case _ => tp - - private def superTypeIsImpure(tp: Type): Boolean = { - tp.dealias match - case CapturingType(_, refs) => - !refs.isAlwaysEmpty - case tp: (TypeRef | AppliedType) => - val sym = tp.typeSymbol - if sym.isClass then - sym == defn.AnyClass - // we assume Any is a shorthand of {*} Any, so if Any is an upper - // bound, the type is taken to be impure. - else superTypeIsImpure(tp.superType) - case tp: (RefinedOrRecType | MatchType) => - superTypeIsImpure(tp.underlying) - case tp: AndType => - superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) - case tp: OrType => - superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) - case _ => - false - }.showing(i"super type is impure $tp = $result", capt) - - /** Should a capture set variable be added on type `tp`? */ - def needsVariable(tp: Type): Boolean = { - tp.typeParams.isEmpty && tp.match - case tp: (TypeRef | AppliedType) => - val tp1 = tp.dealias - if tp1 ne tp then needsVariable(tp1) - else - val sym = tp1.typeSymbol - if sym.isClass then - !sym.isPureClass && sym != defn.AnyClass - else superTypeIsImpure(tp1) - case tp: (RefinedOrRecType | MatchType) => - needsVariable(tp.underlying) - case tp: AndType => - needsVariable(tp.tp1) && needsVariable(tp.tp2) - case tp: OrType => - needsVariable(tp.tp1) || needsVariable(tp.tp2) - case CapturingType(parent, refs) => - needsVariable(parent) - && refs.isConst // if refs is a variable, no need to add another - && !refs.isUniversal // if refs is {*}, an added variable would not change anything - case _ => - false - }.showing(i"can have inferred capture $tp = $result", capt) - - /** Add a capture set variable to `tp` if necessary, or maybe pull out - * an embedded capture set variable from a part of `tp`. - */ - def addVar(tp: Type) = tp match - case tp @ RefinedType(parent @ CapturingType(parent1, refs), rname, rinfo) => - CapturingType(tp.derivedRefinedType(parent1, rname, rinfo), refs, parent.isBoxed) - case tp: RecType => - tp.parent match - case parent @ CapturingType(parent1, refs) => - CapturingType(tp.derivedRecType(parent1), refs, parent.isBoxed) - case _ => - tp // can return `tp` here since unlike RefinedTypes, RecTypes are never created - // by `mapInferred`. Hence if the underlying type admits capture variables - // a variable was already added, and the first case above would apply. - case AndType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => - assert(refs1.asVar.elems.isEmpty) - assert(refs2.asVar.elems.isEmpty) - assert(tp1.isBoxed == tp2.isBoxed) - CapturingType(AndType(parent1, parent2), refs1 ** refs2, tp1.isBoxed) - case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => - assert(refs1.asVar.elems.isEmpty) - assert(refs2.asVar.elems.isEmpty) - assert(tp1.isBoxed == tp2.isBoxed) - CapturingType(OrType(parent1, parent2, tp.isSoft), refs1 ++ refs2, tp1.isBoxed) - case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2) => - CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) - case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => - CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) - case _ if needsVariable(tp) => - val cs = tp.dealias match - case CapturingType(_, refs) => CaptureSet.Var(refs.elems) - case _ => CaptureSet.Var() - CapturingType(tp, cs) - case _ => - tp - - private var isTopLevel = true - - private def mapNested(ts: List[Type]): List[Type] = - val saved = isTopLevel - isTopLevel = false - try ts.mapConserve(this) finally isTopLevel = saved - - def apply(t: Type) = - val tp = expandThrowsAlias(t) - val tp1 = tp match - case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => - // Drop explicit retains annotations - apply(parent) - case tp @ AppliedType(tycon, args) => - val tycon1 = this(tycon) - if defn.isNonRefinedFunction(tp) then - // Convert toplevel generic function types to dependent functions - val args0 = args.init - var res0 = args.last - val args1 = mapNested(args0) - val res1 = this(res0) - if isTopLevel then - depFun(tycon1, args1, res1) - .showing(i"add function refinement $tp --> $result", capt) - else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then - tp - else - tp.derivedAppliedType(tycon1, args1 :+ res1) - else - tp.derivedAppliedType(tycon1, args.mapConserve(arg => this(arg))) - case tp @ RefinedType(core, rname, rinfo) if defn.isFunctionType(tp) => - val rinfo1 = apply(rinfo) - if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) - else tp - case tp: MethodType => - tp.derivedLambdaType( - paramInfos = mapNested(tp.paramInfos), - resType = this(tp.resType)) - case tp: TypeLambda => - // Don't recurse into parameter bounds, just cleanup any stray retains annotations - tp.derivedLambdaType( - paramInfos = tp.paramInfos.mapConserve(cleanup(_).bounds), - resType = this(tp.resType)) - case _ => - mapOver(tp) - addVar(addCaptureRefinements(tp1)) - end apply - end mapInferred - - private def transformInferredType(tp: Type, boxed: Boolean)(using Context): Type = - val tp1 = mapInferred(tp) - if boxed then box(tp1) else tp1 - - /** Expand some aliases of function types to the underlying functions. - * Right now, these are only $throws aliases, but this could be generalized. - */ - private def expandThrowsAlias(tp: Type)(using Context) = tp match - case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => - // hard-coded expansion since $throws aliases in stdlib are defined with `?=>` rather than `?->` - defn.FunctionOf(defn.CanThrowClass.typeRef.appliedTo(exc) :: Nil, res, isContextual = true, isErased = true) - case _ => tp - - private def expandThrowsAliases(using DetachedContext) = new TypeMap: - def apply(t: Type) = t match - case _: AppliedType => - val t1 = expandThrowsAlias(t) - if t1 ne t then apply(t1) else mapOver(t) - case _: LazyRef => - t - case t @ AnnotatedType(t1, ann) => - // Don't map capture sets, since that would implicitly normalize sets that - // are not well-formed. - t.derivedAnnotatedType(apply(t1), ann) - case _ => - mapOver(t) - - /** Fill in capture sets of curried function types from left to right, using - * a combination of the following two rules: - * - * 1. Expand `{c} (x: A) -> (y: B) -> C` - * to `{c} (x: A) -> {c} (y: B) -> C` - * 2. Expand `(x: A) -> (y: B) -> C` where `x` is tracked - * to `(x: A) -> {x} (y: B) -> C` - * - * TODO: Should we also propagate capture sets to the left? - */ - private def expandAbbreviations(using DetachedContext) = new TypeMap: - - /** Propagate `outerCs` as well as all tracked parameters as capture set to the result type - * of the dependent function type `tp`. - */ - def propagateDepFunctionResult(tp: Type, outerCs: CaptureSet): Type = tp match - case RefinedType(parent, nme.apply, rinfo: MethodType) => - val localCs = CaptureSet(rinfo.paramRefs.filter(_.isTracked)*) - val rinfo1 = rinfo.derivedLambdaType( - resType = propagateEnclosing(rinfo.resType, CaptureSet.empty, outerCs ++ localCs)) - if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) - else tp - - /** If `tp` is a function type: - * - add `outerCs` as its capture set, - * - propagate `currentCs`, `outerCs`, and all tracked parameters of `tp` to the right. - */ - def propagateEnclosing(tp: Type, currentCs: CaptureSet, outerCs: CaptureSet): Type = tp match - case tp @ AppliedType(tycon, args) if defn.isFunctionClass(tycon.typeSymbol) => - val tycon1 = this(tycon) - val args1 = args.init.mapConserve(this) - val tp1 = - if args1.exists(!_.captureSet.isAlwaysEmpty) then - val propagated = propagateDepFunctionResult( - depFun(tycon, args1, args.last), currentCs ++ outerCs) - propagated match - case RefinedType(_, _, mt: MethodType) => - if mt.isCaptureDependent then propagated - else - // No need to introduce dependent type, switch back to generic function type - tp.derivedAppliedType(tycon1, args1 :+ mt.resType) - else - val resType1 = propagateEnclosing( - args.last, CaptureSet.empty, currentCs ++ outerCs) - tp.derivedAppliedType(tycon1, args1 :+ resType1) - tp1.capturing(outerCs) - case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionType(tp) => - propagateDepFunctionResult(mapOver(tp), currentCs ++ outerCs) - .capturing(outerCs) - case _ => - mapOver(tp) - - def apply(tp: Type): Type = tp match - case CapturingType(parent, cs) => - tp.derivedCapturingType(propagateEnclosing(parent, cs, CaptureSet.empty), cs) - case _ => - propagateEnclosing(tp, CaptureSet.empty, CaptureSet.empty) - end expandAbbreviations - - private def transformExplicitType(tp: Type, boxed: Boolean)(using Context): Type = - val tp1 = expandThrowsAliases(if boxed then box(tp) else tp) - if tp1 ne tp then capt.println(i"expanded: $tp --> $tp1") - if ctx.settings.YccNoAbbrev.value then tp1 - else expandAbbreviations(tp1) - - /** Transform type of type tree, and remember the transformed type as the type the tree */ - private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = - if !tree.hasRememberedType then - tree.rememberType( - if tree.isInstanceOf[InferredTypeTree] && !exact - then transformInferredType(tree.tpe, boxed) - else transformExplicitType(tree.tpe, boxed)) - - /** Substitute parameter symbols in `from` to paramRefs in corresponding - * method or poly types `to`. We use a single BiTypeMap to do everything. - * @param from a list of lists of type or term parameter symbols of a curried method - * @param to a list of method or poly types corresponding one-to-one to the parameter lists - */ - private class SubstParams(from: List[List[Symbol]], to: List[LambdaType])(using DetachedContext) - extends DeepTypeMap, BiTypeMap: - - def apply(t: Type): Type = t match - case t: NamedType => - val sym = t.symbol - def outer(froms: List[List[Symbol]], tos: List[LambdaType]): Type = - def inner(from: List[Symbol], to: List[ParamRef]): Type = - if from.isEmpty then outer(froms.tail, tos.tail) - else if sym eq from.head then to.head - else inner(from.tail, to.tail) - if tos.isEmpty then t - else inner(froms.head, tos.head.paramRefs) - outer(from, to) - case _ => - mapOver(t) - - def inverse(t: Type): Type = t match - case t: ParamRef => - def recur(from: List[LambdaType], to: List[List[Symbol]]): Type = - if from.isEmpty then t - else if t.binder eq from.head then to.head(t.paramNum).namedType - else recur(from.tail, to.tail) - recur(to, from) - case _ => - mapOver(t) - end SubstParams - - /** Update info of `sym` for CheckCaptures phase only */ - private def updateInfo(sym: Symbol, info: Type)(using Context) = - sym.updateInfoBetween(preRecheckPhase, thisPhase, info) - - def traverse(tree: Tree)(using Context): Unit = - tree match - case tree: DefDef => - if isExcluded(tree.symbol) then - return - tree.tpt match - case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => - tree.paramss.foreach(traverse) - transformTT(tpt, boxed = false, exact = true) - traverse(tree.rhs) - //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") - case _ => - traverseChildren(tree) - case tree @ ValDef(_, tpt: TypeTree, _) => - transformTT(tpt, - boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed - exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set - ) - traverse(tree.rhs) - case tree @ TypeApply(fn, args) => - traverse(fn) - for case arg: TypeTree <- args do - transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed - case _ => - traverseChildren(tree) - tree match - case tree: TypeTree => - transformTT(tree, boxed = false, exact = false) // other types are not boxed - case tree: ValOrDefDef => - val sym = tree.symbol - - // replace an existing symbol info with inferred types where capture sets of - // TypeParamRefs and TermParamRefs put in correspondence by BiTypeMaps with the - // capture sets of the types of the method's parameter symbols and result type. - def integrateRT( - info: Type, // symbol info to replace - psymss: List[List[Symbol]], // the local (type and term) parameter symbols corresponding to `info` - prevPsymss: List[List[Symbol]], // the local parameter symbols seen previously in reverse order - prevLambdas: List[LambdaType] // the outer method and polytypes generated previously in reverse order - ): Type = - info match - case mt: MethodOrPoly => - val psyms = psymss.head - mt.companion(mt.paramNames)( - mt1 => - if !psyms.exists(_.isUpdatedAfter(preRecheckPhase)) && !mt.isParamDependent && prevLambdas.isEmpty then - mt.paramInfos - else - val subst = SubstParams(psyms :: prevPsymss, mt1 :: prevLambdas) - psyms.map(psym => subst(psym.info).asInstanceOf[mt.PInfo]), - mt1 => - integrateRT(mt.resType, psymss.tail, psyms :: prevPsymss, mt1 :: prevLambdas) - ) - case info: ExprType => - info.derivedExprType(resType = - integrateRT(info.resType, psymss, prevPsymss, prevLambdas)) - case _ => - val restp = tree.tpt.knownType - if prevLambdas.isEmpty then restp - else SubstParams(prevPsymss, prevLambdas)(restp) - - if tree.tpt.hasRememberedType && !sym.isConstructor then - val newInfo = integrateRT(sym.info, sym.paramSymss, Nil, Nil) - .showing(i"update info $sym: ${sym.info} --> $result", capt) - if newInfo ne sym.info then - val completer = new LazyType: - def complete(denot: SymDenotation)(using Context) = - denot.info = newInfo - recheckDef(tree, sym) - updateInfo(sym, completer) - case tree: Bind => - val sym = tree.symbol - updateInfo(sym, transformInferredType(sym.info, boxed = false)) - case tree: TypeDef => - tree.symbol match - case cls: ClassSymbol => - val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then - // add capture set to self type of nested classes if no self type is given explicitly - val localRefs = CaptureSet.Var() - val newInfo = ClassInfo(prefix, cls, ps, decls, - CapturingType(cinfo.selfType, localRefs) - .showing(i"inferred self type for $cls: $result", capt)) - updateInfo(cls, newInfo) - cls.thisType.asInstanceOf[ThisType].invalidateCaches() - if cls.is(ModuleClass) then - // if it's a module, the capture set of the module reference is the capture set of the self type - val modul = cls.sourceModule - updateInfo(modul, CapturingType(modul.info, localRefs)) - modul.termRef.invalidateCaches() - case _ => - val info = atPhase(preRecheckPhase)(tree.symbol.info) - val newInfo = transformExplicitType(info, boxed = false) - if newInfo ne info then - updateInfo(tree.symbol, newInfo) - capt.println(i"update info of ${tree.symbol} from $info to $newInfo") - case _ => - end traverse -end Setup diff --git a/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala b/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala deleted file mode 100644 index dacbd27e0f35..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala +++ /dev/null @@ -1,189 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Symbols.*, SymDenotations.*, Contexts.*, Flags.*, Types.*, Decorators.* -import StdNames.nme -import Names.Name -import NameKinds.DefaultGetterName -import Phases.checkCapturesPhase -import config.Printers.capt - -/** Classification and transformation methods for synthetic - * case class methods that need to be treated specially. - * In particular, compute capturing types for some of these methods which - * have inferred (result-)types that need to be established under separate - * compilation. - */ -object Synthetics: - private def isSyntheticCopyMethod(sym: SymDenotation)(using Context) = - sym.name == nme.copy && sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) - - private def isSyntheticCompanionMethod(sym: SymDenotation, names: Name*)(using Context): Boolean = - names.contains(sym.name) && sym.is(Synthetic) && sym.owner.is(Module) && sym.owner.companionClass.is(Case) - - private def isSyntheticCopyDefaultGetterMethod(sym: SymDenotation)(using Context) = sym.name match - case DefaultGetterName(nme.copy, _) => sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) - case _ => false - - /** Is `sym` a synthetic apply, copy, or copy default getter method? - * The types of these symbols are transformed in a special way without - * looking at the definitions's RHS - */ - def needsTransform(symd: SymDenotation)(using Context): Boolean = - isSyntheticCopyMethod(symd) - || isSyntheticCompanionMethod(symd, nme.apply, nme.unapply) - || isSyntheticCopyDefaultGetterMethod(symd) - || (symd.symbol eq defn.Object_eq) - || (symd.symbol eq defn.Object_ne) - - /** Method is excluded from regular capture checking. - * Excluded are synthetic class members - * - that override a synthesized case class symbol, or - * - the fromProduct method, or - * - members transformed specially as indicated by `needsTransform`. - */ - def isExcluded(sym: Symbol)(using Context): Boolean = - sym.is(Synthetic) - && sym.owner.isClass - && ( defn.caseClassSynthesized.exists( - ccsym => sym.overriddenSymbol(ccsym.owner.asClass) == ccsym) - || isSyntheticCompanionMethod(sym, nme.fromProduct) - || needsTransform(sym)) - - /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. - * An apply method in a case class like this: - * case class CC(a: {d} A, b: B, {*} c: C) - * would get type - * def apply(a': {d} A, b: B, {*} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } - * where `'` is used to indicate the difference between parameter symbol and refinement name. - * Analogous for the copy method. - */ - private def addCaptureDeps(info: Type)(using Context): Type = info match - case info: MethodType => - val trackedParams = info.paramRefs.filter(atPhase(checkCapturesPhase)(_.isTracked)) - def augmentResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = augmentResult(tp.resType)) - case _ => - val refined = trackedParams.foldLeft(tp) { (parent, pref) => - RefinedType(parent, pref.paramName, - CapturingType( - atPhase(ctx.phase.next)(pref.underlying.stripCapturing), - CaptureSet(pref))) - } - CapturingType(refined, CaptureSet(trackedParams*)) - if trackedParams.isEmpty then info - else augmentResult(info).showing(i"augment apply/copy type $info to $result", capt) - case info: PolyType => - info.derivedLambdaType(resType = addCaptureDeps(info.resType)) - case _ => - info - - /** Drop capture dependencies from the type of `apply` or `copy` method of a case class */ - private def dropCaptureDeps(tp: Type)(using Context): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = dropCaptureDeps(tp.resType)) - case CapturingType(parent, _) => - dropCaptureDeps(parent) - case RefinedType(parent, _, _) => - dropCaptureDeps(parent) - case _ => - tp - - /** Add capture information to the type of the default getter of a case class copy method */ - private def addDefaultGetterCapture(info: Type, owner: Symbol, idx: Int)(using Context): Type = info match - case info: MethodOrPoly => - info.derivedLambdaType(resType = addDefaultGetterCapture(info.resType, owner, idx)) - case info: ExprType => - info.derivedExprType(addDefaultGetterCapture(info.resType, owner, idx)) - case EventuallyCapturingType(parent, _) => - addDefaultGetterCapture(parent, owner, idx) - case info @ AnnotatedType(parent, annot) => - info.derivedAnnotatedType(addDefaultGetterCapture(parent, owner, idx), annot) - case _ if idx < owner.asClass.paramGetters.length => - val param = owner.asClass.paramGetters(idx) - val pinfo = param.info - atPhase(ctx.phase.next) { - if pinfo.captureSet.isAlwaysEmpty then info - else CapturingType(pinfo.stripCapturing, CaptureSet(param.termRef)) - } - case _ => - info - - /** Drop capture information from the type of the default getter of a case class copy method */ - private def dropDefaultGetterCapture(info: Type)(using Context): Type = info match - case info: MethodOrPoly => - info.derivedLambdaType(resType = dropDefaultGetterCapture(info.resType)) - case CapturingType(parent, _) => - parent - case info @ AnnotatedType(parent, annot) => - info.derivedAnnotatedType(dropDefaultGetterCapture(parent), annot) - case _ => - info - - /** Augment an unapply of type `(x: C): D` to `(x: {*} C): {x} D` */ - private def addUnapplyCaptures(info: Type)(using Context): Type = info match - case info: MethodType => - val paramInfo :: Nil = info.paramInfos: @unchecked - val newParamInfo = - CapturingType(paramInfo, CaptureSet.universal) - val trackedParam = info.paramRefs.head - def newResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = newResult(tp.resType)) - case _ => - CapturingType(tp, CaptureSet(trackedParam)) - info.derivedLambdaType(paramInfos = newParamInfo :: Nil, resType = newResult(info.resType)) - .showing(i"augment unapply type $info to $result", capt) - case info: PolyType => - info.derivedLambdaType(resType = addUnapplyCaptures(info.resType)) - - /** Drop added capture information from the type of an `unapply` */ - private def dropUnapplyCaptures(info: Type)(using Context): Type = info match - case info: MethodType => - info.paramInfos match - case CapturingType(oldParamInfo, _) :: Nil => - def oldResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = oldResult(tp.resType)) - case CapturingType(tp, _) => - tp - info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) - case _ => - info - case info: PolyType => - info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) - - /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method - * of a case class, transform it to account for capture information. - * The method is run in phase CheckCaptures.Pre - * @pre needsTransform(sym) - */ - def transformToCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match - case DefaultGetterName(nme.copy, n) => - sym.copySymDenotation(info = addDefaultGetterCapture(sym.info, sym.owner, n)) - case nme.unapply => - sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) - case nme.apply | nme.copy => - sym.copySymDenotation(info = addCaptureDeps(sym.info)) - case n if n == nme.eq || n == nme.ne => - sym.copySymDenotation(info = - MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) - - /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method - * of a case class, transform it back to what it was before the CC phase. - * @pre needsTransform(sym) - */ - def transformFromCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match - case DefaultGetterName(nme.copy, n) => - sym.copySymDenotation(info = dropDefaultGetterCapture(sym.info)) - case nme.unapply => - sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) - case nme.apply | nme.copy => - sym.copySymDenotation(info = dropCaptureDeps(sym.info)) - case n if n == nme.eq || n == nme.ne => - sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) - -end Synthetics \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala deleted file mode 100644 index 51b261583feb..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools -package dotc.classpath - -import scala.language.unsafeNulls - -import java.net.URL -import scala.collection.mutable.ArrayBuffer -import scala.collection.immutable.ArraySeq -import dotc.util - -import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation, EfficientClassPath } - -/** - * A classpath unifying multiple class- and sourcepath entries. - * The Classpath can obtain entries for classes and sources independently - * so it tries to do operations quite optimally - iterating only these collections - * which are needed in the given moment and only as far as it's necessary. - * - * @param aggregates classpath instances containing entries which this class processes - */ -case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { - override def findClassFile(className: String): Option[AbstractFile] = { - val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) - aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClassFile(className)).collectFirst { - case Some(x) => x - } - } - private val packageIndex: collection.mutable.Map[String, Seq[ClassPath]] = collection.mutable.Map() - private def aggregatesForPackage(pkg: PackageName): Seq[ClassPath] = packageIndex.synchronized { - packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) - } - - override def findClass(className: String): Option[ClassRepresentation] = { - val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) - - def findEntry(isSource: Boolean): Option[ClassRepresentation] = - aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst { - case Some(s: SourceFileEntry) if isSource => s - case Some(s: ClassFileEntry) if !isSource => s - } - - val classEntry = findEntry(isSource = false) - val sourceEntry = findEntry(isSource = true) - - (classEntry, sourceEntry) match { - case (Some(c: ClassFileEntry), Some(s: SourceFileEntry)) => Some(ClassAndSourceFilesEntry(c.file, s.file)) - case (c @ Some(_), _) => c - case (_, s) => s - } - } - - override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) - - override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct - - override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*) - - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { - val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct - aggregatedPackages - } - - override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = - getDistinctEntries(_.classes(inPackage)) - - override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = - getDistinctEntries(_.sources(inPackage)) - - override private[dotty] def hasPackage(pkg: PackageName): Boolean = aggregates.exists(_.hasPackage(pkg)) - override private[dotty] def list(inPackage: PackageName): ClassPathEntries = { - val packages: java.util.HashSet[PackageEntry] = new java.util.HashSet[PackageEntry]() - val classesAndSourcesBuffer = collection.mutable.ArrayBuffer[ClassRepresentation]() - val onPackage: PackageEntry => Unit = packages.add(_) - val onClassesAndSources: ClassRepresentation => Unit = classesAndSourcesBuffer += _ - - aggregates.foreach { cp => - try { - cp match { - case ecp: EfficientClassPath => - ecp.list(inPackage, onPackage, onClassesAndSources) - case _ => - val entries = cp.list(inPackage) - entries._1.foreach(entry => packages.add(entry)) - classesAndSourcesBuffer ++= entries._2 - } - } catch { - case ex: java.io.IOException => - val e = FatalError(ex.getMessage) - e.initCause(ex) - throw e - } - } - - val distinctPackages: Seq[PackageEntry] = { - val arr = packages.toArray(new Array[PackageEntry](packages.size())) - ArraySeq.unsafeWrapArray(arr) - } - val distinctClassesAndSources = mergeClassesAndSources(classesAndSourcesBuffer) - ClassPathEntries(distinctPackages, distinctClassesAndSources) - } - - /** - * Returns only one entry for each name. If there's both a source and a class entry, it - * creates an entry containing both of them. If there would be more than one class or source - * entries for the same class it always would use the first entry of each type found on a classpath. - */ - private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = { - // based on the implementation from MergedClassPath - var count = 0 - val indices = util.HashMap[String, Int]() - val mergedEntries = new ArrayBuffer[ClassRepresentation](entries.size) - for { - entry <- entries - } { - val name = entry.name - if (indices.contains(name)) { - val index = indices(name) - val existing = mergedEntries(index) - - if (existing.binary.isEmpty && entry.binary.isDefined) - mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get) - if (existing.source.isEmpty && entry.source.isDefined) - mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get) - } - else { - indices(name) = count - mergedEntries += entry - count += 1 - } - } - if (mergedEntries.isEmpty) Nil else mergedEntries.toIndexedSeq - } - - private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { - val seenNames = util.HashSet[String]() - val entriesBuffer = new ArrayBuffer[EntryType](1024) - for { - cp <- aggregates - entry <- getEntries(cp) if !seenNames.contains(entry.name) - } - { - entriesBuffer += entry - seenNames += entry.name - } - entriesBuffer.toIndexedSeq - } -} - -object AggregateClassPath { - def createAggregate(parts: ClassPath*): ClassPath = { - val elems = new ArrayBuffer[ClassPath]() - parts foreach { - case AggregateClassPath(ps) => elems ++= ps - case p => elems += p - } - if (elems.size == 1) elems.head - else AggregateClassPath(elems.toIndexedSeq) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala deleted file mode 100644 index 176b6acf9c6c..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import dotty.tools.io.AbstractFile -import dotty.tools.io.ClassRepresentation - -case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { - def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) -} - -object ClassPathEntries { - val empty = ClassPathEntries(Seq.empty, Seq.empty) -} - -trait ClassFileEntry extends ClassRepresentation { - def file: AbstractFile -} - -trait SourceFileEntry extends ClassRepresentation { - def file: AbstractFile -} - -case class PackageName(dottedString: String) { - val dirPathTrailingSlashJar: String = FileUtils.dirPathInJar(dottedString) + "/" - - val dirPathTrailingSlash: String = - if (java.io.File.separatorChar == '/') - dirPathTrailingSlashJar - else - FileUtils.dirPath(dottedString) + java.io.File.separator - - def isRoot: Boolean = dottedString.isEmpty - - def entryName(entry: String): String = { - if (isRoot) entry else { - val builder = new java.lang.StringBuilder(dottedString.length + 1 + entry.length) - builder.append(dottedString) - builder.append('.') - builder.append(entry) - builder.toString - } - } -} - -trait PackageEntry { - def name: String -} - -private[dotty] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry { - final def fileName: String = file.name - def name: String = FileUtils.stripClassExtension(file.name) // class name - - def binary: Option[AbstractFile] = Some(file) - def source: Option[AbstractFile] = None -} - -private[dotty] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry { - final def fileName: String = file.name - def name: String = FileUtils.stripSourceExtension(file.name) - - def binary: Option[AbstractFile] = None - def source: Option[AbstractFile] = Some(file) -} - -private[dotty] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation { - final def fileName: String = classFile.name - def name: String = FileUtils.stripClassExtension(classFile.name) - - def binary: Option[AbstractFile] = Some(classFile) - def source: Option[AbstractFile] = Some(srcFile) -} - -private[dotty] case class PackageEntryImpl(name: String) extends PackageEntry - -private[dotty] trait NoSourcePaths { - def asSourcePathString: String = "" - private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = Seq.empty -} - -private[dotty] trait NoClassPaths { - def findClassFile(className: String): Option[AbstractFile] = None - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = Seq.empty -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala deleted file mode 100644 index ac8b69381938..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import dotty.tools.io.{AbstractFile, VirtualDirectory} -import FileUtils._ -import dotty.tools.io.ClassPath -import dotty.tools.dotc.core.Contexts._ - -/** - * Provides factory methods for classpath. When creating classpath instances for a given path, - * it uses proper type of classpath depending on a types of particular files containing sources or classes. - */ -class ClassPathFactory { - /** - * Create a new classpath based on the abstract file. - */ - def newClassPath(file: AbstractFile)(using Context): ClassPath = ClassPathFactory.newClassPath(file) - - /** - * Creators for sub classpaths which preserve this context. - */ - def sourcesInPath(path: String)(using Context): List[ClassPath] = - for { - file <- expandPath(path, expandStar = false) - dir <- Option(AbstractFile getDirectory file) - } - yield createSourcePath(dir) - - - def expandPath(path: String, expandStar: Boolean = true): List[String] = dotty.tools.io.ClassPath.expandPath(path, expandStar) - - def expandDir(extdir: String): List[String] = dotty.tools.io.ClassPath.expandDir(extdir) - - def contentsOfDirsInPath(path: String)(using Context): List[ClassPath] = - for { - dir <- expandPath(path, expandStar = false) - name <- expandDir(dir) - entry <- Option(AbstractFile.getDirectory(name)) - } - yield newClassPath(entry) - - def classesInExpandedPath(path: String)(using Context): IndexedSeq[ClassPath] = - classesInPathImpl(path, expand = true).toIndexedSeq - - def classesInPath(path: String)(using Context): List[ClassPath] = classesInPathImpl(path, expand = false) - - def classesInManifest(useManifestClassPath: Boolean)(using Context): List[ClassPath] = - if (useManifestClassPath) dotty.tools.io.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url)) - else Nil - - // Internal - protected def classesInPathImpl(path: String, expand: Boolean)(using Context): List[ClassPath] = - for { - file <- expandPath(path, expand) - dir <- { - def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None - Option(AbstractFile.getDirectory(file)).orElse(asImage) - } - } - yield newClassPath(dir) - - private def createSourcePath(file: AbstractFile)(using Context): ClassPath = - if (file.isJarOrZip) - ZipAndJarSourcePathFactory.create(file) - else if (file.isDirectory) - new DirectorySourcePath(file.file) - else - sys.error(s"Unsupported sourcepath element: $file") -} - -object ClassPathFactory { - def newClassPath(file: AbstractFile)(using Context): ClassPath = file match { - case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) - case _ => - if (file.isJarOrZip) - ZipAndJarClassPathFactory.create(file) - else if (file.isDirectory) - new DirectoryClassPath(file.file) - else - sys.error(s"Unsupported classpath element: $file") - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala deleted file mode 100644 index a5678970411b..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import scala.language.unsafeNulls - -import java.io.{File => JFile} -import java.net.URL -import java.nio.file.{FileSystems, Files} - -import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} -import dotty.tools.io.{AbstractFile, PlainFile, ClassPath, ClassRepresentation, EfficientClassPath, JDK9Reflectors} -import FileUtils._ -import PlainFile.toPlainFile - -import scala.jdk.CollectionConverters._ -import scala.collection.immutable.ArraySeq -import scala.util.control.NonFatal -import language.experimental.pureFunctions - -/** - * A trait allowing to look for classpath entries in directories. It provides common logic for - * classes handling class and source files. - * It makes use of the fact that in the case of nested directories it's easy to find a file - * when we have a name of a package. - * It abstracts over the file representation to work with both JFile and AbstractFile. - */ -trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { - type F - - val dir: F - - protected def emptyFiles: Array[F] // avoids reifying ClassTag[F] - protected def getSubDir(dirName: String): Option[F] - protected def listChildren(dir: F, filter: Option[F -> Boolean] = (None: Option[F -> Boolean])): Array[F] // !cc! need explicit typing of default argument - protected def getName(f: F): String - protected def toAbstractFile(f: F): AbstractFile - protected def isPackage(f: F): Boolean - - protected def createFileEntry(file: AbstractFile): FileEntryType - protected def isMatchingFile(f: F): Boolean - - private def getDirectory(forPackage: PackageName): Option[F] = - if (forPackage.isRoot) - Some(dir) - else - getSubDir(forPackage.dirPathTrailingSlash) - - override private[dotty] def hasPackage(pkg: PackageName): Boolean = getDirectory(pkg).isDefined - - private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { - val dirForPackage = getDirectory(inPackage) - val nestedDirs: Array[F] = dirForPackage match { - case None => emptyFiles - case Some(directory) => listChildren(directory, Some(isPackage)) - } - ArraySeq.unsafeWrapArray(nestedDirs).map(f => PackageEntryImpl(inPackage.entryName(getName(f)))) - } - - protected def files(inPackage: PackageName): Seq[FileEntryType] = { - val dirForPackage = getDirectory(inPackage) - val files: Array[F] = dirForPackage match { - case None => emptyFiles - case Some(directory) => listChildren(directory, Some(isMatchingFile)) - } - files.iterator.map(f => createFileEntry(toAbstractFile(f))).toSeq - } - - override def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = { - val dirForPackage = getDirectory(inPackage) - dirForPackage match { - case None => - case Some(directory) => - for (file <- listChildren(directory)) { - if (isPackage(file)) - onPackageEntry(PackageEntryImpl(inPackage.entryName(getName(file)))) - else if (isMatchingFile(file)) - onClassesAndSources(createFileEntry(toAbstractFile(file))) - } - } - } -} - -trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] { - type F = JFile - - protected def emptyFiles: Array[JFile] = Array.empty - protected def getSubDir(packageDirName: String): Option[JFile] = { - val packageDir = new JFile(dir, packageDirName) - if (packageDir.exists && packageDir.isDirectory) Some(packageDir) - else None - } - protected def listChildren(dir: JFile, filter: Option[JFile -> Boolean]): Array[JFile] = { - val listing = filter match { - case Some(f) => dir.listFiles(mkFileFilter(f)) - case None => dir.listFiles() - } - - if (listing != null) { - // Sort by file name for stable order of directory .class entries in package scope. - // This gives stable results ordering of base type sequences for unrelated classes - // with the same base type depth. - // - // Notably, this will stably infer`Product with Serializable` - // as the type of `case class C(); case class D(); List(C(), D()).head`, rather than the opposite order. - // On Mac, the HFS performs this sorting transparently, but on Linux the order is unspecified. - // - // Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only - // intended to improve determinism of the compiler for compiler hackers. - java.util.Arrays.sort(listing, - new java.util.Comparator[JFile] { - def compare(o1: JFile, o2: JFile) = o1.getName.compareTo(o2.getName) - }) - listing - } - else Array() - } - protected def getName(f: JFile): String = f.getName - protected def toAbstractFile(f: JFile): AbstractFile = f.toPath.toPlainFile - protected def isPackage(f: JFile): Boolean = f.isPackage - - assert(dir != null, "Directory file in DirectoryFileLookup cannot be null") - - def asURLs: Seq[URL] = Seq(dir.toURI.toURL) - def asClassPathStrings: Seq[String] = Seq(dir.getPath) -} - -object JrtClassPath { - import java.nio.file._, java.net.URI - def apply(release: Option[String]): Option[ClassPath] = { - import scala.util.Properties._ - if (!isJavaAtLeast("9")) None - else { - // Longer term we'd like an official API for this in the JDK - // Discussion: http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/thread.html#11738 - - val currentMajorVersion: Int = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() - release match { - case Some(v) if v.toInt < currentMajorVersion => - try { - val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") - if (Files.notExists(ctSym)) None - else Some(new CtSymClassPath(ctSym, v.toInt)) - } catch { - case NonFatal(_) => None - } - case _ => - try { - val fs = FileSystems.getFileSystem(URI.create("jrt:/")) - Some(new JrtClassPath(fs)) - } catch { - case _: ProviderNotFoundException | _: FileSystemNotFoundException => None - } - } - } - } -} - -/** - * Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220) - * - * https://bugs.openjdk.java.net/browse/JDK-8066492 is the most up to date reference - * for the structure of the jrt:// filesystem. - * - * The implementation assumes that no classes exist in the empty package. - */ -final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths { - import java.nio.file.Path, java.nio.file._ - type F = Path - private val dir: Path = fs.getPath("/packages") - - // e.g. "java.lang" -> Seq("/modules/java.base") - private val packageToModuleBases: Map[String, Seq[Path]] = { - val ps = Files.newDirectoryStream(dir).iterator().asScala - def lookup(pack: Path): Seq[Path] = - Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList - ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap - } - - /** Empty string represents root package */ - override private[dotty] def hasPackage(pkg: PackageName): Boolean = packageToModuleBases.contains(pkg.dottedString) - - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = - packageToModuleBases.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector - - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = - if (inPackage.isRoot) Nil - else - packageToModuleBases.getOrElse(inPackage.dottedString, Nil).flatMap(x => - Files.list(x.resolve(inPackage.dirPathTrailingSlash)).iterator().asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x => - ClassFileEntryImpl(x.toPlainFile)).toVector - - override private[dotty] def list(inPackage: PackageName): ClassPathEntries = - if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) - else ClassPathEntries(packages(inPackage), classes(inPackage)) - - def asURLs: Seq[URL] = Seq(new URL("jrt:/")) - // We don't yet have a scheme to represent the JDK modules in our `-classpath`. - // java models them as entries in the new "module path", we'll probably need to follow this. - def asClassPathStrings: Seq[String] = Nil - - def findClassFile(className: String): Option[AbstractFile] = - if (!className.contains(".")) None - else { - val (inPackage, _) = separatePkgAndClassNames(className) - packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{ x => - val file = x.resolve(FileUtils.dirPath(className) + ".class") - if (Files.exists(file)) file.toPlainFile :: Nil else Nil - }.take(1).toList.headOption - } -} - -/** - * Implementation `ClassPath` based on the \$JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 - */ -final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { - import java.nio.file.Path, java.nio.file._ - - private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader) - private val root: Path = fileSystem.getRootDirectories.iterator.next - private val roots = Files.newDirectoryStream(root).iterator.asScala.toList - - // http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/011737.html - private def codeFor(major: Int): String = if (major < 10) major.toString else ('A' + (major - 10)).toChar.toString - - private val releaseCode: String = codeFor(release) - private def fileNameMatchesRelease(fileName: String) = !fileName.contains("-") && fileName.contains(releaseCode) // exclude `9-modules` - private val rootsForRelease: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString)) - - // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) - private val packageIndex: scala.collection.Map[String, scala.collection.Seq[Path]] = { - val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() - val isJava12OrHigher = scala.util.Properties.isJavaAtLeast("12") - rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p => - val moduleNamePathElementCount = if (isJava12OrHigher) 1 else 0 - if (p.getNameCount > root.getNameCount + moduleNamePathElementCount) { - val packageDotted = p.subpath(moduleNamePathElementCount + root.getNameCount, p.getNameCount).toString.replace('/', '.') - index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p - } - }) - index - } - - /** Empty string represents root package */ - override private[dotty] def hasPackage(pkg: PackageName) = packageIndex.contains(pkg.dottedString) - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { - packageIndex.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector - } - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = { - if (inPackage.isRoot) Nil - else { - val sigFiles = packageIndex.getOrElse(inPackage.dottedString, Nil).iterator.flatMap(p => - Files.list(p).iterator.asScala.filter(_.getFileName.toString.endsWith(".sig"))) - sigFiles.map(f => ClassFileEntryImpl(f.toPlainFile)).toVector - } - } - - override private[dotty] def list(inPackage: PackageName): ClassPathEntries = - if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) - else ClassPathEntries(packages(inPackage), classes(inPackage)) - - def asURLs: Seq[URL] = Nil - def asClassPathStrings: Seq[String] = Nil - def findClassFile(className: String): Option[AbstractFile] = { - if (!className.contains(".")) None - else { - val (inPackage, classSimpleName) = separatePkgAndClassNames(className) - packageIndex.getOrElse(inPackage, Nil).iterator.flatMap { p => - val path = p.resolve(classSimpleName + ".sig") - if (Files.exists(path)) path.toPlainFile :: Nil else Nil - }.take(1).toList.headOption - } - } -} - -case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { - override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply - - def findClassFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) - val classFile = new JFile(dir, relativePath + ".class") - if (classFile.exists) { - Some(classFile.toPath.toPlainFile) - } - else None - } - - protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) - protected def isMatchingFile(f: JFile): Boolean = f.isClass - - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) -} - -case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths { - def asSourcePathString: String = asClassPathString - - protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file) - protected def isMatchingFile(f: JFile): Boolean = endsScalaOrJava(f.getName) - - override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl.apply - - private def findSourceFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) - val sourceFile = LazyList("scala", "java") - .map(ext => new JFile(dir, relativePath + "." + ext)) - .collectFirst { case file if file.exists() => file } - - sourceFile.map(_.toPath.toPlainFile) - } - - private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala deleted file mode 100644 index 0f5ac16b40bf..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools -package dotc.classpath - -import scala.language.unsafeNulls - -import java.io.{File => JFile, FileFilter} -import java.net.URL -import dotty.tools.io.AbstractFile -import language.experimental.pureFunctions - -/** - * Common methods related to Java files and abstract files used in the context of classpath - */ -object FileUtils { - extension (file: AbstractFile) { - def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - - def isClass: Boolean = !file.isDirectory && file.hasExtension("class") && !file.name.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 - - def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) - - // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? - def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") - - /** - * Safe method returning a sequence containing one URL representing this file, when underlying file exists, - * and returning given default value in other case - */ - def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) - } - - extension (file: JFile) { - def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) - - def isClass: Boolean = file.isFile && file.getName.endsWith(".class") && !file.getName.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 - } - - private val SUFFIX_CLASS = ".class" - private val SUFFIX_SCALA = ".scala" - private val SUFFIX_JAVA = ".java" - private val SUFFIX_SIG = ".sig" - - def stripSourceExtension(fileName: String): String = - if (endsScala(fileName)) stripClassExtension(fileName) - else if (endsJava(fileName)) stripJavaExtension(fileName) - else throw new FatalError("Unexpected source file ending: " + fileName) - - def dirPath(forPackage: String): String = forPackage.replace('.', JFile.separatorChar) - - def dirPathInJar(forPackage: String): String = forPackage.replace('.', '/') - - inline private def ends (filename:String, suffix:String) = filename.endsWith(suffix) && filename.length > suffix.length - - def endsClass(fileName: String): Boolean = - ends (fileName, SUFFIX_CLASS) || fileName.endsWith(SUFFIX_SIG) - - def endsScalaOrJava(fileName: String): Boolean = - endsScala(fileName) || endsJava(fileName) - - def endsJava(fileName: String): Boolean = - ends (fileName, SUFFIX_JAVA) - - def endsScala(fileName: String): Boolean = - ends (fileName, SUFFIX_SCALA) - - def stripClassExtension(fileName: String): String = - fileName.substring(0, fileName.lastIndexOf('.')) - - def stripJavaExtension(fileName: String): String = - fileName.substring(0, fileName.length - 5) // equivalent of fileName.length - SUFFIX_JAVA.length - - // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed - // because then some tests in partest don't pass - def mayBeValidPackage(dirName: String): Boolean = - (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.') - - def mkFileFilter(f: JFile -> Boolean): FileFilter = new FileFilter { - def accept(pathname: JFile): Boolean = f(pathname) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala deleted file mode 100644 index ea7412f15d8a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import dotty.tools.io.ClassPath.RootPackage - -/** - * Common methods related to package names represented as String - */ -object PackageNameUtils { - - /** - * @param fullClassName full class name with package - * @return (package, simple class name) - */ - inline def separatePkgAndClassNames(fullClassName: String): (String, String) = { - val lastDotIndex = fullClassName.lastIndexOf('.') - if (lastDotIndex == -1) - (RootPackage, fullClassName) - else - (fullClassName.substring(0, lastDotIndex).nn, fullClassName.substring(lastDotIndex + 1).nn) - } - - def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." - - /** - * `true` if `packageDottedName` is a package directly nested in `inPackage`, for example: - * - `packageContains("scala", "scala.collection")` - * - `packageContains("", "scala")` - */ - def packageContains(inPackage: String, packageDottedName: String) = { - if (packageDottedName.contains(".")) - packageDottedName.startsWith(inPackage) && packageDottedName.lastIndexOf('.') == inPackage.length - else inPackage == "" - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala deleted file mode 100644 index ac80d543b539..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala +++ /dev/null @@ -1,55 +0,0 @@ -package dotty.tools.dotc.classpath - -import scala.language.unsafeNulls - -import dotty.tools.io.ClassRepresentation -import dotty.tools.io.{AbstractFile, VirtualDirectory} -import FileUtils._ -import java.net.URL - -import dotty.tools.io.ClassPath -import language.experimental.pureFunctions - -case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { - type F = AbstractFile - - // From AbstractFileClassLoader - private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = { - var file: AbstractFile = base - val dirParts = pathParts.init.iterator - while (dirParts.hasNext) { - val dirPart = dirParts.next - file = file.lookupName(dirPart, directory = true) - if (file == null) - return null - } - file.lookupName(pathParts.last, directory = directory) - } - - protected def emptyFiles: Array[AbstractFile] = Array.empty - protected def getSubDir(packageDirName: String): Option[AbstractFile] = - Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true)) - protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile -> Boolean]): Array[F] = filter match { - case Some(f) => dir.iterator.filter(f).toArray - case _ => dir.toArray - } - def getName(f: AbstractFile): String = f.name - def toAbstractFile(f: AbstractFile): AbstractFile = f - def isPackage(f: AbstractFile): Boolean = f.isPackage - - // mimic the behavior of the old nsc.util.DirectoryClassPath - def asURLs: Seq[URL] = Seq(new URL(dir.name)) - def asClassPathStrings: Seq[String] = Seq(dir.path) - - override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply - - def findClassFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) + ".class" - Option(lookupPath(dir)(relativePath.split(java.io.File.separator).toIndexedSeq, directory = false)) - } - - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) - - protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) - protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala deleted file mode 100644 index 865f95551a0b..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc -package classpath - -import scala.language.unsafeNulls - -import java.io.File -import java.net.URL -import java.nio.file.Files -import java.nio.file.attribute.{BasicFileAttributes, FileTime} - -import scala.annotation.tailrec -import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources} -import dotty.tools.dotc.core.Contexts._ -import FileUtils._ - -/** - * A trait providing an optional cache for classpath entries obtained from zip and jar files. - * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE - * when there are a lot of projects having a lot of common dependencies. - */ -sealed trait ZipAndJarFileLookupFactory { - private val cache = new FileBasedCache[ClassPath] - - def create(zipFile: AbstractFile)(using Context): ClassPath = - val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) - if (ctx.settings.YdisableFlatCpCaching.value || zipFile.file == null) createForZipFile(zipFile, release) - else createUsingCache(zipFile, release) - - protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath - - private def createUsingCache(zipFile: AbstractFile, release: Option[String]): ClassPath = - cache.getOrCreate(zipFile.file.toPath, () => createForZipFile(zipFile, release)) -} - -/** - * Manages creation of classpath for class files placed in zip and jar files. - * It should be the only way of creating them as it provides caching. - */ -object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { - private case class ZipArchiveClassPath(zipFile: File, override val release: Option[String]) - extends ZipArchiveFileLookup[ClassFileEntryImpl] - with NoSourcePaths { - - override def findClassFile(className: String): Option[AbstractFile] = { - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - file(PackageName(pkg), simpleClassName + ".class").map(_.file) - } - - // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. - override def findClass(className: String): Option[ClassRepresentation] = { - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - file(PackageName(pkg), simpleClassName + ".class") - } - - override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) - - override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file) - override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass - } - - /** - * This type of classpath is closely related to the support for JSR-223. - * Its usage can be observed e.g. when running: - * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala - * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: - * Name: scala/Function2$mcFJD$sp.class - */ - private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths { - override def findClassFile(className: String): Option[AbstractFile] = { - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - classes(PackageName(pkg)).find(_.name == simpleClassName).map(_.file) - } - - override def asClassPathStrings: Seq[String] = Seq(file.path) - - override def asURLs: Seq[URL] = file.toURLs() - - import ManifestResourcesClassPath.PackageFileInfo - import ManifestResourcesClassPath.PackageInfo - - /** - * A cache mapping package name to abstract file for package directory and subpackages of given package. - * - * ManifestResources can iterate through the collections of entries from e.g. remote jar file. - * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into - * given package, when it's needed. On the other hand we can iterate over entries to get - * AbstractFiles, iterate over entries of these files etc. - * - * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time, - * when we need subpackages of a given package or its classes, we traverse once and cache only packages. - * Classes for given package can be then easily loaded when they are needed. - */ - private lazy val cachedPackages: util.HashMap[String, PackageFileInfo] = { - val packages = util.HashMap[String, PackageFileInfo]() - - def getSubpackages(dir: AbstractFile): List[AbstractFile] = - (for (file <- dir if file.isPackage) yield file).toList - - @tailrec - def traverse(packagePrefix: String, - filesForPrefix: List[AbstractFile], - subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match { - case pkgFile :: remainingFiles => - val subpackages = getSubpackages(pkgFile) - val fullPkgName = packagePrefix + pkgFile.name - packages(fullPkgName) = PackageFileInfo(pkgFile, subpackages) - val newPackagePrefix = fullPkgName + "." - subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) - traverse(packagePrefix, remainingFiles, subpackagesQueue) - case Nil if subpackagesQueue.nonEmpty => - val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue() - traverse(packagePrefix, filesForPrefix, subpackagesQueue) - case _ => - } - - val subpackages = getSubpackages(file) - packages(ClassPath.RootPackage) = PackageFileInfo(file, subpackages) - traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue()) - packages - } - - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = cachedPackages.get(inPackage.dottedString) match { - case None => Seq.empty - case Some(PackageFileInfo(_, subpackages)) => - subpackages.map(packageFile => PackageEntryImpl(inPackage.entryName(packageFile.name))) - } - - override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = cachedPackages.get(inPackage.dottedString) match { - case None => Seq.empty - case Some(PackageFileInfo(pkg, _)) => - (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file)).toSeq - } - - override private[dotty] def hasPackage(pkg: PackageName) = cachedPackages.contains(pkg.dottedString) - override private[dotty] def list(inPackage: PackageName): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage)) - } - - private object ManifestResourcesClassPath { - case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile]) - case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) - } - - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = - if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) - else ZipArchiveClassPath(zipFile.file, release) - - private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { - case manifestRes: ManifestResources => - ManifestResourcesClassPath(manifestRes) - case _ => - val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile" - throw new IllegalArgumentException(errorMsg) - } -} - -/** - * Manages creation of classpath for source files placed in zip and jar files. - * It should be the only way of creating them as it provides caching. - */ -object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { - private case class ZipArchiveSourcePath(zipFile: File) - extends ZipArchiveFileLookup[SourceFileEntryImpl] - with NoClassPaths { - - def release: Option[String] = None - - override def asSourcePathString: String = asClassPathString - - override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) - - override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file) - override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource - } - - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) -} - -final class FileBasedCache[T] { - private case class Stamp(lastModified: FileTime, fileKey: Object) - private val cache = collection.mutable.Map.empty[java.nio.file.Path, (Stamp, T)] - - def getOrCreate(path: java.nio.file.Path, create: () => T): T = cache.synchronized { - val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) - val lastModified = attrs.lastModifiedTime() - // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp - val fileKey = attrs.fileKey() - val stamp = Stamp(lastModified, fileKey) - cache.get(path) match { - case Some((cachedStamp, cached)) if cachedStamp == stamp => cached - case _ => - val value = create() - cache.put(path, (stamp, value)) - value - } - } - - def clear(): Unit = cache.synchronized { - // TODO support closing - // cache.valuesIterator.foreach(_.close()) - cache.clear() - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala deleted file mode 100644 index e241feee8244..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import scala.language.unsafeNulls - -import java.io.File -import java.net.URL - -import dotty.tools.io.{ AbstractFile, FileZipArchive } -import FileUtils._ -import dotty.tools.io.{EfficientClassPath, ClassRepresentation} - -/** - * A trait allowing to look for classpath entries of given type in zip and jar files. - * It provides common logic for classes handling class and source files. - * It's aware of things like e.g. META-INF directory which is correctly skipped. - */ -trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { - val zipFile: File - def release: Option[String] - - assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null") - - override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) - override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) - - private val archive = new FileZipArchive(zipFile.toPath, release) - - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { - for { - dirEntry <- findDirEntry(inPackage).toSeq - entry <- dirEntry.iterator if entry.isPackage - } - yield PackageEntryImpl(inPackage.entryName(entry.name)) - } - - protected def files(inPackage: PackageName): Seq[FileEntryType] = - for { - dirEntry <- findDirEntry(inPackage).toSeq - entry <- dirEntry.iterator if isRequiredFileType(entry) - } - yield createFileEntry(entry) - - protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = - for { - dirEntry <- findDirEntry(inPackage) - entry <- Option(dirEntry.lookupName(name, directory = false)) - if isRequiredFileType(entry) - } - yield createFileEntry(entry) - - override def hasPackage(pkg: PackageName) = findDirEntry(pkg).isDefined - def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = - findDirEntry(inPackage) match { - case Some(dirEntry) => - for (entry <- dirEntry.iterator) { - if (entry.isPackage) - onPackageEntry(PackageEntryImpl(inPackage.entryName(entry.name))) - else if (isRequiredFileType(entry)) - onClassesAndSources(createFileEntry(entry)) - } - case None => - } - - private def findDirEntry(pkg: PackageName): Option[archive.DirEntry] = - archive.allDirs.get(pkg.dirPathTrailingSlashJar) - - protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType - protected def isRequiredFileType(file: AbstractFile): Boolean -} diff --git a/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala deleted file mode 100644 index 68c900e405da..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala +++ /dev/null @@ -1,198 +0,0 @@ -package dotty.tools.dotc -package config - -import scala.language.unsafeNulls - -import Settings._ -import core.Contexts._ -import printing.Highlighting - -import scala.util.chaining.given -import scala.PartialFunction.cond - -trait CliCommand: - - type ConcreteSettings <: CommonScalaSettings with Settings.SettingGroup - - def versionMsg: String - - def ifErrorsMsg: String - - /** The name of the command */ - def cmdName: String - - def isHelpFlag(using settings: ConcreteSettings)(using SettingsState): Boolean - - def helpMsg(using settings: ConcreteSettings)(using SettingsState, Context): String - - private def explainAdvanced = """ - |-- Notes on option parsing -- - |Boolean settings are always false unless set. - |Where multiple values are accepted, they should be comma-separated. - | example: -Xplugin:plugin1,plugin2 - | means one or a comma-separated list of: - | - (partial) phase names with an optional "+" suffix to include the next phase - | - the string "all" - | example: -Xprint:all prints all phases. - | example: -Xprint:typer,mixin prints the typer and mixin phases. - | example: -Ylog:erasure+ logs the erasure phase and the phase after the erasure phase. - | This is useful because during the tree transform of phase X, we often - | already are in phase X + 1. - """ - - /** Distill arguments into summary detailing settings, errors and files to main */ - def distill(args: Array[String], sg: Settings.SettingGroup)(ss: SettingsState = sg.defaultState)(using Context): ArgsSummary = - - // expand out @filename to the contents of that filename - def expandedArguments = args.toList flatMap { - case x if x startsWith "@" => CommandLineParser.expandArg(x) - case x => List(x) - } - - sg.processArguments(expandedArguments, processAll = true, settingsState = ss) - end distill - - /** Creates a help message for a subset of options based on cond */ - protected def availableOptionsMsg(p: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = - // result is (Option Name, descrption\ndefault: value\nchoices: x, y, z - def help(s: Setting[?]): (String, String) = - // For now, skip the default values that do not make sense for the end user, such as 'false' for the version command. - def defaultValue = s.default match - case _: Int | _: String => s.default.toString - case _ => "" - val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices ${s.legalChoices}" else "") - (s.name, info.filter(_.nonEmpty).mkString("\n")) - end help - - val ss = settings.allSettings.filter(p).toList.sortBy(_.name) - val formatter = Columnator("", "", maxField = 30) - val fresh = ContextBase().initialCtx.fresh.setSettings(summon[SettingsState]) - formatter(List(ss.map(help) :+ ("@", "A text file containing compiler arguments (options and source files).")))(using fresh) - end availableOptionsMsg - - protected def shortUsage: String = s"Usage: $cmdName " - - protected def createUsageMsg(label: String, shouldExplain: Boolean, cond: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = - val prefix = List( - Some(shortUsage), - Some(explainAdvanced).filter(_ => shouldExplain), - Some(label + " options include:") - ).flatten.mkString("\n") - - prefix + "\n" + availableOptionsMsg(cond) - - protected def isStandard(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - !isVerbose(s) && !isWarning(s) && !isAdvanced(s) && !isPrivate(s) || s.name == "-Werror" || s.name == "-Wconf" - protected def isVerbose(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - s.name.startsWith("-V") && s.name != "-V" - protected def isWarning(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - s.name.startsWith("-W") && s.name != "-W" || s.name == "-Xlint" - protected def isAdvanced(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - s.name.startsWith("-X") && s.name != "-X" - protected def isPrivate(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - s.name.startsWith("-Y") && s.name != "-Y" - protected def shortHelp(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): String = - s.description.linesIterator.next() - protected def isHelping(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - cond(s.value) { - case ss: List[?] if s.isMultivalue => ss.contains("help") - case s: String => "help" == s - } - - /** Messages explaining usage and options */ - protected def usageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("where possible standard", shouldExplain = false, isStandard) - protected def vusageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("Possible verbose", shouldExplain = true, isVerbose) - protected def wusageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("Possible warning", shouldExplain = true, isWarning) - protected def xusageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("Possible advanced", shouldExplain = true, isAdvanced) - protected def yusageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("Possible private", shouldExplain = true, isPrivate) - - /** Used for the formatted output of -Xshow-phases */ - protected def phasesMessage(using Context): String = - val phases = new Compiler().phases - val formatter = Columnator("phase name", "description", maxField = 25) - formatter(phases.map(mega => mega.map(p => (p.phaseName, p.description)))) - - /** Provide usage feedback on argument summary, assuming that all settings - * are already applied in context. - * @return Either Some list of files passed as arguments or None if further processing should be interrupted. - */ - def checkUsage(summary: ArgsSummary, sourcesRequired: Boolean)(using settings: ConcreteSettings)(using SettingsState, Context): Option[List[String]] = - // Print all warnings encountered during arguments parsing - summary.warnings.foreach(report.warning(_)) - - if summary.errors.nonEmpty then - summary.errors foreach (report.error(_)) - report.echo(ifErrorsMsg) - None - else if settings.version.value then - report.echo(versionMsg) - None - else if isHelpFlag then - report.echo(helpMsg) - None - else if (sourcesRequired && summary.arguments.isEmpty) - report.echo(usageMessage) - None - else - Some(summary.arguments) - - extension [T](setting: Setting[T]) - protected def value(using ss: SettingsState): T = setting.valueIn(ss) - - extension (s: String) - def padLeft(width: Int): String = String.format(s"%${width}s", s) - - // Formatting for -help and -Vphases in two columns, handling long field1 and wrapping long field2 - class Columnator(heading1: String, heading2: String, maxField: Int, separation: Int = 2): - def apply(texts: List[List[(String, String)]])(using Context): String = StringBuilder().tap(columnate(_, texts)).toString - - private def columnate(sb: StringBuilder, texts: List[List[(String, String)]])(using Context): Unit = - import Highlighting.* - val colors = Seq(Green(_), Yellow(_), Magenta(_), Cyan(_), Red(_)) - val nocolor = texts.length == 1 - def color(index: Int): String => Highlight = if nocolor then NoColor(_) else colors(index % colors.length) - val maxCol = ctx.settings.pageWidth.value - val field1 = maxField.min(texts.flatten.map(_._1.length).filter(_ < maxField).max) // widest field under maxField - val field2 = if field1 + separation + maxField < maxCol then maxCol - field1 - separation else 0 // skinny window -> terminal wrap - val separator = " " * separation - val EOL = "\n" - def formatField1(text: String): String = if text.length <= field1 then text.padLeft(field1) else text + EOL + "".padLeft(field1) - def formatField2(text: String): String = - def loopOverField2(fld: String): List[String] = - if field2 == 0 || fld.length <= field2 then List(fld) - else - fld.lastIndexOf(" ", field2) match - case -1 => List(fld) - case i => val (prefix, rest) = fld.splitAt(i) ; prefix :: loopOverField2(rest.trim) - text.split("\n").toList.flatMap(loopOverField2).filter(_.nonEmpty).mkString(EOL + "".padLeft(field1) + separator) - end formatField2 - def format(first: String, second: String, index: Int, colorPicker: Int => String => Highlight) = - sb.append(colorPicker(index)(formatField1(first)).show) - .append(separator) - .append(formatField2(second)) - .append(EOL): Unit - def fancy(first: String, second: String, index: Int) = format(first, second, index, color) - def plain(first: String, second: String) = format(first, second, 0, _ => NoColor(_)) - - if heading1.nonEmpty then - plain(heading1, heading2) - plain("-" * heading1.length, "-" * heading2.length) - - def emit(index: Int)(textPair: (String, String)): Unit = fancy(textPair._1, textPair._2, index) - def group(index: Int)(body: Int => Unit): Unit = - if !ctx.useColors then plain(s"{", "") - body(index) - if !ctx.useColors then plain(s"}", "") - - texts.zipWithIndex.foreach { (text, index) => - text match - case List(single) => emit(index)(single) - case Nil => - case mega => group(index)(i => mega.foreach(emit(i))) - } - end Columnator diff --git a/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala b/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala deleted file mode 100644 index 2e76561c9913..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala +++ /dev/null @@ -1,125 +0,0 @@ -package dotty.tools.dotc.config - -import java.lang.Character.isWhitespace -import java.nio.file.{Files, Paths} -import scala.annotation.tailrec -import scala.collection.mutable.ArrayBuffer -import scala.jdk.CollectionConverters.* - -/** Split a line of text using shell conventions. - */ -object CommandLineParser: - inline private val DQ = '"' - inline private val SQ = '\'' - inline private val EOF = -1 - - /** Split the line into tokens separated by whitespace. - * - * Single or double quotes can be embedded to preserve internal whitespace: - * - * `""" echo "hello, world!" """` => "echo" :: "hello, world!" :: Nil - * `""" echo hello,' 'world! """` => "echo" :: "hello, world!" :: Nil - * `""" echo \"hello, world!\" """` => "echo" :: "\"hello," :: "world!\"" :: Nil - * - * The embedded quotes are stripped. Escaping backslash is not stripped. - * - * Invoke `errorFn` with a descriptive message if an end quote is missing. - */ - def tokenize(line: String, errorFn: String => Unit): List[String] = - - var accum: List[String] = Nil - - var pos = 0 - var start = 0 - val qpos = new ArrayBuffer[Int](16) // positions of paired quotes in current token - - inline def cur = if done then EOF else line.charAt(pos): Int - inline def bump() = pos += 1 - inline def done = pos >= line.length - - // Skip to the given unescaped end quote; false on no more input. - def skipToEndQuote(q: Int): Boolean = - var escaped = false - def terminal = cur match - case _ if escaped => escaped = false ; false - case '\\' => escaped = true ; false - case `q` | EOF => true - case _ => false - while !terminal do bump() - !done - - // Skip to the next whitespace word boundary; record unescaped embedded quotes; false on missing quote. - def skipToDelim(): Boolean = - var escaped = false - inline def quote() = { qpos += pos ; bump() } - @tailrec def advance(): Boolean = cur match - case _ if escaped => escaped = false ; bump() ; advance() - case '\\' => escaped = true ; bump() ; advance() - case q @ (DQ | SQ) => { quote() ; skipToEndQuote(q) } && { quote() ; advance() } - case EOF => true - case c if isWhitespace(c) => true - case _ => bump(); advance() - advance() - - def copyText(): String = - val buf = new java.lang.StringBuilder - var p = start - var i = 0 - while p < pos do - if i >= qpos.size then - buf.append(line, p, pos) - p = pos - else if p == qpos(i) then - buf.append(line, qpos(i)+1, qpos(i+1)) - p = qpos(i+1)+1 - i += 2 - else - buf.append(line, p, qpos(i)) - p = qpos(i) - buf.toString - - // the current token, stripped of any embedded quotes. - def text(): String = - val res = - if qpos.isEmpty then line.substring(start, pos) - else if qpos(0) == start && qpos(1) == pos then line.substring(start+1, pos-1) - else copyText() - qpos.clear() - res.nn - - inline def badquote() = errorFn(s"Unmatched quote [${qpos.last}](${line.charAt(qpos.last)})") - - inline def skipWhitespace() = while isWhitespace(cur) do bump() - - @tailrec def loop(): List[String] = - skipWhitespace() - start = pos - if done then - accum.reverse - else if !skipToDelim() then - badquote() - Nil - else - accum ::= text() - loop() - end loop - - loop() - end tokenize - - def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) - - /** Expands all arguments starting with @ to the contents of the file named like each argument. - */ - def expandArg(arg: String): List[String] = - val path = Paths.get(arg.stripPrefix("@")) - if !Files.exists(path) then - System.err.nn.println(s"Argument file ${path.nn.getFileName} could not be found") - Nil - else - def stripComment(s: String) = s.indexOf('#') match { case -1 => s case i => s.substring(0, i) } - val lines = Files.readAllLines(path).nn - val params = lines.asScala.map(stripComment).filter(!_.nn.isEmpty).mkString(" ") - tokenize(params) - - class ParseException(msg: String) extends RuntimeException(msg) diff --git a/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala deleted file mode 100644 index 41e123472a75..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala +++ /dev/null @@ -1,26 +0,0 @@ -package dotty.tools.dotc -package config - -import Settings._ -import core.Contexts._ - -abstract class CompilerCommand extends CliCommand: - type ConcreteSettings = ScalaSettings - - final def helpMsg(using settings: ScalaSettings)(using SettingsState, Context): String = - settings.allSettings.find(isHelping) match - case Some(s) => s.description - case _ => - if (settings.help.value) usageMessage - else if (settings.Vhelp.value) vusageMessage - else if (settings.Whelp.value) wusageMessage - else if (settings.Xhelp.value) xusageMessage - else if (settings.Yhelp.value) yusageMessage - else if (settings.showPlugins.value) ctx.base.pluginDescriptions - else if (settings.XshowPhases.value) phasesMessage - else "" - - final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean = - import settings._ - val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases) - flags.exists(_.value) || allSettings.exists(isHelping) diff --git a/tests/pos-with-compiler-cc/dotc/config/Config.scala b/tests/pos-with-compiler-cc/dotc/config/Config.scala deleted file mode 100644 index 02d075c8853d..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Config.scala +++ /dev/null @@ -1,256 +0,0 @@ -package dotty.tools.dotc.config - -object Config { - - inline val cacheMembersNamed = true - inline val cacheAsSeenFrom = true - inline val cacheMemberNames = true - inline val cacheImplicitScopes = true - inline val cacheMatchReduced = true - - /** If true, the `runWithOwner` operation uses a re-usable context, - * similar to explore. This requires that the context does not escape - * the call. If false, `runWithOwner` runs its operation argument - * in a fresh context. - */ - inline val reuseOwnerContexts = true - - inline val checkCacheMembersNamed = false - - /** When updating a constraint bound, check that the constrained parameter - * does not appear at the top-level of either of its bounds. - */ - inline val checkConstraintsNonCyclic = false - - /** Check that reverse dependencies in constraints are correct and complete. - * Can also be enabled using -Ycheck-constraint-deps. - */ - inline val checkConstraintDeps = false - - /** Check that each constraint resulting from a subtype test - * is satisfiable. Also check that a type variable instantiation - * satisfies its constraints. - * Note that this can fail when bad bounds are in scope, like in - * tests/neg/i4721a.scala. - */ - inline val checkConstraintsSatisfiable = false - - /** Check that each constraint is fully propagated. i.e. - * If P <: Q then the upper bound of P is a subtype of the upper bound of Q - * and the lower bound of Q is a subtype of the lower bound of P. - */ - inline val checkConstraintsPropagated = false - - /** Check that constraint bounds do not contain wildcard types */ - inline val checkNoWildcardsInConstraint = false - - /** If a constraint is over a type lambda `tl` and `tvar` is one of - * the type variables associated with `tl` in the constraint, check - * that the origin of `tvar` is a parameter of `tl`. - */ - inline val checkConsistentVars = false - - /** Check that constraints of globally committable typer states are closed. - * NOTE: When enabled, the check can cause CyclicReference errors because - * it traverses all elements of a type. Such failures were observed when - * compiling all of dotty together (source seems to be in GenBCode which - * accesses javac's settings.) - * - * It is recommended to turn this option on only when chasing down - * a TypeParamRef instantiation error. See comment in Types.TypeVar.instantiate. - */ - inline val debugCheckConstraintsClosed = false - - /** Check that no type appearing as the info of a SymDenotation contains - * skolem types. - */ - inline val checkNoSkolemsInInfo = false - - /** Check that Name#toString is not called directly from backend by analyzing - * the stack trace of each toString call on names. This is very expensive, - * so not suitable for continuous testing. But it can be used to find a problem - * when running a specific test. - */ - inline val checkBackendNames = false - - /** Check that re-used type comparers are in their initialization state */ - inline val checkTypeComparerReset = false - - /** Type comparer will fail with an assert if the upper bound - * of a constrained parameter becomes Nothing. This should be turned - * on only for specific debugging as normally instantiation to Nothing - * is not an error condition. - */ - inline val failOnInstantiationToNothing = false - - /** Enable noDoubleDef checking if option "-YnoDoubleDefs" is set. - * The reason to have an option as well as the present global switch is - * that the noDoubleDef checking is done in a hotspot, and we do not - * want to incur the overhead of checking an option each time. - */ - inline val checkNoDoubleBindings = true - - /** Check positions for consistency after parsing */ - inline val checkPositions = true - - /** Check that typed trees don't point to untyped ones */ - inline val checkTreesConsistent = false - - /** Show subtype traces for all deep subtype recursions */ - inline val traceDeepSubTypeRecursions = false - - /** When explaining subtypes and this flag is set, also show the classes of the compared types. */ - inline val verboseExplainSubtype = false - - /** If this flag is set, take the fast path when comparing same-named type-aliases and types */ - inline val fastPathForRefinedSubtype = true - - /** If this flag is set, and we compute `T1[X1]` & `T2[X2]` as a new - * upper bound of a constrained parameter, try to align the arguments by computing - * `S1 =:= S2` (which might instantiate type parameters). - * This rule is contentious because it cuts the constraint set. - * - * For more info, see the comment in `TypeComparer#glbArgs`. - */ - inline val alignArgsInAnd = true - - /** If this flag is set, higher-kinded applications are checked for validity - */ - inline val checkHKApplications = false - - /** If this flag is set, method types are checked for valid parameter references - */ - inline val checkMethodTypes = false - - /** If this flag is set, it is checked that TypeRefs don't refer directly - * to themselves. - */ - inline val checkTypeRefCycles = false - - /** If this flag is set, we check that types assigned to trees are error types only - * if some error was already reported. There are complicicated scenarios where this - * is not true. An example is TestNonCyclic in posTwice. If we remove the - * first (unused) import `import dotty.tools.dotc.core.Types.Type` in `CompilationUnit`, - * we end up assigning a CyclicReference error type to an import expression `annotation` - * before the cyclic reference is reported. What happens is that the error was reported - * as a result of a completion in a not-yet committed typerstate. So we cannot enforce - * this in all circumstances. But since it is almost always true it is useful to - * keep the Config option for debugging. - */ - inline val checkUnreportedErrors = false - - /** If this flag is set, it is checked that class type parameters are - * only references with NoPrefix or ThisTypes as prefixes. This option - * is usually disabled, because there are still some legitimate cases where - * this can arise (e.g. for pos/Map.scala, in LambdaType.integrate). - */ - inline val checkTypeParamRefs = false - - /** The recursion depth for showing a summarized string */ - inline val summarizeDepth = 2 - - /** Check that variances of lambda arguments match the - * variance of the underlying lambda class. - */ - inline val checkLambdaVariance = false - - /** Check that certain types cannot be created in erasedTypes phases. - * Note: Turning this option on will get some false negatives, since it is - * possible that And/Or types are still created during erasure as the result - * of some operation on an existing type. - */ - inline val checkUnerased = false - - /** Check that atoms-based comparisons match regular comparisons that do not - * take atoms into account. The two have to give the same results, since - * atoms comparison is intended to be just an optimization. - */ - inline val checkAtomsComparisons = false - - /** In `derivedSelect`, rewrite - * - * (S & T)#A --> S#A & T#A - * (S | T)#A --> S#A | T#A - * - * Not sure whether this is useful. Preliminary measurements show a slowdown of about - * 7% for the build when this option is enabled. - */ - inline val splitProjections = false - - /** If this flag is on, always rewrite an application `S[Ts]` where `S` is an alias for - * `[Xs] -> U` to `[Xs := Ts]U`. - * Turning this flag on was observed to give a ~6% speedup on the JUnit test suite. - */ - inline val simplifyApplications = true - - /** Assume -indent by default */ - inline val defaultIndent = true - - /** If set, prints a trace of all symbol completions */ - inline val showCompletions = false - - /** If set, show variable/variable reverse dependencies when printing constraints. */ - inline val showConstraintDeps = true - - /** If set, method results that are context functions are flattened by adding - * the parameters of the context function results to the methods themselves. - * This is an optimization that reduces closure allocations. - */ - inline val flattenContextFunctionResults = true - - /** If set, enables tracing */ - inline val tracingEnabled = false - - /** Initial capacity of the uniques HashMap. - * Note: This should be a power of two to work with util.HashSet - */ - inline val initialUniquesCapacity = 0x8000 - - /** How many recursive calls to NamedType#underlying are performed before logging starts. */ - inline val LogPendingUnderlyingThreshold = 50 - - /** How many recursive calls to isSubType are performed before logging starts. */ - inline val LogPendingSubTypesThreshold = 50 - - /** How many recursive calls to findMember are performed before logging names starts - * Note: this threshold has to be chosen carefully. Too large, and programs - * like tests/pos/IterableSelfRec go into polynomial (or even exponential?) - * compile time slowdown. Too small and normal programs will cause the compiler to - * do inefficient operations on findMember. The current value is determined - * so that (1) IterableSelfRec still compiles in reasonable time (< 10sec) (2) Compiling - * dotty itself only causes small pending names lists to be generated (we measured - * at max 6 elements) and these lists are never searched with contains. - */ - inline val LogPendingFindMemberThreshold = 9 - - /** When in IDE, turn StaleSymbol errors into warnings instead of crashing */ - inline val ignoreStaleInIDE = true - - /** If true, `Denotation#asSeenFrom` is allowed to return an existing - * `SymDenotation` instead of allocating a new `SingleDenotation` if - * the two would only differ in their `prefix` (SymDenotation always - * have `NoPrefix` as their prefix). - * This is done for performance reasons: when compiling Dotty itself this - * reduces the number of allocated denotations by ~50%. - */ - inline val reuseSymDenotations = true - - /** If `checkLevelsOnConstraints` is true, check levels of type variables - * and create fresh ones as needed when bounds are first entered intot he constraint. - * If `checkLevelsOnInstantiation` is true, allow level-incorrect constraints but - * fix levels on type variable instantiation. - */ - inline val checkLevelsOnConstraints = false - inline val checkLevelsOnInstantiation = true - - /** If true, print capturing types in the form `{c} T`. - * If false, print them in the form `T @retains(c)`. - */ - inline val printCaptureSetsAsPrefix = true - - /** If true, allow mapping capture set variables under captureChecking with maps that are neither - * bijective nor idempotent. We currently do now know how to do this correctly in all - * cases, though. - */ - inline val ccAllowUnsoundMaps = false -} diff --git a/tests/pos-with-compiler-cc/dotc/config/Feature.scala b/tests/pos-with-compiler-cc/dotc/config/Feature.scala deleted file mode 100644 index 1637c9268e30..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Feature.scala +++ /dev/null @@ -1,173 +0,0 @@ -package dotty.tools -package dotc -package config - -import core._ -import Contexts._, Symbols._, Names._ -import StdNames.nme -import Decorators.* -import util.{SrcPos, NoSourcePosition} -import SourceVersion._ -import reporting.Message -import NameKinds.QualifiedName -import language.experimental.pureFunctions - -object Feature: - - def experimental(str: PreName): TermName = - QualifiedName(nme.experimental, str.toTermName) - - private def deprecated(str: PreName): TermName = - QualifiedName(nme.deprecated, str.toTermName) - - private val namedTypeArguments = experimental("namedTypeArguments") - private val genericNumberLiterals = experimental("genericNumberLiterals") - val scala2macros = experimental("macros") - - val dependent = experimental("dependent") - val erasedDefinitions = experimental("erasedDefinitions") - val symbolLiterals = deprecated("symbolLiterals") - val fewerBraces = experimental("fewerBraces") - val saferExceptions = experimental("saferExceptions") - val pureFunctions = experimental("pureFunctions") - val captureChecking = experimental("captureChecking") - val into = experimental("into") - - val globalOnlyImports: Set[TermName] = Set(pureFunctions, captureChecking) - - /** Is `feature` enabled by by a command-line setting? The enabling setting is - * - * -language:feature - * - * where is the fully qualified name of `owner`, followed by a ".", - * but subtracting the prefix `scala.language.` at the front. - */ - def enabledBySetting(feature: TermName)(using Context): Boolean = - ctx.base.settings.language.value.contains(feature.toString) - - /** Is `feature` enabled by by an import? This is the case if the feature - * is imported by a named import - * - * import owner.feature - * - * and there is no visible nested import that excludes the feature, as in - * - * import owner.{ feature => _ } - */ - def enabledByImport(feature: TermName)(using Context): Boolean = - //atPhase(typerPhase) { - val info = ctx.importInfo - info != null && info.featureImported(feature) - //} - - /** Is `feature` enabled by either a command line setting or an import? - * @param feature The name of the feature - * @param owner The prefix symbol (nested in `scala.language`) where the - * feature is defined. - */ - def enabled(feature: TermName)(using Context): Boolean = - enabledBySetting(feature) || enabledByImport(feature) - - /** Is auto-tupling enabled? */ - def autoTuplingEnabled(using Context): Boolean = !enabled(nme.noAutoTupling) - - def dynamicsEnabled(using Context): Boolean = enabled(nme.dynamics) - - def dependentEnabled(using Context) = enabled(dependent) - - def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) - - def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) - - def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) - - /** Is pureFunctions enabled for this compilation unit? */ - def pureFunsEnabled(using Context) = - enabledBySetting(pureFunctions) - || ctx.compilationUnit.knowsPureFuns - || ccEnabled - - /** Is captureChecking enabled for this compilation unit? */ - def ccEnabled(using Context) = - enabledBySetting(captureChecking) - || ctx.compilationUnit.needsCaptureChecking - - /** Is pureFunctions enabled for any of the currently compiled compilation units? */ - def pureFunsEnabledSomewhere(using Context) = - enabledBySetting(pureFunctions) - || ctx.run != null && ctx.run.nn.pureFunsImportEncountered - || ccEnabledSomewhere - - /** Is captureChecking enabled for any of the currently compiled compilation units? */ - def ccEnabledSomewhere(using Context) = - enabledBySetting(captureChecking) - || ctx.run != null && ctx.run.nn.ccImportEncountered - - def sourceVersionSetting(using Context): SourceVersion = - SourceVersion.valueOf(ctx.settings.source.value) - - def sourceVersion(using Context): SourceVersion = - ctx.compilationUnit.sourceVersion match - case Some(v) => v - case none => sourceVersionSetting - - def migrateTo3(using Context): Boolean = - sourceVersion == `3.0-migration` - - def fewerBracesEnabled(using Context) = - sourceVersion.isAtLeast(`3.3`) || enabled(fewerBraces) - - /** If current source migrates to `version`, issue given warning message - * and return `true`, otherwise return `false`. - */ - def warnOnMigration(msg: Message, pos: SrcPos, version: SourceVersion)(using Context): Boolean = - if sourceVersion.isMigrating && sourceVersion.stable == version - || (version == `3.0` || version == `3.1`) && migrateTo3 - then - report.migrationWarning(msg, pos) - true - else - false - - def checkExperimentalFeature(which: String, srcPos: SrcPos, note: -> String = "")(using Context) = - if !isExperimentalEnabled then - report.error(em"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) - - def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = - if !isExperimentalEnabled then - val symMsg = - if sym.hasAnnotation(defn.ExperimentalAnnot) then - i"$sym is marked @experimental" - else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then - i"${sym.owner} is marked @experimental" - else - i"$sym inherits @experimental" - report.error(em"$symMsg and therefore may only be used in an experimental scope.", srcPos) - - /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ - def checkExperimentalSettings(using Context): Unit = - for setting <- ctx.settings.language.value - if setting.startsWith("experimental.") && setting != "experimental.macros" - do checkExperimentalFeature(s"feature $setting", NoSourcePosition) - - def isExperimentalEnabled(using Context): Boolean = - Properties.experimental && !ctx.settings.YnoExperimental.value - - /** Handle language import `import language..` if it is one - * of the global imports `pureFunctions` or `captureChecking`. In this case - * make the compilation unit's and current run's fields accordingly. - * @return true iff import that was handled - */ - def handleGlobalLanguageImport(prefix: TermName, imported: Name)(using Context): Boolean = - val fullFeatureName = QualifiedName(prefix, imported.asTermName) - if fullFeatureName == pureFunctions then - ctx.compilationUnit.knowsPureFuns = true - if ctx.run != null then ctx.run.nn.pureFunsImportEncountered = true - true - else if fullFeatureName == captureChecking then - ctx.compilationUnit.needsCaptureChecking = true - if ctx.run != null then ctx.run.nn.ccImportEncountered = true - true - else - false -end Feature diff --git a/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala deleted file mode 100644 index 2b2f35e49451..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala +++ /dev/null @@ -1,69 +0,0 @@ -package dotty.tools -package dotc -package config - -import io._ -import classpath.AggregateClassPath -import core._ -import Symbols._, Types._, Contexts._, StdNames._ -import Flags._ -import transform.ExplicitOuter, transform.SymUtils._ - -class JavaPlatform extends Platform { - - private var currentClassPath: Option[ClassPath] = None - - def classPath(using Context): ClassPath = { - if (currentClassPath.isEmpty) - currentClassPath = Some(new PathResolver().result) - val cp = currentClassPath.get - cp - } - - // The given symbol is a method with the right name and signature to be a runnable java program. - def isMainMethod(sym: Symbol)(using Context): Boolean = - (sym.name == nme.main) && (sym.info match { - case MethodTpe(_, defn.ArrayOf(el) :: Nil, restpe) => el =:= defn.StringType && (restpe isRef defn.UnitClass) - case _ => false - }) - - /** Update classpath with a substituted subentry */ - def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = currentClassPath.get match { - case AggregateClassPath(entries) => - currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e)))) - case cp: ClassPath => - currentClassPath = Some(subst.getOrElse(cp, cp)) - } - - def rootLoader(root: TermSymbol)(using Context): SymbolLoader = new SymbolLoaders.PackageLoader(root, classPath) - - /** Is the SAMType `cls` also a SAM under the rules of the JVM? */ - def isSam(cls: ClassSymbol)(using Context): Boolean = - cls.isAllOf(NoInitsTrait) && - cls.superClass == defn.ObjectClass && - cls.directlyInheritedTraits.forall(_.is(NoInits)) && - !ExplicitOuter.needsOuterIfReferenced(cls) && - cls.typeRef.fields.isEmpty // Superaccessors already show up as abstract methods here, so no test necessary - - /** We could get away with excluding BoxedBooleanClass for the - * purpose of equality testing since it need not compare equal - * to anything but other booleans, but it should be present in - * case this is put to other uses. - */ - def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = { - val d = defn - import d._ - (sym == ObjectClass) || - (sym == JavaSerializableClass) || - (sym == ComparableClass) || - (sym derivesFrom BoxedNumberClass) || - (sym derivesFrom BoxedCharClass) || - (sym derivesFrom BoxedBooleanClass) - } - - def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = - true - - def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader = - new ClassfileLoader(bin) -} diff --git a/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala b/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala deleted file mode 100644 index 0411c5604768..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala +++ /dev/null @@ -1,117 +0,0 @@ -package dotty.tools -package dotc -package config - -import scala.language.unsafeNulls - -import io._ - -/** A class for holding mappings from source directories to - * their output location. This functionality can be accessed - * only programmatically. The command line compiler uses a - * single output location, but tools may use this functionality - * to set output location per source directory. - */ -class OutputDirs { - /** Pairs of source directory - destination directory. */ - private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil - - /** If this is not None, the output location where all - * classes should go. - */ - private var singleOutDir: Option[AbstractFile] = None - - /** Add a destination directory for sources found under srcdir. - * Both directories should exits. - */ - def add(srcDir: String, outDir: String): Unit = - add(checkDir(AbstractFile.getDirectory(srcDir), srcDir), - checkDir(AbstractFile.getDirectory(outDir), outDir)) - - /** Check that dir is exists and is a directory. */ - private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = ( - if (dir != null && dir.isDirectory) - dir - // was: else if (allowJar && dir == null && Path.isJarOrZip(name, false)) - else if (allowJar && dir == null && Jar.isJarOrZip(File(name), false)) - new PlainFile(Path(name)) - else - throw new FatalError(name + " does not exist or is not a directory")) - - /** Set the single output directory. From now on, all files will - * be dumped in there, regardless of previous calls to 'add'. - */ - def setSingleOutput(outDir: String): Unit = { - val dst = AbstractFile.getDirectory(outDir) - setSingleOutput(checkDir(dst, outDir, true)) - } - - def getSingleOutput: Option[AbstractFile] = singleOutDir - - /** Set the single output directory. From now on, all files will - * be dumped in there, regardless of previous calls to 'add'. - */ - def setSingleOutput(dir: AbstractFile): Unit = - singleOutDir = Some(dir) - - def add(src: AbstractFile, dst: AbstractFile): Unit = { - singleOutDir = None - outputDirs ::= ((src, dst)) - } - - /** Return the list of source-destination directory pairs. */ - def outputs: List[(AbstractFile, AbstractFile)] = outputDirs - - /** Return the output directory for the given file. - */ - def outputDirFor(src: AbstractFile): AbstractFile = { - def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = - src.path.startsWith(srcDir.path) - - singleOutDir match { - case Some(d) => d - case None => - (outputs find (isBelow _).tupled) match { - case Some((_, d)) => d - case _ => - throw new FatalError("Could not find an output directory for " - + src.path + " in " + outputs) - } - } - } - - /** Return the source file path(s) which correspond to the given - * classfile path and SourceFile attribute value, subject to the - * condition that source files are arranged in the filesystem - * according to Java package layout conventions. - * - * The given classfile path must be contained in at least one of - * the specified output directories. If it does not then this - * method returns Nil. - * - * Note that the source file is not required to exist, so assuming - * a valid classfile path this method will always return a list - * containing at least one element. - * - * Also that if two or more source path elements target the same - * output directory there will be two or more candidate source file - * paths. - */ - def srcFilesFor(classFile: AbstractFile, srcPath: String): List[AbstractFile] = { - def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = - classFile.path.startsWith(outDir.path) - - singleOutDir match { - case Some(d) => - d match { - case _: VirtualDirectory | _: io.ZipArchive => Nil - case _ => List(d.lookupPathUnchecked(srcPath, false)) - } - case None => - (outputs filter (isBelow _).tupled) match { - case Nil => Nil - case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false)) - } - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala b/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala deleted file mode 100644 index afa30e38dc2a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala +++ /dev/null @@ -1,268 +0,0 @@ -package dotty.tools -package dotc -package config - -import scala.language.unsafeNulls - -import WrappedProperties.AccessControl -import io.{ClassPath, Directory, Path} -import classpath.{AggregateClassPath, ClassPathFactory, JrtClassPath} -import ClassPath.split -import PartialFunction.condOpt -import core.Contexts._ -import Settings._ -import dotty.tools.io.File - -object PathResolver { - - // Imports property/environment functions which suppress - // security exceptions. - import AccessControl._ - - def firstNonEmpty(xs: String*): String = xs find (_ != "") getOrElse "" - - /** Map all classpath elements to absolute paths and reconstruct the classpath. - */ - def makeAbsolute(cp: String): String = ClassPath.map(cp, x => Path(x).toAbsolute.path) - - /** pretty print class path - */ - def ppcp(s: String): String = split(s) match { - case Nil => "" - case Seq(x) => x - case xs => xs.map("\n" + _).mkString - } - - /** Values found solely by inspecting environment or property variables. - */ - object Environment { - private def searchForBootClasspath = ( - systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse "" - ) - - /** Environment variables which java pays attention to so it - * seems we do as well. - */ - def classPathEnv: String = envOrElse("CLASSPATH", "") - def sourcePathEnv: String = envOrElse("SOURCEPATH", "") - - def javaBootClassPath: String = propOrElse("sun.boot.class.path", searchForBootClasspath) - - def javaExtDirs: String = propOrEmpty("java.ext.dirs") - def scalaHome: String = propOrEmpty("scala.home") - def scalaExtDirs: String = propOrEmpty("scala.ext.dirs") - - /** The java classpath and whether to use it. - */ - def javaUserClassPath: String = propOrElse("java.class.path", "") - def useJavaClassPath: Boolean = propOrFalse("scala.usejavacp") - - override def toString: String = s""" - |object Environment { - | scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath) - | javaBootClassPath = <${javaBootClassPath.length} chars> - | javaExtDirs = ${ppcp(javaExtDirs)} - | javaUserClassPath = ${ppcp(javaUserClassPath)} - | scalaExtDirs = ${ppcp(scalaExtDirs)} - |}""".trim.stripMargin - } - - /** Default values based on those in Environment as interpreted according - * to the path resolution specification. - */ - object Defaults { - def scalaSourcePath: String = Environment.sourcePathEnv - def javaBootClassPath: String = Environment.javaBootClassPath - def javaUserClassPath: String = Environment.javaUserClassPath - def javaExtDirs: String = Environment.javaExtDirs - def useJavaClassPath: Boolean = Environment.useJavaClassPath - - def scalaHome: String = Environment.scalaHome - def scalaHomeDir: Directory = Directory(scalaHome) - def scalaHomeExists: Boolean = scalaHomeDir.isDirectory - def scalaLibDir: Directory = (scalaHomeDir / "lib").toDirectory - def scalaClassesDir: Directory = (scalaHomeDir / "classes").toDirectory - - def scalaLibAsJar: File = (scalaLibDir / "scala-library.jar").toFile - def scalaLibAsDir: Directory = (scalaClassesDir / "library").toDirectory - - def scalaLibDirFound: Option[Directory] = - if (scalaLibAsJar.isFile) Some(scalaLibDir) - else if (scalaLibAsDir.isDirectory) Some(scalaClassesDir) - else None - - def scalaLibFound: String = - if (scalaLibAsJar.isFile) scalaLibAsJar.path - else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path - else "" - - // XXX It must be time for someone to figure out what all these things - // are intended to do. This is disabled here because it was causing all - // the scala jars to end up on the classpath twice: one on the boot - // classpath as set up by the runner (or regular classpath under -nobootcp) - // and then again here. - def scalaBootClassPath: String = "" - // scalaLibDirFound match { - // case Some(dir) if scalaHomeExists => - // val paths = ClassPath expandDir dir.path - // join(paths: _*) - // case _ => "" - // } - - def scalaExtDirs: String = Environment.scalaExtDirs - - def scalaPluginPath: String = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path - - override def toString: String = """ - |object Defaults { - | scalaHome = %s - | javaBootClassPath = %s - | scalaLibDirFound = %s - | scalaLibFound = %s - | scalaBootClassPath = %s - | scalaPluginPath = %s - |}""".trim.stripMargin.format( - scalaHome, - ppcp(javaBootClassPath), - scalaLibDirFound, scalaLibFound, - ppcp(scalaBootClassPath), ppcp(scalaPluginPath) - ) - } - - def fromPathString(path: String)(using Context): ClassPath = { - val settings = ctx.settings.classpath.update(path) - inContext(ctx.fresh.setSettings(settings)) { - new PathResolver().result - } - } - - /** Show values in Environment and Defaults when no argument is provided. - * Otherwise, show values in Calculated as if those options had been given - * to a scala runner. - */ - def main(args: Array[String]): Unit = - if (args.isEmpty) { - println(Environment) - println(Defaults) - } - else inContext(ContextBase().initialCtx) { - val ArgsSummary(sstate, rest, errors, warnings) = - ctx.settings.processArguments(args.toList, true, ctx.settingsState) - errors.foreach(println) - val pr = inContext(ctx.fresh.setSettings(sstate)) { - new PathResolver() - } - println(" COMMAND: 'scala %s'".format(args.mkString(" "))) - println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) - - pr.result match { - case cp: AggregateClassPath => - println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") - } - } -} - -import PathResolver.{Defaults, ppcp} - -class PathResolver(using c: Context) { - import c.base.settings - - private val classPathFactory = new ClassPathFactory - - private def cmdLineOrElse(name: String, alt: String) = - commandLineFor(name) match { - case Some("") | None => alt - case Some(x) => x - } - - private def commandLineFor(s: String): Option[String] = condOpt(s) { - case "javabootclasspath" => settings.javabootclasspath.value - case "javaextdirs" => settings.javaextdirs.value - case "bootclasspath" => settings.bootclasspath.value - case "extdirs" => settings.extdirs.value - case "classpath" | "cp" => settings.classpath.value - case "sourcepath" => settings.sourcepath.value - } - - /** Calculated values based on any given command line options, falling back on - * those in Defaults. - */ - object Calculated { - def scalaHome: String = Defaults.scalaHome - def useJavaClassPath: Boolean = settings.usejavacp.value || Defaults.useJavaClassPath - def javaBootClassPath: String = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath) - def javaExtDirs: String = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs) - def javaUserClassPath: String = if (useJavaClassPath) Defaults.javaUserClassPath else "" - def scalaBootClassPath: String = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath) - def scalaExtDirs: String = cmdLineOrElse("extdirs", Defaults.scalaExtDirs) - /** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as: - * [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect - * [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg) - * [scaladoc] ^ - * Because bootstrapping looks at the sourcepath and creates the package "reflect" in "" it will cause the - * typedIdentifier to pick .reflect instead of the .scala.reflect package. Thus, no bootstrapping for scaladoc! - */ - def sourcePath: String = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) - - def userClassPath: String = - if (!settings.classpath.isDefault) settings.classpath.value - else sys.env.getOrElse("CLASSPATH", ".") - - import classPathFactory._ - - // Assemble the elements! - def basis: List[Traversable[ClassPath]] = - val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) - - List( - JrtClassPath(release), // 1. The Java 9+ classpath (backed by the jrt:/ virtual system, if available) - classesInPath(javaBootClassPath), // 2. The Java bootstrap class path. - contentsOfDirsInPath(javaExtDirs), // 3. The Java extension class path. - classesInExpandedPath(javaUserClassPath), // 4. The Java application class path. - classesInPath(scalaBootClassPath), // 5. The Scala boot class path. - contentsOfDirsInPath(scalaExtDirs), // 6. The Scala extension class path. - classesInExpandedPath(userClassPath), // 7. The Scala application class path. - sourcesInPath(sourcePath) // 8. The Scala source path. - ) - - lazy val containers: List[ClassPath] = basis.flatten.distinct - - override def toString: String = """ - |object Calculated { - | scalaHome = %s - | javaBootClassPath = %s - | javaExtDirs = %s - | javaUserClassPath = %s - | useJavaClassPath = %s - | scalaBootClassPath = %s - | scalaExtDirs = %s - | userClassPath = %s - | sourcePath = %s - |}""".trim.stripMargin.format( - scalaHome, - ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath), - useJavaClassPath, - ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath), - ppcp(sourcePath) - ) - } - - def containers: List[ClassPath] = Calculated.containers - - lazy val result: ClassPath = { - val cp = AggregateClassPath(containers.toIndexedSeq) - - if (settings.YlogClasspath.value) { - Console.println("Classpath built from " + settings.toConciseString(ctx.settingsState)) - Console.println("Defaults: " + PathResolver.Defaults) - Console.println("Calculated: " + Calculated) - - val xs = (Calculated.basis drop 2).flatten.distinct - println("After java boot/extdirs classpath has %d entries:" format xs.size) - xs foreach (x => println(" " + x)) - } - cp - } - - def asURLs: Seq[java.net.URL] = result.asURLs -} diff --git a/tests/pos-with-compiler-cc/dotc/config/Platform.scala b/tests/pos-with-compiler-cc/dotc/config/Platform.scala deleted file mode 100644 index 0faacf1bcebb..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Platform.scala +++ /dev/null @@ -1,46 +0,0 @@ -package dotty.tools -package dotc -package config - -import io.{ClassPath, AbstractFile} -import core.Contexts._, core.Symbols._ -import core.SymbolLoader -import core.StdNames.nme -import core.Flags.Module - -/** The platform dependent pieces of Global. - */ -abstract class Platform { - - /** The root symbol loader. */ - def rootLoader(root: TermSymbol)(using Context): SymbolLoader - - /** The compiler classpath. */ - def classPath(using Context): ClassPath - - /** Update classpath with a substitution that maps entries to entries */ - def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit - - /** Any platform-specific phases. */ - //def platformPhases: List[SubComponent] - - /** Is the SAMType `cls` also a SAM under the rules of the platform? */ - def isSam(cls: ClassSymbol)(using Context): Boolean - - /** The various ways a boxed primitive might materialize at runtime. */ - def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean - - /** Is the given class symbol eligible for Java serialization-specific methods? */ - def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean - - /** Create a new class loader to load class file `bin` */ - def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader - - /** The given symbol is a method with the right name and signature to be a runnable program. */ - def isMainMethod(sym: Symbol)(using Context): Boolean - - /** The given class has a main method. */ - final def hasMainMethod(sym: Symbol)(using Context): Boolean = - sym.info.member(nme.main).hasAltWith(d => - isMainMethod(d.symbol) && (sym.is(Module) || d.symbol.isStatic)) -} diff --git a/tests/pos-with-compiler-cc/dotc/config/Printers.scala b/tests/pos-with-compiler-cc/dotc/config/Printers.scala deleted file mode 100644 index ecb189de9bb3..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Printers.scala +++ /dev/null @@ -1,52 +0,0 @@ -package dotty.tools.dotc.config - -object Printers { - - class Printer { - def println(msg: => String): Unit = System.out.nn.println(msg) - } - - object noPrinter extends Printer { - inline override def println(msg: => String): Unit = () - } - - val default = new Printer - - val capt = noPrinter - val constr = noPrinter - val core = noPrinter - val checks = noPrinter - val config = noPrinter - val cyclicErrors = noPrinter - val debug = noPrinter - val derive = noPrinter - val desugar = noPrinter - val scaladoc = noPrinter - val exhaustivity = noPrinter - val gadts = noPrinter - val gadtsConstr = noPrinter - val hk = noPrinter - val implicits = noPrinter - val implicitsDetailed = noPrinter - val lexical = noPrinter - val init = noPrinter - val inlining = noPrinter - val interactiv = noPrinter - val matchTypes = noPrinter - val nullables = noPrinter - val overload = noPrinter - val patmatch = noPrinter - val pickling = noPrinter - val quotePickling = noPrinter - val plugins = noPrinter - val recheckr = noPrinter - val refcheck = noPrinter - val simplify = noPrinter - val staging = noPrinter - val subtyping = noPrinter - val tailrec = noPrinter - val transforms = noPrinter - val typr = noPrinter - val unapp = noPrinter - val variances = noPrinter -} diff --git a/tests/pos-with-compiler-cc/dotc/config/Properties.scala b/tests/pos-with-compiler-cc/dotc/config/Properties.scala deleted file mode 100644 index 1e9cc82112af..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Properties.scala +++ /dev/null @@ -1,142 +0,0 @@ -package dotty.tools -package dotc -package config - -import scala.language.unsafeNulls - -import scala.annotation.internal.sharable - -import java.io.IOException -import java.util.jar.Attributes.{ Name => AttributeName } -import java.nio.charset.StandardCharsets - -/** Loads `library.properties` from the jar. */ -object Properties extends PropertiesTrait { - protected def propCategory: String = "compiler" - protected def pickJarBasedOn: Class[PropertiesTrait] = classOf[PropertiesTrait] - - /** Scala manifest attributes. - */ - @sharable val ScalaCompilerVersion: AttributeName = new AttributeName("Scala-Compiler-Version") -} - -trait PropertiesTrait { - protected def propCategory: String // specializes the remainder of the values - protected def pickJarBasedOn: Class[?] // props file comes from jar containing this - - /** The name of the properties file */ - protected val propFilename: String = "/" + propCategory + ".properties" - - /** The loaded properties */ - @sharable protected lazy val scalaProps: java.util.Properties = { - val props = new java.util.Properties - val stream = pickJarBasedOn getResourceAsStream propFilename - if (stream ne null) - quietlyDispose(props load stream, stream.close) - - props - } - - private def quietlyDispose(action: => Unit, disposal: => Unit) = - try { action } - finally - try { disposal } - catch { case _: IOException => } - - def propIsSet(name: String): Boolean = System.getProperty(name) != null - def propIsSetTo(name: String, value: String): Boolean = propOrNull(name) == value - def propOrElse(name: String, alt: String): String = System.getProperty(name, alt) - def propOrEmpty(name: String): String = propOrElse(name, "") - def propOrNull(name: String): String = propOrElse(name, null) - def propOrNone(name: String): Option[String] = Option(propOrNull(name)) - def propOrFalse(name: String): Boolean = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase) - def setProp(name: String, value: String): String = System.setProperty(name, value) - def clearProp(name: String): String = System.clearProperty(name) - - def envOrElse(name: String, alt: String): String = Option(System getenv name) getOrElse alt - def envOrNone(name: String): Option[String] = Option(System getenv name) - - // for values based on propFilename - def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt) - def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "") - def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)) - - /** Either the development or release version if known, otherwise - * the empty string. - */ - def versionNumberString: String = scalaPropOrEmpty("version.number") - - /** The version number of the jar this was loaded from, - * or `"(unknown)"` if it cannot be determined. - */ - val simpleVersionString: String = { - val v = scalaPropOrElse("version.number", "(unknown)") - v + ( - if (v.contains("SNAPSHOT") || v.contains("NIGHTLY")) - "-git-" + scalaPropOrElse("git.hash", "(unknown)") - else - "" - ) - } - - /** The version number of the jar this was loaded from plus `"version "` prefix, - * or `"version (unknown)"` if it cannot be determined. - */ - val versionString: String = "version " + simpleVersionString - - /** Whether the current version of compiler is experimental - * - * 1. Snapshot, nightly releases and non-bootstrapped compiler are experimental. - * 2. Features supported by experimental versions of the compiler: - * - research plugins - */ - val experimental: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") - - val copyrightString: String = scalaPropOrElse("copyright.string", "(c) 2002-2017 LAMP/EPFL") - - /** This is the encoding to use reading in source files, overridden with -encoding - * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. - */ - def sourceEncoding: String = scalaPropOrElse("file.encoding", StandardCharsets.UTF_8.name) - def sourceReader: String = scalaPropOrElse("source.reader", "scala.tools.nsc.io.SourceReader") - - /** This is the default text encoding, overridden (unreliably) with - * `JAVA_OPTS="-Dfile.encoding=Foo"` - */ - def encodingString: String = propOrElse("file.encoding", StandardCharsets.UTF_8.name) - - /** The default end of line character. - */ - def lineSeparator: String = propOrElse("line.separator", "\n") - - /** Various well-known properties. - */ - def javaClassPath: String = propOrEmpty("java.class.path") - def javaHome: String = propOrEmpty("java.home") - def javaVendor: String = propOrEmpty("java.vendor") - def javaVersion: String = propOrEmpty("java.version") - def javaVmInfo: String = propOrEmpty("java.vm.info") - def javaVmName: String = propOrEmpty("java.vm.name") - def javaVmVendor: String = propOrEmpty("java.vm.vendor") - def javaVmVersion: String = propOrEmpty("java.vm.version") - def osName: String = propOrEmpty("os.name") - def scalaHome: String = propOrEmpty("scala.home") - def tmpDir: String = propOrEmpty("java.io.tmpdir") - def userDir: String = propOrEmpty("user.dir") - def userHome: String = propOrEmpty("user.home") - def userName: String = propOrEmpty("user.name") - - /** Some derived values. - */ - def isWin: Boolean = osName startsWith "Windows" - def isMac: Boolean = javaVendor startsWith "Apple" - - // This is looking for javac, tools.jar, etc. - // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME, - // and finally the system property based javaHome. - def jdkHome: String = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome)) - - def versionMsg: String = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString) - def scalaCmd: String = if (isWin) "scala.bat" else "scala" - def scalacCmd: String = if (isWin) "scalac.bat" else "scalac" -} diff --git a/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala deleted file mode 100644 index ae417b717ca3..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala +++ /dev/null @@ -1,35 +0,0 @@ -package dotty.tools.dotc.config - -import dotty.tools.dotc.core._ -import Contexts._ -import Symbols._ - -import dotty.tools.backend.sjs.JSDefinitions - -object SJSPlatform { - /** The `SJSPlatform` for the current context. */ - def sjsPlatform(using Context): SJSPlatform = - ctx.platform.asInstanceOf[SJSPlatform] -} - -class SJSPlatform()(using DetachedContext) extends JavaPlatform { - - /** Scala.js-specific definitions. */ - val jsDefinitions: JSDefinitions = new JSDefinitions() - - /** Is the SAMType `cls` also a SAM under the rules of the Scala.js back-end? */ - override def isSam(cls: ClassSymbol)(using Context): Boolean = - defn.isFunctionClass(cls) - || cls.superClass == jsDefinitions.JSFunctionClass - - /** Is the given class symbol eligible for Java serialization-specific methods? - * - * This is not simply false because we still want to add them to Scala classes - * and objects. They might be transitively used by macros and other compile-time - * code. It feels safer to have them be somewhat equivalent to the ones we would - * get in a JVM project. The JVM back-end will slap an extends `java.io.Serializable` - * to them, so we should be consistent and also emit the proper serialization methods. - */ - override def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = - !sym.isSubClass(jsDefinitions.JSAnyClass) -} diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala deleted file mode 100644 index 407171f1a0dd..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala +++ /dev/null @@ -1,21 +0,0 @@ -package dotty.tools.dotc.config - -enum ScalaRelease(val majorVersion: Int, val minorVersion: Int) extends Ordered[ScalaRelease]: - case Release3_0 extends ScalaRelease(3, 0) - case Release3_1 extends ScalaRelease(3, 1) - case Release3_2 extends ScalaRelease(3, 2) - - def show = s"$majorVersion.$minorVersion" - - def compare(that: ScalaRelease) = - val ord = summon[Ordering[(Int, Int)]] - ord.compare((majorVersion, minorVersion), (that.majorVersion, that.minorVersion)) - -object ScalaRelease: - def latest = Release3_1 - - def parse(name: String) = name match - case "3.0" => Some(Release3_0) - case "3.1" => Some(Release3_1) - case "3.2" => Some(Release3_2) - case _ => None diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala deleted file mode 100644 index 914146c3c175..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala +++ /dev/null @@ -1,347 +0,0 @@ -package dotty.tools.dotc -package config - -import scala.language.unsafeNulls - -import dotty.tools.dotc.config.PathResolver.Defaults -import dotty.tools.dotc.config.Settings.{Setting, SettingGroup} -import dotty.tools.dotc.config.SourceVersion -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.rewrites.Rewrites -import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory} - -import scala.util.chaining._ - -class ScalaSettings extends SettingGroup with AllScalaSettings - -object ScalaSettings: - // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` - private val minTargetVersion = 8 - private val maxTargetVersion = 23 - - def supportedTargetVersions: List[String] = - (minTargetVersion to maxTargetVersion).toList.map(_.toString) - - def supportedReleaseVersions: List[String] = - if scala.util.Properties.isJavaAtLeast("9") then - val jdkVersion = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() - val maxVersion = Math.min(jdkVersion, maxTargetVersion) - (minTargetVersion to maxVersion).toList.map(_.toString) - else List(minTargetVersion).map(_.toString) - - def supportedScalaReleaseVersions: List[String] = - ScalaRelease.values.toList.map(_.show) - - def supportedSourceVersions: List[String] = - SourceVersion.values.toList.map(_.toString) - - def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") - - def defaultPageWidth: Int = { - val defaultWidth = 80 - val columnsVar = System.getenv("COLUMNS") - if columnsVar != null then columnsVar.toInt - else if Properties.isWin then - val ansiconVar = System.getenv("ANSICON") // eg. "142x32766 (142x26)" - if ansiconVar != null && ansiconVar.matches("[0-9]+x.*") then - ansiconVar.substring(0, ansiconVar.indexOf("x")).toInt - else defaultWidth - else defaultWidth - } - -trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: - self: SettingGroup => - - /* Path related settings */ - val semanticdbTarget: Setting[String] = PathSetting("-semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") - - val source: Setting[String] = ChoiceSetting("-source", "source version", "source version", ScalaSettings.supportedSourceVersions, SourceVersion.defaultSourceVersion.toString, aliases = List("--source")) - val uniqid: Setting[Boolean] = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) - val rewrite: Setting[Option[Rewrites]] = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with a `...-migration` source version, rewrites sources to migrate to new version.", aliases = List("--rewrite")) - val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty files. The arguments are .tasty or .jar files.", aliases = List("--from-tasty")) - - val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions.") - val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") - val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") - val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) - - /* Decompiler settings */ - val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) - val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.", aliases = List("--print-lines")) - - /* Scala.js-related settings */ - val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only)") - val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only)") - - val projectUrl: Setting[String] = StringSetting ( - "-project-url", - "project repository homepage", - "The source repository of your project.", - "" - ) - - val wikiSyntax: Setting[Boolean] = BooleanSetting("-Xwiki-syntax", "Retains the Scala2 behavior of using Wiki Syntax in Scaladoc.") - - val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") - val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") -end AllScalaSettings - -/** Settings shared by compiler and scaladoc */ -trait CommonScalaSettings: - self: SettingGroup => - - /* Path related settings */ - val bootclasspath: Setting[String] = PathSetting("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath, aliases = List("--boot-class-path")) - val extdirs: Setting[String] = PathSetting("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs, aliases = List("--extension-directories")) - val javabootclasspath: Setting[String] = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath, aliases = List("--java-boot-class-path")) - val javaextdirs: Setting[String] = PathSetting("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs, aliases = List("--java-extension-directories")) - val sourcepath: Setting[String] = PathSetting("-sourcepath", "Specify location(s) of source files.", Defaults.scalaSourcePath, aliases = List("--source-path")) - val sourceroot: Setting[String] = PathSetting("-sourceroot", "Specify workspace root directory.", ".") - - val classpath: Setting[String] = PathSetting("-classpath", "Specify where to find user class files.", ScalaSettings.defaultClasspath, aliases = List("-cp", "--class-path")) - val outputDir: Setting[AbstractFile] = OutputSetting("-d", "directory|jar", "Destination for generated classfiles.", - new PlainDirectory(Directory("."))) - val color: Setting[String] = ChoiceSetting("-color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/, aliases = List("--color")) - val verbose: Setting[Boolean] = BooleanSetting("-verbose", "Output messages about what the compiler is doing.", aliases = List("--verbose")) - val version: Setting[Boolean] = BooleanSetting("-version", "Print product version and exit.", aliases = List("--version")) - val help: Setting[Boolean] = BooleanSetting("-help", "Print a synopsis of standard options.", aliases = List("--help", "-h")) - val pageWidth: Setting[Int] = IntSetting("-pagewidth", "Set page width", ScalaSettings.defaultPageWidth, aliases = List("--page-width")) - val silentWarnings: Setting[Boolean] = BooleanSetting("-nowarn", "Silence all warnings.", aliases = List("--no-warnings")) - - val javaOutputVersion: Setting[String] = ChoiceSetting("-java-output-version", "version", "Compile code with classes specific to the given version of the Java platform available on the classpath and emit bytecode for this version. Corresponds to -release flag in javac.", ScalaSettings.supportedReleaseVersions, "", aliases = List("-release", "--release")) - - val deprecation: Setting[Boolean] = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.", aliases = List("--deprecation")) - val feature: Setting[Boolean] = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.", aliases = List("--feature")) - val explain: Setting[Boolean] = BooleanSetting("-explain", "Explain errors in more detail.", aliases = List("--explain")) - // -explain-types setting is necessary for cross compilation, since it is mentioned in sbt-tpolecat, for instance - // it is otherwise subsumed by -explain, and should be dropped as soon as we can. - val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) - val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) - val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) - - /* Coverage settings */ - val coverageOutputDir = PathSetting("-coverage-out", "Destination for coverage classfiles and instrumentation data.", "", aliases = List("--coverage-out")) - - /* Other settings */ - val encoding: Setting[String] = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding, aliases = List("--encoding")) - val usejavacp: Setting[Boolean] = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.", aliases = List("--use-java-class-path")) - val scalajs: Setting[Boolean] = BooleanSetting("-scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).", aliases = List("--scalajs")) -end CommonScalaSettings - -/** -P "plugin" settings. Various tools might support plugins. */ -private sealed trait PluginSettings: - self: SettingGroup => - val plugin: Setting[List[String]] = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.") - val disable: Setting[List[String]] = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.") - val require: Setting[List[String]] = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") - val showPlugins: Setting[Boolean] = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.") - val pluginsDir: Setting[String] = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) - val pluginOptions: Setting[List[String]] = MultiStringSetting ("-P", "plugin:opt", "Pass an option to a plugin, e.g. -P::") - -/** -V "Verbose" settings */ -private sealed trait VerboseSettings: - self: SettingGroup => - val Vhelp: Setting[Boolean] = BooleanSetting("-V", "Print a synopsis of verbose options.") - val Xprint: Setting[List[String]] = PhasesSetting("-Vprint", "Print out program after", aliases = List("-Xprint")) - val XshowPhases: Setting[Boolean] = BooleanSetting("-Vphases", "List compiler phases.", aliases = List("-Xshow-phases")) - - val Vprofile: Setting[Boolean] = BooleanSetting("-Vprofile", "Show metrics about sources and internal representations to estimate compile-time complexity.") - val VprofileSortedBy = ChoiceSetting("-Vprofile-sorted-by", "key", "Show metrics about sources and internal representations sorted by given column name", List("name", "path", "lines", "tokens", "tasty", "complexity"), "") - val VprofileDetails = IntSetting("-Vprofile-details", "Show metrics about sources and internal representations of the most complex methods", 0) - val VreplMaxPrintElements: Setting[Int] = IntSetting("-Vrepl-max-print-elements", "Number of elements to be printed before output is truncated.", 1000) - val VreplMaxPrintCharacters: Setting[Int] = IntSetting("-Vrepl-max-print-characters", "Number of characters to be printed before output is truncated.", 50000) - -/** -W "Warnings" settings - */ -private sealed trait WarningSettings: - self: SettingGroup => - val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") - val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) - - val Wunused: Setting[List[String]] = MultiChoiceSetting( - name = "-Wunused", - helpArg = "warning", - descr = "Enable or disable specific `unused` warnings", - choices = List("nowarn", "all"), - default = Nil - ) - object WunusedHas: - def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) - def nowarn(using Context) = allOr("nowarn") - - val Wconf: Setting[List[String]] = MultiStringSetting( - "-Wconf", - "patterns", - default = List(), - descr = - s"""Configure compiler warnings. - |Syntax: -Wconf::,:,... - |multiple are combined with &, i.e., &...& - | - | - | - Any message: any - | - | - Message categories: cat=deprecation, cat=feature, cat=unchecked - | - | - Message content: msg=regex - | The regex need only match some part of the message, not all of it. - | - | - Message id: id=E129 - | The message id is printed with the warning. - | - | - Message name: name=PureExpressionInStatementPosition - | The message name is printed with the warning in verbose warning mode. - | - |In verbose warning mode the compiler prints matching filters for warnings. - |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally - |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). - | - | - | - error / e - | - warning / w - | - verbose / v (emit warning, show additional help for writing `-Wconf` filters) - | - info / i (infos are not counted as warnings and not affected by `-Werror`) - | - silent / s - | - |The default configuration is empty. - | - |User-defined configurations are added to the left. The leftmost rule matching - |a warning message defines the action. - | - |Examples: - | - change every warning into an error: -Wconf:any:error - | - silence deprecations: -Wconf:cat=deprecation:s - | - |Note: on the command-line you might need to quote configurations containing `*` or `&` - |to prevent the shell from expanding patterns.""".stripMargin, - ) - -/** -X "Extended" or "Advanced" settings */ -private sealed trait XSettings: - self: SettingGroup => - - val Xhelp: Setting[Boolean] = BooleanSetting("-X", "Print a synopsis of advanced options.") - val XnoForwarders: Setting[Boolean] = BooleanSetting("-Xno-forwarders", "Do not generate static forwarders in mirror classes.") - val XmaxInlines: Setting[Int] = IntSetting("-Xmax-inlines", "Maximal number of successive inlines.", 32) - val XmaxInlinedTrees: Setting[Int] = IntSetting("-Xmax-inlined-trees", "Maximal number of inlined trees.", 2_000_000) - val Xmigration: Setting[ScalaVersion] = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.") - val XprintTypes: Setting[Boolean] = BooleanSetting("-Xprint-types", "Print tree types (debugging option).") - val XprintDiff: Setting[Boolean] = BooleanSetting("-Xprint-diff", "Print changed parts of the tree since last print.") - val XprintDiffDel: Setting[Boolean] = BooleanSetting("-Xprint-diff-del", "Print changed parts of the tree since last print including deleted parts.") - val XprintInline: Setting[Boolean] = BooleanSetting("-Xprint-inline", "Show where inlined code comes from.") - val XprintSuspension: Setting[Boolean] = BooleanSetting("-Xprint-suspension", "Show when code is suspended until macros are compiled.") - val Xprompt: Setting[Boolean] = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).") - val XreplDisableDisplay: Setting[Boolean] = BooleanSetting("-Xrepl-disable-display", "Do not display definitions in REPL.") - val XverifySignatures: Setting[Boolean] = BooleanSetting("-Xverify-signatures", "Verify generic signatures in generated bytecode.") - val XignoreScala2Macros: Setting[Boolean] = BooleanSetting("-Xignore-scala2-macros", "Ignore errors when compiling code that calls Scala2 macros, these will fail at runtime.") - val XimportSuggestionTimeout: Setting[Int] = IntSetting("-Ximport-suggestion-timeout", "Timeout (in ms) for searching for import suggestions when errors are reported.", 8000) - val Xsemanticdb: Setting[Boolean] = BooleanSetting("-Xsemanticdb", "Store information in SemanticDB.", aliases = List("-Ysemanticdb")) - val XuncheckedJavaOutputVersion: Setting[String] = ChoiceSetting("-Xunchecked-java-output-version", "target", "Emit bytecode for the specified version of the Java platform. This might produce bytecode that will break at runtime. Corresponds to -target flag in javac. When on JDK 9+, consider -java-output-version as a safer alternative.", ScalaSettings.supportedTargetVersions, "", aliases = List("-Xtarget", "--Xtarget")) - val XcheckMacros: Setting[Boolean] = BooleanSetting("-Xcheck-macros", "Check some invariants of macro generated code while expanding macros", aliases = List("--Xcheck-macros")) - val XmainClass: Setting[String] = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") - val XimplicitSearchLimit: Setting[Int] = IntSetting("-Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) - - val XmixinForceForwarders = ChoiceSetting( - name = "-Xmixin-force-forwarders", - helpArg = "mode", - descr = "Generate forwarder methods in classes inhering concrete methods from traits.", - choices = List("true", "junit", "false"), - default = "true") - - object mixinForwarderChoices { - def isTruthy(using Context) = XmixinForceForwarders.value == "true" - def isAtLeastJunit(using Context) = isTruthy || XmixinForceForwarders.value == "junit" - } - - val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") -end XSettings - -/** -Y "Forking" as in forked tongue or "Private" settings */ -private sealed trait YSettings: - self: SettingGroup => - - val Yhelp: Setting[Boolean] = BooleanSetting("-Y", "Print a synopsis of private options.") - val Ycheck: Setting[List[String]] = PhasesSetting("-Ycheck", "Check the tree at the end of") - val YcheckMods: Setting[Boolean] = BooleanSetting("-Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync.") - val Ydebug: Setting[Boolean] = BooleanSetting("-Ydebug", "Increase the quantity of debugging output.") - val YdebugTrace: Setting[Boolean] = BooleanSetting("-Ydebug-trace", "Trace core operations.") - val YdebugFlags: Setting[Boolean] = BooleanSetting("-Ydebug-flags", "Print all flags of definitions.") - val YdebugMissingRefs: Setting[Boolean] = BooleanSetting("-Ydebug-missing-refs", "Print a stacktrace when a required symbol is missing.") - val YdebugNames: Setting[Boolean] = BooleanSetting("-Ydebug-names", "Show internal representation of names.") - val YdebugPos: Setting[Boolean] = BooleanSetting("-Ydebug-pos", "Show full source positions including spans.") - val YdebugTreeWithId: Setting[Int] = IntSetting("-Ydebug-tree-with-id", "Print the stack trace when the tree with the given id is created.", Int.MinValue) - val YdebugTypeError: Setting[Boolean] = BooleanSetting("-Ydebug-type-error", "Print the stack trace when a TypeError is caught", false) - val YdebugError: Setting[Boolean] = BooleanSetting("-Ydebug-error", "Print the stack trace when any error is caught.", false) - val YdebugUnpickling: Setting[Boolean] = BooleanSetting("-Ydebug-unpickling", "Print the stack trace when an error occurs when reading Tasty.", false) - val YtermConflict: Setting[String] = ChoiceSetting("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") - val Ylog: Setting[List[String]] = PhasesSetting("-Ylog", "Log operations during") - val YlogClasspath: Setting[Boolean] = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.") - val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") - - val Yscala2Unpickler: Setting[String] = StringSetting("-Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") - - val YnoImports: Setting[Boolean] = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") - val YnoGenericSig: Setting[Boolean] = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.") - val YnoPredef: Setting[Boolean] = BooleanSetting("-Yno-predef", "Compile without importing Predef.") - val Yskip: Setting[List[String]] = PhasesSetting("-Yskip", "Skip") - val Ydumpclasses: Setting[String] = StringSetting("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") - val YstopAfter: Setting[List[String]] = PhasesSetting("-Ystop-after", "Stop after", aliases = List("-stop")) // backward compat - val YstopBefore: Setting[List[String]] = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully - val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") - val YdetailedStats: Setting[Boolean] = BooleanSetting("-Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") - val YkindProjector: Setting[String] = ChoiceSetting("-Ykind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "enable", "underscores"), "disable") - val YprintPos: Setting[Boolean] = BooleanSetting("-Yprint-pos", "Show tree positions.") - val YprintPosSyms: Setting[Boolean] = BooleanSetting("-Yprint-pos-syms", "Show symbol definitions positions.") - val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting("-Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") - val YnoPatmatOpt: Setting[Boolean] = BooleanSetting("-Yno-patmat-opt", "Disable all pattern matching optimizations.") - val YplainPrinter: Setting[Boolean] = BooleanSetting("-Yplain-printer", "Pretty-print using a plain printer.") - val YprintSyms: Setting[Boolean] = BooleanSetting("-Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") - val YprintDebug: Setting[Boolean] = BooleanSetting("-Yprint-debug", "When printing trees, print some extra information useful for debugging.") - val YprintDebugOwners: Setting[Boolean] = BooleanSetting("-Yprint-debug-owners", "When printing trees, print owners of definitions.") - val YprintLevel: Setting[Boolean] = BooleanSetting("-Yprint-level", "print nesting levels of symbols and type variables.") - val YshowPrintErrors: Setting[Boolean] = BooleanSetting("-Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") - val YtestPickler: Setting[Boolean] = BooleanSetting("-Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") - val YcheckReentrant: Setting[Boolean] = BooleanSetting("-Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") - val YdropComments: Setting[Boolean] = BooleanSetting("-Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) - val YcookComments: Setting[Boolean] = BooleanSetting("-Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) - val YreadComments: Setting[Boolean] = BooleanSetting("-Yread-docs", "Read documentation from tasty.") - val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") - val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") - val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") - val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") - val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") - val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") - val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") - val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features") - - val YprofileEnabled: Setting[Boolean] = BooleanSetting("-Yprofile-enabled", "Enable profiling.") - val YprofileDestination: Setting[String] = StringSetting("-Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileExternalTool: Setting[List[String]] = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting("-Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - - // Experimental language features - val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting("-Yno-kind-polymorphism", "Disable kind polymorphism.") - val YexplicitNulls: Setting[Boolean] = BooleanSetting("-Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") - val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects") - val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation") - val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") - val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") - val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") - val YlightweightLazyVals: Setting[Boolean] = BooleanSetting("-Ylightweight-lazy-vals", "Use experimental lightweight implementation of lazy vals") - - /** Area-specific debug output */ - val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") - val YnoDoubleBindings: Setting[Boolean] = BooleanSetting("-Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).") - val YshowVarBounds: Setting[Boolean] = BooleanSetting("-Yshow-var-bounds", "Print type variables with their bounds.") - - val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting("-Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") - - val Yinstrument: Setting[Boolean] = BooleanSetting("-Yinstrument", "Add instrumentation code that counts allocations and closure creations.") - val YinstrumentDefs: Setting[Boolean] = BooleanSetting("-Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") - - val YforceInlineWhileTyping: Setting[Boolean] = BooleanSetting("-Yforce-inline-while-typing", "Make non-transparent inline methods inline when typing. Emulates the old inlining behavior of 3.0.0-M3.") -end YSettings - diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala deleted file mode 100644 index 7fdf57478f1a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala +++ /dev/null @@ -1,188 +0,0 @@ -/* @author James Iry - */ -package dotty.tools -package dotc.config - -import scala.language.unsafeNulls - -import scala.annotation.internal.sharable -import scala.util.{Try, Success, Failure} - -/** - * Represents a single Scala version in a manner that - * supports easy comparison and sorting. - */ -sealed abstract class ScalaVersion extends Ordered[ScalaVersion] { - def unparse: String -} - -/** - * A scala version that sorts higher than all actual versions - */ -@sharable case object NoScalaVersion extends ScalaVersion { - def unparse: String = "none" - - def compare(that: ScalaVersion): Int = that match { - case NoScalaVersion => 0 - case _ => 1 - } -} - -/** - * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion - * may or may not be a released version - i.e. this same class is used to represent - * final, release candidate, milestone, and development builds. The build argument is used - * to segregate builds - */ -case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion { - def unparse: String = s"${major}.${minor}.${rev}.${build.unparse}" - - def compare(that: ScalaVersion): Int = that match { - case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) => - // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these - // comparisons a lot so I'm using brute force direct style code - if (major < thatMajor) -1 - else if (major > thatMajor) 1 - else if (minor < thatMinor) -1 - else if (minor > thatMinor) 1 - else if (rev < thatRev) -1 - else if (rev > thatRev) 1 - else build compare thatBuild - case AnyScalaVersion => 1 - case NoScalaVersion => -1 - } -} - -/** - * A Scala version that sorts lower than all actual versions - */ -@sharable case object AnyScalaVersion extends ScalaVersion { - def unparse: String = "any" - - def compare(that: ScalaVersion): Int = that match { - case AnyScalaVersion => 0 - case _ => -1 - } -} - -/** - * Methods for parsing ScalaVersions - */ -@sharable object ScalaVersion { - private val dot = "\\." - private val dash = "\\-" - private def not(s:String) = s"[^${s}]" - private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r - - def parse(versionString : String): Try[ScalaVersion] = { - def failure = Failure(new NumberFormatException( - s"There was a problem parsing ${versionString}. " + - "Versions should be in the form major[.minor[.revision]] " + - "where each part is a positive number, as in 2.10.1. " + - "The minor and revision parts are optional." - )) - - def toInt(s: String) = s match { - case null | "" => 0 - case _ => s.toInt - } - - def isInt(s: String) = Try(toInt(s)).isSuccess - - import ScalaBuild._ - - def toBuild(s: String) = s match { - case null | "FINAL" => Final - case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2))) - case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1))) - case _ => Development(s) - } - - try versionString match { - case "" | "any" => Success(AnyScalaVersion) - case "none" => Success(NoScalaVersion) - case R(_, majorS, _, minorS, _, revS, _, buildS) => - Success(SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))) - case _ => failure - } - catch { - case e: NumberFormatException => failure - } - } - - /** - * The version of the compiler running now - */ - val current: ScalaVersion = parse(util.Properties.versionNumberString).get -} - -/** - * Represents the data after the dash in major.minor.rev-build - */ -abstract class ScalaBuild extends Ordered[ScalaBuild] { - /** - * Return a version of this build information that can be parsed back into the - * same ScalaBuild - */ - def unparse: String -} - -object ScalaBuild { - - /** A development, test, nightly, snapshot or other "unofficial" build - */ - case class Development(id: String) extends ScalaBuild { - def unparse: String = s"-${id}" - - def compare(that: ScalaBuild): Int = that match { - // sorting two development builds based on id is reasonably valid for two versions created with the same schema - // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions - // this is a pragmatic compromise - case Development(thatId) => id compare thatId - // assume a development build is newer than anything else, that's not really true, but good luck - // mapping development build versions to other build types - case _ => 1 - } - } - - /** A final build - */ - case object Final extends ScalaBuild { - def unparse: String = "" - - def compare(that: ScalaBuild): Int = that match { - case Final => 0 - // a final is newer than anything other than a development build or another final - case Development(_) => -1 - case _ => 1 - } - } - - /** A candidate for final release - */ - case class RC(n: Int) extends ScalaBuild { - def unparse: String = s"-RC${n}" - - def compare(that: ScalaBuild): Int = that match { - // compare two rcs based on their RC numbers - case RC(thatN) => n - thatN - // an rc is older than anything other than a milestone or another rc - case Milestone(_) => 1 - case _ => -1 - } - } - - /** An intermediate release - */ - case class Milestone(n: Int) extends ScalaBuild { - def unparse: String = s"-M${n}" - - def compare(that: ScalaBuild): Int = that match { - // compare two milestones based on their milestone numbers - case Milestone(thatN) => n - thatN - // a milestone is older than anything other than another milestone - case _ => -1 - } - } -} - diff --git a/tests/pos-with-compiler-cc/dotc/config/Settings.scala b/tests/pos-with-compiler-cc/dotc/config/Settings.scala deleted file mode 100644 index 277833afbd5d..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Settings.scala +++ /dev/null @@ -1,295 +0,0 @@ -package dotty.tools.dotc -package config - -import scala.language.unsafeNulls - -import core.Contexts._ - -import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory} - -import annotation.tailrec -import collection.mutable.ArrayBuffer -import reflect.ClassTag -import scala.util.{Success, Failure} - -object Settings: - - val BooleanTag: ClassTag[Boolean] = ClassTag.Boolean - val IntTag: ClassTag[Int] = ClassTag.Int - val StringTag: ClassTag[String] = ClassTag(classOf[String]) - val ListTag: ClassTag[List[?]] = ClassTag(classOf[List[?]]) - val VersionTag: ClassTag[ScalaVersion] = ClassTag(classOf[ScalaVersion]) - val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) - val OutputTag: ClassTag[AbstractFile] = ClassTag(classOf[AbstractFile]) - - class SettingsState(initialValues: Seq[Any]): - private val values = ArrayBuffer(initialValues: _*) - private var _wasRead: Boolean = false - - override def toString: String = s"SettingsState(values: ${values.toList})" - - def value(idx: Int): Any = - _wasRead = true - values(idx) - - def update(idx: Int, x: Any): SettingsState = - if (_wasRead) then SettingsState(values.toSeq).update(idx, x) - else - values(idx) = x - this - end SettingsState - - case class ArgsSummary( - sstate: SettingsState, - arguments: List[String], - errors: List[String], - warnings: List[String]) { - - def fail(msg: String): Settings.ArgsSummary = - ArgsSummary(sstate, arguments.tail, errors :+ msg, warnings) - - def warn(msg: String): Settings.ArgsSummary = - ArgsSummary(sstate, arguments.tail, errors, warnings :+ msg) - } - - case class Setting[T: ClassTag] private[Settings] ( - name: String, - description: String, - default: T, - helpArg: String = "", - choices: Option[Seq[?]] = None, - prefix: String = "", - aliases: List[String] = Nil, - depends: List[(Setting[?], Any)] = Nil, - propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int) { - - private var changed: Boolean = false - - def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] - - def updateIn(state: SettingsState, x: Any): SettingsState = x match - case _: T => state.update(idx, x) - case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${implicitly[ClassTag[T]]}") - - def isDefaultIn(state: SettingsState): Boolean = valueIn(state) == default - - def isMultivalue: Boolean = implicitly[ClassTag[T]] == ListTag - - def legalChoices: String = - choices match { - case Some(xs) if xs.isEmpty => "" - case Some(r: Range) => s"${r.head}..${r.last}" - case Some(xs) => xs.mkString(", ") - case None => "" - } - - def tryToSet(state: ArgsSummary): ArgsSummary = { - val ArgsSummary(sstate, arg :: args, errors, warnings) = state: @unchecked - def update(value: Any, args: List[String]): ArgsSummary = - var dangers = warnings - val value1 = - if changed && isMultivalue then - val value0 = value.asInstanceOf[List[String]] - val current = valueIn(sstate).asInstanceOf[List[String]] - value0.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") - current ++ value0 - else - if changed then dangers :+= s"Flag $name set repeatedly" - value - changed = true - ArgsSummary(updateIn(sstate, value1), args, errors, dangers) - end update - - def fail(msg: String, args: List[String]) = - ArgsSummary(sstate, args, errors :+ msg, warnings) - - def missingArg = - fail(s"missing argument for option $name", args) - - def setString(argValue: String, args: List[String]) = - choices match - case Some(xs) if !xs.contains(argValue) => - fail(s"$argValue is not a valid choice for $name", args) - case _ => - update(argValue, args) - - def setInt(argValue: String, args: List[String]) = - try - val x = argValue.toInt - choices match - case Some(r: Range) if x < r.head || r.last < x => - fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name", args) - case Some(xs) if !xs.contains(x) => - fail(s"$argValue is not a valid choice for $name", args) - case _ => - update(x, args) - catch case _: NumberFormatException => - fail(s"$argValue is not an integer argument for $name", args) - - def doSet(argRest: String) = ((implicitly[ClassTag[T]], args): @unchecked) match { - case (BooleanTag, _) => - update(true, args) - case (OptionTag, _) => - update(Some(propertyClass.get.getConstructor().newInstance()), args) - case (ListTag, _) => - if (argRest.isEmpty) missingArg - else - val strings = argRest.split(",").toList - choices match - case Some(valid) => strings.filterNot(valid.contains) match - case Nil => update(strings, args) - case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) - case _ => update(strings, args) - case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => - setString(argRest, args) - case (StringTag, arg2 :: args2) => - if (arg2 startsWith "-") missingArg - else setString(arg2, args2) - case (OutputTag, arg :: args) => - val path = Directory(arg) - val isJar = path.extension == "jar" - if (!isJar && !path.isDirectory) - fail(s"'$arg' does not exist or is not a directory or .jar file", args) - else { - val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) - update(output, args) - } - case (IntTag, args) if argRest.nonEmpty => - setInt(argRest, args) - case (IntTag, arg2 :: args2) => - setInt(arg2, args2) - case (VersionTag, _) => - ScalaVersion.parse(argRest) match { - case Success(v) => update(v, args) - case Failure(ex) => fail(ex.getMessage, args) - } - case (_, Nil) => - missingArg - } - - def matches(argName: String) = (name :: aliases).exists(_ == argName) - - if (prefix != "" && arg.startsWith(prefix)) - doSet(arg drop prefix.length) - else if (prefix == "" && matches(arg.takeWhile(_ != ':'))) - doSet(arg.dropWhile(_ != ':').drop(1)) - else - state - } - } - - object Setting: - extension [T](setting: Setting[T]) - def value(using Context): T = setting.valueIn(ctx.settingsState) - def update(x: T)(using Context): SettingsState = setting.updateIn(ctx.settingsState, x) - def isDefault(using Context): Boolean = setting.isDefaultIn(ctx.settingsState) - - class SettingGroup { - - private val _allSettings = new ArrayBuffer[Setting[?]] - def allSettings: Seq[Setting[?]] = _allSettings.toSeq - - def defaultState: SettingsState = new SettingsState(allSettings map (_.default)) - - def userSetSettings(state: SettingsState): Seq[Setting[?]] = - allSettings filterNot (_.isDefaultIn(state)) - - def toConciseString(state: SettingsState): String = - userSetSettings(state).mkString("(", " ", ")") - - private def checkDependencies(state: ArgsSummary): ArgsSummary = - userSetSettings(state.sstate).foldLeft(state)(checkDependenciesOfSetting) - - private def checkDependenciesOfSetting(state: ArgsSummary, setting: Setting[?]) = - setting.depends.foldLeft(state) { (s, dep) => - val (depSetting, reqValue) = dep - if (depSetting.valueIn(state.sstate) == reqValue) s - else s.fail(s"incomplete option ${setting.name} (requires ${depSetting.name})") - } - - /** Iterates over the arguments applying them to settings where applicable. - * Then verifies setting dependencies are met. - * - * This takes a boolean indicating whether to keep - * processing if an argument is seen which is not a command line option. - * This is an expedience for the moment so that you can say - * - * scalac -d /tmp foo.scala -optimise - * - * while also allowing - * - * scala Program opt opt - * - * to get their arguments. - */ - @tailrec - final def processArguments(state: ArgsSummary, processAll: Boolean, skipped: List[String]): ArgsSummary = - def stateWithArgs(args: List[String]) = ArgsSummary(state.sstate, args, state.errors, state.warnings) - state.arguments match - case Nil => - checkDependencies(stateWithArgs(skipped)) - case "--" :: args => - checkDependencies(stateWithArgs(skipped ++ args)) - case x :: _ if x startsWith "-" => - @tailrec def loop(settings: List[Setting[?]]): ArgsSummary = settings match - case setting :: settings1 => - val state1 = setting.tryToSet(state) - if state1 ne state then state1 - else loop(settings1) - case Nil => - state.warn(s"bad option '$x' was ignored") - processArguments(loop(allSettings.toList), processAll, skipped) - case arg :: args => - if processAll then processArguments(stateWithArgs(args), processAll, skipped :+ arg) - else state - end processArguments - - def processArguments(arguments: List[String], processAll: Boolean, settingsState: SettingsState = defaultState): ArgsSummary = - processArguments(ArgsSummary(settingsState, arguments, Nil, Nil), processAll, Nil) - - def publish[T](settingf: Int => Setting[T]): Setting[T] = { - val setting = settingf(_allSettings.length) - _allSettings += setting - setting - } - - def BooleanSetting(name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = - publish(Setting(name, descr, initialValue, aliases = aliases)) - - def StringSetting(name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(name, descr, default, helpArg, aliases = aliases)) - - def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) - - def MultiChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) - - def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = - publish(Setting(name, descr, default, aliases = aliases)) - - def IntChoiceSetting(name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = - publish(Setting(name, descr, default, choices = Some(choices))) - - def MultiStringSetting(name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, default, helpArg, aliases = aliases)) - - def OutputSetting(name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = - publish(Setting(name, descr, default, helpArg)) - - def PathSetting(name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(name, descr, default, aliases = aliases)) - - def PhasesSetting(name: String, descr: String, default: String = "", aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, if (default.isEmpty) Nil else List(default), aliases = aliases)) - - def PrefixSetting(name: String, pre: String, descr: String): Setting[List[String]] = - publish(Setting(name, descr, Nil, prefix = pre)) - - def VersionSetting(name: String, descr: String, default: ScalaVersion = NoScalaVersion): Setting[ScalaVersion] = - publish(Setting(name, descr, default)) - - def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = - publish(Setting(name, descr, None, propertyClass = Some(implicitly[ClassTag[T]].runtimeClass), aliases = aliases)) - } -end Settings diff --git a/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala deleted file mode 100644 index 4b9b1b247856..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala +++ /dev/null @@ -1,32 +0,0 @@ -package dotty.tools -package dotc -package config - -import core.Decorators.* -import util.Property - -enum SourceVersion: - case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. - case `3.2-migration`, `3.2` - case `3.3-migration`, `3.3` - case `future-migration`, `future` - - val isMigrating: Boolean = toString.endsWith("-migration") - - def stable: SourceVersion = - if isMigrating then SourceVersion.values(ordinal + 1) else this - - def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal - -object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.3` - - /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ - val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) - - /** language versions that the compiler recognises. */ - val validSourceVersionNames = values.toList.map(_.toString.toTermName) - - /** All source versions that can be recognised from a language import. e.g. `import language.3.1` */ - val allSourceVersionNames = validSourceVersionNames ::: illegalSourceVersionNames -end SourceVersion diff --git a/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala b/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala deleted file mode 100644 index 5b79432a97e7..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala +++ /dev/null @@ -1,42 +0,0 @@ -package dotty.tools -package dotc -package config - -import scala.language.unsafeNulls - -/** For placing a wrapper function around property functions. - * Motivated by places like google app engine throwing exceptions - * on property lookups. - */ -trait WrappedProperties extends PropertiesTrait { - def wrap[T](body: => T): Option[T] - - protected def propCategory: String = "wrapped" - protected def pickJarBasedOn: Class[?] = this.getClass - - override def propIsSet(name: String): Boolean = wrap(super.propIsSet(name)) exists (x => x) - override def propOrElse(name: String, alt: String): String = wrap(super.propOrElse(name, alt)) getOrElse alt - override def setProp(name: String, value: String): String = wrap(super.setProp(name, value)).orNull - override def clearProp(name: String): String = wrap(super.clearProp(name)).orNull - override def envOrElse(name: String, alt: String): String = wrap(super.envOrElse(name, alt)) getOrElse alt - override def envOrNone(name: String): Option[String] = wrap(super.envOrNone(name)).flatten - - def systemProperties: Iterator[(String, String)] = { - import scala.jdk.CollectionConverters._ - wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty - } -} - -object WrappedProperties { - object AccessControl extends WrappedProperties { - def wrap[T](body: => T): Option[T] = - try Some(body) - catch { - // the actual exception we are concerned with is AccessControlException, - // but that's deprecated on JDK 17, so catching its superclass is a convenient - // way to avoid a deprecation warning - case _: SecurityException => - None - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala deleted file mode 100644 index 2061bddb9e8a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala +++ /dev/null @@ -1,274 +0,0 @@ -package dotty.tools -package dotc -package core - -import Symbols._, Types._, Contexts._, Constants._ -import dotty.tools.dotc.ast.tpd, tpd.* -import util.Spans.Span -import printing.{Showable, Printer} -import printing.Texts.Text -import annotation.internal.sharable -import language.experimental.pureFunctions -import annotation.retains - -object Annotations { - - def annotClass(tree: Tree)(using Context) = - if (tree.symbol.isConstructor) tree.symbol.owner - else tree.tpe.typeSymbol - - abstract class Annotation extends Showable, Pure { - - def tree(using Context): Tree - - def symbol(using Context): Symbol = annotClass(tree) - - def hasSymbol(sym: Symbol)(using Context) = symbol == sym - - def matches(cls: Symbol)(using Context): Boolean = symbol.derivesFrom(cls) - - def appliesToModule: Boolean = true // for now; see remark in SymDenotations - - def derivedAnnotation(tree: Tree)(using Context): Annotation = - if (tree eq this.tree) this else Annotation(tree) - - /** All arguments to this annotation in a single flat list */ - def arguments(using Context): List[Tree] = tpd.allArguments(tree) - - def argument(i: Int)(using Context): Option[Tree] = { - val args = arguments - if (i < args.length) Some(args(i)) else None - } - def argumentConstant(i: Int)(using Context): Option[Constant] = - for (case ConstantType(c) <- argument(i) map (_.tpe.widenTermRefExpr.normalized)) yield c - - def argumentConstantString(i: Int)(using Context): Option[String] = - for (case Constant(s: String) <- argumentConstant(i)) yield s - - /** The tree evaluaton is in progress. */ - def isEvaluating: Boolean = false - - /** The tree evaluation has finished. */ - def isEvaluated: Boolean = true - - /** Normally, applies a type map to all tree nodes of this annotation, but can - * be overridden. Returns EmptyAnnotation if type type map produces a range - * type, since ranges cannot be types of trees. - */ - def mapWith(tm: TypeMap @retains(caps.cap))(using Context) = - val args = arguments - if args.isEmpty then this - else - val findDiff = new TreeAccumulator[Type]: - def apply(x: Type, tree: Tree)(using Context): Type = - if tm.isRange(x) then x - else - val tp1 = tm(tree.tpe) - foldOver(if tp1 frozen_=:= tree.tpe then x else tp1, tree) - val diff = findDiff(NoType, args) - if tm.isRange(diff) then EmptyAnnotation - else if diff.exists then derivedAnnotation(tm.mapOver(tree)) - else this - - /** Does this annotation refer to a parameter of `tl`? */ - def refersToParamOf(tl: TermLambda)(using Context): Boolean = - val args = arguments - if args.isEmpty then false - else tree.existsSubTree { - case id: Ident => id.tpe.stripped match - case TermParamRef(tl1, _) => tl eq tl1 - case _ => false - case _ => false - } - - /** A string representation of the annotation. Overridden in BodyAnnotation. - */ - def toText(printer: Printer): Text = printer.annotText(this) - - def ensureCompleted(using Context): Unit = tree - - def sameAnnotation(that: Annotation)(using Context): Boolean = - symbol == that.symbol && tree.sameTree(that.tree) - - /** Operations for hash-consing, can be overridden */ - def hash: Int = System.identityHashCode(this) - def eql(that: Annotation) = this eq that - } - - case class ConcreteAnnotation(t: Tree) extends Annotation: - def tree(using Context): Tree = t - - abstract class LazyAnnotation extends Annotation { - protected var mySym: Symbol | (Context ?-> Symbol) | Null - override def symbol(using parentCtx: Context): Symbol = - assert(mySym != null) - mySym match { - case symFn: (Context ?-> Symbol) @unchecked => - mySym = null - mySym = atPhaseBeforeTransforms(symFn) - // We should always produce the same annotation tree, no matter when the - // annotation is evaluated. Setting the phase to a pre-transformation phase - // seems to be enough to ensure this (note that after erasure, `ctx.typer` - // will be the Erasure typer, but that doesn't seem to affect the annotation - // trees we create, so we leave it as is) - case sym: Symbol if sym.defRunId != parentCtx.runId => - mySym = sym.denot.current.symbol - case _ => - } - mySym.asInstanceOf[Symbol] - - protected var myTree: Tree | (Context ?-> Tree) | Null - def tree(using Context): Tree = - assert(myTree != null) - myTree match { - case treeFn: (Context ?-> Tree) @unchecked => - myTree = null - myTree = atPhaseBeforeTransforms(treeFn) - case _ => - } - myTree.asInstanceOf[Tree] - - override def isEvaluating: Boolean = myTree == null - override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] - } - - class DeferredSymAndTree(symFn: Context ?-> Symbol, treeFn: Context ?-> Tree) - extends LazyAnnotation: - protected var mySym: Symbol | (Context ?-> Symbol) | Null = ctx ?=> symFn(using ctx) - protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) - - /** An annotation indicating the body of a right-hand side, - * typically of an inline method. Treated specially in - * pickling/unpickling and TypeTreeMaps - */ - abstract class BodyAnnotation extends Annotation { - override def symbol(using Context): ClassSymbol = defn.BodyAnnot - override def derivedAnnotation(tree: Tree)(using Context): Annotation = - if (tree eq this.tree) this else ConcreteBodyAnnotation(tree) - override def arguments(using Context): List[Tree] = Nil - override def ensureCompleted(using Context): Unit = () - override def toText(printer: Printer): Text = "@Body" - } - - class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation { - def tree(using Context): Tree = body - } - - abstract class LazyBodyAnnotation extends BodyAnnotation { - // Copy-pasted from LazyAnnotation to avoid having to turn it into a trait - protected var myTree: Tree | (Context ?-> Tree) | Null - def tree(using Context): Tree = - assert(myTree != null) - myTree match { - case treeFn: (Context ?-> Tree) @unchecked => - myTree = null - myTree = atPhaseBeforeTransforms(treeFn) - case _ => - } - myTree.asInstanceOf[Tree] - - override def isEvaluating: Boolean = myTree == null - override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] - } - - object LazyBodyAnnotation { - def apply(bodyFn: Context ?-> Tree): LazyBodyAnnotation = - new LazyBodyAnnotation: - protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> bodyFn(using ctx) - } - - object Annotation { - - def apply(tree: Tree): ConcreteAnnotation = ConcreteAnnotation(tree) - - def apply(cls: ClassSymbol)(using Context): Annotation = - apply(cls, Nil) - - def apply(cls: ClassSymbol, arg: Tree)(using Context): Annotation = - apply(cls, arg :: Nil) - - def apply(cls: ClassSymbol, arg1: Tree, arg2: Tree)(using Context): Annotation = - apply(cls, arg1 :: arg2 :: Nil) - - def apply(cls: ClassSymbol, args: List[Tree])(using Context): Annotation = - apply(cls.typeRef, args) - - def apply(atp: Type, arg: Tree)(using Context): Annotation = - apply(atp, arg :: Nil) - - def apply(atp: Type, arg1: Tree, arg2: Tree)(using Context): Annotation = - apply(atp, arg1 :: arg2 :: Nil) - - def apply(atp: Type, args: List[Tree])(using Context): Annotation = - apply(New(atp, args)) - - /** Create an annotation where the tree is computed lazily. */ - def deferred(sym: Symbol)(treeFn: Context ?-> Tree): Annotation = - new LazyAnnotation { - protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) - protected var mySym: Symbol | (Context ?-> Symbol) | Null = sym - } - - /** Create an annotation where the symbol and the tree are computed lazily. */ - def deferredSymAndTree(symFn: Context ?-> Symbol)(treeFn: Context ?-> Tree): Annotation = - DeferredSymAndTree(symFn, treeFn) - - /** Extractor for child annotations */ - object Child { - - /** A deferred annotation to the result of a given child computation */ - def later(delayedSym: Context ?-> Symbol, span: Span)(using Context): Annotation = { - def makeChildLater(using Context) = { - val sym = delayedSym - New(defn.ChildAnnot.typeRef.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil) - .withSpan(span) - } - deferred(defn.ChildAnnot)(makeChildLater) - } - - /** A regular, non-deferred Child annotation */ - def apply(sym: Symbol, span: Span)(using Context): Annotation = later(sym, span) - - def unapply(ann: Annotation)(using Context): Option[Symbol] = - if (ann.symbol == defn.ChildAnnot) { - val AppliedType(_, (arg: NamedType) :: Nil) = ann.tree.tpe: @unchecked - Some(arg.symbol) - } - else None - } - - def makeSourceFile(path: String)(using Context): Annotation = - apply(defn.SourceFileAnnot, Literal(Constant(path))) - } - - @sharable val EmptyAnnotation = Annotation(EmptyTree) - - def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = { - val tref = cls.typeRef - Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref)) - } - - /** Extracts the type of the thrown exception from an annotation. - * - * Supports both "old-style" `@throws(classOf[Exception])` - * as well as "new-style" `@throws[Exception]("cause")` annotations. - */ - object ThrownException { - def unapply(a: Annotation)(using Context): Option[Type] = - if (a.symbol ne defn.ThrowsAnnot) - None - else a.argumentConstant(0) match { - // old-style: @throws(classOf[Exception]) (which is throws[T](classOf[Exception])) - case Some(Constant(tpe: Type)) => - Some(tpe) - // new-style: @throws[Exception], @throws[Exception]("cause") - case _ => - stripApply(a.tree) match { - case TypeApply(_, List(tpt)) => - Some(tpt.tpe) - case _ => - None - } - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Atoms.scala b/tests/pos-with-compiler-cc/dotc/core/Atoms.scala deleted file mode 100644 index bcaaf6794107..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Atoms.scala +++ /dev/null @@ -1,36 +0,0 @@ -package dotty.tools -package dotc -package core - -import Types._ - -/** Indicates the singleton types that a type must or may consist of. - * @param lo The lower bound: singleton types in this set are guaranteed - * to be in the carrier type. - * @param hi The upper bound: all singleton types in the carrier type are - * guaranteed to be in this set - * If the underlying type of a singleton type is another singleton type, - * only the latter type ends up in the sets. - */ -enum Atoms: - case Range(lo: Set[Type], hi: Set[Type]) - case Unknown - - def & (that: Atoms): Atoms = this match - case Range(lo1, hi1) => - that match - case Range(lo2, hi2) => Range(lo1 & lo2, hi1 & hi2) - case Unknown => Range(Set.empty, hi1) - case Unknown => - that match - case Range(lo2, hi2) => Range(Set.empty, hi2) - case Unknown => Unknown - - def | (that: Atoms): Atoms = this match - case Range(lo1, hi1) => - that match - case Range(lo2, hi2) => Range(lo1 | lo2, hi1 | hi2) - case Unknown => Unknown - case Unknown => Unknown - -end Atoms diff --git a/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala deleted file mode 100644 index d166cec11573..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala +++ /dev/null @@ -1,216 +0,0 @@ -package dotty.tools -package dotc -package core - -import Contexts._, Types._, Symbols._, Names._, Flags._ -import Denotations.SingleDenotation -import Decorators._ -import collection.mutable -import config.SourceVersion.future -import config.Feature.sourceVersion -import annotation.constructorOnly - -/** Realizability status */ -object CheckRealizable { - - sealed abstract class Realizability(val msg: String) extends Pure { - def andAlso(other: => Realizability): Realizability = - if (this == Realizable) other else this - def mapError(f: Realizability -> Context ?-> Realizability)(using Context): Realizability = - if (this == Realizable) this else f(this) - } - - object Realizable extends Realizability("") - - object NotConcrete extends Realizability(" is not a concrete type") - - class NotFinal(sym: Symbol)(using @constructorOnly ctx: Context) - extends Realizability(i" refers to nonfinal $sym") - - class HasProblemBounds(name: Name, info: Type)(using @constructorOnly ctx: Context) - extends Realizability(i" has a member $name with possibly conflicting bounds ${info.bounds.lo} <: ... <: ${info.bounds.hi}") - - class HasProblemBaseArg(typ: Type, argBounds: TypeBounds)(using @constructorOnly ctx: Context) - extends Realizability(i" has a base type $typ with possibly conflicting parameter bounds ${argBounds.lo} <: ... <: ${argBounds.hi}") - - class HasProblemBase(base1: Type, base2: Type)(using @constructorOnly ctx: Context) - extends Realizability(i" has conflicting base types $base1 and $base2") - - class HasProblemField(fld: SingleDenotation, problem: Realizability)(using @constructorOnly ctx: Context) - extends Realizability(i" has a member $fld which is not a legal path\nsince ${fld.symbol.name}: ${fld.info}${problem.msg}") - - class ProblemInUnderlying(tp: Type, problem: Realizability)(using @constructorOnly ctx: Context) - extends Realizability(i"s underlying type ${tp}${problem.msg}") { - assert(problem != Realizable) - } - - def realizability(tp: Type)(using Context): Realizability = - new CheckRealizable().realizability(tp) - - def boundsRealizability(tp: Type)(using Context): Realizability = - new CheckRealizable().boundsRealizability(tp) - - private val LateInitializedFlags = Lazy | Erased -} - -/** Compute realizability status. - * - * A type T is realizable iff it is inhabited by non-null values. This ensures that its type members have good bounds - * (in the sense from DOT papers). A type projection T#L is legal if T is realizable, and can be understood as - * Scala 2's `v.L forSome { val v: T }`. - * - * In general, a realizable type can have multiple inhabitants, hence it need not be stable (in the sense of - * Type.isStable). - */ -class CheckRealizable(using Context) { - import CheckRealizable._ - - /** A set of all fields that have already been checked. Used - * to avoid infinite recursions when analyzing recursive types. - */ - private val checkedFields: mutable.Set[Symbol] = mutable.LinkedHashSet[Symbol]() - - /** Is symbol's definitition a lazy or erased val? - * (note we exclude modules here, because their realizability is ensured separately) - */ - private def isLateInitialized(sym: Symbol) = sym.isOneOf(LateInitializedFlags, butNot = Module) - - /** The realizability status of given type `tp`*/ - def realizability(tp: Type): Realizability = tp.dealias match { - /* - * A `TermRef` for a path `p` is realizable if - * - `p`'s type is stable and realizable, or - * - its underlying path is idempotent (that is, *stable*), total, and not null. - * We don't check yet the "not null" clause: that will require null-safety checking. - * - * We assume that stability of tp.prefix is checked elsewhere, since that's necessary for the path to be legal in - * the first place. - */ - case tp: TermRef => - val sym = tp.symbol - lazy val tpInfoRealizable = realizability(tp.info) - if (sym.is(StableRealizable)) realizability(tp.prefix) - else { - val r = - if (sym.isStableMember && !isLateInitialized(sym)) - // it's realizable because we know that a value of type `tp` has been created at run-time - Realizable - else if (!sym.isEffectivelyFinal) - // it's potentially not realizable since it might be overridden with a member of nonrealizable type - new NotFinal(sym) - else - // otherwise we need to look at the info to determine realizability - // roughly: it's realizable if the info does not have bad bounds - tpInfoRealizable.mapError(r => new ProblemInUnderlying(tp, r)) - r andAlso { - if (sym.isStableMember) sym.setFlag(StableRealizable) // it's known to be stable and realizable - realizability(tp.prefix) - } mapError { r => - // A mutable path is in fact stable and realizable if it has a realizable singleton type. - if (tp.info.isStable && tpInfoRealizable == Realizable) { - sym.setFlag(StableRealizable) - Realizable - } - else r - } - } - case _: SingletonType | NoPrefix => - Realizable - case tp => - def isConcrete(tp: Type): Boolean = tp.dealias match { - case tp: TypeRef => tp.symbol.isClass - case tp: TypeParamRef => false - case tp: TypeProxy => isConcrete(tp.underlying) - case tp: AndType => isConcrete(tp.tp1) && isConcrete(tp.tp2) - case tp: OrType => isConcrete(tp.tp1) && isConcrete(tp.tp2) - case _ => false - } - if (!isConcrete(tp)) NotConcrete - else boundsRealizability(tp).andAlso(memberRealizability(tp)) - } - - private def refinedNames(tp: Type): Set[Name] = tp.dealias match { - case tp: RefinedType => refinedNames(tp.parent) + tp.refinedName - case tp: AndType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) - case tp: OrType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) - case tp: TypeProxy => refinedNames(tp.superType) - case _ => Set.empty - } - - /** `Realizable` if `tp` has good bounds, a `HasProblem...` instance - * pointing to a bad bounds member otherwise. "Has good bounds" means: - * - * - all type members have good bounds (except for opaque helpers) - * - all refinements of the underlying type have good bounds (except for opaque companions) - * - all base types are class types, and if their arguments are wildcards - * they have good bounds. - * - base types do not appear in multiple instances with different arguments. - * (depending on the simplification scheme for AndTypes employed, this could - * also lead to base types with bad bounds). - */ - private def boundsRealizability(tp: Type) = { - - val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { - for { - mbr <- tp.nonClassTypeMembers - if !(mbr.info.loBound <:< mbr.info.hiBound) - } - yield new HasProblemBounds(mbr.name, mbr.info) - } - - val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { - for { - name <- refinedNames(tp) - if (name.isTypeName) - mbr <- tp.member(name).alternatives - if !(mbr.info.loBound <:< mbr.info.hiBound) - } - yield - new HasProblemBounds(name, mbr.info) - } - - def baseTypeProblems(base: Type) = base match { - case AndType(base1, base2) => - new HasProblemBase(base1, base2) :: Nil - case base => - base.argInfos.collect { - case bounds @ TypeBounds(lo, hi) if !(lo <:< hi) => - new HasProblemBaseArg(base, bounds) - } - } - val baseProblems = - tp.baseClasses.map(_.baseTypeOf(tp)).flatMap(baseTypeProblems) - - baseProblems.foldLeft( - refinementProblems.foldLeft( - memberProblems.foldLeft( - Realizable: Realizability)(_ andAlso _))(_ andAlso _))(_ andAlso _) - } - - /** `Realizable` if all of `tp`'s non-strict fields have realizable types, - * a `HasProblemField` instance pointing to a bad field otherwise. - */ - private def memberRealizability(tp: Type) = { - def checkField(sofar: Realizability, fld: SingleDenotation): Realizability = - sofar andAlso { - if (checkedFields.contains(fld.symbol) || fld.symbol.isOneOf(Private | Mutable | LateInitializedFlags)) - // if field is private it cannot be part of a visible path - // if field is mutable it cannot be part of a path - // if field is lazy or erased it does not need to be initialized when the owning object is - // so in all cases the field does not influence realizability of the enclosing object. - Realizable - else { - checkedFields += fld.symbol - realizability(fld.info).mapError(r => new HasProblemField(fld, r)) - } - } - if sourceVersion.isAtLeast(future) then - // check fields only from version 3.x. - // Reason: An embedded field could well be nullable, which means it - // should not be part of a path and need not be checked; but we cannot recognize - // this situation until we have a typesystem that tracks nullability. - tp.fields.foldLeft(Realizable: Realizability)(checkField) - else - Realizable - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Comments.scala b/tests/pos-with-compiler-cc/dotc/core/Comments.scala deleted file mode 100644 index 1b20b75ad8ac..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Comments.scala +++ /dev/null @@ -1,462 +0,0 @@ -package dotty.tools -package dotc -package core - -import scala.language.unsafeNulls - -import ast.{ untpd, tpd } -import Symbols._, Contexts._ -import util.{SourceFile, ReadOnlyMap} -import util.Spans._ -import util.CommentParsing._ -import util.Property.Key -import parsing.Parsers.Parser -import reporting.ProperDefinitionNotFound - -object Comments { - val ContextDoc: Key[ContextDocstrings] = new Key[ContextDocstrings] - - /** Decorator for getting docbase out of context */ - given CommentsContext: AnyRef with - extension (c: Context) def docCtx: Option[ContextDocstrings] = c.property(ContextDoc) - - /** Context for Docstrings, contains basic functionality for getting - * docstrings via `Symbol` and expanding templates - */ - class ContextDocstrings { - - private val _docstrings: MutableSymbolMap[Comment] = MutableSymbolMap[Comment](512) // FIXME: 2nd [Comment] needed or "not a class type" - - val templateExpander: CommentExpander = new CommentExpander - - def docstrings: ReadOnlyMap[Symbol, Comment] = _docstrings - - def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym) - - def addDocstring(sym: Symbol, doc: Option[Comment]): Unit = - doc.foreach(d => _docstrings.update(sym, d)) - } - - /** - * A `Comment` contains the unformatted docstring, it's position and potentially more - * information that is populated when the comment is "cooked". - * - * @param span The position span of this `Comment`. - * @param raw The raw comment, as seen in the source code, without any expansion. - * @param expanded If this comment has been expanded, it's expansion, otherwise `None`. - * @param usecases The usecases for this comment. - */ - final case class Comment( - span: Span, - raw: String, - expanded: Option[String], - usecases: List[UseCase], - variables: Map[String, String], - ) { - - /** Has this comment been cooked or expanded? */ - def isExpanded: Boolean = expanded.isDefined - - /** The body of this comment, without the `@usecase` and `@define` sections, after expansion. */ - lazy val expandedBody: Option[String] = - expanded.map(removeSections(_, "@usecase", "@define")) - - val isDocComment: Boolean = Comment.isDocComment(raw) - - /** - * Expands this comment by giving its content to `f`, and then parsing the `@usecase` sections. - * Typically, `f` will take care of expanding the variables. - * - * @param f The expansion function. - * @return The expanded comment, with the `usecases` populated. - */ - def expand(f: String => String)(using Context): Comment = { - val expandedComment = f(raw) - val useCases = Comment.parseUsecases(expandedComment, span) - Comment(span, raw, Some(expandedComment), useCases, Map.empty) - } - } - - object Comment { - - def isDocComment(comment: String): Boolean = comment.startsWith("/**") - - def apply(span: Span, raw: String): Comment = - Comment(span, raw, None, Nil, Map.empty) - - private def parseUsecases(expandedComment: String, span: Span)(using Context): List[UseCase] = - if (!isDocComment(expandedComment)) - Nil - else - tagIndex(expandedComment) - .filter { startsWithTag(expandedComment, _, "@usecase") } - .map { case (start, end) => decomposeUseCase(expandedComment, span, start, end) } - - /** Turns a usecase section into a UseCase, with code changed to: - * {{{ - * // From: - * def foo: A - * // To: - * def foo: A = ??? - * }}} - */ - private def decomposeUseCase(body: String, span: Span, start: Int, end: Int)(using Context): UseCase = { - def subPos(start: Int, end: Int) = - if (span == NoSpan) NoSpan - else { - val start1 = span.start + start - val end1 = span.end + end - span withStart start1 withPoint start1 withEnd end1 - } - - val codeStart = skipWhitespace(body, start + "@usecase".length) - val codeEnd = skipToEol(body, codeStart) - val code = body.substring(codeStart, codeEnd) + " = ???" - val codePos = subPos(codeStart, codeEnd) - - UseCase(code, codePos) - } - } - - final case class UseCase(code: String, codePos: Span, untpdCode: untpd.Tree, tpdCode: Option[tpd.DefDef]) { - def typed(tpdCode: tpd.DefDef): UseCase = copy(tpdCode = Some(tpdCode)) - } - - object UseCase { - def apply(code: String, codePos: Span)(using Context): UseCase = { - val tree = { - val tree = new Parser(SourceFile.virtual("", code)).localDef(codePos.start) - tree match { - case tree: untpd.DefDef => - val newName = ctx.compilationUnit.freshNames.newName(tree.name, NameKinds.DocArtifactName) - untpd.cpy.DefDef(tree)(name = newName) - case _ => - report.error(ProperDefinitionNotFound(), ctx.source.atSpan(codePos)) - tree - } - } - UseCase(code, codePos, tree, None) - } - } - - /** - * Port of DocComment.scala from nsc - * @author Martin Odersky - * @author Felix Mulder - */ - class CommentExpander { - import dotc.config.Printers.scaladoc - import scala.collection.mutable - - def expand(sym: Symbol, site: Symbol)(using Context): String = { - val parent = if (site != NoSymbol) site else sym - defineVariables(parent) - expandedDocComment(sym, parent) - } - - /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing. - * - * @param sym The symbol for which doc comment is returned - * @param site The class for which doc comments are generated - * @throws ExpansionLimitExceeded when more than 10 successive expansions - * of the same string are done, which is - * interpreted as a recursive variable definition. - */ - def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(using Context): String = { - // when parsing a top level class or module, use the (module-)class itself to look up variable definitions - val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym - else site - expandVariables(cookedDocComment(sym, docStr), sym, parent) - } - - private def template(raw: String): String = - removeSections(raw, "@define") - - private def defines(raw: String): List[String] = { - val sections = tagIndex(raw) - val defines = sections filter { startsWithTag(raw, _, "@define") } - val usecases = sections filter { startsWithTag(raw, _, "@usecase") } - val end = startTag(raw, (defines ::: usecases).sortBy(_._1)) - - defines map { case (start, end) => raw.substring(start, end) } - } - - private def replaceInheritDocToInheritdoc(docStr: String): String = - docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc") - - /** The cooked doc comment of an overridden symbol */ - protected def superComment(sym: Symbol)(using Context): Option[String] = - allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "") - - private val cookedDocComments = MutableSymbolMap[String]() - - /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by - * missing sections of an inherited doc comment. - * If a symbol does not have a doc comment but some overridden version of it does, - * the doc comment of the overridden version is copied instead. - */ - def cookedDocComment(sym: Symbol, docStr: String = "")(using Context): String = cookedDocComments.getOrElseUpdate(sym, { - var ownComment = - if (docStr.length == 0) ctx.docCtx.flatMap(_.docstring(sym).map(c => template(c.raw))).getOrElse("") - else template(docStr) - ownComment = replaceInheritDocToInheritdoc(ownComment) - - superComment(sym) match { - case None => - // SI-8210 - The warning would be false negative when this symbol is a setter - if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) - scaladoc.println(s"${sym.span}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.") - ownComment.replace("@inheritdoc", "") - case Some(sc) => - if (ownComment == "") sc - else expandInheritdoc(sc, merge(sc, ownComment, sym), sym) - } - }) - - private def isMovable(str: String, sec: (Int, Int)): Boolean = - startsWithTag(str, sec, "@param") || - startsWithTag(str, sec, "@tparam") || - startsWithTag(str, sec, "@return") - - def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = { - val srcSections = tagIndex(src) - val dstSections = tagIndex(dst) - val srcParams = paramDocs(src, "@param", srcSections) - val dstParams = paramDocs(dst, "@param", dstSections) - val srcTParams = paramDocs(src, "@tparam", srcSections) - val dstTParams = paramDocs(dst, "@tparam", dstSections) - val out = new StringBuilder - var copied = 0 - var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _))) - - if (copyFirstPara) { - val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment - (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections) - out append src.substring(0, eop).trim - copied = 3 - tocopy = 3 - } - - def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match { - case Some((start, end)) => - if (end > tocopy) tocopy = end - case None => - srcSec match { - case Some((start1, end1)) => - out append dst.substring(copied, tocopy).trim - out append "\n" - copied = tocopy - out append src.substring(start1, end1).trim - case None => - } - } - - //TODO: enable this once you know how to get `sym.paramss` - /* - for (params <- sym.paramss; param <- params) - mergeSection(srcParams get param.name.toString, dstParams get param.name.toString) - for (tparam <- sym.typeParams) - mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString) - - mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections)) - mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections)) - */ - - if (out.length == 0) dst - else { - out append dst.substring(copied) - out.toString - } - } - - /** - * Expand inheritdoc tags - * - for the main comment we transform the inheritdoc into the super variable, - * and the variable expansion can expand it further - * - for the param, tparam and throws sections we must replace comments on the spot - * - * This is done separately, for two reasons: - * 1. It takes longer to run compared to merge - * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely - * impacts performance - * - * @param parent The source (or parent) comment - * @param child The child (overriding member or usecase) comment - * @param sym The child symbol - * @return The child comment with the inheritdoc sections expanded - */ - def expandInheritdoc(parent: String, child: String, sym: Symbol): String = - if (child.indexOf("@inheritdoc") == -1) - child - else { - val parentSections = tagIndex(parent) - val childSections = tagIndex(child) - val parentTagMap = sectionTagMap(parent, parentSections) - val parentNamedParams = Map() + - ("@param" -> paramDocs(parent, "@param", parentSections)) + - ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) + - ("@throws" -> paramDocs(parent, "@throws", parentSections)) - - val out = new StringBuilder - - def replaceInheritdoc(childSection: String, parentSection: => String) = - if (childSection.indexOf("@inheritdoc") == -1) - childSection - else - childSection.replace("@inheritdoc", parentSection) - - def getParentSection(section: (Int, Int)): String = { - - def getSectionHeader = extractSectionTag(child, section) match { - case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section) - case other => other - } - - def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String = - paramMap.get(param) match { - case Some(section) => - // Cleanup the section tag and parameter - val sectionTextBounds = extractSectionText(parent, section) - cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2)) - case None => - scaladoc.println(s"""${sym.span}: the """" + getSectionHeader + "\" annotation of the " + sym + - " comment contains @inheritdoc, but the corresponding section in the parent is not defined.") - "" - } - - child.substring(section._1, section._1 + 7) match { - case param@("@param "|"@tparam"|"@throws") => - sectionString(extractSectionParam(child, section), parentNamedParams(param.trim)) - case _ => - sectionString(extractSectionTag(child, section), parentTagMap) - } - } - - def mainComment(str: String, sections: List[(Int, Int)]): String = - if (str.trim.length > 3) - str.trim.substring(3, startTag(str, sections)) - else - "" - - // Append main comment - out.append("/**") - out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections))) - - // Append sections - for (section <- childSections) - out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section))) - - out.append("*/") - out.toString - } - - protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(using Context): String = { - val expandLimit = 10 - - def expandInternal(str: String, depth: Int): String = { - if (depth >= expandLimit) - throw new ExpansionLimitExceeded(str) - - val out = new StringBuilder - var copied, idx = 0 - // excluding variables written as \$foo so we can use them when - // necessary to document things like Symbol#decode - def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\' - while (idx < str.length) - if ((str charAt idx) != '$' || isEscaped) - idx += 1 - else { - val vstart = idx - idx = skipVariable(str, idx + 1) - def replaceWith(repl: String) = { - out append str.substring(copied, vstart) - out append repl - copied = idx - } - variableName(str.substring(vstart + 1, idx)) match { - case "super" => - superComment(sym) foreach { sc => - val superSections = tagIndex(sc) - replaceWith(sc.substring(3, startTag(sc, superSections))) - for (sec @ (start, end) <- superSections) - if (!isMovable(sc, sec)) out append sc.substring(start, end) - } - case "" => idx += 1 - case vname => - lookupVariable(vname, site) match { - case Some(replacement) => replaceWith(replacement) - case None => - scaladoc.println(s"Variable $vname undefined in comment for $sym in $site") - } - } - } - if (out.length == 0) str - else { - out append str.substring(copied) - expandInternal(out.toString, depth + 1) - } - } - - // We suppressed expanding \$ throughout the recursion, and now we - // need to replace \$ with $ so it looks as intended. - expandInternal(initialStr, 0).replace("""\$""", "$") - } - - def defineVariables(sym: Symbol)(using Context): Unit = { - val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r - - val raw = ctx.docCtx.flatMap(_.docstring(sym).map(_.raw)).getOrElse("") - defs(sym) ++= defines(raw).map { - str => { - val start = skipWhitespace(str, "@define".length) - val (key, value) = str.splitAt(skipVariable(str, start)) - key.drop(start) -> value - } - } map { - case (key, Trim(value)) => - variableName(key) -> value.replaceAll("\\s+\\*+$", "") - } - } - - /** Maps symbols to the variable -> replacement maps that are defined - * in their doc comments - */ - private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map() - - /** Lookup definition of variable. - * - * @param vble The variable for which a definition is searched - * @param site The class for which doc comments are generated - */ - def lookupVariable(vble: String, site: Symbol)(using Context): Option[String] = site match { - case NoSymbol => None - case _ => - val searchList = - if (site.flags.is(Flags.Module)) site :: site.info.baseClasses - else site.info.baseClasses - - searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match { - case Some(str) if str startsWith "$" => lookupVariable(str.tail, site) - case res => res orElse lookupVariable(vble, site.owner) - } - } - - /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing - * If a symbol does not have a doc comment but some overridden version of it does, - * the position of the doc comment of the overridden version is returned instead. - */ - def docCommentPos(sym: Symbol)(using Context): Span = - ctx.docCtx.flatMap(_.docstring(sym).map(_.span)).getOrElse(NoSpan) - - /** A version which doesn't consider self types, as a temporary measure: - * an infinite loop has broken out between superComment and cookedDocComment - * since r23926. - */ - private def allInheritedOverriddenSymbols(sym: Symbol)(using Context): List[Symbol] = - if (!sym.owner.isClass) Nil - else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..` - //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol) - - class ExpansionLimitExceeded(str: String) extends Exception - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constants.scala b/tests/pos-with-compiler-cc/dotc/core/Constants.scala deleted file mode 100644 index f45e9e5217de..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Constants.scala +++ /dev/null @@ -1,261 +0,0 @@ -package dotty.tools -package dotc -package core - -import Types._, Symbols._, Contexts._ -import printing.Printer -import printing.Texts.Text - -object Constants { - - inline val NoTag = 0 - inline val UnitTag = 1 - inline val BooleanTag = 2 - inline val ByteTag = 3 - inline val ShortTag = 4 - inline val CharTag = 5 - inline val IntTag = 6 - inline val LongTag = 7 - inline val FloatTag = 8 - inline val DoubleTag = 9 - inline val StringTag = 10 - inline val NullTag = 11 - inline val ClazzTag = 12 - - class Constant(val value: Any, val tag: Int) extends printing.Showable with Product1[Any] { - import java.lang.Double.doubleToRawLongBits - import java.lang.Float.floatToRawIntBits - - def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue - def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue - def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue - def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag - def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag - def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag - def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag - def isNonUnitAnyVal: Boolean = BooleanTag <= tag && tag <= DoubleTag - def isAnyVal: Boolean = UnitTag <= tag && tag <= DoubleTag - - def tpe(using Context): Type = tag match { - case UnitTag => defn.UnitType - case BooleanTag => defn.BooleanType - case ByteTag => defn.ByteType - case ShortTag => defn.ShortType - case CharTag => defn.CharType - case IntTag => defn.IntType - case LongTag => defn.LongType - case FloatTag => defn.FloatType - case DoubleTag => defn.DoubleType - case StringTag => defn.StringType - case NullTag => defn.NullType - case ClazzTag => defn.ClassType(typeValue) - } - - /** We need the equals method to take account of tags as well as values. - */ - override def equals(other: Any): Boolean = other match { - case that: Constant => - this.tag == that.tag && equalHashValue == that.equalHashValue - case _ => false - } - - def isNaN: Boolean = value match { - case f: Float => f.isNaN - case d: Double => d.isNaN - case _ => false - } - - def booleanValue: Boolean = - if (tag == BooleanTag) value.asInstanceOf[Boolean] - else throw new Error("value " + value + " is not a boolean") - - def byteValue: Byte = tag match { - case ByteTag => value.asInstanceOf[Byte] - case ShortTag => value.asInstanceOf[Short].toByte - case CharTag => value.asInstanceOf[Char].toByte - case IntTag => value.asInstanceOf[Int].toByte - case LongTag => value.asInstanceOf[Long].toByte - case FloatTag => value.asInstanceOf[Float].toByte - case DoubleTag => value.asInstanceOf[Double].toByte - case _ => throw new Error("value " + value + " is not a Byte") - } - - def shortValue: Short = tag match { - case ByteTag => value.asInstanceOf[Byte].toShort - case ShortTag => value.asInstanceOf[Short] - case CharTag => value.asInstanceOf[Char].toShort - case IntTag => value.asInstanceOf[Int].toShort - case LongTag => value.asInstanceOf[Long].toShort - case FloatTag => value.asInstanceOf[Float].toShort - case DoubleTag => value.asInstanceOf[Double].toShort - case _ => throw new Error("value " + value + " is not a Short") - } - - def charValue: Char = tag match { - case ByteTag => value.asInstanceOf[Byte].toChar - case ShortTag => value.asInstanceOf[Short].toChar - case CharTag => value.asInstanceOf[Char] - case IntTag => value.asInstanceOf[Int].toChar - case LongTag => value.asInstanceOf[Long].toChar - case FloatTag => value.asInstanceOf[Float].toChar - case DoubleTag => value.asInstanceOf[Double].toChar - case _ => throw new Error("value " + value + " is not a Char") - } - - def intValue: Int = tag match { - case ByteTag => value.asInstanceOf[Byte].toInt - case ShortTag => value.asInstanceOf[Short].toInt - case CharTag => value.asInstanceOf[Char].toInt - case IntTag => value.asInstanceOf[Int] - case LongTag => value.asInstanceOf[Long].toInt - case FloatTag => value.asInstanceOf[Float].toInt - case DoubleTag => value.asInstanceOf[Double].toInt - case _ => throw new Error("value " + value + " is not an Int") - } - - def longValue: Long = tag match { - case ByteTag => value.asInstanceOf[Byte].toLong - case ShortTag => value.asInstanceOf[Short].toLong - case CharTag => value.asInstanceOf[Char].toLong - case IntTag => value.asInstanceOf[Int].toLong - case LongTag => value.asInstanceOf[Long] - case FloatTag => value.asInstanceOf[Float].toLong - case DoubleTag => value.asInstanceOf[Double].toLong - case _ => throw new Error("value " + value + " is not a Long") - } - - def floatValue: Float = tag match { - case ByteTag => value.asInstanceOf[Byte].toFloat - case ShortTag => value.asInstanceOf[Short].toFloat - case CharTag => value.asInstanceOf[Char].toFloat - case IntTag => value.asInstanceOf[Int].toFloat - case LongTag => value.asInstanceOf[Long].toFloat - case FloatTag => value.asInstanceOf[Float] - case DoubleTag => value.asInstanceOf[Double].toFloat - case _ => throw new Error("value " + value + " is not a Float") - } - - def doubleValue: Double = tag match { - case ByteTag => value.asInstanceOf[Byte].toDouble - case ShortTag => value.asInstanceOf[Short].toDouble - case CharTag => value.asInstanceOf[Char].toDouble - case IntTag => value.asInstanceOf[Int].toDouble - case LongTag => value.asInstanceOf[Long].toDouble - case FloatTag => value.asInstanceOf[Float].toDouble - case DoubleTag => value.asInstanceOf[Double] - case _ => throw new Error("value " + value + " is not a Double") - } - - /** Convert constant value to conform to given type. - */ - def convertTo(pt: Type)(using Context): Constant | Null = { - def classBound(pt: Type): Type = pt.dealias.stripTypeVar match { - case tref: TypeRef if !tref.symbol.isClass && tref.info.exists => - classBound(tref.info.bounds.lo) - case param: TypeParamRef => - ctx.typerState.constraint.entry(param) match { - case TypeBounds(lo, hi) => - if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound - else classBound(lo) - case NoType => classBound(param.binder.paramInfos(param.paramNum).lo) - case inst => classBound(inst) - } - case pt => pt - } - pt match - case ConstantType(value) if value == this => this - case _: SingletonType => null - case _ => - val target = classBound(pt).typeSymbol - if (target == tpe.typeSymbol) - this - else if ((target == defn.ByteClass) && isByteRange) - Constant(byteValue) - else if (target == defn.ShortClass && isShortRange) - Constant(shortValue) - else if (target == defn.CharClass && isCharRange) - Constant(charValue) - else if (target == defn.IntClass && isIntRange) - Constant(intValue) - else if (target == defn.LongClass && isLongRange) - Constant(longValue) - else if (target == defn.FloatClass && isFloatRange) - Constant(floatValue) - else if (target == defn.DoubleClass && isNumeric) - Constant(doubleValue) - else - null - } - - def stringValue: String = value.toString - - def toText(printer: Printer): Text = printer.toText(this) - - def typeValue: Type = value.asInstanceOf[Type] - - /** - * Consider two `NaN`s to be identical, despite non-equality - * Consider -0d to be distinct from 0d, despite equality - * - * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`) - * to avoid treating different encodings of `NaN` as the same constant. - * You probably can't express different `NaN` varieties as compile time - * constants in regular Scala code, but it is conceivable that you could - * conjure them with a macro. - */ - private def equalHashValue: Any = value match { - case f: Float => floatToRawIntBits(f) - case d: Double => doubleToRawLongBits(d) - case v => v - } - - override def hashCode: Int = { - import scala.util.hashing.MurmurHash3._ - val seed = 17 - var h = seed - h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. - h = mix(h, equalHashValue.##) - finalizeHash(h, length = 2) - } - - override def toString: String = s"Constant($value)" - def canEqual(x: Any): Boolean = true - def get: Any = value - def isEmpty: Boolean = false - def _1: Any = value - } - - object Constant { - def apply(x: Null): Constant = new Constant(x, NullTag) - def apply(x: Unit): Constant = new Constant(x, UnitTag) - def apply(x: Boolean): Constant = new Constant(x, BooleanTag) - def apply(x: Byte): Constant = new Constant(x, ByteTag) - def apply(x: Short): Constant = new Constant(x, ShortTag) - def apply(x: Int): Constant = new Constant(x, IntTag) - def apply(x: Long): Constant = new Constant(x, LongTag) - def apply(x: Float): Constant = new Constant(x, FloatTag) - def apply(x: Double): Constant = new Constant(x, DoubleTag) - def apply(x: String): Constant = new Constant(x, StringTag) - def apply(x: Char): Constant = new Constant(x, CharTag) - def apply(x: Type): Constant = new Constant(x, ClazzTag) - def apply(value: Any): Constant = - new Constant(value, - value match { - case null => NullTag - case x: Unit => UnitTag - case x: Boolean => BooleanTag - case x: Byte => ByteTag - case x: Short => ShortTag - case x: Int => IntTag - case x: Long => LongTag - case x: Float => FloatTag - case x: Double => DoubleTag - case x: String => StringTag - case x: Char => CharTag - case x: Type => ClazzTag - } - ) - - def unapply(c: Constant): Constant = c - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala deleted file mode 100644 index fb87aed77c41..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala +++ /dev/null @@ -1,214 +0,0 @@ -package dotty.tools -package dotc -package core - -import Types._, Contexts._ -import printing.Showable -import util.{SimpleIdentitySet, SimpleIdentityMap} - -/** Constraint over undetermined type parameters. Constraints are built - * over values of the following types: - * - * - TypeLambda A constraint constrains the type parameters of a set of TypeLambdas - * - TypeParamRef The parameters of the constrained type lambdas - * - TypeVar Every constrained parameter might be associated with a TypeVar - * that has the TypeParamRef as origin. - */ -abstract class Constraint extends Showable { - - type This <: Constraint - - /** Does the constraint's domain contain the type parameters of `tl`? */ - def contains(tl: TypeLambda): Boolean - - /** Does the constraint's domain contain the type parameter `param`? */ - def contains(param: TypeParamRef): Boolean - - /** Does this constraint contain the type variable `tvar` and is it uninstantiated? */ - def contains(tvar: TypeVar): Boolean - - /** The constraint entry for given type parameter `param`, or NoType if `param` is not part of - * the constraint domain. Note: Low level, implementation dependent. - */ - def entry(param: TypeParamRef): Type - - /** The type variable corresponding to parameter `param`, or - * NoType, if `param` is not in constrained or is not paired with a type variable. - */ - def typeVarOfParam(param: TypeParamRef): Type - - /** Is it known that `param1 <:< param2`? */ - def isLess(param1: TypeParamRef, param2: TypeParamRef): Boolean - - /** The parameters that are known to be smaller wrt <: than `param` */ - def lower(param: TypeParamRef): List[TypeParamRef] - - /** The parameters that are known to be greater wrt <: than `param` */ - def upper(param: TypeParamRef): List[TypeParamRef] - - /** The lower dominator set. - * - * This is like `lower`, except that each parameter returned is no smaller than every other returned parameter. - */ - def minLower(param: TypeParamRef): List[TypeParamRef] - - /** The upper dominator set. - * - * This is like `upper`, except that each parameter returned is no greater than every other returned parameter. - */ - def minUpper(param: TypeParamRef): List[TypeParamRef] - - /** lower(param) \ lower(butNot) */ - def exclusiveLower(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] - - /** upper(param) \ upper(butNot) */ - def exclusiveUpper(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] - - /** The constraint bounds for given type parameter `param`. - * Poly params that are known to be smaller or greater than `param` - * are not contained in the return bounds. - * @pre `param` is not part of the constraint domain. - */ - def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds - - /** A new constraint which is derived from this constraint by adding - * entries for all type parameters of `poly`. - * @param tvars A list of type variables associated with the params, - * or Nil if the constraint will just be checked for - * satisfiability but will solved to give instances of - * type variables. - */ - def add(poly: TypeLambda, tvars: List[TypeVar])(using Context): This - - /** A new constraint which is derived from this constraint by updating - * the entry for parameter `param` to `tp`. - * `tp` can be one of the following: - * - * - A TypeBounds value, indicating new constraint bounds - * - Another type, indicating a solution for the parameter - * - * @pre `this contains param`. - */ - def updateEntry(param: TypeParamRef, tp: Type)(using Context): This - - /** A constraint that includes the relationship `p1 <: p2`. - * `<:` relationships between parameters ("edges") are propagated, but - * non-parameter bounds are left alone. - * - * @param direction Must be set to `KeepParam1` or `KeepParam2` when - * `p2 <: p1` is already true depending on which parameter - * the caller intends to keep. This will avoid propagating - * bounds that will be redundant after `p1` and `p2` are - * unified. - */ - def addLess(p1: TypeParamRef, p2: TypeParamRef, - direction: UnificationDirection = UnificationDirection.NoUnification)(using Context): This - - /** A new constraint which is derived from this constraint by removing - * the type parameter `param` from the domain and replacing all top-level occurrences - * of the parameter elsewhere in the constraint by type `tp`, or a conservative - * approximation of it if that is needed to avoid cycles. - * Occurrences nested inside a refinement or prefix are not affected. - */ - def replace(param: TypeParamRef, tp: Type)(using Context): This - - /** Is entry associated with `tl` removable? This is the case if - * all type parameters of the entry are associated with type variables - * which have their `inst` fields set. - */ - def isRemovable(tl: TypeLambda): Boolean - - /** A new constraint with all entries coming from `tl` removed. */ - def remove(tl: TypeLambda)(using Context): This - - /** A new constraint with entry `from` replaced with `to` - * Rerences to `from` from within other constraint bounds are updated to `to`. - * Type variables are left alone. - */ - def subst(from: TypeLambda, to: TypeLambda)(using Context): This - - /** Is `tv` marked as hard in the constraint? */ - def isHard(tv: TypeVar): Boolean - - /** The same as this constraint, but with `tv` marked as hard. */ - def withHard(tv: TypeVar)(using Context): This - - /** Gives for each instantiated type var that does not yet have its `inst` field - * set, the instance value stored in the constraint. Storing instances in constraints - * is done only in a temporary way for contexts that may be retracted - * without also retracting the type var as a whole. - */ - def instType(tvar: TypeVar): Type - - /** The given `tl` in case it is not contained in this constraint, - * a fresh copy of `tl` otherwise. - */ - def ensureFresh(tl: TypeLambda)(using Context): TypeLambda - - /** The type lambdas constrained by this constraint */ - def domainLambdas: List[TypeLambda] - - /** The type lambda parameters constrained by this constraint */ - def domainParams: List[TypeParamRef] - - /** Check whether predicate holds for all parameters in constraint */ - def forallParams(p: TypeParamRef => Boolean): Boolean - - /** Perform operation `op` on all typevars that do not have their `inst` field set. */ - def foreachTypeVar(op: TypeVar => Unit): Unit - - /** The uninstantiated typevars of this constraint, which still have a bounds constraint - */ - def uninstVars: collection.Seq[TypeVar] - - /** Whether `tl` is present in both `this` and `that` but is associated with - * different TypeVars there, meaning that the constraints cannot be merged. - */ - def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean - - /** Does `param` occur at the toplevel in `tp` ? - * Toplevel means: the type itself or a factor in some - * combination of `&` or `|` types. - */ - def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean - - /** A string that shows the reverse dependencies maintained by this constraint - * (coDeps and contraDeps for OrderingConstraints). - */ - def depsToString(using Context): String - - /** Does the constraint restricted to variables outside `except` depend on `tv` - * in the given direction `co`? - * @param `co` If true, test whether the constraint would change if the variable is made larger - * otherwise, test whether the constraint would change if the variable is made smaller. - */ - def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean - - /** Depending on Config settngs: - * - Under `checkConstraintsNonCyclic`, check that no constrained - * parameter contains itself as a bound. - * - Under `checkConstraintDeps`, check hat reverse dependencies in - * constraints are correct and complete. - */ - def checkWellFormed()(using Context): this.type - - /** Check that constraint only refers to TypeParamRefs bound by itself */ - def checkClosed()(using Context): Unit - - /** Check that every typevar om this constraint has as origin a type parameter - * of athe type lambda that is associated with the typevar itself. - */ - def checkConsistentVars()(using Context): Unit -} - -/** When calling `Constraint#addLess(p1, p2, ...)`, the caller might end up - * unifying one parameter with the other, this enum lets `addLess` know which - * direction the unification will take. - */ -enum UnificationDirection: - /** Neither p1 nor p2 will be instantiated. */ - case NoUnification - /** `p2 := p1`, p1 left uninstantiated. */ - case KeepParam1 - /** `p1 := p2`, p2 left uninstantiated. */ - case KeepParam2 diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala deleted file mode 100644 index 96e965903010..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala +++ /dev/null @@ -1,891 +0,0 @@ -package dotty.tools -package dotc -package core - -import Types._ -import Contexts._ -import Symbols._ -import Decorators._ -import Flags._ -import config.Config -import config.Printers.typr -import typer.ProtoTypes.{newTypeVar, representedParamRef} -import UnificationDirection.* -import NameKinds.AvoidNameKind -import util.SimpleIdentitySet -import NullOpsDecorator.stripNull - -/** Methods for adding constraints and solving them. - * - * What goes into a Constraint as opposed to a ConstrainHandler? - * - * Constraint code is purely functional: Operations get constraints and produce new ones. - * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done - * elsewhere. - * - * By comparison: Constraint handlers are parts of type comparers and can use their functionality. - * Constraint handlers update the current constraint as a side effect. - */ -trait ConstraintHandling { - - def constr: config.Printers.Printer = config.Printers.constr - - protected def isSub(tp1: Type, tp2: Type)(using Context): Boolean - protected def isSame(tp1: Type, tp2: Type)(using Context): Boolean - - protected def constraint: Constraint - protected def constraint_=(c: Constraint): Unit - - private var addConstraintInvocations = 0 - - /** If the constraint is frozen we cannot add new bounds to the constraint. */ - protected var frozenConstraint: Boolean = false - - /** Potentially a type lambda that is still instantiatable, even though the constraint - * is generally frozen. - */ - protected var caseLambda: Type = NoType - - /** If set, align arguments `S1`, `S2`when taking the glb - * `T1 { X = S1 } & T2 { X = S2 }` of a constraint upper bound for some type parameter. - * Aligning means computing `S1 =:= S2` which may change the current constraint. - * See note in TypeComparer#distributeAnd. - */ - protected var homogenizeArgs: Boolean = false - - /** We are currently comparing type lambdas. Used as a flag for - * optimization: when `false`, no need to do an expensive `pruneLambdaParams` - */ - protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty - - /** Used for match type reduction: If false, we don't recognize an abstract type - * to be a subtype type of any of its base classes. This is in place only at the - * toplevel; it is turned on again when we add parts of the scrutinee to the constraint. - */ - protected var canWidenAbstract: Boolean = true - - protected var myNecessaryConstraintsOnly = false - /** When collecting the constraints needed for a particular subtyping - * judgment to be true, we sometimes need to approximate the constraint - * set (see `TypeComparer#either` for example). - * - * Normally, this means adding extra constraints which may not be necessary - * for the subtyping judgment to be true, but if this variable is set to true - * we will instead under-approximate and keep only the constraints that must - * always be present for the subtyping judgment to hold. - * - * This is needed for GADT bounds inference to be sound, but it is also used - * when constraining a method call based on its expected type to avoid adding - * constraints that would later prevent us from typechecking method - * arguments, see or-inf.scala and and-inf.scala for examples. - */ - protected def necessaryConstraintsOnly(using Context): Boolean = - ctx.mode.is(Mode.GadtConstraintInference) || myNecessaryConstraintsOnly - - /** If `trustBounds = false` we perform comparisons in a pessimistic way as follows: - * Given an abstract type `A >: L <: H`, a subtype comparison of any type - * with `A` will compare against both `L` and `H`. E.g. - * - * T <:< A if T <:< L and T <:< H - * A <:< T if L <:< T and H <:< T - * - * This restricted form makes sure we don't "forget" types when forming - * unions and intersections with abstract types that have bad bounds. E.g. - * the following example from neg/i8900.scala that @smarter came up with: - * We have a type variable X with constraints - * - * X >: 1, X >: x.M - * - * where `x` is a locally nested variable and `x.M` has bad bounds - * - * x.M >: Int | String <: Int & String - * - * If we trust bounds, then the lower bound of `X` is `x.M` since `x.M >: 1`. - * Then even if we correct levels on instantiation to eliminate the local `x`, - * it is alreay too late, we'd get `Int & String` as instance, which does not - * satisfy the original constraint `X >: 1`. - * - * But if `trustBounds` is false, we do not conclude the `x.M >: 1` since - * we compare both bounds and the upper bound `Int & String` is not a supertype - * of `1`. So the lower bound is `1 | x.M` and when we level-avoid that we - * get `1 | Int & String`, which simplifies to `Int`. - */ - private var myTrustBounds = true - - inline def withUntrustedBounds(op: => Type): Type = - val saved = myTrustBounds - myTrustBounds = false - try op finally myTrustBounds = saved - - def trustBounds: Boolean = - !Config.checkLevelsOnInstantiation || myTrustBounds - - def checkReset() = - assert(addConstraintInvocations == 0) - assert(frozenConstraint == false) - assert(caseLambda == NoType) - assert(homogenizeArgs == false) - assert(comparedTypeLambdas == Set.empty) - - def nestingLevel(param: TypeParamRef)(using Context) = constraint.typeVarOfParam(param) match - case tv: TypeVar => tv.nestingLevel - case _ => - // This should only happen when reducing match types (in - // TrackingTypeComparer#matchCases) or in uncommitable TyperStates (as - // asserted in ProtoTypes.constrained) and is special-cased in `levelOK` - // below. - Int.MaxValue - - /** Is `level` <= `maxLevel` or legal in the current context? */ - def levelOK(level: Int, maxLevel: Int)(using Context): Boolean = - level <= maxLevel - || ctx.isAfterTyper || !ctx.typerState.isCommittable // Leaks in these cases shouldn't break soundness - || level == Int.MaxValue // See `nestingLevel` above. - || !Config.checkLevelsOnConstraints - - /** If `param` is nested deeper than `maxLevel`, try to instantiate it to a - * fresh type variable of level `maxLevel` and return the new variable. - * If this isn't possible, throw a TypeError. - */ - def atLevel(maxLevel: Int, param: TypeParamRef)(using Context): TypeParamRef = - if levelOK(nestingLevel(param), maxLevel) then - return param - LevelAvoidMap(0, maxLevel)(param) match - case freshVar: TypeVar => freshVar.origin - case _ => throw TypeError( - em"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") - - def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) - - /** The full lower bound of `param` includes both the `nonParamBounds` and the - * params in the constraint known to be `<: param`, except that - * params with a `nestingLevel` higher than `param` will be instantiated - * to a fresh param at a legal level. See the documentation of `TypeVar` - * for details. - */ - def fullLowerBound(param: TypeParamRef)(using Context): Type = - val maxLevel = nestingLevel(param) - var loParams = constraint.minLower(param) - if maxLevel != Int.MaxValue then - loParams = loParams.mapConserve(atLevel(maxLevel, _)) - loParams.foldLeft(nonParamBounds(param).lo)(_ | _) - - /** The full upper bound of `param`, see the documentation of `fullLowerBounds` above. */ - def fullUpperBound(param: TypeParamRef)(using Context): Type = - val maxLevel = nestingLevel(param) - var hiParams = constraint.minUpper(param) - if maxLevel != Int.MaxValue then - hiParams = hiParams.mapConserve(atLevel(maxLevel, _)) - hiParams.foldLeft(nonParamBounds(param).hi)(_ & _) - - /** Full bounds of `param`, including other lower/upper params. - * - * Note that underlying operations perform subtype checks - for this reason, recursing on `fullBounds` - * of some param when comparing types might lead to infinite recursion. Consider `bounds` instead. - */ - def fullBounds(param: TypeParamRef)(using Context): TypeBounds = - nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) - - /** An approximating map that prevents types nested deeper than maxLevel as - * well as WildcardTypes from leaking into the constraint. - */ - class LevelAvoidMap(topLevelVariance: Int, maxLevel: Int)(using Context) extends TypeOps.AvoidMap: - variance = topLevelVariance - - def toAvoid(tp: NamedType): Boolean = - tp.prefix == NoPrefix && !tp.symbol.isStatic && !levelOK(tp.symbol.nestingLevel, maxLevel) - - /** Return a (possibly fresh) type variable of a level no greater than `maxLevel` which is: - * - lower-bounded by `tp` if variance >= 0 - * - upper-bounded by `tp` if variance <= 0 - * If this isn't possible, return the empty range. - */ - def legalVar(tp: TypeVar): Type = - val oldParam = tp.origin - val nameKind = - if variance > 0 then AvoidNameKind.UpperBound - else if variance < 0 then AvoidNameKind.LowerBound - else AvoidNameKind.BothBounds - - /** If it exists, return the first param in the list created in a previous call to `legalVar(tp)` - * with the appropriate level and variance. - */ - def findParam(params: List[TypeParamRef]): Option[TypeParamRef] = - params.find(p => - nestingLevel(p) <= maxLevel && representedParamRef(p) == oldParam && - (p.paramName.is(AvoidNameKind.BothBounds) || - variance != 0 && p.paramName.is(nameKind))) - - // First, check if we can reuse an existing parameter, this is more than an optimization - // since it avoids an infinite loop in tests/pos/i8900-cycle.scala - findParam(constraint.lower(oldParam)).orElse(findParam(constraint.upper(oldParam))) match - case Some(param) => - constraint.typeVarOfParam(param) - case _ => - // Otherwise, try to return a fresh type variable at `maxLevel` with - // the appropriate constraints. - val name = nameKind(oldParam.paramName.toTermName).toTypeName - val freshVar = newTypeVar(TypeBounds.upper(tp.topType), name, - nestingLevel = maxLevel, represents = oldParam) - val ok = - if variance < 0 then - addLess(freshVar.origin, oldParam) - else if variance > 0 then - addLess(oldParam, freshVar.origin) - else - unify(freshVar.origin, oldParam) - if ok then freshVar else emptyRange - end legalVar - - override def apply(tp: Type): Type = tp match - case tp: TypeVar if !tp.isInstantiated && !levelOK(tp.nestingLevel, maxLevel) => - legalVar(tp) - // TypeParamRef can occur in tl bounds - case tp: TypeParamRef => - constraint.typeVarOfParam(tp) match - case tvar: TypeVar => - apply(tvar) - case _ => super.apply(tp) - case _ => - super.apply(tp) - - override def mapWild(t: WildcardType) = - if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) - else - val tvar = newTypeVar(apply(t.effectiveBounds).toBounds, nestingLevel = maxLevel) - tvar - end LevelAvoidMap - - /** Approximate `rawBound` if needed to make it a legal bound of `param` by - * avoiding wildcards and types with a level strictly greater than its - * `nestingLevel`. - * - * Note that level-checking must be performed here and cannot be delayed - * until instantiation because if we allow level-incorrect bounds, then we - * might end up reasoning with bad bounds outside of the scope where they are - * defined. This can lead to level-correct but unsound instantiations as - * demonstrated by tests/neg/i8900.scala. - */ - protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = - // Over-approximate for soundness. - var variance = if isUpper then -1 else 1 - // ...unless we can only infer necessary constraints, in which case we - // flip the variance to under-approximate. - if necessaryConstraintsOnly then variance = -variance - - val approx = new LevelAvoidMap(variance, nestingLevel(param)): - override def legalVar(tp: TypeVar): Type = - // `legalVar` will create a type variable whose bounds depend on - // `variance`, but whether the variance is positive or negative, - // we can still infer necessary constraints since just creating a - // type variable doesn't reduce the set of possible solutions. - // Therefore, we can safely "unflip" the variance flipped above. - // This is necessary for i8900-unflip.scala to typecheck. - val v = if necessaryConstraintsOnly then -this.variance else this.variance - atVariance(v)(super.legalVar(tp)) - approx(rawBound) - end legalBound - - protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = - if !constraint.contains(param) then true - else if !isUpper && param.occursIn(rawBound) then - // We don't allow recursive lower bounds when defining a type, - // so we shouldn't allow them as constraints either. - false - else - val bound = legalBound(param, rawBound, isUpper) - val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) - val equalBounds = (if isUpper then lo else hi) eq bound - if equalBounds && !bound.existsPart(_ eq param, StopAt.Static) then - // The narrowed bounds are equal and not recursive, - // so we can remove `param` from the constraint. - constraint = constraint.replace(param, bound) - true - else - // Narrow one of the bounds of type parameter `param` - // If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure - // that `param >: bound`. - val narrowedBounds = - val saved = homogenizeArgs - homogenizeArgs = Config.alignArgsInAnd - try - withUntrustedBounds( - if isUpper then oldBounds.derivedTypeBounds(lo, hi & bound) - else oldBounds.derivedTypeBounds(lo | bound, hi)) - finally - homogenizeArgs = saved - //println(i"narrow bounds for $param from $oldBounds to $narrowedBounds") - val c1 = constraint.updateEntry(param, narrowedBounds) - (c1 eq constraint) - || { - constraint = c1 - val TypeBounds(lo, hi) = constraint.entry(param): @unchecked - isSub(lo, hi) - } - end addOneBound - - protected def addBoundTransitively(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = - - /** Adjust the bound `tp` in the following ways: - * - * 1. Toplevel occurrences of TypeRefs that are instantiated in the current - * constraint are also dereferenced. - * 2. Toplevel occurrences of ExprTypes lead to a `NoType` return, which - * causes the addOneBound operation to fail. - * - * An occurrence is toplevel if it is the bound itself, or a term in some - * combination of `&` or `|` types. - */ - def adjust(tp: Type): Type = tp match - case tp: AndOrType => - val p1 = adjust(tp.tp1) - val p2 = adjust(tp.tp2) - if p1.exists && p2.exists then tp.derivedAndOrType(p1, p2) else NoType - case tp: TypeVar if constraint.contains(tp.origin) => - adjust(tp.underlying) - case tp: ExprType => - // ExprTypes are not value types, so type parameters should not - // be instantiated to ExprTypes. A scenario where such an attempted - // instantiation can happen is if we unify (=> T) => () with A => () - // where A is a TypeParamRef. See the comment on EtaExpansion.etaExpand - // why types such as (=> T) => () can be constructed and i7969.scala - // as a test where this happens. - // Note that scalac by contrast allows such instantiations. But letting - // type variables be ExprTypes has its own problems (e.g. you can't write - // the resulting types down) and is largely unknown terrain. - NoType - case _ => - tp - - def description = i"constraint $param ${if isUpper then "<:" else ":>"} $rawBound to\n$constraint" - constr.println(i"adding $description$location") - if isUpper && rawBound.isRef(defn.NothingClass) && ctx.typerState.isGlobalCommittable then - def msg = i"!!! instantiated to Nothing: $param, constraint = $constraint" - if Config.failOnInstantiationToNothing - then assert(false, msg) - else report.log(msg) - def others = if isUpper then constraint.lower(param) else constraint.upper(param) - val bound = adjust(rawBound) - bound.exists - && addOneBound(param, bound, isUpper) && others.forall(addOneBound(_, bound, isUpper)) - .showing(i"added $description = $result$location", constr) - end addBoundTransitively - - protected def addLess(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { - def description = i"ordering $p1 <: $p2 to\n$constraint" - val res = - if (constraint.isLess(p2, p1)) unify(p2, p1) - else { - val down1 = p1 :: constraint.exclusiveLower(p1, p2) - val up2 = p2 :: constraint.exclusiveUpper(p2, p1) - val lo1 = constraint.nonParamBounds(p1).lo - val hi2 = constraint.nonParamBounds(p2).hi - constr.println(i"adding $description down1 = $down1, up2 = $up2$location") - constraint = constraint.addLess(p1, p2) - down1.forall(addOneBound(_, hi2, isUpper = true)) && - up2.forall(addOneBound(_, lo1, isUpper = false)) - } - constr.println(i"added $description = $res$location") - res - } - - def location(using Context) = "" // i"in ${ctx.typerState.stateChainStr}" // use for debugging - - /** Unify p1 with p2: one parameter will be kept in the constraint, the - * other will be removed and its bounds transferred to the remaining one. - * - * If p1 and p2 have different `nestingLevel`, the parameter with the lowest - * level will be kept and the transferred bounds from the other parameter - * will be adjusted for level-correctness. - */ - private def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { - constr.println(s"unifying $p1 $p2") - if !constraint.isLess(p1, p2) then - constraint = constraint.addLess(p1, p2) - - val level1 = nestingLevel(p1) - val level2 = nestingLevel(p2) - val pKept = if level1 <= level2 then p1 else p2 - val pRemoved = if level1 <= level2 then p2 else p1 - - val down = constraint.exclusiveLower(p2, p1) - val up = constraint.exclusiveUpper(p1, p2) - - constraint = constraint.addLess(p2, p1, direction = if pKept eq p1 then KeepParam2 else KeepParam1) - - val boundKept = constraint.nonParamBounds(pKept).substParam(pRemoved, pKept) - var boundRemoved = constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept) - - if level1 != level2 then - boundRemoved = LevelAvoidMap(-1, math.min(level1, level2))(boundRemoved) - val TypeBounds(lo, hi) = boundRemoved: @unchecked - // After avoidance, the interval might be empty, e.g. in - // tests/pos/i8900-promote.scala: - // >: x.type <: Singleton - // becomes: - // >: Int <: Singleton - // In that case, we can still get a legal constraint - // by replacing the lower-bound to get: - // >: Int & Singleton <: Singleton - if !isSub(lo, hi) then - boundRemoved = TypeBounds(lo & hi, hi) - - val newBounds = (boundKept & boundRemoved).bounds - constraint = constraint.updateEntry(pKept, newBounds).replace(pRemoved, pKept) - - val lo = newBounds.lo - val hi = newBounds.hi - isSub(lo, hi) && - down.forall(addOneBound(_, hi, isUpper = true)) && - up.forall(addOneBound(_, lo, isUpper = false)) - } - - protected def isSubType(tp1: Type, tp2: Type, whenFrozen: Boolean)(using Context): Boolean = - if (whenFrozen) - isSubTypeWhenFrozen(tp1, tp2) - else - isSub(tp1, tp2) - - inline final def inFrozenConstraint[T](op: => T): T = { - val savedFrozen = frozenConstraint - val savedLambda = caseLambda - frozenConstraint = true - caseLambda = NoType - try op - finally { - frozenConstraint = savedFrozen - caseLambda = savedLambda - } - } - - final def isSubTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSub(tp1, tp2)) - final def isSameTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSame(tp1, tp2)) - - /** Test whether the lower bounds of all parameters in this - * constraint are a solution to the constraint. - */ - protected final def isSatisfiable(using Context): Boolean = - constraint.forallParams { param => - val TypeBounds(lo, hi) = constraint.entry(param): @unchecked - isSub(lo, hi) || { - report.log(i"sub fail $lo <:< $hi") - false - } - } - - /** Fix instance type `tp` by avoidance so that it does not contain references - * to types at level > `maxLevel`. - * @param tp the type to be fixed - * @param fromBelow whether type was obtained from lower bound - * @param maxLevel the maximum level of references allowed - * @param param the parameter that was instantiated - */ - private def fixLevels(tp: Type, fromBelow: Boolean, maxLevel: Int, param: TypeParamRef)(using Context) = - - def needsFix(tp: NamedType)(using Context) = - (tp.prefix eq NoPrefix) && tp.symbol.nestingLevel > maxLevel - - /** An accumulator that determines whether levels need to be fixed - * and computes on the side sets of nested type variables that need - * to be instantiated. - */ - def needsLeveling = new TypeAccumulator[Boolean]: - if !fromBelow then variance = -1 - - def apply(need: Boolean, tp: Type) = - need || tp.match - case tp: NamedType => - needsFix(tp) - || !stopBecauseStaticOrLocal(tp) && apply(need, tp.prefix) - case tp: TypeVar => - val inst = tp.instanceOpt - if inst.exists then apply(need, inst) - else if tp.nestingLevel > maxLevel then - // Change the nesting level of inner type variable to `maxLevel`. - // This means that the type variable will be instantiated later to a - // less nested type. If there are other references to the same type variable - // that do not come from the type undergoing `fixLevels`, this could lead - // to coarser types than intended. An alternative is to instantiate the - // type variable right away, but this also loses information. See - // i15934.scala for a test where the current strategey works but an early instantiation - // of `tp` would fail. - constr.println(i"widening nesting level of type variable $tp from ${tp.nestingLevel} to $maxLevel") - ctx.typerState.setNestingLevel(tp, maxLevel) - true - else false - case _ => - foldOver(need, tp) - end needsLeveling - - def levelAvoid = new TypeOps.AvoidMap: - if !fromBelow then variance = -1 - def toAvoid(tp: NamedType) = needsFix(tp) - - if Config.checkLevelsOnInstantiation && !ctx.isAfterTyper && needsLeveling(false, tp) then - typr.println(i"instance $tp for $param needs leveling to $maxLevel") - levelAvoid(tp) - else tp - end fixLevels - - /** Solve constraint set for given type parameter `param`. - * If `fromBelow` is true the parameter is approximated by its lower bound, - * otherwise it is approximated by its upper bound, unless the upper bound - * contains a reference to the parameter itself (such occurrences can arise - * for F-bounded types, `addOneBound` ensures that they never occur in the - * lower bound). - * The solved type is not allowed to contain references to types nested deeper - * than `maxLevel`. - * Wildcard types in bounds are approximated by their upper or lower bounds. - * The constraint is left unchanged. - * @return the instantiating type - * @pre `param` is in the constraint's domain. - */ - final def approximation(param: TypeParamRef, fromBelow: Boolean, maxLevel: Int)(using Context): Type = - constraint.entry(param) match - case entry: TypeBounds => - val useLowerBound = fromBelow || param.occursIn(entry.hi) - val rawInst = withUntrustedBounds( - if useLowerBound then fullLowerBound(param) else fullUpperBound(param)) - val levelInst = fixLevels(rawInst, fromBelow, maxLevel, param) - if levelInst ne rawInst then - typr.println(i"level avoid for $maxLevel: $rawInst --> $levelInst") - typr.println(i"approx $param, from below = $fromBelow, inst = $levelInst") - levelInst - case inst => - assert(inst.exists, i"param = $param\nconstraint = $constraint") - inst - end approximation - - private def isTransparent(tp: Type, traitOnly: Boolean)(using Context): Boolean = tp match - case AndType(tp1, tp2) => - isTransparent(tp1, traitOnly) && isTransparent(tp2, traitOnly) - case _ => - val cls = tp.underlyingClassRef(refinementOK = false).typeSymbol - cls.isTransparentClass && (!traitOnly || cls.is(Trait)) - - /** If `tp` is an intersection such that some operands are transparent trait instances - * and others are not, replace as many transparent trait instances as possible with Any - * as long as the result is still a subtype of `bound`. But fall back to the - * original type if the resulting widened type is a supertype of all dropped - * types (since in this case the type was not a true intersection of transparent traits - * and other types to start with). - */ - def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = - var kept: Set[Type] = Set() // types to keep since otherwise bound would not fit - var dropped: List[Type] = List() // the types dropped so far, last one on top - - def dropOneTransparentTrait(tp: Type): Type = - if isTransparent(tp, traitOnly = true) && !kept.contains(tp) then - dropped = tp :: dropped - defn.AnyType - else tp match - case AndType(tp1, tp2) => - val tp1w = dropOneTransparentTrait(tp1) - if tp1w ne tp1 then tp1w & tp2 - else - val tp2w = dropOneTransparentTrait(tp2) - if tp2w ne tp2 then tp1 & tp2w - else tp - case _ => - tp - - def recur(tp: Type): Type = - val tpw = dropOneTransparentTrait(tp) - if tpw eq tp then tp - else if tpw <:< bound then recur(tpw) - else - kept += dropped.head - dropped = dropped.tail - recur(tp) - - val saved = ctx.typerState.snapshot() - val tpw = recur(tp) - if (tpw eq tp) || dropped.forall(_ frozen_<:< tpw) then - // Rollback any constraint change that would lead to `tp` no longer - // being a valid solution. - ctx.typerState.resetTo(saved) - tp - else - tpw - end dropTransparentTraits - - /** If `tp` is an applied match type alias which is also an unreducible application - * of a higher-kinded type to a wildcard argument, widen to the match type's bound, - * in order to avoid an unreducible application of higher-kinded type ... in inferred type" - * error in PostTyper. Fixes #11246. - */ - def widenIrreducible(tp: Type)(using Context): Type = tp match - case tp @ AppliedType(tycon, _) if tycon.isLambdaSub && tp.hasWildcardArg => - tp.superType match - case MatchType(bound, _, _) => bound - case _ => tp - case _ => - tp - - /** Widen inferred type `inst` with upper `bound`, according to the following rules: - * 1. If `inst` is a singleton type, or a union containing some singleton types, - * widen (all) the singleton type(s), provided the result is a subtype of `bound` - * (i.e. `inst.widenSingletons <:< bound` succeeds with satisfiable constraint) and - * is not transparent according to `isTransparent`. - * 2a. If `inst` is a union type and `widenUnions` is true, approximate the union type - * from above by an intersection of all common base types, provided the result - * is a subtype of `bound`. - * 2b. If `inst` is a union type and `widenUnions` is false, turn it into a hard - * union type (except for unions | Null, which are kept in the state they were). - * 3. Widen some irreducible applications of higher-kinded types to wildcard arguments - * (see @widenIrreducible). - * 4. Drop transparent traits from intersections (see @dropTransparentTraits). - * - * Don't do these widenings if `bound` is a subtype of `scala.Singleton`. - * Also, if the result of these widenings is a TypeRef to a module class, - * and this type ref is different from `inst`, replace by a TermRef to - * its source module instead. - * - * At this point we also drop the @Repeated annotation to avoid inferring type arguments with it, - * as those could leak the annotation to users (see run/inferred-repeated-result). - */ - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = - def widenOr(tp: Type) = - if widenUnions then - val tpw = tp.widenUnion - if (tpw ne tp) && !isTransparent(tpw, traitOnly = false) && (tpw <:< bound) then tpw else tp - else tp.hardenUnions - - def widenSingle(tp: Type) = - val tpw = tp.widenSingletons - if (tpw ne tp) && (tpw <:< bound) then tpw else tp - - def isSingleton(tp: Type): Boolean = tp match - case WildcardType(optBounds) => optBounds.exists && isSingleton(optBounds.bounds.hi) - case _ => isSubTypeWhenFrozen(tp, defn.SingletonType) - - val wideInst = - if isSingleton(bound) then inst - else - val widenedFromSingle = widenSingle(inst) - val widenedFromUnion = widenOr(widenedFromSingle) - val widened = dropTransparentTraits(widenedFromUnion, bound) - widenIrreducible(widened) - - wideInst match - case wideInst: TypeRef if wideInst.symbol.is(Module) => - TermRef(wideInst.prefix, wideInst.symbol.sourceModule) - case _ => - wideInst.dropRepeatedAnnot - end widenInferred - - /** Convert all toplevel union types in `tp` to hard unions */ - extension (tp: Type) private def hardenUnions(using Context): Type = tp.widen match - case tp: AndType => - tp.derivedAndType(tp.tp1.hardenUnions, tp.tp2.hardenUnions) - case tp: RefinedType => - tp.derivedRefinedType(tp.parent.hardenUnions, tp.refinedName, tp.refinedInfo) - case tp: RecType => - tp.rebind(tp.parent.hardenUnions) - case tp: HKTypeLambda => - tp.derivedLambdaType(resType = tp.resType.hardenUnions) - case tp: OrType => - val tp1 = tp.stripNull - if tp1 ne tp then tp.derivedOrType(tp1.hardenUnions, defn.NullType) - else tp.derivedOrType(tp.tp1.hardenUnions, tp.tp2.hardenUnions, soft = false) - case _ => - tp - - /** The instance type of `param` in the current constraint (which contains `param`). - * If `fromBelow` is true, the instance type is the lub of the parameter's - * lower bounds; otherwise it is the glb of its upper bounds. However, - * a lower bound instantiation can be a singleton type only if the upper bound - * is also a singleton type. - * The instance type is not allowed to contain references to types nested deeper - * than `maxLevel`. - */ - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { - val approx = approximation(param, fromBelow, maxLevel).simplified - if fromBelow then - val widened = widenInferred(approx, param, widenUnions) - // Widening can add extra constraints, in particular the widened type might - // be a type variable which is now instantiated to `param`, and therefore - // cannot be used as an instantiation of `param` without creating a loop. - // If that happens, we run `instanceType` again to find a new instantation. - // (we do not check for non-toplevel occurrences: those should never occur - // since `addOneBound` disallows recursive lower bounds). - if constraint.occursAtToplevel(param, widened) then - instanceType(param, fromBelow, widenUnions, maxLevel) - else - widened - else - approx - } - - /** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have - * for all poly params `p` defined in `c2` as `p >: L2 <: U2`: - * - * c1 defines p with bounds p >: L1 <: U1, and - * L2 <: L1, and - * U1 <: U2 - * - * Both `c1` and `c2` are required to derive from constraint `pre`, without adding - * any new type variables but possibly narrowing already registered ones with further bounds. - */ - protected final def subsumes(c1: Constraint, c2: Constraint, pre: Constraint)(using Context): Boolean = - if (c2 eq pre) true - else if (c1 eq pre) false - else { - val saved = constraint - try - // We iterate over params of `pre`, instead of `c2` as the documentation may suggest. - // As neither `c1` nor `c2` can have more params than `pre`, this only matters in one edge case. - // Constraint#forallParams only iterates over params that can be directly constrained. - // If `c2` has, compared to `pre`, instantiated a param and we iterated over params of `c2`, - // we could miss that param being instantiated to an incompatible type in `c1`. - pre.forallParams(p => - c1.entry(p).exists - && c2.upper(p).forall(c1.isLess(p, _)) - && isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)) - ) - finally constraint = saved - } - - /** The current bounds of type parameter `param` */ - def bounds(param: TypeParamRef)(using Context): TypeBounds = { - val e = constraint.entry(param) - if (e.exists) e.bounds - else { - // TODO: should we change the type of paramInfos to nullable? - val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos - if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala - else TypeBounds.empty - } - } - - /** Add type lambda `tl`, possibly with type variables `tvars`, to current constraint - * and propagate all bounds. - * @param tvars See Constraint#add - */ - def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = - checkPropagated(i"initialized $tl") { - constraint = constraint.add(tl, tvars) - tl.paramRefs.forall { param => - val lower = constraint.lower(param) - val upper = constraint.upper(param) - constraint.entry(param) match { - case bounds: TypeBounds => - if lower.nonEmpty && !bounds.lo.isRef(defn.NothingClass) - || upper.nonEmpty && !bounds.hi.isAny - then constr.println(i"INIT*** $tl") - lower.forall(addOneBound(_, bounds.hi, isUpper = true)) && - upper.forall(addOneBound(_, bounds.lo, isUpper = false)) - case x => - // Happens if param was already solved while processing earlier params of the same TypeLambda. - // See #4720. - true - } - } - } - - /** Can `param` be constrained with new bounds? */ - final def canConstrain(param: TypeParamRef): Boolean = - (!frozenConstraint || (caseLambda `eq` param.binder)) && constraint.contains(param) - - /** Is `param` assumed to be a sub- and super-type of any other type? - * This holds if `TypeVarsMissContext` is set unless `param` is a part - * of a MatchType that is currently normalized. - */ - final def assumedTrue(param: TypeParamRef)(using Context): Boolean = - ctx.mode.is(Mode.TypevarsMissContext) && (caseLambda `ne` param.binder) - - /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise. - * `bound` is assumed to be in normalized form, as specified in `firstTry` and - * `secondTry` of `TypeComparer`. In particular, it should not be an alias type, - * lazy ref, typevar, wildcard type, error type. In addition, upper bounds may - * not be AndTypes and lower bounds may not be OrTypes. This is assured by the - * way isSubType is organized. - */ - protected def addConstraint(param: TypeParamRef, bound: Type, fromBelow: Boolean)(using Context): Boolean = - if !bound.isValueTypeOrLambda then return false - - /** When comparing lambdas we might get constraints such as - * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter - * and `X0` is a lambda parameter. The constraint for `A` is not allowed - * to refer to such a lambda parameter because the lambda parameter is - * not visible where `A` is defined. Consequently, we need to - * approximate the bound so that the lambda parameter does not appear in it. - * If `tp` is an upper bound, we need to approximate with something smaller, - * otherwise something larger. - * Test case in pos/i94-nada.scala. This test crashes with an illegal instance - * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is - * missing. - */ - def avoidLambdaParams(tp: Type) = - if comparedTypeLambdas.nonEmpty then - val approx = new ApproximatingTypeMap { - if (!fromBelow) variance = -1 - def apply(t: Type): Type = t match { - case t @ TypeParamRef(tl: TypeLambda, n) if comparedTypeLambdas contains tl => - val bounds = tl.paramInfos(n) - range(bounds.lo, bounds.hi) - case tl: TypeLambda => - val saved = comparedTypeLambdas - comparedTypeLambdas -= tl - try mapOver(tl) - finally comparedTypeLambdas = saved - case _ => - mapOver(t) - } - } - approx(tp) - else tp - - def addParamBound(bound: TypeParamRef) = - constraint.entry(param) match { - case _: TypeBounds => - if (fromBelow) addLess(bound, param) else addLess(param, bound) - case tp => - if (fromBelow) isSub(bound, tp) else isSub(tp, bound) - } - - def kindCompatible(tp1: Type, tp2: Type): Boolean = - val tparams1 = tp1.typeParams - val tparams2 = tp2.typeParams - tparams1.corresponds(tparams2)((p1, p2) => kindCompatible(p1.paramInfo, p2.paramInfo)) - && (tparams1.isEmpty || kindCompatible(tp1.hkResult, tp2.hkResult)) - || tp1.hasAnyKind - || tp2.hasAnyKind - - def description = i"constr $param ${if (fromBelow) ">:" else "<:"} $bound:\n$constraint" - - //checkPropagated(s"adding $description")(true) // DEBUG in case following fails - checkPropagated(s"added $description") { - addConstraintInvocations += 1 - val saved = canWidenAbstract - canWidenAbstract = true - try bound match - case bound: TypeParamRef if constraint contains bound => - addParamBound(bound) - case _ => - val pbound = avoidLambdaParams(bound) - kindCompatible(param, pbound) && addBoundTransitively(param, pbound, !fromBelow) - finally - canWidenAbstract = saved - addConstraintInvocations -= 1 - } - end addConstraint - - /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ - def checkPropagated(msg: => String)(result: Boolean)(using Context): Boolean = { - if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) - inFrozenConstraint { - for (p <- constraint.domainParams) { - def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = - assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") - for (u <- constraint.upper(p)) - check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") - for (l <- constraint.lower(p)) { - check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") - check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") - } - } - } - result - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala deleted file mode 100644 index d2b1246a8149..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala +++ /dev/null @@ -1,23 +0,0 @@ -package dotty.tools.dotc -package core - -import Contexts._ -import config.Printers.{default, typr} - -trait ConstraintRunInfo { self: Run => - private var maxSize = 0 - private var maxConstraint: Constraint | Null = _ - def recordConstraintSize(c: Constraint, size: Int): Unit = - if (size > maxSize) { - maxSize = size - maxConstraint = c - } - def printMaxConstraint()(using Context): Unit = - if maxSize > 0 then - val printer = if ctx.settings.YdetailedStats.value then default else typr - printer.println(s"max constraint size: $maxSize") - try printer.println(s"max constraint = ${maxConstraint.nn.show}") - catch case ex: StackOverflowError => printer.println("max constraint cannot be printed due to stack overflow") - - protected def reset(): Unit = maxConstraint = null -} diff --git a/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala b/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala deleted file mode 100644 index 20687dc1663a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala +++ /dev/null @@ -1,115 +0,0 @@ -package dotty.tools.dotc -package core - -import Contexts._, Symbols._, Types._, Flags._ -import Denotations._, SymDenotations._ -import Names.Name, StdNames.nme -import ast.untpd -import caps.unsafe.unsafeBoxFunArg - -/** Extension methods for contexts where we want to keep the ctx. syntax */ -object ContextOps: - - extension (ctx: Context) - - /** Enter symbol into current class, if current class is owner of current context, - * or into current scope, if not. Should always be called instead of scope.enter - * in order to make sure that updates to class members are reflected in - * finger prints. - */ - def enter(sym: Symbol): Symbol = inContext(ctx) { - ctx.owner match - case cls: ClassSymbol => cls.classDenot.enter(sym) - case _ => ctx.scope.openForMutations.enter(sym) - sym - } - - /** The denotation with the given `name` and all `required` flags in current context - */ - def denotNamed(name: Name, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = - inContext(ctx) { - if (ctx.owner.isClass) - if (ctx.outer.owner == ctx.owner) { // inner class scope; check whether we are referring to self - if (ctx.scope.size == 1) { - val elem = ctx.scope.lastEntry.nn - if (elem.name == name) return elem.sym.denot // return self - } - val pre = ctx.owner.thisType - if ctx.isJava then javaFindMember(name, pre, required, excluded) - else pre.findMember(name, pre, required, excluded) - } - else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext. - ctx.owner.findMember(name, ctx.owner.thisType, required, excluded) - else - ctx.scope.denotsNamed(name).filterWithFlags(required, excluded).toDenot(NoPrefix) - } - - final def javaFindMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = - assert(ctx.isJava) - inContext(ctx) { - - val preSym = pre.typeSymbol - - // 1. Try to search in current type and parents. - val directSearch = pre.findMember(name, pre, required, excluded) - - // 2. Try to search in companion class if current is an object. - def searchCompanionClass = if preSym.is(Flags.Module) then - preSym.companionClass.thisType.findMember(name, pre, required, excluded) - else NoDenotation - - // 3. Try to search in companion objects of super classes. - // In Java code, static inner classes, which we model as members of the companion object, - // can be referenced from an ident in a subclass or by a selection prefixed by the subclass. - def searchSuperCompanionObjects = - val toSearch = if preSym.is(Flags.Module) then - if preSym.companionClass.exists then - preSym.companionClass.asClass.baseClasses - else Nil - else - preSym.asClass.baseClasses - - toSearch.iterator.map { bc => - val pre1 = bc.companionModule.namedType - pre1.findMember(name, pre1, required, excluded) - }.find(_.exists).getOrElse(NoDenotation) - - if preSym.isClass then - directSearch orElse searchCompanionClass orElse searchSuperCompanionObjects - else - directSearch - } - - /** A fresh local context with given tree and owner. - * Owner might not exist (can happen for self valdefs), in which case - * no owner is set in result context - */ - def localContext(tree: untpd.Tree, owner: Symbol): FreshContext = inContext(ctx) { - val freshCtx = ctx.fresh.setTree(tree) - if owner.exists then freshCtx.setOwner(owner) else freshCtx - } - - /** Context where `sym` is defined, assuming we are in a nested context. */ - def defContext(sym: Symbol): Context = inContext(ctx) { - ctx.outersIterator - .dropWhile(((ctx: Context) => ctx.owner != sym).unsafeBoxFunArg) - .dropWhile(((ctx: Context) => ctx.owner == sym).unsafeBoxFunArg) - .next() - } - - /** A new context for the interior of a class */ - def inClassContext(selfInfo: TypeOrSymbol): Context = - inline def op(using Context): Context = - val localCtx: Context = ctx.fresh.setNewScope - selfInfo match { - case sym: Symbol if sym.exists && sym.name != nme.WILDCARD => localCtx.scope.openForMutations.enter(sym) - case _ => - } - localCtx - op(using ctx) - - def packageContext(tree: untpd.PackageDef, pkg: Symbol): Context = inContext(ctx) { - if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree) - else ctx - } -end ContextOps diff --git a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala deleted file mode 100644 index a2389a28e941..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala +++ /dev/null @@ -1,1041 +0,0 @@ -package dotty.tools -package dotc -package core - -import interfaces.CompilerCallback -import Decorators._ -import Periods._ -import Names._ -import Phases._ -import Types._ -import Symbols._ -import Scopes._ -import Uniques._ -import ast.Trees._ -import ast.untpd -import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} -import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} -import inlines.Inliner -import Nullables._ -import Implicits.ContextualImplicits -import config.Settings._ -import config.Config -import reporting._ -import io.{AbstractFile, NoAbstractFile, PlainFile, Path} -import scala.io.Codec -import collection.mutable -import printing._ -import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} -import classfile.ReusableDataReader -import StdNames.nme -import compiletime.uninitialized - -import annotation.internal.sharable -import annotation.retains - -import DenotTransformers.DenotTransformer -import dotty.tools.dotc.profile.Profiler -import util.Property.Key -import util.Store -import xsbti.AnalysisCallback -import plugins._ -import java.util.concurrent.atomic.AtomicInteger -import java.nio.file.InvalidPathException -import language.experimental.pureFunctions - -object Contexts { - - //@sharable var nextId = 0 - - private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() - private val (sbtCallbackLoc, store2) = store1.newLocation[AnalysisCallback]() - private val (printerFnLoc, store3) = store2.newLocation[DetachedContext -> Printer](new RefinedPrinter(_)) - private val (settingsStateLoc, store4) = store3.newLocation[SettingsState]() - private val (compilationUnitLoc, store5) = store4.newLocation[CompilationUnit]() - private val (runLoc, store6) = store5.newLocation[Run | Null]() - private val (profilerLoc, store7) = store6.newLocation[Profiler]() - private val (notNullInfosLoc, store8) = store7.newLocation[List[NotNullInfo]]() - private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() - private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) - - private val initialStore = store10 - - /** The current context */ - inline def ctx(using ctx: Context): Context = ctx - - /** Run `op` with given context */ - inline def inContext[T](c: Context)(inline op: Context ?-> T): T = - op(using c) - - /** Execute `op` at given period */ - inline def atPeriod[T](pd: Period)(inline op: Context ?-> T)(using Context): T = - op(using ctx.fresh.setPeriod(pd)) - - /** Execute `op` at given phase id */ - inline def atPhase[T](pid: PhaseId)(inline op: Context ?-> T)(using Context): T = - op(using ctx.withPhase(pid)) - - /** Execute `op` at given phase */ - inline def atPhase[T](phase: Phase)(inline op: Context ?-> T)(using Context): T = - op(using ctx.withPhase(phase)) - - inline def atNextPhase[T](inline op: Context ?-> T)(using Context): T = - atPhase(ctx.phase.next)(op) - - /** Execute `op` at the current phase if it's before the first transform phase, - * otherwise at the last phase before the first transform phase. - * - * Note: this should be used instead of `atPhaseNoLater(ctx.picklerPhase)` - * because the later won't work if the `Pickler` phase is not present (for example, - * when using `QuoteCompiler`). - */ - inline def atPhaseBeforeTransforms[T](inline op: Context ?-> T)(using Context): T = - atPhaseNoLater(firstTransformPhase.prev)(op) - - inline def atPhaseNoLater[T](limit: Phase)(inline op: Context ?-> T)(using Context): T = - op(using if !limit.exists || ctx.phase <= limit then ctx else ctx.withPhase(limit)) - - inline def atPhaseNoEarlier[T](limit: Phase)(inline op: Context ?-> T)(using Context): T = - op(using if !limit.exists || limit <= ctx.phase then ctx else ctx.withPhase(limit)) - - inline def inMode[T](mode: Mode)(inline op: Context ?-> T)(using ctx: Context): T = - op(using if mode != ctx.mode then ctx.fresh.setMode(mode) else ctx) - - inline def withMode[T](mode: Mode)(inline op: Context ?-> T)(using ctx: Context): T = - inMode(ctx.mode | mode)(op) - - inline def withoutMode[T](mode: Mode)(inline op: Context ?-> T)(using ctx: Context): T = - inMode(ctx.mode &~ mode)(op) - - inline def inDetachedContext[T](inline op: DetachedContext ?-> T)(using ctx: Context): T = - op(using ctx.detach) - - type Context = ContextCls @retains(caps.cap) - - /** A context is passed basically everywhere in dotc. - * This is convenient but carries the risk of captured contexts in - * objects that turn into space leaks. To combat this risk, here are some - * conventions to follow: - * - * - Never let an implicit context be an argument of a class whose instances - * live longer than the context. - * - Classes that need contexts for their initialization take an explicit parameter - * named `initctx`. They pass initctx to all positions where it is needed - * (and these positions should all be part of the intialization sequence of the class). - * - Classes that need contexts that survive initialization are instead passed - * a "condensed context", typically named `cctx` (or they create one). Condensed contexts - * just add some basic information to the context base without the - * risk of capturing complete trees. - * - To make sure these rules are kept, it would be good to do a sanity - * check using bytecode inspection with javap or scalap: Keep track - * of all class fields of type context; allow them only in whitelisted - * classes (which should be short-lived). - */ - abstract class ContextCls(val base: ContextBase) { - - //val id = nextId - //nextId += 1 - //assert(id != 35599) - - protected given Context = this - - def outer: ContextCls @retains(this) - def period: Period - def mode: Mode - def owner: Symbol - def tree: Tree[?] - def scope: Scope - def typerState: TyperState - def gadt: GadtConstraint - def searchHistory: SearchHistory - def source: SourceFile - - /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */ - def outersIterator: Iterator[ContextCls @retains(this)] - - /** A map in which more contextual properties can be stored - * Typically used for attributes that are read and written only in special situations. - */ - def moreProperties: Map[Key[Any], Any] - - def property[T](key: Key[T]): Option[T] = - moreProperties.get(key).asInstanceOf[Option[T]] - - /** A store that can be used by sub-components. - * Typically used for attributes that are defined only once per compilation unit. - * Access to store entries is much faster than access to properties, and only - * slightly slower than a normal field access would be. - */ - def store: Store - - /** The compiler callback implementation, or null if no callback will be called. */ - def compilerCallback: CompilerCallback = store(compilerCallbackLoc) - - /** The sbt callback implementation if we are run from sbt, null otherwise */ - def sbtCallback: AnalysisCallback = store(sbtCallbackLoc) - - /** The current plain printer */ - def printerFn: DetachedContext -> Printer = store(printerFnLoc) - - /** A function creating a printer */ - def printer: Printer = - val pr = printerFn(detach) - if this.settings.YplainPrinter.value then pr.plain else pr - - /** The current settings values */ - def settingsState: SettingsState = store(settingsStateLoc) - - /** The current compilation unit */ - def compilationUnit: CompilationUnit = store(compilationUnitLoc) - - /** The current compiler-run */ - def run: Run | Null = store(runLoc) - - /** The current compiler-run profiler */ - def profiler: Profiler = store(profilerLoc) - - /** The paths currently known to be not null */ - def notNullInfos: List[NotNullInfo] = store(notNullInfosLoc) - - /** The currently active import info */ - def importInfo: ImportInfo | Null = store(importInfoLoc) - - /** The current type assigner or typer */ - def typeAssigner: TypeAssigner = store(typeAssignerLoc) - - /** The new implicit references that are introduced by this scope */ - private var implicitsCache: ContextualImplicits | Null = null - def implicits: ContextualImplicits = { - if (implicitsCache == null) - implicitsCache = { - val implicitRefs: List[ImplicitRef] = - if (isClassDefContext) - try owner.thisType.implicitMembers - catch { - case ex: CyclicReference => Nil - } - else if (isImportContext) importInfo.nn.importedImplicits - else if (isNonEmptyScopeContext) scope.implicitDecls - else Nil - val outerImplicits = - if (isImportContext && importInfo.nn.unimported.exists) - outer.implicits exclude importInfo.nn.unimported - else - outer.implicits - if (implicitRefs.isEmpty) outerImplicits - else new ContextualImplicits(implicitRefs, outerImplicits, isImportContext)(detach) - } - implicitsCache.nn - } - - /** Either the current scope, or, if the current context owner is a class, - * the declarations of the current class. - */ - def effectiveScope(using Context): Scope = - val myOwner: Symbol | Null = owner - if myOwner != null && myOwner.isClass then myOwner.asClass.unforcedDecls - else scope - - def nestingLevel: Int = effectiveScope.nestingLevel - - /** Sourcefile corresponding to given abstract file, memoized */ - def getSource(file: AbstractFile, codec: -> Codec = Codec(settings.encoding.value)) = { - util.Stats.record("Context.getSource") - base.sources.getOrElseUpdate(file, SourceFile(file, codec)) - } - - /** SourceFile with given path name, memoized */ - def getSource(path: TermName): SourceFile = getFile(path) match - case NoAbstractFile => NoSource - case file => getSource(file) - - /** SourceFile with given path, memoized */ - def getSource(path: String): SourceFile = getSource(path.toTermName) - - /** AbstractFile with given path name, memoized */ - def getFile(name: TermName): AbstractFile = base.files.get(name) match - case Some(file) => - file - case None => - try - val file = new PlainFile(Path(name.toString)) - base.files(name) = file - file - catch - case ex: InvalidPathException => - report.error(em"invalid file path: ${ex.getMessage}") - NoAbstractFile - - /** AbstractFile with given path, memoized */ - def getFile(name: String): AbstractFile = getFile(name.toTermName) - - final def withPhase(phase: Phase): Context = ctx.fresh.setPhase(phase.id) - final def withPhase(pid: PhaseId): Context = ctx.fresh.setPhase(pid) - - private var related: SimpleIdentityMap[SourceFile, DetachedContext] | Null = null - - private def lookup(key: SourceFile): DetachedContext | Null = - util.Stats.record("Context.related.lookup") - if related == null then - related = SimpleIdentityMap.empty - null - else - related.nn(key) - - final def withSource(source: SourceFile): Context = - util.Stats.record("Context.withSource") - if this.source eq source then - this - else - var ctx1 = lookup(source) - if ctx1 == null then - util.Stats.record("Context.withSource.new") - val ctx2 = fresh.setSource(source) - if ctx2.compilationUnit eq NoCompilationUnit then - // `source` might correspond to a file not necessarily - // in the current project (e.g. when inlining library code), - // so set `mustExist` to false. - ctx2.setCompilationUnit(CompilationUnit(source, mustExist = false)) - val dctx = ctx2.detach - ctx1 = dctx - related = related.nn.updated(source, dctx) - ctx1 - - // `creationTrace`-related code. To enable, uncomment the code below and the - // call to `setCreationTrace()` in this file. - /* - /** If -Ydebug is on, the top of the stack trace where this context - * was created, otherwise `null`. - */ - private var creationTrace: Array[StackTraceElement] = uninitialized - - private def setCreationTrace() = - creationTrace = (new Throwable).getStackTrace().take(20) - - /** Print all enclosing context's creation stacktraces */ - def printCreationTraces() = { - println("=== context creation trace =======") - for (ctx <- outersIterator) { - println(s">>>>>>>>> $ctx") - if (ctx.creationTrace != null) println(ctx.creationTrace.mkString("\n")) - } - println("=== end context creation trace ===") - } - */ - - /** The current reporter */ - def reporter: Reporter = typerState.reporter - - final def phase: Phase = base.phases(period.firstPhaseId) - final def runId = period.runId - final def phaseId = period.phaseId - - final def lastPhaseId = base.phases.length - 1 - - /** Does current phase use an erased types interpretation? */ - final def erasedTypes = phase.erasedTypes - - /** Are we in a Java compilation unit? */ - final def isJava: Boolean = compilationUnit.isJava - - /** Is current phase after TyperPhase? */ - final def isAfterTyper = base.isAfterTyper(phase) - final def isTyper = base.isTyper(phase) - - /** Is this a context for the members of a class definition? */ - def isClassDefContext: Boolean = - owner.isClass && (owner ne outer.owner) - - /** Is this a context that introduces an import clause? */ - def isImportContext: Boolean = - (this ne NoContext) - && (outer ne NoContext) - && (this.importInfo nen outer.importInfo) - - /** Is this a context that introduces a non-empty scope? */ - def isNonEmptyScopeContext: Boolean = - (this.scope ne outer.scope) && !this.scope.isEmpty - - /** Is this a context for typechecking an inlined body? */ - def isInlineContext: Boolean = - typer.isInstanceOf[Inliner#InlineTyper] - - /** The next outer context whose tree is a template or package definition - * Note: Currently unused - def enclTemplate: Context = { - var c = this - while (c != NoContext && !c.tree.isInstanceOf[Template[?]] && !c.tree.isInstanceOf[PackageDef[?]]) - c = c.outer - c - }*/ - - /** The context for a supercall. This context is used for elaborating - * the parents of a class and their arguments. - * The context is computed from the current class context. It has - * - * - as owner: The primary constructor of the class - * - as outer context: The context enclosing the class context - * - as scope: The parameter accessors in the class context - * - * The reasons for this peculiar choice of attributes are as follows: - * - * - The constructor must be the owner, because that's where any local methods or closures - * should go. - * - The context may not see any class members (inherited or defined), and should - * instead see definitions defined in the outer context which might be shadowed by - * such class members. That's why the outer context must be the outer context of the class. - * - At the same time the context should see the parameter accessors of the current class, - * that's why they get added to the local scope. An alternative would have been to have the - * context see the constructor parameters instead, but then we'd need a final substitution step - * from constructor parameters to class parameter accessors. - */ - def superCallContext: Context = { - val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors: _*) - superOrThisCallContext(owner.primaryConstructor, locals) - } - - /** The context for the arguments of a this(...) constructor call. - * The context is computed from the local auxiliary constructor context. - * It has - * - * - as owner: The auxiliary constructor - * - as outer context: The context enclosing the enclosing class context - * - as scope: The parameters of the auxiliary constructor. - */ - def thisCallArgContext: Context = { - val constrCtx = detach.outersIterator.dropWhile(_.outer.owner == owner).next() - superOrThisCallContext(owner, constrCtx.scope) - .setTyperState(typerState) - .setGadt(gadt) - .fresh - .setScope(this.scope) - } - - /** The super- or this-call context with given owner and locals. */ - private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = { - var classCtx = detach.outersIterator.dropWhile(!_.isClassDefContext).next() - classCtx.outer.fresh.setOwner(owner) - .setScope(locals) - .setMode(classCtx.mode) - } - - /** The context of expression `expr` seen as a member of a statement sequence */ - def exprContext(stat: Tree[?], exprOwner: Symbol): Context = - if (exprOwner == this.owner) this - else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext - else fresh.setOwner(exprOwner) - - /** A new context that summarizes an import statement */ - def importContext(imp: Import[?], sym: Symbol): FreshContext = - fresh.setImportInfo(ImportInfo(sym, imp.selectors, imp.expr)) - - /** Is the debug option set? */ - def debug: Boolean = base.settings.Ydebug.value - - /** Is the verbose option set? */ - def verbose: Boolean = base.settings.verbose.value - - /** Should use colors when printing? */ - def useColors: Boolean = - base.settings.color.value == "always" - - /** Is the explicit nulls option set? */ - def explicitNulls: Boolean = base.settings.YexplicitNulls.value - - /** A fresh clone of this context embedded in this context. */ - def fresh: FreshContext = freshOver(this) - - /** A fresh clone of this context embedded in the specified `outer` context. */ - def freshOver(outer: Context): FreshContext = - util.Stats.record("Context.fresh") - FreshContext(base).init(outer, this).setTyperState(this.typerState) - - final def withOwner(owner: Symbol): Context = - if (owner ne this.owner) fresh.setOwner(owner) else this - - final def withTyperState(typerState: TyperState): Context = - if typerState ne this.typerState then fresh.setTyperState(typerState) else this - - final def withUncommittedTyperState: Context = - withTyperState(typerState.uncommittedAncestor) - - final def withProperty[T](key: Key[T], value: Option[T]): Context = - if (property(key) == value) this - else value match { - case Some(v) => fresh.setProperty(key, v) - case None => fresh.dropProperty(key) - } - - def typer: Typer = this.typeAssigner match { - case typer: Typer => typer - case _ => new Typer - } - - override def toString: String = - //if true then - // outersIterator.map { ctx => - // i"${ctx.id} / ${ctx.owner} / ${ctx.moreProperties.valuesIterator.map(_.getClass).toList.mkString(", ")}" - // }.mkString("\n") - //else - def iinfo(using Context) = - val info = ctx.importInfo - if (info == null) "" else i"${info.selectors}%, %" - def cinfo(using Context) = - val core = s" owner = ${ctx.owner}, scope = ${ctx.scope}, import = $iinfo" - if (ctx ne NoContext) && (ctx.implicits ne ctx.outer.implicits) then - s"$core, implicits = ${ctx.implicits}" - else - core - s"""Context( - |${outersIterator.map(ctx => cinfo(using ctx)).mkString("\n\n")})""".stripMargin - - def settings: ScalaSettings = base.settings - def definitions: Definitions = base.definitions - def platform: Platform = base.platform - def pendingUnderlying: util.HashSet[Type] = base.pendingUnderlying - def uniqueNamedTypes: Uniques.NamedTypeUniques = base.uniqueNamedTypes - def uniques: util.WeakHashSet[Type] = base.uniques - - def initialize()(using Context): Unit = base.initialize() - - protected def resetCaches(): Unit = - implicitsCache = null - related = null - - /** Reuse this context as a fresh context nested inside `outer` */ - def reuseIn(outer: Context): this.type - - def detach: DetachedContext - } - - object detached: - opaque type DetachedContext <: ContextCls = ContextCls - inline def apply(c: ContextCls): DetachedContext = c - - type DetachedContext = detached.DetachedContext - - /** A condensed context provides only a small memory footprint over - * a Context base, and therefore can be stored without problems in - * long-lived objects. - abstract class CondensedContext extends Context { - override def condensed = this - } - */ - - /** A fresh context allows selective modification - * of its attributes using the with... methods. - */ - class FreshContext(base: ContextBase) extends ContextCls(base) { thiscontext => - - private var _outer: DetachedContext = uninitialized - def outer: DetachedContext = _outer - - def outersIterator: Iterator[ContextCls] = new Iterator[ContextCls] { - var current: ContextCls = thiscontext - def hasNext = current != NoContext - def next = { val c = current; current = current.outer; c } - } - - private var _period: Period = uninitialized - final def period: Period = _period - - private var _mode: Mode = uninitialized - final def mode: Mode = _mode - - private var _owner: Symbol = uninitialized - final def owner: Symbol = _owner - - private var _tree: Tree[?]= _ - final def tree: Tree[?] = _tree - - private var _scope: Scope = uninitialized - final def scope: Scope = _scope - - private var _typerState: TyperState = uninitialized - final def typerState: TyperState = _typerState - - private var _gadt: GadtConstraint = uninitialized - final def gadt: GadtConstraint = _gadt - - private var _searchHistory: SearchHistory = uninitialized - final def searchHistory: SearchHistory = _searchHistory - - private var _source: SourceFile = uninitialized - final def source: SourceFile = _source - - private var _moreProperties: Map[Key[Any], Any] = uninitialized - final def moreProperties: Map[Key[Any], Any] = _moreProperties - - private var _store: Store = uninitialized - final def store: Store = _store - - /** Initialize all context fields, except typerState, which has to be set separately - * @param outer The outer context - * @param origin The context from which fields are copied - */ - private[Contexts] def init(outer: Context, origin: Context): this.type = { - _outer = outer.asInstanceOf[DetachedContext] - _period = origin.period - _mode = origin.mode - _owner = origin.owner - _tree = origin.tree - _scope = origin.scope - _gadt = origin.gadt - _searchHistory = origin.searchHistory - _source = origin.source - _moreProperties = origin.moreProperties - _store = origin.store - this - } - - def reuseIn(outer: Context): this.type = - resetCaches() - init(outer, outer) - - def detach: DetachedContext = detached(this) - - def setPeriod(period: Period): this.type = - util.Stats.record("Context.setPeriod") - assert(period.firstPhaseId == period.lastPhaseId, period) - this._period = period - this - - def setMode(mode: Mode): this.type = - util.Stats.record("Context.setMode") - this._mode = mode - this - - def setOwner(owner: Symbol): this.type = - util.Stats.record("Context.setOwner") - assert(owner != NoSymbol) - this._owner = owner - this - - def setTree(tree: Tree[?]): this.type = - util.Stats.record("Context.setTree") - this._tree = tree - this - - def setScope(scope: Scope): this.type = - this._scope = scope - this - - def setNewScope: this.type = - util.Stats.record("Context.setScope") - this._scope = newScope - this - - def setTyperState(typerState: TyperState): this.type = - this._typerState = typerState - this - def setNewTyperState(): this.type = - setTyperState(typerState.fresh(committable = true)) - def setExploreTyperState(): this.type = - setTyperState(typerState.fresh(committable = false)) - def setReporter(reporter: Reporter): this.type = - setTyperState(typerState.fresh().setReporter(reporter)) - - def setTyper(typer: Typer): this.type = - this._scope = typer.scope - setTypeAssigner(typer) - - def setGadt(gadt: GadtConstraint): this.type = - util.Stats.record("Context.setGadt") - this._gadt = gadt - this - def setFreshGADTBounds: this.type = - setGadt(gadt.fresh) - - def setSearchHistory(searchHistory: SearchHistory): this.type = - util.Stats.record("Context.setSearchHistory") - this._searchHistory = searchHistory - this - - def setSource(source: SourceFile): this.type = - util.Stats.record("Context.setSource") - this._source = source - this - - private def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = - util.Stats.record("Context.setMoreProperties") - this._moreProperties = moreProperties - this - - private def setStore(store: Store): this.type = - util.Stats.record("Context.setStore") - this._store = store - this - - def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { - setSource(compilationUnit.source) - updateStore(compilationUnitLoc, compilationUnit) - } - - def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) - def setSbtCallback(callback: AnalysisCallback): this.type = updateStore(sbtCallbackLoc, callback) - def setPrinterFn(printer: DetachedContext -> Printer): this.type = updateStore(printerFnLoc, printer) - def setSettings(settingsState: SettingsState): this.type = updateStore(settingsStateLoc, settingsState) - def setRun(run: Run | Null): this.type = updateStore(runLoc, run) - def setProfiler(profiler: Profiler): this.type = updateStore(profilerLoc, profiler) - def setNotNullInfos(notNullInfos: List[NotNullInfo]): this.type = updateStore(notNullInfosLoc, notNullInfos) - def setImportInfo(importInfo: ImportInfo): this.type = - importInfo.mentionsFeature(nme.unsafeNulls) match - case Some(true) => - setMode(this.mode &~ Mode.SafeNulls) - case Some(false) if ctx.settings.YexplicitNulls.value => - setMode(this.mode | Mode.SafeNulls) - case _ => - updateStore(importInfoLoc, importInfo) - def setTypeAssigner(typeAssigner: TypeAssigner): this.type = updateStore(typeAssignerLoc, typeAssigner) - - def setProperty[T](key: Key[T], value: T): this.type = - setMoreProperties(moreProperties.updated(key, value)) - - def dropProperty(key: Key[?]): this.type = - setMoreProperties(moreProperties - key) - - def addLocation[T](initial: T): Store.Location[T] = { - val (loc, store1) = store.newLocation(initial) - setStore(store1) - loc - } - - def addLocation[T](): Store.Location[T] = { - val (loc, store1) = store.newLocation[T]() - setStore(store1) - loc - } - - def updateStore[T](loc: Store.Location[T], value: T): this.type = - setStore(store.updated(loc, value)) - - def setPhase(pid: PhaseId): this.type = setPeriod(Period(runId, pid)) - def setPhase(phase: Phase): this.type = setPeriod(Period(runId, phase.start, phase.end)) - - def setSetting[T](setting: Setting[T], value: T): this.type = - setSettings(setting.updateIn(settingsState, value)) - - def setDebug: this.type = setSetting(base.settings.Ydebug, true) - } - - object FreshContext: - /** Defines an initial context with given context base and possible settings. */ - def initial(base: ContextBase, settingsGroup: SettingGroup): Context = - val c = new FreshContext(base) - c._outer = NoContext - c._period = InitialPeriod - c._mode = Mode.None - c._typerState = TyperState.initialState() - c._owner = NoSymbol - c._tree = untpd.EmptyTree - c._moreProperties = Map(MessageLimiter -> DefaultMessageLimiter()) - c._scope = EmptyScope - c._source = NoSource - c._store = initialStore - .updated(settingsStateLoc, settingsGroup.defaultState) - .updated(notNullInfosLoc, Nil) - .updated(compilationUnitLoc, NoCompilationUnit) - c._searchHistory = new SearchRoot - c._gadt = GadtConstraint.empty - c - end FreshContext - - given detachedCtx(using c: Context): DetachedContext = c.detach - - given ops: AnyRef with - extension (c: Context) - def addNotNullInfo(info: NotNullInfo): Context = - c.withNotNullInfos(c.notNullInfos.extendWith(info)) - - def addNotNullRefs(refs: Set[TermRef]): Context = - c.addNotNullInfo(NotNullInfo(refs, Set())) - - def withNotNullInfos(infos: List[NotNullInfo]): Context = - if c.notNullInfos eq infos then c else c.fresh.setNotNullInfos(infos) - - def relaxedOverrideContext: Context = - c.withModeBits(c.mode &~ Mode.SafeNulls | Mode.RelaxedOverriding) - end ops - - // TODO: Fix issue when converting ModeChanges and FreshModeChanges to extension givens - extension (c: Context) { - final def withModeBits(mode: Mode): Context = - if (mode != c.mode) c.fresh.setMode(mode) else c - - final def addMode(mode: Mode): Context = withModeBits(c.mode | mode) - final def retractMode(mode: Mode): Context = withModeBits(c.mode &~ mode) - } - - extension (c: FreshContext) { - final def addMode(mode: Mode): c.type = c.setMode(c.mode | mode) - final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode) - } - - private def exploreCtx(using Context): FreshContext = - util.Stats.record("explore") - val base = ctx.base - import base._ - val nestedCtx = - if exploresInUse < exploreContexts.size then - exploreContexts(exploresInUse).reuseIn(ctx) - else - val ts = TyperState() - .setReporter(ExploringReporter()) - .setCommittable(false) - val c = FreshContext(ctx.base).init(ctx, ctx).setTyperState(ts) - exploreContexts += c - c - exploresInUse += 1 - val nestedTS = nestedCtx.typerState - nestedTS.init(ctx.typerState, ctx.typerState.constraint) - nestedCtx - - private def wrapUpExplore(ectx: Context) = - ectx.reporter.asInstanceOf[ExploringReporter].reset() - ectx.base.exploresInUse -= 1 - - inline def explore[T](inline op: Context ?=> T)(using Context): T = - val ectx = exploreCtx - try op(using ectx) finally wrapUpExplore(ectx) - - inline def exploreInFreshCtx[T](inline op: FreshContext ?=> T)(using Context): T = - val ectx = exploreCtx - try op(using ectx) finally wrapUpExplore(ectx) - - private def changeOwnerCtx(owner: Symbol)(using Context): Context = - val base = ctx.base - import base._ - val nestedCtx = - if changeOwnersInUse < changeOwnerContexts.size then - changeOwnerContexts(changeOwnersInUse).reuseIn(ctx) - else - val c = FreshContext(ctx.base).init(ctx, ctx) - changeOwnerContexts += c - c - changeOwnersInUse += 1 - nestedCtx.setOwner(owner).setTyperState(ctx.typerState) - - /** Run `op` in current context, with a mode is temporarily set as specified. - */ - inline def runWithOwner[T](owner: Symbol)(inline op: Context ?=> T)(using Context): T = - if Config.reuseOwnerContexts then - try op(using changeOwnerCtx(owner)) - finally ctx.base.changeOwnersInUse -= 1 - else - op(using ctx.fresh.setOwner(owner)) - - /** The type comparer of the kind created by `maker` to be used. - * This is the currently active type comparer CMP if - * - CMP is associated with the current context, and - * - CMP is of the kind created by maker or maker creates a plain type comparer. - * Note: plain TypeComparers always take on the kind of the outer comparer if they are in the same context. - * In other words: tracking or explaining is a sticky property in the same context. - */ - private def comparer(using Context): TypeComparer = - util.Stats.record("comparing") - val base = ctx.base - if base.comparersInUse > 0 - && (base.comparers(base.comparersInUse - 1).comparerContext eq ctx) - then - base.comparers(base.comparersInUse - 1).currentInstance - else - val result = - if base.comparersInUse < base.comparers.size then - base.comparers(base.comparersInUse) - else - val result = TypeComparer(ctx) - base.comparers += result - result - base.comparersInUse += 1 - result.init(ctx) - result - - inline def comparing[T](inline op: TypeComparer => T)(using Context): T = - util.Stats.record("comparing") - val saved = ctx.base.comparersInUse - try op(comparer) - finally ctx.base.comparersInUse = saved - end comparing - - @sharable val NoContext: DetachedContext = detached( - new FreshContext((null: ContextBase | Null).uncheckedNN) { - override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null, false)(detached(this: @unchecked)) - setSource(NoSource) - } - ) - - /** A context base defines state and associated methods that exist once per - * compiler run. - */ - class ContextBase extends ContextState - with Phases.PhasesBase - with Plugins { - - /** The applicable settings */ - val settings: ScalaSettings = new ScalaSettings - - /** The initial context */ - val initialCtx: Context = FreshContext.initial(this: @unchecked, settings) - - /** The platform, initialized by `initPlatform()`. */ - private var _platform: Platform | Null = uninitialized - - /** The platform */ - def platform: Platform = { - val p = _platform - if p == null then - throw new IllegalStateException( - "initialize() must be called before accessing platform") - p - } - - protected def newPlatform(using Context): Platform = - if (settings.scalajs.value) new SJSPlatform - else new JavaPlatform - - /** The loader that loads the members of _root_ */ - def rootLoader(root: TermSymbol)(using Context): SymbolLoader = platform.rootLoader(root) - - /** The standard definitions */ - val definitions: Definitions = new Definitions - - // Set up some phases to get started */ - usePhases(List(SomePhase)) - - /** Initializes the `ContextBase` with a starting context. - * This initializes the `platform` and the `definitions`. - */ - def initialize()(using Context): Unit = { - _platform = newPlatform - definitions.init() - } - - def fusedContaining(p: Phase): Phase = - allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) - } - - /** The essential mutable state of a context base, collected into a common class */ - class ContextState { - // Symbols state - - /** Counter for unique symbol ids */ - private var _nextSymId: Int = 0 - def nextSymId: Int = { _nextSymId += 1; _nextSymId } - - /** Sources and Files that were loaded */ - val sources: util.HashMap[AbstractFile, SourceFile] = util.HashMap[AbstractFile, SourceFile]() - val files: util.HashMap[TermName, AbstractFile] = util.HashMap() - - // Types state - /** A table for hash consing unique types */ - private[core] val uniques: Uniques = Uniques() - - /** A table for hash consing unique applied types */ - private[dotc] val uniqueAppliedTypes: AppliedUniques = AppliedUniques() - - /** A table for hash consing unique named types */ - private[core] val uniqueNamedTypes: NamedTypeUniques = NamedTypeUniques() - - var emptyTypeBounds: TypeBounds | Null = null - var emptyWildcardBounds: WildcardType | Null = null - - /** Number of findMember calls on stack */ - private[core] var findMemberCount: Int = 0 - - /** List of names which have a findMemberCall on stack, - * after Config.LogPendingFindMemberThreshold is reached. - */ - private[core] var pendingMemberSearches: List[Name] = Nil - - /** The number of recursive invocation of underlying on a NamedType - * during a controlled operation. - */ - private[core] var underlyingRecursions: Int = 0 - - /** The set of named types on which a currently active invocation - * of underlying during a controlled operation exists. */ - private[core] val pendingUnderlying: util.HashSet[Type] = util.HashSet[Type]() - - /** A map from ErrorType to associated message. We use this map - * instead of storing messages directly in ErrorTypes in order - * to avoid space leaks - the message usually captures a context. - */ - private[core] val errorTypeMsg: mutable.Map[Types.ErrorType, Message] = mutable.Map() - - // Phases state - - private[core] var phasesPlan: List[List[Phase]] = uninitialized - - /** Phases by id */ - private[dotc] var phases: Array[Phase] = uninitialized - - /** Phases with consecutive Transforms grouped into a single phase, Empty array if fusion is disabled */ - private[core] var fusedPhases: Array[Phase] = Array.empty[Phase] - - /** Next denotation transformer id */ - private[core] var nextDenotTransformerId: Array[Int] = uninitialized - - private[core] var denotTransformers: Array[DenotTransformer] = uninitialized - - /** Flag to suppress inlining, set after overflow */ - private[dotc] var stopInlining: Boolean = false - - /** A variable that records that some error was reported in a globally committable context. - * The error will not necessarlily be emitted, since it could still be that - * the enclosing context will be aborted. The variable is used as a smoke test - * to turn off assertions that might be wrong if the program is erroneous. To - * just test for `ctx.reporter.errorsReported` is not always enough, since it - * could be that the context in which the assertion is tested is a completer context - * that's different from the context where the error was reported. See i13218.scala - * for a test. - */ - private[dotc] var errorsToBeReported = false - - // Reporters state - private[dotc] var indent: Int = 0 - - protected[dotc] val indentTab: String = " " - - private[Contexts] val exploreContexts = new mutable.ArrayBuffer[FreshContext] - private[Contexts] var exploresInUse: Int = 0 - - private[Contexts] val changeOwnerContexts = new mutable.ArrayBuffer[FreshContext] - private[Contexts] var changeOwnersInUse: Int = 0 - - private[Contexts] val comparers = new mutable.ArrayBuffer[TypeComparer] - private[Contexts] var comparersInUse: Int = 0 - - private var charArray = new Array[Char](256) - - private[core] val reusableDataReader = ReusableInstance(new ReusableDataReader()) - - private[dotc] var wConfCache: (List[String], WConf) = uninitialized - - def sharedCharArray(len: Int): Array[Char] = - while len > charArray.length do - charArray = new Array[Char](charArray.length * 2) - charArray - - def reset(): Unit = - uniques.clear() - uniqueAppliedTypes.clear() - uniqueNamedTypes.clear() - emptyTypeBounds = null - emptyWildcardBounds = null - errorsToBeReported = false - errorTypeMsg.clear() - sources.clear() - files.clear() - comparers.clear() // forces re-evaluation of top and bottom classes in TypeComparer - - // Test that access is single threaded - - /** The thread on which `checkSingleThreaded was invoked last */ - @sharable private var thread: Thread | Null = null - - /** Check that we are on the same thread as before */ - def checkSingleThreaded(): Unit = - if (thread == null) thread = Thread.currentThread() - else assert(thread == Thread.currentThread(), "illegal multithreaded access to ContextBase") - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala deleted file mode 100644 index f9844c6eaab6..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala +++ /dev/null @@ -1,322 +0,0 @@ -package dotty.tools -package dotc -package core - -import scala.annotation.tailrec -import scala.collection.mutable.ListBuffer -import scala.util.control.NonFatal - -import Contexts._, Names._, Phases._, Symbols._ -import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ -import transform.MegaPhase -import reporting.{Message, NoExplanation} -import language.experimental.pureFunctions -import annotation.retains - -/** This object provides useful extension methods for types defined elsewhere */ -object Decorators { - - /** Extension methods for toType/TermName methods on PreNames. - */ - extension (pn: PreName) - def toTermName: TermName = pn match - case s: String => termName(s) - case n: Name => n.toTermName - def toTypeName: TypeName = pn match - case s: String => typeName(s) - case n: Name => n.toTypeName - - extension (s: String) - def splitWhere(f: Char => Boolean, doDropIndex: Boolean): Option[(String, String)] = - def splitAt(idx: Int, doDropIndex: Boolean): Option[(String, String)] = - if (idx == -1) None - else Some((s.take(idx), s.drop(if (doDropIndex) idx + 1 else idx))) - splitAt(s.indexWhere(f), doDropIndex) - - /** Create a term name from a string slice, using a common buffer. - * This avoids some allocation relative to `termName(s)` - */ - def sliceToTermName(start: Int, end: Int)(using Context): SimpleName = - val len = end - start - val chars = ctx.base.sharedCharArray(len) - s.getChars(start, end, chars, 0) - termName(chars, 0, len) - - def sliceToTypeName(start: Int, end: Int)(using Context): TypeName = - sliceToTermName(start, end).toTypeName - - def concat(name: Name)(using Context): SimpleName = name match - case name: SimpleName => - val len = s.length + name.length - var chars = ctx.base.sharedCharArray(len) - s.getChars(0, s.length, chars, 0) - if name.length != 0 then name.getChars(0, name.length, chars, s.length) - termName(chars, 0, len) - case name: TypeName => s.concat(name.toTermName) - case _ => termName(s.concat(name.toString).nn) - - def indented(width: Int): String = - val padding = " " * width - padding + s.replace("\n", "\n" + padding) - end extension - - /** Convert lazy string to message. To be with caution, since no message-defined - * formatting will be done on the string. - */ - extension (str: -> String) - def toMessage: Message = NoExplanation(str)(using NoContext) - - /** Implements a findSymbol method on iterators of Symbols that - * works like find but avoids Option, replacing None with NoSymbol. - */ - extension (it: Iterator[Symbol]) - final def findSymbol(p: Symbol => Boolean): Symbol = { - while (it.hasNext) { - val sym = it.next() - if (p(sym)) return sym - } - NoSymbol - } - - inline val MaxFilterRecursions = 10 - - /** Implements filterConserve, zipWithConserve methods - * on lists that avoid duplication of list nodes where feasible. - */ - extension [T](xs: List[T]) - final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.cap)): List[U] = - xs.collect(pf.asInstanceOf) - - final def mapconserve[U](f: T => U): List[U] = { - @tailrec - def loop(mapped: ListBuffer[U] | Null, unchanged: List[U], pending: List[T]): List[U] = - if (pending.isEmpty) - if (mapped == null) unchanged - else mapped.prependToList(unchanged) - else { - val head0 = pending.head - val head1 = f(head0) - - if (head1.asInstanceOf[AnyRef] eq head0.asInstanceOf[AnyRef]) - loop(mapped, unchanged, pending.tail) - else { - val b = if (mapped == null) new ListBuffer[U] else mapped - var xc = unchanged - while (xc ne pending) { - b += xc.head - xc = xc.tail - } - b += head1 - val tail0 = pending.tail - loop(b, tail0.asInstanceOf[List[U]], tail0) - } - } - loop(null, xs.asInstanceOf[List[U]], xs) - } - - /** Like `xs filter p` but returns list `xs` itself - instead of a copy - - * if `p` is true for all elements. - */ - def filterConserve(p: T => Boolean): List[T] = - - def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = - if from eq until then buf else addAll(buf += from.head, from.tail, until) - - def loopWithBuffer(buf: ListBuffer[T], xs: List[T]): List[T] = xs match - case x :: xs1 => - if p(x) then buf += x - loopWithBuffer(buf, xs1) - case nil => buf.toList - - def loop(keep: List[T], explore: List[T], keepCount: Int, recCount: Int): List[T] = - explore match - case x :: rest => - if p(x) then - loop(keep, rest, keepCount + 1, recCount) - else if keepCount <= 3 && recCount <= MaxFilterRecursions then - val rest1 = loop(rest, rest, 0, recCount + 1) - keepCount match - case 0 => rest1 - case 1 => keep.head :: rest1 - case 2 => keep.head :: keep.tail.head :: rest1 - case 3 => val tl = keep.tail; keep.head :: tl.head :: tl.tail.head :: rest1 - else - loopWithBuffer(addAll(new ListBuffer[T], keep, explore), rest) - case nil => - keep - - loop(xs, xs, 0, 0) - end filterConserve - - /** Like `xs.lazyZip(ys).map(f)`, but returns list `xs` itself - * - instead of a copy - if function `f` maps all elements of - * `xs` to themselves. Also, it is required that `ys` is at least - * as long as `xs`. - */ - def zipWithConserve[U, V <: T](ys: List[U])(f: (T, U) => V): List[V] = - if (xs.isEmpty || ys.isEmpty) Nil - else { - val x1 = f(xs.head, ys.head) - val xs1 = xs.tail.zipWithConserve(ys.tail)(f) - if (x1.asInstanceOf[AnyRef] eq xs.head.asInstanceOf[AnyRef]) && (xs1 eq xs.tail) - then xs.asInstanceOf[List[V]] - else x1 :: xs1 - } - - /** Like `xs.lazyZip(xs.indices).map(f)`, but returns list `xs` itself - * - instead of a copy - if function `f` maps all elements of - * `xs` to themselves. - */ - def mapWithIndexConserve[U <: T](f: (T, Int) => U): List[U] = - - @tailrec - def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = - if from eq until then buf else addAll(buf += from.head, from.tail, until) - - @tailrec - def loopWithBuffer(buf: ListBuffer[U], explore: List[T], idx: Int): List[U] = explore match - case Nil => buf.toList - case t :: rest => loopWithBuffer(buf += f(t, idx), rest, idx + 1) - - @tailrec - def loop(keep: List[T], explore: List[T], idx: Int): List[U] = explore match - case Nil => keep.asInstanceOf[List[U]] - case t :: rest => - val u = f(t, idx) - if u.asInstanceOf[AnyRef] eq t.asInstanceOf[AnyRef] then - loop(keep, rest, idx + 1) - else - val buf = addAll(new ListBuffer[T], keep, explore).asInstanceOf[ListBuffer[U]] - loopWithBuffer(buf += u, rest, idx + 1) - - loop(xs, xs, 0) - end mapWithIndexConserve - - /** True if two lists have the same length. Since calling length on linear sequences - * is Θ(n), it is an inadvisable way to test length equality. This method is Θ(n min m). - */ - final def hasSameLengthAs[U](ys: List[U]): Boolean = { - @tailrec def loop(xs: List[T], ys: List[U]): Boolean = - if (xs.isEmpty) ys.isEmpty - else ys.nonEmpty && loop(xs.tail, ys.tail) - loop(xs, ys) - } - - @tailrec final def eqElements(ys: List[AnyRef]): Boolean = xs match { - case x :: _ => - ys match { - case y :: _ => - x.asInstanceOf[AnyRef].eq(y) && - xs.tail.eqElements(ys.tail) - case _ => false - } - case nil => ys.isEmpty - } - - /** Union on lists seen as sets */ - def setUnion (ys: List[T]): List[T] = xs ::: ys.filterNot(xs contains _) - - extension [T, U](xss: List[List[T]]) - def nestedMap(f: T => U): List[List[U]] = xss match - case xs :: xss1 => xs.map(f) :: xss1.nestedMap(f) - case nil => Nil - def nestedMapConserve(f: T => U): List[List[U]] = - xss.mapconserve(_.mapconserve(f)) - def nestedZipWithConserve(yss: List[List[U]])(f: (T, U) => T): List[List[T]] = - xss.zipWithConserve(yss)((xs, ys) => xs.zipWithConserve(ys)(f)) - def nestedExists(p: T => Boolean): Boolean = xss match - case xs :: xss1 => xs.exists(p) || xss1.nestedExists(p) - case nil => false - end extension - - extension [T](xs: Seq[T]) - final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.cap)): Seq[U] = - xs.collect(pf.asInstanceOf) - - extension [A, B](f: PartialFunction[A, B] @retains(caps.cap)) - def orElseCC(g: PartialFunction[A, B] @retains(caps.cap)): PartialFunction[A, B] @retains(f, g) = - f.orElse(g.asInstanceOf).asInstanceOf - - extension (text: Text) - def show(using Context): String = text.mkString(ctx.settings.pageWidth.value, ctx.settings.printLines.value) - - /** Test whether a list of strings representing phases contains - * a given phase. See [[config.CompilerCommand#explainAdvanced]] for the - * exact meaning of "contains" here. - */ - extension (names: List[String]) - def containsPhase(phase: Phase): Boolean = - names.nonEmpty && { - phase match { - case phase: MegaPhase => phase.miniPhases.exists(x => names.containsPhase(x)) - case _ => - names exists { name => - name == "all" || { - val strippedName = name.stripSuffix("+") - val logNextPhase = name != strippedName - phase.phaseName.startsWith(strippedName) || - (logNextPhase && phase.prev.phaseName.startsWith(strippedName)) - } - } - } - } - - extension [T](x: T) - def showing[U]( - op: WrappedResult[U] ?=> String, - printer: config.Printers.Printer = config.Printers.default)(using c: Conversion[T, U] | Null = null): T = { - // either the use of `$result` was driven by the expected type of `Shown` - // which led to the summoning of `Conversion[T, Shown]` (which we'll invoke) - // or no such conversion was found so we'll consume the result as it is instead - val obj = if c == null then x.asInstanceOf[U] else c(x) - printer.println(op(using WrappedResult(obj))) - x - } - - /** Instead of `toString` call `show` on `Showable` values, falling back to `toString` if an exception is raised. */ - def tryToShow(using Context): String = x match - case x: Showable => - try x.show - catch - case ex: CyclicReference => "... (caught cyclic reference) ..." - case NonFatal(ex) - if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => - val msg = ex match - case te: TypeError => te.toMessage.message - case _ => ex.getMessage - s"[cannot display due to $msg, raw string = $x]" - case _ => String.valueOf(x).nn - - /** Returns the simple class name of `x`. */ - def className: String = getClass.getSimpleName.nn - - extension [T](x: T) - def assertingErrorsReported(using Context): T = { - assert(ctx.reporter.errorsReported) - x - } - def assertingErrorsReported(msg: Message)(using Context): T = { - assert(ctx.reporter.errorsReported, msg) - x - } - - extension [T <: AnyRef](xs: ::[T]) - def derivedCons(x1: T, xs1: List[T]) = - if (xs.head eq x1) && (xs.tail eq xs1) then xs else x1 :: xs1 - - extension (sc: StringContext) - - /** General purpose string formatting */ - def i(args: Shown*)(using Context): String = - new StringFormatter(sc).assemble(args) - - /** Interpolator yielding an error message, which undergoes - * the formatting defined in Message. - */ - def em(args: Shown*)(using Context): NoExplanation = - NoExplanation(i(args*)) - - extension [T <: AnyRef](arr: Array[T]) - def binarySearch(x: T | Null): Int = java.util.Arrays.binarySearch(arr.asInstanceOf[Array[Object | Null]], x) - -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala deleted file mode 100644 index 8faf208e36d0..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala +++ /dev/null @@ -1,2433 +0,0 @@ -package dotty.tools -package dotc -package core - -import scala.annotation.{threadUnsafe => tu} -import Types._, Contexts._, Symbols._, SymDenotations._, StdNames._, Names._, Phases._ -import Flags._, Scopes._, Decorators._, NameOps._, Periods._, NullOpsDecorator._ -import unpickleScala2.Scala2Unpickler.ensureConstructor -import scala.collection.mutable -import collection.mutable -import Denotations.{SingleDenotation, staticRef} -import util.{SimpleIdentityMap, SourceFile, NoSource} -import typer.ImportInfo.RootRef -import Comments.CommentsContext -import Comments.Comment -import util.Spans.NoSpan -import config.Feature -import Symbols.requiredModuleRef -import cc.{CapturingType, CaptureSet, EventuallyCapturingType} - -import scala.annotation.tailrec -import language.experimental.pureFunctions - -object Definitions { - - /** The maximum number of elements in a tuple or product. - * This should be removed once we go to hlists. - */ - val MaxTupleArity: Int = 22 - - /** The maximum arity N of a function type that's implemented - * as a trait `scala.FunctionN`. Functions of higher arity are possible, - * but are mapped in erasure to functions taking a single parameter of type - * Object[]. - * The limit 22 is chosen for Scala2x interop. It could be something - * else without affecting the set of programs that can be compiled. - */ - val MaxImplementedFunctionArity: Int = MaxTupleArity -} - -/** A class defining symbols and types of standard definitions - * - */ -class Definitions { - import Definitions._ - - private var initCtx: DetachedContext = _ - private given currentContext[Dummy_so_its_a_def]: DetachedContext = initCtx - - private def newPermanentSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) = - newSymbol(owner, name, flags | Permanent, info) - - private def newPermanentClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) = - newClassSymbol(owner, name, flags | Permanent | NoInits | Open, infoFn) - - private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef]): ClassSymbol = - enterCompleteClassSymbol(owner, name, flags, parents, newScope(owner.nestingLevel + 1)) - - private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = - newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered - - private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = - scope.enter(newPermanentSymbol(cls, name, flags, TypeBounds.empty)) - - private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = - enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope) - - private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") = - enterTypeParam(cls, suffix.toTypeName, paramFlags, scope) - - // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only - // implemented in Dotty and not in Scala 2. - // See . - private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: -> Seq[Type]): ClassSymbol = { - val completer = new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { - val cls = denot.asClass.classSymbol - val paramDecls = newScope - val typeParam = enterSyntheticTypeParam(cls, paramFlags, paramDecls) - def instantiate(tpe: Type) = - if (tpe.typeParams.nonEmpty) tpe.appliedTo(typeParam.typeRef) - else tpe - val parents = parentConstrs.toList map instantiate - denot.info = ClassInfo(ScalaPackageClass.thisType, cls, parents, paramDecls) - } - } - newPermanentClassSymbol(ScalaPackageClass, name, Artifact, completer).entered - } - - /** The trait FunctionN, ContextFunctionN, ErasedFunctionN or ErasedContextFunction, for some N - * @param name The name of the trait to be created - * - * FunctionN traits follow this template: - * - * trait FunctionN[-T0,...-T{N-1}, +R] extends Object { - * def apply($x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * That is, they follow the template given for Function2..Function22 in the - * standard library, but without `tupled` and `curried` methods and without - * a `toString`. - * - * ContextFunctionN traits follow this template: - * - * trait ContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(using $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedFunctionN traits follow this template: - * - * trait ErasedFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedContextFunctionN traits follow this template: - * - * trait ErasedContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(using erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedFunctionN and ErasedContextFunctionN erase to Function0. - * - * ImpureXYZFunctionN follow this template: - * - * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {*} XYZFunctionN[T0,...,T{N-1}, R] - */ - private def newFunctionNType(name: TypeName): Symbol = { - val impure = name.startsWith("Impure") - val completer = new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { - val arity = name.functionArity - if impure then - val argParamNames = List.tabulate(arity)(tpnme.syntheticTypeParamName) - val argVariances = List.fill(arity)(Contravariant) - val underlyingName = name.asSimpleName.drop(6) - val underlyingClass = ScalaPackageVal.requiredClass(underlyingName) - denot.info = TypeAlias( - HKTypeLambda(argParamNames :+ "R".toTypeName, argVariances :+ Covariant)( - tl => List.fill(arity + 1)(TypeBounds.empty), - tl => CapturingType(underlyingClass.typeRef.appliedTo(tl.paramRefs), - CaptureSet.universal) - )) - else - val cls = denot.asClass.classSymbol - val decls = newScope - val paramNamePrefix = tpnme.scala ++ str.NAME_JOIN ++ name ++ str.EXPAND_SEPARATOR - val argParamRefs = List.tabulate(arity) { i => - enterTypeParam(cls, paramNamePrefix ++ "T" ++ (i + 1).toString, Contravariant, decls).typeRef - } - val resParamRef = enterTypeParam(cls, paramNamePrefix ++ "R", Covariant, decls).typeRef - val methodType = MethodType.companion( - isContextual = name.isContextFunction, - isImplicit = false, - isErased = name.isErasedFunction) - decls.enter(newMethod(cls, nme.apply, methodType(argParamRefs, resParamRef), Deferred)) - denot.info = - ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) - } - } - if impure then - newPermanentSymbol(ScalaPackageClass, name, EmptyFlags, completer) - else - newPermanentClassSymbol(ScalaPackageClass, name, Trait | NoInits, completer) - } - - private def newMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = - newPermanentSymbol(cls, name, flags | Method, info).asTerm - - private def enterMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = - newMethod(cls, name, info, flags).entered - - private def enterPermanentSymbol(name: Name, info: Type, flags: FlagSet = EmptyFlags): Symbol = - val sym = newPermanentSymbol(ScalaPackageClass, name, flags, info) - ScalaPackageClass.currentPackageDecls.enter(sym) - sym - - private def enterAliasType(name: TypeName, tpe: Type, flags: FlagSet = EmptyFlags): TypeSymbol = - enterPermanentSymbol(name, TypeAlias(tpe), flags).asType - - private def enterBinaryAlias(name: TypeName, op: (Type, Type) => Type): TypeSymbol = - enterAliasType(name, - HKTypeLambda(TypeBounds.empty :: TypeBounds.empty :: Nil)( - tl => op(tl.paramRefs(0), tl.paramRefs(1)))) - - private def enterPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int, - resultTypeFn: PolyType -> Type, - flags: FlagSet = EmptyFlags, - bounds: TypeBounds = TypeBounds.empty, - useCompleter: Boolean = false) = { - val tparamNames = PolyType.syntheticParamNames(typeParamCount) - val tparamInfos = tparamNames map (_ => bounds) - def ptype = PolyType(tparamNames)(_ => tparamInfos, resultTypeFn) - val info = - if (useCompleter) - new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = - denot.info = ptype - } - else ptype - enterMethod(cls, name, info, flags) - } - - private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType -> Type, flags: FlagSet) = - enterPolyMethod(cls, name, 1, resultTypeFn, flags) - - private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef | Null] = { - val arr = new Array[TypeRef | Null](arity + 1) - for (i <- countFrom to arity) arr(i) = requiredClassRef(name + i) - arr - } - - private def completeClass(cls: ClassSymbol, ensureCtor: Boolean = true): ClassSymbol = { - if (ensureCtor) ensureConstructor(cls, cls.denot.asClass, EmptyScope) - if (cls.linkedClass.exists) cls.linkedClass.markAbsent() - cls - } - - @tu lazy val RootClass: ClassSymbol = newPackageSymbol( - NoSymbol, nme.ROOT, (root, rootcls) => ctx.base.rootLoader(root)).moduleClass.asClass - @tu lazy val RootPackage: TermSymbol = newSymbol( - NoSymbol, nme.ROOTPKG, PackageCreationFlags, TypeRef(NoPrefix, RootClass)) - - @tu lazy val EmptyPackageVal: TermSymbol = newPackageSymbol( - RootClass, nme.EMPTY_PACKAGE, (emptypkg, emptycls) => ctx.base.rootLoader(emptypkg)).entered - @tu lazy val EmptyPackageClass: ClassSymbol = EmptyPackageVal.moduleClass.asClass - - /** A package in which we can place all methods and types that are interpreted specially by the compiler */ - @tu lazy val OpsPackageVal: TermSymbol = newCompletePackageSymbol(RootClass, nme.OPS_PACKAGE).entered - @tu lazy val OpsPackageClass: ClassSymbol = OpsPackageVal.moduleClass.asClass - - @tu lazy val ScalaPackageVal: TermSymbol = requiredPackage(nme.scala) - @tu lazy val ScalaMathPackageVal: TermSymbol = requiredPackage("scala.math") - @tu lazy val ScalaPackageClass: ClassSymbol = { - val cls = ScalaPackageVal.moduleClass.asClass - cls.info.decls.openForMutations.useSynthesizer( - name => - if (name.isTypeName && name.isSyntheticFunction) newFunctionNType(name.asTypeName) - else NoSymbol) - cls - } - @tu lazy val ScalaPackageObject: Symbol = requiredModule("scala.package") - @tu lazy val ScalaRuntimePackageVal: TermSymbol = requiredPackage("scala.runtime") - @tu lazy val ScalaRuntimePackageClass: ClassSymbol = ScalaRuntimePackageVal.moduleClass.asClass - @tu lazy val JavaPackageVal: TermSymbol = requiredPackage(nme.java) - @tu lazy val JavaPackageClass: ClassSymbol = JavaPackageVal.moduleClass.asClass - @tu lazy val JavaLangPackageVal: TermSymbol = requiredPackage(jnme.JavaLang) - @tu lazy val JavaLangPackageClass: ClassSymbol = JavaLangPackageVal.moduleClass.asClass - - // fundamental modules - @tu lazy val SysPackage : Symbol = requiredModule("scala.sys.package") - @tu lazy val Sys_error: Symbol = SysPackage.moduleClass.requiredMethod(nme.error) - - @tu lazy val ScalaXmlPackageClass: Symbol = getPackageClassIfDefined("scala.xml") - - @tu lazy val CompiletimePackageClass: Symbol = requiredPackage("scala.compiletime").moduleClass - @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") - @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") - @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") - @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) - @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") - @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") - @tu lazy val Compiletime_constValueOpt: Symbol = CompiletimePackageClass.requiredMethod("constValueOpt") - @tu lazy val Compiletime_summonFrom : Symbol = CompiletimePackageClass.requiredMethod("summonFrom") - @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") - @tu lazy val CompiletimeTestingPackage: Symbol = requiredPackage("scala.compiletime.testing") - @tu lazy val CompiletimeTesting_typeChecks: Symbol = CompiletimeTestingPackage.requiredMethod("typeChecks") - @tu lazy val CompiletimeTesting_typeCheckErrors: Symbol = CompiletimeTestingPackage.requiredMethod("typeCheckErrors") - @tu lazy val CompiletimeTesting_ErrorClass: ClassSymbol = requiredClass("scala.compiletime.testing.Error") - @tu lazy val CompiletimeTesting_Error: Symbol = requiredModule("scala.compiletime.testing.Error") - @tu lazy val CompiletimeTesting_Error_apply = CompiletimeTesting_Error.requiredMethod(nme.apply) - @tu lazy val CompiletimeTesting_ErrorKind: Symbol = requiredModule("scala.compiletime.testing.ErrorKind") - @tu lazy val CompiletimeTesting_ErrorKind_Parser: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Parser") - @tu lazy val CompiletimeTesting_ErrorKind_Typer: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Typer") - @tu lazy val CompiletimeOpsPackage: Symbol = requiredPackage("scala.compiletime.ops") - @tu lazy val CompiletimeOpsAnyModuleClass: Symbol = requiredModule("scala.compiletime.ops.any").moduleClass - @tu lazy val CompiletimeOpsIntModuleClass: Symbol = requiredModule("scala.compiletime.ops.int").moduleClass - @tu lazy val CompiletimeOpsLongModuleClass: Symbol = requiredModule("scala.compiletime.ops.long").moduleClass - @tu lazy val CompiletimeOpsFloatModuleClass: Symbol = requiredModule("scala.compiletime.ops.float").moduleClass - @tu lazy val CompiletimeOpsDoubleModuleClass: Symbol = requiredModule("scala.compiletime.ops.double").moduleClass - @tu lazy val CompiletimeOpsStringModuleClass: Symbol = requiredModule("scala.compiletime.ops.string").moduleClass - @tu lazy val CompiletimeOpsBooleanModuleClass: Symbol = requiredModule("scala.compiletime.ops.boolean").moduleClass - - /** Note: We cannot have same named methods defined in Object and Any (and AnyVal, for that matter) - * because after erasure the Any and AnyVal references get remapped to the Object methods - * which would result in a double binding assertion failure. - * Instead we do the following: - * - * - Have some methods exist only in Any, and remap them with the Erasure denotation - * transformer to be owned by Object. - * - Have other methods exist only in Object. - * To achieve this, we synthesize all Any and Object methods; Object methods no longer get - * loaded from a classfile. - */ - @tu lazy val AnyClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Any, Abstract, Nil), ensureCtor = false) - def AnyType: TypeRef = AnyClass.typeRef - @tu lazy val MatchableClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Matchable, Trait, AnyType :: Nil), ensureCtor = false) - def MatchableType: TypeRef = MatchableClass.typeRef - @tu lazy val AnyValClass: ClassSymbol = - val res = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.AnyVal, Abstract, List(AnyType, MatchableType))) - // Mark companion as absent, so that class does not get re-completed - val companion = ScalaPackageVal.info.decl(nme.AnyVal).symbol - companion.moduleClass.markAbsent() - companion.markAbsent() - res - - def AnyValType: TypeRef = AnyValClass.typeRef - - @tu lazy val Any_== : TermSymbol = enterMethod(AnyClass, nme.EQ, methOfAny(BooleanType), Final) - @tu lazy val Any_!= : TermSymbol = enterMethod(AnyClass, nme.NE, methOfAny(BooleanType), Final) - @tu lazy val Any_equals: TermSymbol = enterMethod(AnyClass, nme.equals_, methOfAny(BooleanType)) - @tu lazy val Any_hashCode: TermSymbol = enterMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType)) - @tu lazy val Any_toString: TermSymbol = enterMethod(AnyClass, nme.toString_, MethodType(Nil, StringType)) - @tu lazy val Any_## : TermSymbol = enterMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final) - @tu lazy val Any_isInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final) - @tu lazy val Any_asInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, _.paramRefs(0), Final) - @tu lazy val Any_typeTest: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOfPM, _ => BooleanType, Final | SyntheticArtifact) - @tu lazy val Any_typeCast: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOfPM, _.paramRefs(0), Final | SyntheticArtifact | StableRealizable) - // generated by pattern matcher and explicit nulls, eliminated by erasure - - /** def getClass[A >: this.type](): Class[? <: A] */ - @tu lazy val Any_getClass: TermSymbol = - enterPolyMethod( - AnyClass, nme.getClass_, 1, - pt => MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.upper(pt.paramRefs(0)))), - Final, - bounds = TypeBounds.lower(AnyClass.thisType)) - - def AnyMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, - Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_asInstanceOf, Any_typeTest, Any_typeCast) - - @tu lazy val ObjectClass: ClassSymbol = { - val cls = requiredClass("java.lang.Object") - assert(!cls.isCompleted, "race for completing java.lang.Object") - cls.info = ClassInfo(cls.owner.thisType, cls, List(AnyType, MatchableType), newScope) - cls.setFlag(NoInits | JavaDefined) - - ensureConstructor(cls, cls.denot.asClass, EmptyScope) - val companion = JavaLangPackageVal.info.decl(nme.Object).symbol.asTerm - NamerOps.makeConstructorCompanion(companion, cls) - cls - } - def ObjectType: TypeRef = ObjectClass.typeRef - - /** A type alias of Object used to represent any reference to Object in a Java - * signature, the secret sauce is that subtype checking treats it specially: - * - * tp <:< FromJavaObject - * - * is equivalent to: - * - * tp <:< Any - * - * This is useful to avoid usability problems when interacting with Java - * code where Object is the top type. This is safe because this type will - * only appear in signatures of Java definitions in positions where `Object` - * might appear, let's enumerate all possible cases this gives us: - * - * 1. At the top level: - * - * // A.java - * void meth1(Object arg) {} - * void meth2(T arg) {} // T implicitly extends Object - * - * // B.scala - * meth1(1) // OK - * meth2(1) // OK - * - * This is safe even though Int is not a subtype of Object, because Erasure - * will detect the mismatch and box the value type. - * - * 2. In a class type parameter: - * - * // A.java - * void meth3(scala.List arg) {} - * void meth4(scala.List arg) {} - * - * // B.scala - * meth3(List[Int](1)) // OK - * meth4(List[Int](1)) // OK - * - * At erasure, type parameters are removed and value types are boxed. - * - * 3. As the type parameter of an array: - * - * // A.java - * void meth5(Object[] arg) {} - * void meth6(T[] arg) {} - * - * // B.scala - * meth5(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] - * meth6(Array[Int](1)) // error: Array[Int] is not a subtype of Array[T & Object] - * - * - * This is a bit more subtle: at erasure, Arrays keep their type parameter, - * and primitive Arrays are not subtypes of reference Arrays on the JVM, - * so we can't pass an Array of Int where a reference Array is expected. - * Array is invariant in Scala, so `meth5` is safe even if we use `FromJavaObject`, - * but generic Arrays are treated specially: we always add `& Object` (and here - * we mean the normal java.lang.Object type) to these types when they come from - * Java signatures (see `translateJavaArrayElementType`), this ensure that `meth6` - * is safe to use. - * - * 4. As the repeated argument of a varargs method: - * - * // A.java - * void meth7(Object... args) {} - * void meth8(T... args) {} - * - * // B.scala - * meth7(1) // OK (creates a reference array) - * meth8(1) // OK (creates a primitive array and copies it into a reference array at Erasure) - * val ai = Array[Int](1) - * meth7(ai: _*) // OK (will copy the array at Erasure) - * meth8(ai: _*) // OK (will copy the array at Erasure) - * - * Java repeated arguments are erased to arrays, so it would be safe to treat - * them in the same way: add an `& Object` to the parameter type to disallow - * passing primitives, but that would be very inconvenient as it is common to - * want to pass a primitive to an Object repeated argument (e.g. - * `String.format("foo: %d", 1)`). So instead we type them _without_ adding the - * `& Object` and let `ElimRepeated` and `Erasure` take care of doing any necessary adaptation - * (note that adapting a primitive array to a reference array requires - * copying the whole array, so this transformation only preserves semantics - * if the callee does not try to mutate the varargs array which is a reasonable - * assumption to make). - * - * - * This mechanism is similar to `ObjectTpeJavaRef` in Scala 2, except that we - * create a new symbol with its own name, this is needed because this type - * can show up in inferred types and therefore needs to be preserved when - * pickling so that unpickled trees pass `-Ycheck`. - * - * Note that by default we pretty-print `FromJavaObject` as `Object` or simply omit it - * if it's the sole upper-bound of a type parameter, use `-Yprint-debug` to explicitly - * display it. - */ - @tu lazy val FromJavaObjectSymbol: TypeSymbol = - newPermanentSymbol(OpsPackageClass, tpnme.FromJavaObject, JavaDefined, TypeAlias(ObjectType)).entered - def FromJavaObjectType: TypeRef = FromJavaObjectSymbol.typeRef - - @tu lazy val AnyRefAlias: TypeSymbol = enterAliasType(tpnme.AnyRef, ObjectType) - def AnyRefType: TypeRef = AnyRefAlias.typeRef - - @tu lazy val Object_eq: TermSymbol = enterMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final) - @tu lazy val Object_ne: TermSymbol = enterMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final) - @tu lazy val Object_synchronized: TermSymbol = enterPolyMethod(ObjectClass, nme.synchronized_, 1, - pt => MethodType(List(pt.paramRefs(0)), pt.paramRefs(0)), Final) - @tu lazy val Object_clone: TermSymbol = enterMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected) - @tu lazy val Object_finalize: TermSymbol = enterMethod(ObjectClass, nme.finalize_, MethodType(Nil, UnitType), Protected) - @tu lazy val Object_notify: TermSymbol = enterMethod(ObjectClass, nme.notify_, MethodType(Nil, UnitType), Final) - @tu lazy val Object_notifyAll: TermSymbol = enterMethod(ObjectClass, nme.notifyAll_, MethodType(Nil, UnitType), Final) - @tu lazy val Object_wait: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(Nil, UnitType), Final) - @tu lazy val Object_waitL: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: Nil, UnitType), Final) - @tu lazy val Object_waitLI: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: IntType :: Nil, UnitType), Final) - - def ObjectMethods: List[TermSymbol] = List(Object_eq, Object_ne, Object_synchronized, Object_clone, - Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI) - - /** Methods in Object and Any that do not have a side effect */ - @tu lazy val pureMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, - Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_typeTest, Object_eq, Object_ne) - - @tu lazy val AnyKindClass: ClassSymbol = { - val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil, newScope(0)) - if (!ctx.settings.YnoKindPolymorphism.value) - // Enable kind-polymorphism by exposing scala.AnyKind - cls.entered - cls - } - def AnyKindType: TypeRef = AnyKindClass.typeRef - - @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) - @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) - - /** Method representing a throw */ - @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, - MethodType(List(ThrowableType), NothingType)) - - @tu lazy val NothingClass: ClassSymbol = enterCompleteClassSymbol( - ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyType)) - def NothingType: TypeRef = NothingClass.typeRef - @tu lazy val NullClass: ClassSymbol = { - // When explicit-nulls is enabled, Null becomes a direct subtype of Any and Matchable - val parents = if ctx.explicitNulls then AnyType :: MatchableType :: Nil else ObjectType :: Nil - enterCompleteClassSymbol(ScalaPackageClass, tpnme.Null, AbstractFinal, parents) - } - def NullType: TypeRef = NullClass.typeRef - - @tu lazy val InvokerModule = requiredModule("scala.runtime.coverage.Invoker") - @tu lazy val InvokedMethodRef = InvokerModule.requiredMethodRef("invoked") - - @tu lazy val ImplicitScrutineeTypeSym = - newPermanentSymbol(ScalaPackageClass, tpnme.IMPLICITkw, EmptyFlags, TypeBounds.empty).entered - def ImplicitScrutineeTypeRef: TypeRef = ImplicitScrutineeTypeSym.typeRef - - @tu lazy val ScalaPredefModule: Symbol = requiredModule("scala.Predef") - @tu lazy val Predef_conforms : Symbol = ScalaPredefModule.requiredMethod(nme.conforms_) - @tu lazy val Predef_classOf : Symbol = ScalaPredefModule.requiredMethod(nme.classOf) - @tu lazy val Predef_identity : Symbol = ScalaPredefModule.requiredMethod(nme.identity) - @tu lazy val Predef_undefined: Symbol = ScalaPredefModule.requiredMethod(nme.???) - @tu lazy val ScalaPredefModuleClass: ClassSymbol = ScalaPredefModule.moduleClass.asClass - - @tu lazy val SubTypeClass: ClassSymbol = requiredClass("scala.<:<") - @tu lazy val SubType_refl: Symbol = SubTypeClass.companionModule.requiredMethod(nme.refl) - - @tu lazy val DummyImplicitClass: ClassSymbol = requiredClass("scala.DummyImplicit") - - @tu lazy val ScalaRuntimeModule: Symbol = requiredModule("scala.runtime.ScalaRunTime") - def runtimeMethodRef(name: PreName): TermRef = ScalaRuntimeModule.requiredMethodRef(name) - def ScalaRuntime_drop: Symbol = runtimeMethodRef(nme.drop).symbol - @tu lazy val ScalaRuntime__hashCode: Symbol = ScalaRuntimeModule.requiredMethod(nme._hashCode_) - @tu lazy val ScalaRuntime_toArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toArray) - @tu lazy val ScalaRuntime_toObjectArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toObjectArray) - - @tu lazy val BoxesRunTimeModule: Symbol = requiredModule("scala.runtime.BoxesRunTime") - @tu lazy val BoxesRunTimeModule_externalEquals: Symbol = BoxesRunTimeModule.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol - @tu lazy val ScalaStaticsModule: Symbol = requiredModule("scala.runtime.Statics") - def staticsMethodRef(name: PreName): TermRef = ScalaStaticsModule.requiredMethodRef(name) - def staticsMethod(name: PreName): TermSymbol = ScalaStaticsModule.requiredMethod(name) - - @tu lazy val DottyArraysModule: Symbol = requiredModule("scala.runtime.Arrays") - def newGenericArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") - def newArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newArray") - - def getWrapVarargsArrayModule: Symbol = ScalaRuntimeModule - - // The set of all wrap{X, Ref}Array methods, where X is a value type - val WrapArrayMethods: PerRun[collection.Set[Symbol]] = new PerRun({ - val methodNames = ScalaValueTypes.map(ast.tpd.wrapArrayMethodName) `union` Set(nme.wrapRefArray) - methodNames.map(getWrapVarargsArrayModule.requiredMethod(_)) - }) - - @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") - @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") - @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") - @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") - @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") - - @tu lazy val SingletonClass: ClassSymbol = - // needed as a synthetic class because Scala 2.x refers to it in classfiles - // but does not define it as an explicit class. - enterCompleteClassSymbol( - ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final, - List(AnyType), EmptyScope) - @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef - - @tu lazy val CollectionSeqType: TypeRef = requiredClassRef("scala.collection.Seq") - @tu lazy val SeqType: TypeRef = requiredClassRef("scala.collection.immutable.Seq") - def SeqClass(using Context): ClassSymbol = SeqType.symbol.asClass - @tu lazy val Seq_apply : Symbol = SeqClass.requiredMethod(nme.apply) - @tu lazy val Seq_head : Symbol = SeqClass.requiredMethod(nme.head) - @tu lazy val Seq_drop : Symbol = SeqClass.requiredMethod(nme.drop) - @tu lazy val Seq_lengthCompare: Symbol = SeqClass.requiredMethod(nme.lengthCompare, List(IntType)) - @tu lazy val Seq_length : Symbol = SeqClass.requiredMethod(nme.length) - @tu lazy val Seq_toSeq : Symbol = SeqClass.requiredMethod(nme.toSeq) - @tu lazy val SeqModule: Symbol = requiredModule("scala.collection.immutable.Seq") - - - @tu lazy val StringOps: Symbol = requiredClass("scala.collection.StringOps") - @tu lazy val StringOps_format: Symbol = StringOps.requiredMethod(nme.format) - - @tu lazy val ArrayType: TypeRef = requiredClassRef("scala.Array") - def ArrayClass(using Context): ClassSymbol = ArrayType.symbol.asClass - @tu lazy val Array_apply : Symbol = ArrayClass.requiredMethod(nme.apply) - @tu lazy val Array_update : Symbol = ArrayClass.requiredMethod(nme.update) - @tu lazy val Array_length : Symbol = ArrayClass.requiredMethod(nme.length) - @tu lazy val Array_clone : Symbol = ArrayClass.requiredMethod(nme.clone_) - @tu lazy val ArrayConstructor: Symbol = ArrayClass.requiredMethod(nme.CONSTRUCTOR) - - @tu lazy val ArrayModule: Symbol = requiredModule("scala.Array") - def ArrayModuleClass: Symbol = ArrayModule.moduleClass - - @tu lazy val IArrayModule: Symbol = requiredModule("scala.IArray") - def IArrayModuleClass: Symbol = IArrayModule.moduleClass - - @tu lazy val UnitType: TypeRef = valueTypeRef("scala.Unit", java.lang.Void.TYPE, UnitEnc, nme.specializedTypeNames.Void) - def UnitClass(using Context): ClassSymbol = UnitType.symbol.asClass - def UnitModuleClass(using Context): Symbol = UnitType.symbol.asClass.linkedClass - @tu lazy val BooleanType: TypeRef = valueTypeRef("scala.Boolean", java.lang.Boolean.TYPE, BooleanEnc, nme.specializedTypeNames.Boolean) - def BooleanClass(using Context): ClassSymbol = BooleanType.symbol.asClass - @tu lazy val Boolean_! : Symbol = BooleanClass.requiredMethod(nme.UNARY_!) - @tu lazy val Boolean_&& : Symbol = BooleanClass.requiredMethod(nme.ZAND) // ### harmonize required... calls - @tu lazy val Boolean_|| : Symbol = BooleanClass.requiredMethod(nme.ZOR) - @tu lazy val Boolean_== : Symbol = - BooleanClass.info.member(nme.EQ).suchThat(_.info.firstParamTypes match { - case List(pt) => pt.isRef(BooleanClass) - case _ => false - }).symbol - @tu lazy val Boolean_!= : Symbol = - BooleanClass.info.member(nme.NE).suchThat(_.info.firstParamTypes match { - case List(pt) => pt.isRef(BooleanClass) - case _ => false - }).symbol - - @tu lazy val ByteType: TypeRef = valueTypeRef("scala.Byte", java.lang.Byte.TYPE, ByteEnc, nme.specializedTypeNames.Byte) - def ByteClass(using Context): ClassSymbol = ByteType.symbol.asClass - @tu lazy val ShortType: TypeRef = valueTypeRef("scala.Short", java.lang.Short.TYPE, ShortEnc, nme.specializedTypeNames.Short) - def ShortClass(using Context): ClassSymbol = ShortType.symbol.asClass - @tu lazy val CharType: TypeRef = valueTypeRef("scala.Char", java.lang.Character.TYPE, CharEnc, nme.specializedTypeNames.Char) - def CharClass(using Context): ClassSymbol = CharType.symbol.asClass - @tu lazy val IntType: TypeRef = valueTypeRef("scala.Int", java.lang.Integer.TYPE, IntEnc, nme.specializedTypeNames.Int) - def IntClass(using Context): ClassSymbol = IntType.symbol.asClass - @tu lazy val Int_- : Symbol = IntClass.requiredMethod(nme.MINUS, List(IntType)) - @tu lazy val Int_+ : Symbol = IntClass.requiredMethod(nme.PLUS, List(IntType)) - @tu lazy val Int_/ : Symbol = IntClass.requiredMethod(nme.DIV, List(IntType)) - @tu lazy val Int_* : Symbol = IntClass.requiredMethod(nme.MUL, List(IntType)) - @tu lazy val Int_== : Symbol = IntClass.requiredMethod(nme.EQ, List(IntType)) - @tu lazy val Int_>= : Symbol = IntClass.requiredMethod(nme.GE, List(IntType)) - @tu lazy val Int_<= : Symbol = IntClass.requiredMethod(nme.LE, List(IntType)) - @tu lazy val LongType: TypeRef = valueTypeRef("scala.Long", java.lang.Long.TYPE, LongEnc, nme.specializedTypeNames.Long) - def LongClass(using Context): ClassSymbol = LongType.symbol.asClass - @tu lazy val Long_+ : Symbol = LongClass.requiredMethod(nme.PLUS, List(LongType)) - @tu lazy val Long_* : Symbol = LongClass.requiredMethod(nme.MUL, List(LongType)) - @tu lazy val Long_/ : Symbol = LongClass.requiredMethod(nme.DIV, List(LongType)) - - @tu lazy val FloatType: TypeRef = valueTypeRef("scala.Float", java.lang.Float.TYPE, FloatEnc, nme.specializedTypeNames.Float) - def FloatClass(using Context): ClassSymbol = FloatType.symbol.asClass - @tu lazy val DoubleType: TypeRef = valueTypeRef("scala.Double", java.lang.Double.TYPE, DoubleEnc, nme.specializedTypeNames.Double) - def DoubleClass(using Context): ClassSymbol = DoubleType.symbol.asClass - - @tu lazy val BoxedUnitClass: ClassSymbol = requiredClass("scala.runtime.BoxedUnit") - def BoxedUnit_UNIT(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("UNIT") - def BoxedUnit_TYPE(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("TYPE") - - @tu lazy val BoxedBooleanClass: ClassSymbol = requiredClass("java.lang.Boolean") - @tu lazy val BoxedByteClass : ClassSymbol = requiredClass("java.lang.Byte") - @tu lazy val BoxedShortClass : ClassSymbol = requiredClass("java.lang.Short") - @tu lazy val BoxedCharClass : ClassSymbol = requiredClass("java.lang.Character") - @tu lazy val BoxedIntClass : ClassSymbol = requiredClass("java.lang.Integer") - @tu lazy val BoxedLongClass : ClassSymbol = requiredClass("java.lang.Long") - @tu lazy val BoxedFloatClass : ClassSymbol = requiredClass("java.lang.Float") - @tu lazy val BoxedDoubleClass : ClassSymbol = requiredClass("java.lang.Double") - - @tu lazy val BoxedBooleanModule: TermSymbol = requiredModule("java.lang.Boolean") - @tu lazy val BoxedByteModule : TermSymbol = requiredModule("java.lang.Byte") - @tu lazy val BoxedShortModule : TermSymbol = requiredModule("java.lang.Short") - @tu lazy val BoxedCharModule : TermSymbol = requiredModule("java.lang.Character") - @tu lazy val BoxedIntModule : TermSymbol = requiredModule("java.lang.Integer") - @tu lazy val BoxedLongModule : TermSymbol = requiredModule("java.lang.Long") - @tu lazy val BoxedFloatModule : TermSymbol = requiredModule("java.lang.Float") - @tu lazy val BoxedDoubleModule : TermSymbol = requiredModule("java.lang.Double") - @tu lazy val BoxedUnitModule : TermSymbol = requiredModule("java.lang.Void") - - @tu lazy val ByNameParamClass2x: ClassSymbol = enterSpecialPolyClass(tpnme.BYNAME_PARAM_CLASS, Covariant, Seq(AnyType)) - - @tu lazy val RepeatedParamClass: ClassSymbol = enterSpecialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType)) - - @tu lazy val IntoType: TypeSymbol = enterAliasType(tpnme.INTO, HKTypeLambda(TypeBounds.empty :: Nil)(_.paramRefs(0))) - - // fundamental classes - @tu lazy val StringClass: ClassSymbol = requiredClass("java.lang.String") - def StringType: Type = StringClass.typeRef - @tu lazy val StringModule: Symbol = StringClass.linkedClass - @tu lazy val String_+ : TermSymbol = enterMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final) - @tu lazy val String_valueOf_Object: Symbol = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match { - case List(pt) => pt.isAny || pt.stripNull.isAnyRef - case _ => false - }).symbol - - @tu lazy val JavaCloneableClass: ClassSymbol = requiredClass("java.lang.Cloneable") - @tu lazy val NullPointerExceptionClass: ClassSymbol = requiredClass("java.lang.NullPointerException") - @tu lazy val IndexOutOfBoundsException: ClassSymbol = requiredClass("java.lang.IndexOutOfBoundsException") - @tu lazy val ClassClass: ClassSymbol = requiredClass("java.lang.Class") - @tu lazy val BoxedNumberClass: ClassSymbol = requiredClass("java.lang.Number") - @tu lazy val ClassCastExceptionClass: ClassSymbol = requiredClass("java.lang.ClassCastException") - @tu lazy val ClassCastExceptionClass_stringConstructor: TermSymbol = ClassCastExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { - case List(pt) => - pt.stripNull.isRef(StringClass) - case _ => false - }).symbol.asTerm - @tu lazy val ArithmeticExceptionClass: ClassSymbol = requiredClass("java.lang.ArithmeticException") - @tu lazy val ArithmeticExceptionClass_stringConstructor: TermSymbol = ArithmeticExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { - case List(pt) => - pt.stripNull.isRef(StringClass) - case _ => false - }).symbol.asTerm - - @tu lazy val JavaSerializableClass: ClassSymbol = requiredClass("java.io.Serializable") - - @tu lazy val ComparableClass: ClassSymbol = requiredClass("java.lang.Comparable") - - @tu lazy val SystemClass: ClassSymbol = requiredClass("java.lang.System") - @tu lazy val SystemModule: Symbol = SystemClass.linkedClass - - @tu lazy val NoSuchElementExceptionClass = requiredClass("java.util.NoSuchElementException") - def NoSuchElementExceptionType = NoSuchElementExceptionClass.typeRef - @tu lazy val IllegalArgumentExceptionClass = requiredClass("java.lang.IllegalArgumentException") - def IllegalArgumentExceptionType = IllegalArgumentExceptionClass.typeRef - - // in scalac modified to have Any as parent - - @tu lazy val ThrowableType: TypeRef = requiredClassRef("java.lang.Throwable") - def ThrowableClass(using Context): ClassSymbol = ThrowableType.symbol.asClass - @tu lazy val ExceptionClass: ClassSymbol = requiredClass("java.lang.Exception") - @tu lazy val RuntimeExceptionClass: ClassSymbol = requiredClass("java.lang.RuntimeException") - - @tu lazy val SerializableType: TypeRef = JavaSerializableClass.typeRef - def SerializableClass(using Context): ClassSymbol = SerializableType.symbol.asClass - - @tu lazy val JavaBigIntegerClass: ClassSymbol = requiredClass("java.math.BigInteger") - @tu lazy val JavaBigDecimalClass: ClassSymbol = requiredClass("java.math.BigDecimal") - @tu lazy val JavaCalendarClass: ClassSymbol = requiredClass("java.util.Calendar") - @tu lazy val JavaDateClass: ClassSymbol = requiredClass("java.util.Date") - @tu lazy val JavaFormattableClass: ClassSymbol = requiredClass("java.util.Formattable") - - @tu lazy val JavaEnumClass: ClassSymbol = { - val cls = requiredClass("java.lang.Enum") - // jl.Enum has a single constructor protected(name: String, ordinal: Int). - // We remove the arguments from the primary constructor, and enter - // a new constructor symbol with 2 arguments, so that both - // `X extends jl.Enum[X]` and `X extends jl.Enum[X](name, ordinal)` - // pass typer and go through jl.Enum-specific checks in RefChecks. - cls.infoOrCompleter match { - case completer: ClassfileLoader => - cls.info = new ClassfileLoader(completer.classfile) { - override def complete(root: SymDenotation)(using Context): Unit = { - super.complete(root) - val constr = cls.primaryConstructor - val noArgInfo = constr.info match { - case info: PolyType => - info.resType match { - case meth: MethodType => - info.derivedLambdaType( - resType = meth.derivedLambdaType( - paramNames = Nil, paramInfos = Nil)) - } - } - val argConstr = constr.copy().entered - constr.info = noArgInfo - constr.termRef.recomputeDenot() - } - } - cls - } - } - def JavaEnumType = JavaEnumClass.typeRef - - @tu lazy val MethodHandleClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandle") - @tu lazy val MethodHandlesLookupClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandles.Lookup") - @tu lazy val VarHandleClass: ClassSymbol = requiredClass("java.lang.invoke.VarHandle") - - @tu lazy val StringBuilderClass: ClassSymbol = requiredClass("scala.collection.mutable.StringBuilder") - @tu lazy val MatchErrorClass : ClassSymbol = requiredClass("scala.MatchError") - @tu lazy val ConversionClass : ClassSymbol = requiredClass("scala.Conversion").typeRef.symbol.asClass - - @tu lazy val StringAddClass : ClassSymbol = requiredClass("scala.runtime.StringAdd") - @tu lazy val StringAdd_+ : Symbol = StringAddClass.requiredMethod(nme.raw.PLUS) - - @tu lazy val StringContextClass: ClassSymbol = requiredClass("scala.StringContext") - @tu lazy val StringContext_s : Symbol = StringContextClass.requiredMethod(nme.s) - @tu lazy val StringContext_raw: Symbol = StringContextClass.requiredMethod(nme.raw_) - @tu lazy val StringContext_f : Symbol = StringContextClass.requiredMethod(nme.f) - @tu lazy val StringContext_parts: Symbol = StringContextClass.requiredMethod(nme.parts) - @tu lazy val StringContextModule: Symbol = StringContextClass.companionModule - @tu lazy val StringContextModule_apply: Symbol = StringContextModule.requiredMethod(nme.apply) - @tu lazy val StringContextModule_standardInterpolator: Symbol = StringContextModule.requiredMethod(nme.standardInterpolator) - @tu lazy val StringContextModule_processEscapes: Symbol = StringContextModule.requiredMethod(nme.processEscapes) - - @tu lazy val PartialFunctionClass: ClassSymbol = requiredClass("scala.PartialFunction") - @tu lazy val PartialFunction_isDefinedAt: Symbol = PartialFunctionClass.requiredMethod(nme.isDefinedAt) - @tu lazy val PartialFunction_applyOrElse: Symbol = PartialFunctionClass.requiredMethod(nme.applyOrElse) - - @tu lazy val AbstractPartialFunctionClass: ClassSymbol = requiredClass("scala.runtime.AbstractPartialFunction") - @tu lazy val FunctionXXLClass: ClassSymbol = requiredClass("scala.runtime.FunctionXXL") - @tu lazy val ScalaSymbolClass: ClassSymbol = requiredClass("scala.Symbol") - @tu lazy val DynamicClass: ClassSymbol = requiredClass("scala.Dynamic") - @tu lazy val OptionClass: ClassSymbol = requiredClass("scala.Option") - @tu lazy val SomeClass: ClassSymbol = requiredClass("scala.Some") - @tu lazy val NoneModule: Symbol = requiredModule("scala.None") - - @tu lazy val EnumClass: ClassSymbol = requiredClass("scala.reflect.Enum") - @tu lazy val Enum_ordinal: Symbol = EnumClass.requiredMethod(nme.ordinal) - - @tu lazy val EnumValueSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.EnumValueSerializationProxy") - @tu lazy val EnumValueSerializationProxyConstructor: TermSymbol = - EnumValueSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty), IntType)) - - @tu lazy val ProductClass: ClassSymbol = requiredClass("scala.Product") - @tu lazy val Product_canEqual : Symbol = ProductClass.requiredMethod(nme.canEqual_) - @tu lazy val Product_productArity : Symbol = ProductClass.requiredMethod(nme.productArity) - @tu lazy val Product_productElement : Symbol = ProductClass.requiredMethod(nme.productElement) - @tu lazy val Product_productElementName: Symbol = ProductClass.requiredMethod(nme.productElementName) - @tu lazy val Product_productPrefix : Symbol = ProductClass.requiredMethod(nme.productPrefix) - - @tu lazy val IteratorClass: ClassSymbol = requiredClass("scala.collection.Iterator") - def IteratorModule(using Context): Symbol = IteratorClass.companionModule - - @tu lazy val ModuleSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.ModuleSerializationProxy") - @tu lazy val ModuleSerializationProxyConstructor: TermSymbol = - ModuleSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty))) - - @tu lazy val MirrorClass: ClassSymbol = requiredClass("scala.deriving.Mirror") - @tu lazy val Mirror_ProductClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Product") - @tu lazy val Mirror_Product_fromProduct: Symbol = Mirror_ProductClass.requiredMethod(nme.fromProduct) - @tu lazy val Mirror_SumClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Sum") - @tu lazy val Mirror_SingletonClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Singleton") - @tu lazy val Mirror_SingletonProxyClass: ClassSymbol = requiredClass("scala.deriving.Mirror.SingletonProxy") - - @tu lazy val LanguageModule: Symbol = requiredModule("scala.language") - @tu lazy val LanguageModuleClass: Symbol = LanguageModule.moduleClass.asClass - @tu lazy val LanguageExperimentalModule: Symbol = requiredModule("scala.language.experimental") - @tu lazy val LanguageDeprecatedModule: Symbol = requiredModule("scala.language.deprecated") - @tu lazy val NonLocalReturnControlClass: ClassSymbol = requiredClass("scala.runtime.NonLocalReturnControl") - @tu lazy val SelectableClass: ClassSymbol = requiredClass("scala.Selectable") - @tu lazy val WithoutPreciseParameterTypesClass: Symbol = requiredClass("scala.Selectable.WithoutPreciseParameterTypes") - - @tu lazy val ManifestClass: ClassSymbol = requiredClass("scala.reflect.Manifest") - @tu lazy val ManifestFactoryModule: Symbol = requiredModule("scala.reflect.ManifestFactory") - @tu lazy val ClassManifestFactoryModule: Symbol = requiredModule("scala.reflect.ClassManifestFactory") - @tu lazy val OptManifestClass: ClassSymbol = requiredClass("scala.reflect.OptManifest") - @tu lazy val NoManifestModule: Symbol = requiredModule("scala.reflect.NoManifest") - - @tu lazy val ReflectPackageClass: Symbol = requiredPackage("scala.reflect.package").moduleClass - @tu lazy val ClassTagClass: ClassSymbol = requiredClass("scala.reflect.ClassTag") - @tu lazy val ClassTagModule: Symbol = ClassTagClass.companionModule - @tu lazy val ClassTagModule_apply: Symbol = ClassTagModule.requiredMethod(nme.apply) - - @tu lazy val TypeTestClass: ClassSymbol = requiredClass("scala.reflect.TypeTest") - @tu lazy val TypeTest_unapply: Symbol = TypeTestClass.requiredMethod(nme.unapply) - @tu lazy val TypeTestModule_identity: Symbol = TypeTestClass.companionModule.requiredMethod(nme.identity) - - @tu lazy val QuotedExprClass: ClassSymbol = requiredClass("scala.quoted.Expr") - - @tu lazy val QuotesClass: ClassSymbol = requiredClass("scala.quoted.Quotes") - @tu lazy val Quotes_reflect: Symbol = QuotesClass.requiredValue("reflect") - @tu lazy val Quotes_reflect_asTerm: Symbol = Quotes_reflect.requiredMethod("asTerm") - @tu lazy val Quotes_reflect_Apply: Symbol = Quotes_reflect.requiredValue("Apply") - @tu lazy val Quotes_reflect_Apply_apply: Symbol = Quotes_reflect_Apply.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_TypeApply: Symbol = Quotes_reflect.requiredValue("TypeApply") - @tu lazy val Quotes_reflect_TypeApply_apply: Symbol = Quotes_reflect_TypeApply.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_Assign: Symbol = Quotes_reflect.requiredValue("Assign") - @tu lazy val Quotes_reflect_Assign_apply: Symbol = Quotes_reflect_Assign.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_Inferred: Symbol = Quotes_reflect.requiredValue("Inferred") - @tu lazy val Quotes_reflect_Inferred_apply: Symbol = Quotes_reflect_Inferred.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_Literal: Symbol = Quotes_reflect.requiredValue("Literal") - @tu lazy val Quotes_reflect_Literal_apply: Symbol = Quotes_reflect_Literal.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_TreeMethods: Symbol = Quotes_reflect.requiredMethod("TreeMethods") - @tu lazy val Quotes_reflect_TreeMethods_asExpr: Symbol = Quotes_reflect_TreeMethods.requiredMethod("asExpr") - @tu lazy val Quotes_reflect_TypeRepr: Symbol = Quotes_reflect.requiredValue("TypeRepr") - @tu lazy val Quotes_reflect_TypeRepr_of: Symbol = Quotes_reflect_TypeRepr.requiredMethod("of") - @tu lazy val Quotes_reflect_TypeRepr_typeConstructorOf: Symbol = Quotes_reflect_TypeRepr.requiredMethod("typeConstructorOf") - @tu lazy val Quotes_reflect_TypeReprMethods: Symbol = Quotes_reflect.requiredValue("TypeReprMethods") - @tu lazy val Quotes_reflect_TypeReprMethods_asType: Symbol = Quotes_reflect_TypeReprMethods.requiredMethod("asType") - @tu lazy val Quotes_reflect_TypeTreeType: Symbol = Quotes_reflect.requiredType("TypeTree") - @tu lazy val Quotes_reflect_TermType: Symbol = Quotes_reflect.requiredType("Term") - @tu lazy val Quotes_reflect_BooleanConstant: Symbol = Quotes_reflect.requiredValue("BooleanConstant") - @tu lazy val Quotes_reflect_ByteConstant: Symbol = Quotes_reflect.requiredValue("ByteConstant") - @tu lazy val Quotes_reflect_ShortConstant: Symbol = Quotes_reflect.requiredValue("ShortConstant") - @tu lazy val Quotes_reflect_IntConstant: Symbol = Quotes_reflect.requiredValue("IntConstant") - @tu lazy val Quotes_reflect_LongConstant: Symbol = Quotes_reflect.requiredValue("LongConstant") - @tu lazy val Quotes_reflect_FloatConstant: Symbol = Quotes_reflect.requiredValue("FloatConstant") - @tu lazy val Quotes_reflect_DoubleConstant: Symbol = Quotes_reflect.requiredValue("DoubleConstant") - @tu lazy val Quotes_reflect_CharConstant: Symbol = Quotes_reflect.requiredValue("CharConstant") - @tu lazy val Quotes_reflect_StringConstant: Symbol = Quotes_reflect.requiredValue("StringConstant") - @tu lazy val Quotes_reflect_UnitConstant: Symbol = Quotes_reflect.requiredValue("UnitConstant") - @tu lazy val Quotes_reflect_NullConstant: Symbol = Quotes_reflect.requiredValue("NullConstant") - @tu lazy val Quotes_reflect_ClassOfConstant: Symbol = Quotes_reflect.requiredValue("ClassOfConstant") - - - @tu lazy val QuoteUnpicklerClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteUnpickler") - @tu lazy val QuoteUnpickler_unpickleExprV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleExprV2") - @tu lazy val QuoteUnpickler_unpickleTypeV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleTypeV2") - - @tu lazy val QuoteMatchingClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteMatching") - @tu lazy val QuoteMatching_ExprMatch: Symbol = QuoteMatchingClass.requiredMethod("ExprMatch") - @tu lazy val QuoteMatching_TypeMatch: Symbol = QuoteMatchingClass.requiredMethod("TypeMatch") - - @tu lazy val ToExprModule: Symbol = requiredModule("scala.quoted.ToExpr") - @tu lazy val ToExprModule_BooleanToExpr: Symbol = ToExprModule.requiredMethod("BooleanToExpr") - @tu lazy val ToExprModule_ByteToExpr: Symbol = ToExprModule.requiredMethod("ByteToExpr") - @tu lazy val ToExprModule_ShortToExpr: Symbol = ToExprModule.requiredMethod("ShortToExpr") - @tu lazy val ToExprModule_IntToExpr: Symbol = ToExprModule.requiredMethod("IntToExpr") - @tu lazy val ToExprModule_LongToExpr: Symbol = ToExprModule.requiredMethod("LongToExpr") - @tu lazy val ToExprModule_FloatToExpr: Symbol = ToExprModule.requiredMethod("FloatToExpr") - @tu lazy val ToExprModule_DoubleToExpr: Symbol = ToExprModule.requiredMethod("DoubleToExpr") - @tu lazy val ToExprModule_CharToExpr: Symbol = ToExprModule.requiredMethod("CharToExpr") - @tu lazy val ToExprModule_StringToExpr: Symbol = ToExprModule.requiredMethod("StringToExpr") - - @tu lazy val QuotedRuntimeModule: Symbol = requiredModule("scala.quoted.runtime.Expr") - @tu lazy val QuotedRuntime_exprQuote : Symbol = QuotedRuntimeModule.requiredMethod("quote") - @tu lazy val QuotedRuntime_exprSplice : Symbol = QuotedRuntimeModule.requiredMethod("splice") - @tu lazy val QuotedRuntime_exprNestedSplice : Symbol = QuotedRuntimeModule.requiredMethod("nestedSplice") - - @tu lazy val QuotedRuntime_SplicedTypeAnnot: ClassSymbol = requiredClass("scala.quoted.runtime.SplicedType") - - @tu lazy val QuotedRuntimePatterns: Symbol = requiredModule("scala.quoted.runtime.Patterns") - @tu lazy val QuotedRuntimePatterns_patternHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHole") - @tu lazy val QuotedRuntimePatterns_patternHigherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHigherOrderHole") - @tu lazy val QuotedRuntimePatterns_higherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHole") - @tu lazy val QuotedRuntimePatterns_patternTypeAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("patternType") - @tu lazy val QuotedRuntimePatterns_fromAboveAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("fromAbove") - - @tu lazy val QuotedTypeClass: ClassSymbol = requiredClass("scala.quoted.Type") - @tu lazy val QuotedType_splice: Symbol = QuotedTypeClass.requiredType(tpnme.Underlying) - - @tu lazy val QuotedTypeModule: Symbol = QuotedTypeClass.companionModule - @tu lazy val QuotedTypeModule_of: Symbol = QuotedTypeModule.requiredMethod("of") - - @tu lazy val CanEqualClass: ClassSymbol = getClassIfDefined("scala.Eql").orElse(requiredClass("scala.CanEqual")).asClass - def CanEqual_canEqualAny(using Context): TermSymbol = - val methodName = if CanEqualClass.name == tpnme.Eql then nme.eqlAny else nme.canEqualAny - CanEqualClass.companionModule.requiredMethod(methodName) - - @tu lazy val CanThrowClass: ClassSymbol = requiredClass("scala.CanThrow") - @tu lazy val throwsAlias: Symbol = ScalaRuntimePackageVal.requiredType(tpnme.THROWS) - - @tu lazy val TypeBoxClass: ClassSymbol = requiredClass("scala.runtime.TypeBox") - @tu lazy val TypeBox_CAP: TypeSymbol = TypeBoxClass.requiredType(tpnme.CAP) - - @tu lazy val MatchCaseClass: ClassSymbol = requiredClass("scala.runtime.MatchCase") - @tu lazy val NotGivenClass: ClassSymbol = requiredClass("scala.util.NotGiven") - @tu lazy val NotGiven_value: Symbol = NotGivenClass.companionModule.requiredMethod(nme.value) - - @tu lazy val ValueOfClass: ClassSymbol = requiredClass("scala.ValueOf") - - @tu lazy val FromDigitsClass: ClassSymbol = requiredClass("scala.util.FromDigits") - @tu lazy val FromDigits_WithRadixClass: ClassSymbol = requiredClass("scala.util.FromDigits.WithRadix") - @tu lazy val FromDigits_DecimalClass: ClassSymbol = requiredClass("scala.util.FromDigits.Decimal") - @tu lazy val FromDigits_FloatingClass: ClassSymbol = requiredClass("scala.util.FromDigits.Floating") - - @tu lazy val XMLTopScopeModule: Symbol = requiredModule("scala.xml.TopScope") - - @tu lazy val MainAnnotationClass: ClassSymbol = requiredClass("scala.annotation.MainAnnotation") - @tu lazy val MainAnnotationInfo: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Info") - @tu lazy val MainAnnotationParameter: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Parameter") - @tu lazy val MainAnnotationParameterAnnotation: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.ParameterAnnotation") - @tu lazy val MainAnnotationCommand: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Command") - - @tu lazy val CommandLineParserModule: Symbol = requiredModule("scala.util.CommandLineParser") - @tu lazy val CLP_ParseError: ClassSymbol = CommandLineParserModule.requiredClass("ParseError").typeRef.symbol.asClass - @tu lazy val CLP_parseArgument: Symbol = CommandLineParserModule.requiredMethod("parseArgument") - @tu lazy val CLP_parseRemainingArguments: Symbol = CommandLineParserModule.requiredMethod("parseRemainingArguments") - @tu lazy val CLP_showError: Symbol = CommandLineParserModule.requiredMethod("showError") - - @tu lazy val TupleTypeRef: TypeRef = requiredClassRef("scala.Tuple") - def TupleClass(using Context): ClassSymbol = TupleTypeRef.symbol.asClass - @tu lazy val Tuple_cons: Symbol = TupleClass.requiredMethod("*:") - @tu lazy val EmptyTupleModule: Symbol = requiredModule("scala.EmptyTuple") - @tu lazy val NonEmptyTupleTypeRef: TypeRef = requiredClassRef("scala.NonEmptyTuple") - def NonEmptyTupleClass(using Context): ClassSymbol = NonEmptyTupleTypeRef.symbol.asClass - lazy val NonEmptyTuple_tail: Symbol = NonEmptyTupleClass.requiredMethod("tail") - @tu lazy val PairClass: ClassSymbol = requiredClass("scala.*:") - - @tu lazy val TupleXXLClass: ClassSymbol = requiredClass("scala.runtime.TupleXXL") - def TupleXXLModule(using Context): Symbol = TupleXXLClass.companionModule - - def TupleXXL_fromIterator(using Context): Symbol = TupleXXLModule.requiredMethod("fromIterator") - - @tu lazy val RuntimeTupleMirrorTypeRef: TypeRef = requiredClassRef("scala.runtime.TupleMirror") - - @tu lazy val RuntimeTuplesModule: Symbol = requiredModule("scala.runtime.Tuples") - @tu lazy val RuntimeTuplesModuleClass: Symbol = RuntimeTuplesModule.moduleClass - @tu lazy val RuntimeTuples_consIterator: Symbol = RuntimeTuplesModule.requiredMethod("consIterator") - @tu lazy val RuntimeTuples_concatIterator: Symbol = RuntimeTuplesModule.requiredMethod("concatIterator") - @tu lazy val RuntimeTuples_apply: Symbol = RuntimeTuplesModule.requiredMethod("apply") - @tu lazy val RuntimeTuples_cons: Symbol = RuntimeTuplesModule.requiredMethod("cons") - @tu lazy val RuntimeTuples_size: Symbol = RuntimeTuplesModule.requiredMethod("size") - @tu lazy val RuntimeTuples_tail: Symbol = RuntimeTuplesModule.requiredMethod("tail") - @tu lazy val RuntimeTuples_concat: Symbol = RuntimeTuplesModule.requiredMethod("concat") - @tu lazy val RuntimeTuples_toArray: Symbol = RuntimeTuplesModule.requiredMethod("toArray") - @tu lazy val RuntimeTuples_productToArray: Symbol = RuntimeTuplesModule.requiredMethod("productToArray") - @tu lazy val RuntimeTuples_isInstanceOfTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfTuple") - @tu lazy val RuntimeTuples_isInstanceOfEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfEmptyTuple") - @tu lazy val RuntimeTuples_isInstanceOfNonEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfNonEmptyTuple") - - @tu lazy val TupledFunctionTypeRef: TypeRef = requiredClassRef("scala.util.TupledFunction") - def TupledFunctionClass(using Context): ClassSymbol = TupledFunctionTypeRef.symbol.asClass - def RuntimeTupleFunctionsModule(using Context): Symbol = requiredModule("scala.runtime.TupledFunctions") - - @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") - @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") - @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") - @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") - @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") - @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") - - // Annotation base classes - @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") - @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") - @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") - - // Annotation classes - @tu lazy val AllowConversionsAnnot: ClassSymbol = requiredClass("scala.annotation.allowConversions") - @tu lazy val AnnotationDefaultAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AnnotationDefault") - @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") - @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") - @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") - @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") - @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") - @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") - @tu lazy val DeprecatedAnnot: ClassSymbol = requiredClass("scala.deprecated") - @tu lazy val DeprecatedOverridingAnnot: ClassSymbol = requiredClass("scala.deprecatedOverriding") - @tu lazy val ImplicitAmbiguousAnnot: ClassSymbol = requiredClass("scala.annotation.implicitAmbiguous") - @tu lazy val ImplicitNotFoundAnnot: ClassSymbol = requiredClass("scala.annotation.implicitNotFound") - @tu lazy val InlineParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InlineParam") - @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") - @tu lazy val InvariantBetweenAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InvariantBetween") - @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") - @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") - @tu lazy val MigrationAnnot: ClassSymbol = requiredClass("scala.annotation.migration") - @tu lazy val NowarnAnnot: ClassSymbol = requiredClass("scala.annotation.nowarn") - @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") - @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") - @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") - @tu lazy val SourceFileAnnot: ClassSymbol = requiredClass("scala.annotation.internal.SourceFile") - @tu lazy val ScalaSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaSignature") - @tu lazy val ScalaLongSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaLongSignature") - @tu lazy val ScalaStrictFPAnnot: ClassSymbol = requiredClass("scala.annotation.strictfp") - @tu lazy val ScalaStaticAnnot: ClassSymbol = requiredClass("scala.annotation.static") - @tu lazy val SerialVersionUIDAnnot: ClassSymbol = requiredClass("scala.SerialVersionUID") - @tu lazy val TailrecAnnot: ClassSymbol = requiredClass("scala.annotation.tailrec") - @tu lazy val ThreadUnsafeAnnot: ClassSymbol = requiredClass("scala.annotation.threadUnsafe") - @tu lazy val ConstructorOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.constructorOnly") - @tu lazy val CompileTimeOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.compileTimeOnly") - @tu lazy val SwitchAnnot: ClassSymbol = requiredClass("scala.annotation.switch") - @tu lazy val ExperimentalAnnot: ClassSymbol = requiredClass("scala.annotation.experimental") - @tu lazy val ThrowsAnnot: ClassSymbol = requiredClass("scala.throws") - @tu lazy val TransientAnnot: ClassSymbol = requiredClass("scala.transient") - @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") - @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") - @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") - @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") - @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") - @tu lazy val FieldMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.field") - @tu lazy val GetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.getter") - @tu lazy val ParamMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.param") - @tu lazy val SetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.setter") - @tu lazy val ShowAsInfixAnnot: ClassSymbol = requiredClass("scala.annotation.showAsInfix") - @tu lazy val FunctionalInterfaceAnnot: ClassSymbol = requiredClass("java.lang.FunctionalInterface") - @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") - @tu lazy val VarargsAnnot: ClassSymbol = requiredClass("scala.annotation.varargs") - @tu lazy val SinceAnnot: ClassSymbol = requiredClass("scala.annotation.since") - @tu lazy val RequiresCapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.internal.requiresCapability") - @tu lazy val RetainsAnnot: ClassSymbol = requiredClass("scala.annotation.retains") - @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") - - @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") - - // A list of meta-annotations that are relevant for fields and accessors - @tu lazy val FieldAccessorMetaAnnots: Set[Symbol] = - Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot) - - // A list of annotations that are commonly used to indicate that a field/method argument or return - // type is not null. These annotations are used by the nullification logic in JavaNullInterop to - // improve the precision of type nullification. - // We don't require that any of these annotations be present in the class path, but we want to - // create Symbols for the ones that are present, so they can be checked during nullification. - @tu lazy val NotNullAnnots: List[ClassSymbol] = getClassesIfDefined( - "javax.annotation.Nonnull" :: - "javax.validation.constraints.NotNull" :: - "androidx.annotation.NonNull" :: - "android.support.annotation.NonNull" :: - "android.annotation.NonNull" :: - "com.android.annotations.NonNull" :: - "org.eclipse.jdt.annotation.NonNull" :: - "edu.umd.cs.findbugs.annotations.NonNull" :: - "org.checkerframework.checker.nullness.qual.NonNull" :: - "org.checkerframework.checker.nullness.compatqual.NonNullDecl" :: - "org.jetbrains.annotations.NotNull" :: - "org.springframework.lang.NonNull" :: - "org.springframework.lang.NonNullApi" :: - "org.springframework.lang.NonNullFields" :: - "lombok.NonNull" :: - "reactor.util.annotation.NonNull" :: - "reactor.util.annotation.NonNullApi" :: - "io.reactivex.annotations.NonNull" :: Nil) - - // convenient one-parameter method types - def methOfAny(tp: Type): MethodType = MethodType(List(AnyType), tp) - def methOfAnyVal(tp: Type): MethodType = MethodType(List(AnyValType), tp) - def methOfAnyRef(tp: Type): MethodType = MethodType(List(ObjectType), tp) - - // Derived types - - def RepeatedParamType: TypeRef = RepeatedParamClass.typeRef - - def ClassType(arg: Type)(using Context): Type = { - val ctype = ClassClass.typeRef - if (ctx.phase.erasedTypes) ctype else ctype.appliedTo(arg) - } - - /** The enumeration type, goven a value of the enumeration */ - def EnumType(sym: Symbol)(using Context): TypeRef = - // given (in java): "class A { enum E { VAL1 } }" - // - sym: the symbol of the actual enumeration value (VAL1) - // - .owner: the ModuleClassSymbol of the enumeration (object E) - // - .linkedClass: the ClassSymbol of the enumeration (class E) - sym.owner.linkedClass.typeRef - - object FunctionOf { - def apply(args: List[Type], resultType: Type, isContextual: Boolean = false, isErased: Boolean = false)(using Context): Type = - FunctionType(args.length, isContextual, isErased).appliedTo(args ::: resultType :: Nil) - def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean, Boolean)] = { - val tsym = ft.typeSymbol - if isFunctionClass(tsym) && ft.isRef(tsym) then - val targs = ft.dealias.argInfos - if (targs.isEmpty) None - else Some(targs.init, targs.last, tsym.name.isContextFunction, tsym.name.isErasedFunction) - else None - } - } - - object PartialFunctionOf { - def apply(arg: Type, result: Type)(using Context): Type = - PartialFunctionClass.typeRef.appliedTo(arg :: result :: Nil) - def unapply(pft: Type)(using Context): Option[(Type, List[Type])] = - if (pft.isRef(PartialFunctionClass)) { - val targs = pft.dealias.argInfos - if (targs.length == 2) Some((targs.head, targs.tail)) else None - } - else None - } - - object ArrayOf { - def apply(elem: Type)(using Context): Type = - if (ctx.erasedTypes) JavaArrayType(elem) - else ArrayType.appliedTo(elem :: Nil) - def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match { - case AppliedType(at, arg :: Nil) if at.isRef(ArrayType.symbol) => Some(arg) - case JavaArrayType(tp) if ctx.erasedTypes => Some(tp) - case _ => None - } - } - - object MatchCase { - def apply(pat: Type, body: Type)(using Context): Type = - MatchCaseClass.typeRef.appliedTo(pat, body) - def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match { - case AppliedType(tycon, pat :: body :: Nil) if tycon.isRef(MatchCaseClass) => - Some((pat, body)) - case _ => - None - } - def isInstance(tp: Type)(using Context): Boolean = tp match { - case AppliedType(tycon: TypeRef, _) => - tycon.name == tpnme.MatchCase && // necessary pre-filter to avoid forcing symbols - tycon.isRef(MatchCaseClass) - case _ => false - } - } - - /** An extractor for multi-dimensional arrays. - * Note that this will also extract the high bound if an - * element type is a wildcard upper-bounded by an array. E.g. - * - * Array[? <: Array[? <: Number]] - * - * would match - * - * MultiArrayOf(, 2) - */ - object MultiArrayOf { - def apply(elem: Type, ndims: Int)(using Context): Type = - if (ndims == 0) elem else ArrayOf(apply(elem, ndims - 1)) - def unapply(tp: Type)(using Context): Option[(Type, Int)] = tp match { - case ArrayOf(elemtp) => - def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match { - case tp @ TypeBounds(lo, hi @ MultiArrayOf(finalElemTp, n)) => - Some(finalElemTp, n) - case MultiArrayOf(finalElemTp, n) => Some(finalElemTp, n + 1) - case _ => Some(elemtp, 1) - } - recur(elemtp) - case _ => - None - } - } - - /** Extractor for context function types representing by-name parameters, of the form - * `() ?=> T`. - * Under purefunctions, this becomes `() ?-> T` or `{r1, ..., rN} () ?-> T`. - */ - object ByNameFunction: - def apply(tp: Type)(using Context): Type = tp match - case tp @ EventuallyCapturingType(tp1, refs) if tp.annot.symbol == RetainsByNameAnnot => - CapturingType(apply(tp1), refs) - case _ => - defn.ContextFunction0.typeRef.appliedTo(tp :: Nil) - def unapply(tp: Type)(using Context): Option[Type] = tp match - case tp @ AppliedType(tycon, arg :: Nil) if defn.isByNameFunctionClass(tycon.typeSymbol) => - Some(arg) - case tp @ AnnotatedType(parent, _) => - unapply(parent) - case _ => - None - - final def isByNameFunctionClass(sym: Symbol): Boolean = - sym eq ContextFunction0 - - def isByNameFunction(tp: Type)(using Context): Boolean = tp match - case ByNameFunction(_) => true - case _ => false - - final def isCompiletime_S(sym: Symbol)(using Context): Boolean = - sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass - - private val compiletimePackageAnyTypes: Set[Name] = Set( - tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString - ) - private val compiletimePackageNumericTypes: Set[Name] = Set( - tpnme.Plus, tpnme.Minus, tpnme.Times, tpnme.Div, tpnme.Mod, - tpnme.Lt, tpnme.Gt, tpnme.Ge, tpnme.Le, - tpnme.Abs, tpnme.Negate, tpnme.Min, tpnme.Max - ) - private val compiletimePackageIntTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( - tpnme.ToString, // ToString is moved to ops.any and deprecated for ops.int - tpnme.NumberOfLeadingZeros, tpnme.ToLong, tpnme.ToFloat, tpnme.ToDouble, - tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR - ) - private val compiletimePackageLongTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( - tpnme.NumberOfLeadingZeros, tpnme.ToInt, tpnme.ToFloat, tpnme.ToDouble, - tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR - ) - private val compiletimePackageFloatTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( - tpnme.ToInt, tpnme.ToLong, tpnme.ToDouble - ) - private val compiletimePackageDoubleTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( - tpnme.ToInt, tpnme.ToLong, tpnme.ToFloat - ) - private val compiletimePackageBooleanTypes: Set[Name] = Set(tpnme.Not, tpnme.Xor, tpnme.And, tpnme.Or) - private val compiletimePackageStringTypes: Set[Name] = Set( - tpnme.Plus, tpnme.Length, tpnme.Substring, tpnme.Matches, tpnme.CharAt - ) - private val compiletimePackageOpTypes: Set[Name] = - Set(tpnme.S) - ++ compiletimePackageAnyTypes - ++ compiletimePackageIntTypes - ++ compiletimePackageLongTypes - ++ compiletimePackageFloatTypes - ++ compiletimePackageDoubleTypes - ++ compiletimePackageBooleanTypes - ++ compiletimePackageStringTypes - - final def isCompiletimeAppliedType(sym: Symbol)(using Context): Boolean = - compiletimePackageOpTypes.contains(sym.name) - && ( - isCompiletime_S(sym) - || sym.owner == CompiletimeOpsAnyModuleClass && compiletimePackageAnyTypes.contains(sym.name) - || sym.owner == CompiletimeOpsIntModuleClass && compiletimePackageIntTypes.contains(sym.name) - || sym.owner == CompiletimeOpsLongModuleClass && compiletimePackageLongTypes.contains(sym.name) - || sym.owner == CompiletimeOpsFloatModuleClass && compiletimePackageFloatTypes.contains(sym.name) - || sym.owner == CompiletimeOpsDoubleModuleClass && compiletimePackageDoubleTypes.contains(sym.name) - || sym.owner == CompiletimeOpsBooleanModuleClass && compiletimePackageBooleanTypes.contains(sym.name) - || sym.owner == CompiletimeOpsStringModuleClass && compiletimePackageStringTypes.contains(sym.name) - ) - - // ----- Scala-2 library patches -------------------------------------- - - /** The `scala.runtime.stdLibPacthes` package contains objects - * that contain defnitions that get added as members to standard library - * objects with the same name. - */ - @tu lazy val StdLibPatchesPackage: TermSymbol = requiredPackage("scala.runtime.stdLibPatches") - @tu private lazy val ScalaPredefModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.Predef").moduleClass - @tu private lazy val LanguageModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.language").moduleClass - - /** If `sym` is a patched library class, the source file of its patch class, - * otherwise `NoSource` - */ - def patchSource(sym: Symbol)(using Context): SourceFile = - if sym == ScalaPredefModuleClass then ScalaPredefModuleClassPatch.source - else if sym == LanguageModuleClass then LanguageModuleClassPatch.source - else NoSource - - /** A finalizer that patches standard library classes. - * It copies all non-private, non-synthetic definitions from `patchCls` - * to `denot` while changing their owners to `denot`. Before that it deletes - * any definitions of `denot` that have the same name as one of the copied - * definitions. - * - * If an object is present in both the original class and the patch class, - * it is not overwritten. Instead its members are copied recursively. - * - * To avpid running into cycles on bootstrap, patching happens only if `patchCls` - * is read from a classfile. - */ - def patchStdLibClass(denot: ClassDenotation)(using Context): Unit = - def patch2(denot: ClassDenotation, patchCls: Symbol): Unit = - val scope = denot.info.decls.openForMutations - - def recurse(patch: Symbol) = patch.is(Module) && scope.lookup(patch.name).exists - - def makeClassSymbol(patch: Symbol, parents: List[Type], selfInfo: TypeOrSymbol) = - newClassSymbol( - owner = denot.symbol, - name = patch.name.asTypeName, - flags = patch.flags, - // need to rebuild a fresh ClassInfo - infoFn = cls => ClassInfo( - prefix = denot.symbol.thisType, - cls = cls, - declaredParents = parents, // assume parents in patch don't refer to symbols in the patch - decls = newScope, - selfInfo = - if patch.is(Module) - then TermRef(denot.symbol.thisType, patch.name.sourceModuleName) - else selfInfo // assume patch self type annotation does not refer to symbols in the patch - ), - privateWithin = patch.privateWithin, - coord = denot.symbol.coord, - assocFile = denot.symbol.associatedFile - ) - - def makeNonClassSymbol(patch: Symbol) = - if patch.is(Inline) then - // Inline symbols contain trees in annotations, which is coupled - // with the underlying symbol. - // Changing owner for inline symbols is a simple workaround. - patch.denot = patch.denot.copySymDenotation(owner = denot.symbol) - patch - else - // change `info` which might contain reference to the patch - patch.copy( - owner = denot.symbol, - info = - if patch.is(Module) - then TypeRef(denot.symbol.thisType, patch.name.moduleClassName) - else patch.info // assume non-object info does not refer to symbols in the patch - ) - - if patchCls.exists then - val patches = patchCls.info.decls.filter(patch => - !patch.isConstructor && !patch.isOneOf(PrivateOrSynthetic)) - for patch <- patches if !recurse(patch) do - val e = scope.lookupEntry(patch.name) - if e != null then scope.unlink(e) - for patch <- patches do - patch.ensureCompleted() - if !recurse(patch) then - val sym = - patch.info match - case ClassInfo(_, _, parents, _, selfInfo) => - makeClassSymbol(patch, parents, selfInfo) - case _ => - makeNonClassSymbol(patch) - end match - sym.annotations = patch.annotations - scope.enter(sym) - if patch.isClass then - patch2(scope.lookup(patch.name).asClass, patch) - - def patchWith(patchCls: Symbol) = - denot.sourceModule.info = denot.typeRef // we run into a cyclic reference when patching if this line is omitted - patch2(denot, patchCls) - - if denot.name == tpnme.Predef.moduleClassName && denot.symbol == ScalaPredefModuleClass then - patchWith(ScalaPredefModuleClassPatch) - else if denot.name == tpnme.language.moduleClassName && denot.symbol == LanguageModuleClass then - patchWith(LanguageModuleClassPatch) - end patchStdLibClass - - // ----- Symbol sets --------------------------------------------------- - - @tu lazy val topClasses: Set[Symbol] = Set(AnyClass, MatchableClass, ObjectClass, AnyValClass) - - @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) - - /** Base classes that are assumed to be pure for the purposes of capture checking. - * Every class inheriting from a pure baseclass is pure. - */ - @tu lazy val pureBaseClasses = Set(defn.AnyValClass, defn.ThrowableClass) - - /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, - */ - @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) - - @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] - val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) - def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) - - @tu lazy val caseClassSynthesized: List[Symbol] = List( - Any_hashCode, Any_equals, Any_toString, Product_canEqual, Product_productArity, - Product_productPrefix, Product_productElement, Product_productElementName) - - val LazyHolder: PerRun[Map[Symbol, Symbol]] = new PerRun({ - def holderImpl(holderType: String) = requiredClass("scala.runtime." + holderType) - Map[Symbol, Symbol]( - IntClass -> holderImpl("LazyInt"), - LongClass -> holderImpl("LazyLong"), - BooleanClass -> holderImpl("LazyBoolean"), - FloatClass -> holderImpl("LazyFloat"), - DoubleClass -> holderImpl("LazyDouble"), - ByteClass -> holderImpl("LazyByte"), - CharClass -> holderImpl("LazyChar"), - ShortClass -> holderImpl("LazyShort") - ) - .withDefaultValue(holderImpl("LazyRef")) - }) - - @tu lazy val TupleType: Array[TypeRef | Null] = mkArityArray("scala.Tuple", MaxTupleArity, 1) - - def isSpecializedTuple(cls: Symbol)(using Context): Boolean = - cls.isClass && TupleSpecializedClasses.exists(tupleCls => cls.name.isSpecializedNameOf(tupleCls.name)) - - def SpecializedTuple(base: Symbol, args: List[Type])(using Context): Symbol = - base.owner.requiredClass(base.name.specializedName(args)) - - /** Cached function types of arbitary arities. - * Function types are created on demand with newFunctionNTrait, which is - * called from a synthesizer installed in ScalaPackageClass. - */ - private class FunType(prefix: String): - private var classRefs: Array[TypeRef | Null] = new Array(22) - def apply(n: Int): TypeRef = - while n >= classRefs.length do - val classRefs1 = new Array[TypeRef | Null](classRefs.length * 2) - Array.copy(classRefs, 0, classRefs1, 0, classRefs.length) - classRefs = classRefs1 - val funName = s"scala.$prefix$n" - if classRefs(n) == null then - classRefs(n) = - if prefix.startsWith("Impure") - then staticRef(funName.toTypeName).symbol.typeRef - else requiredClassRef(funName) - classRefs(n).nn - end FunType - - private def funTypeIdx(isContextual: Boolean, isErased: Boolean, isImpure: Boolean): Int = - (if isContextual then 1 else 0) - + (if isErased then 2 else 0) - + (if isImpure then 4 else 0) - - private val funTypeArray: IArray[FunType] = - val arr = Array.ofDim[FunType](8) - val choices = List(false, true) - for contxt <- choices; erasd <- choices; impure <- choices do - var str = "Function" - if contxt then str = "Context" + str - if erasd then str = "Erased" + str - if impure then str = "Impure" + str - arr(funTypeIdx(contxt, erasd, impure)) = FunType(str) - IArray.unsafeFromArray(arr) - - def FunctionSymbol(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = - funTypeArray(funTypeIdx(isContextual, isErased, isImpure))(n).symbol - - @tu lazy val Function0_apply: Symbol = Function0.requiredMethod(nme.apply) - @tu lazy val ContextFunction0_apply: Symbol = ContextFunction0.requiredMethod(nme.apply) - - @tu lazy val Function0: Symbol = FunctionSymbol(0) - @tu lazy val Function1: Symbol = FunctionSymbol(1) - @tu lazy val Function2: Symbol = FunctionSymbol(2) - @tu lazy val ContextFunction0: Symbol = FunctionSymbol(0, isContextual = true) - - def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = - FunctionSymbol(n, isContextual && !ctx.erasedTypes, isErased, isImpure).typeRef - - lazy val PolyFunctionClass = requiredClass("scala.PolyFunction") - def PolyFunctionType = PolyFunctionClass.typeRef - - /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */ - def scalaClassName(cls: Symbol)(using Context): TypeName = cls.denot match - case clsd: ClassDenotation if clsd.owner eq ScalaPackageClass => - clsd.name.asInstanceOf[TypeName] - case _ => - EmptyTypeName - - /** If type `ref` refers to a class in the scala package, its name, otherwise EmptyTypeName */ - def scalaClassName(ref: Type)(using Context): TypeName = scalaClassName(ref.classSymbol) - - private def isVarArityClass(cls: Symbol, prefix: String) = - cls.isClass - && cls.owner.eq(ScalaPackageClass) - && cls.name.testSimple(name => - name.startsWith(prefix) - && name.length > prefix.length - && digitsOnlyAfter(name, prefix.length)) - - private def digitsOnlyAfter(name: SimpleName, idx: Int): Boolean = - idx == name.length || name(idx).isDigit && digitsOnlyAfter(name, idx + 1) - - def isBottomClass(cls: Symbol): Boolean = - if ctx.mode.is(Mode.SafeNulls) && !ctx.phase.erasedTypes - then cls == NothingClass - else isBottomClassAfterErasure(cls) - - def isBottomClassAfterErasure(cls: Symbol): Boolean = cls == NothingClass || cls == NullClass - - /** Is any function class where - * - FunctionXXL - * - FunctionN for N >= 0 - * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 - */ - def isFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isFunction - - /** Is a function class, or an impure function type alias */ - def isFunctionSymbol(sym: Symbol): Boolean = - sym.isType && (sym.owner eq ScalaPackageClass) && sym.name.isFunction - - /** Is a function class where - * - FunctionN for N >= 0 and N != XXL - */ - def isPlainFunctionClass(cls: Symbol) = isVarArityClass(cls, str.Function) - - /** Is an context function class. - * - ContextFunctionN for N >= 0 - * - ErasedContextFunctionN for N > 0 - */ - def isContextFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isContextFunction - - /** Is an erased function class. - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 - */ - def isErasedFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isErasedFunction - - /** Is either FunctionXXL or a class that will be erased to FunctionXXL - * - FunctionXXL - * - FunctionN for N >= 22 - * - ContextFunctionN for N >= 22 - */ - def isXXLFunctionClass(cls: Symbol): Boolean = { - val name = scalaClassName(cls) - (name eq tpnme.FunctionXXL) || name.functionArity > MaxImplementedFunctionArity - } - - /** Is a synthetic function class - * - FunctionN for N > 22 - * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 - */ - def isSyntheticFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isSyntheticFunction - - def isAbstractFunctionClass(cls: Symbol): Boolean = isVarArityClass(cls, str.AbstractFunction) - def isTupleClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Tuple) - def isProductClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Product) - - def isBoxedUnitClass(cls: Symbol): Boolean = - cls.isClass && (cls.owner eq ScalaRuntimePackageClass) && cls.name == tpnme.BoxedUnit - - /** Returns the erased type of the function class `cls` - * - FunctionN for N > 22 becomes FunctionXXL - * - FunctionN for 22 > N >= 0 remains as FunctionN - * - ContextFunctionN for N > 22 becomes FunctionXXL - * - ContextFunctionN for N <= 22 becomes FunctionN - * - ErasedFunctionN becomes Function0 - * - ImplicitErasedFunctionN becomes Function0 - * - anything else becomes a NoType - */ - def functionTypeErasure(cls: Symbol): Type = - val arity = scalaClassName(cls).functionArity - if cls.name.isErasedFunction then FunctionType(0) - else if arity > 22 then FunctionXXLClass.typeRef - else if arity >= 0 then FunctionType(arity) - else NoType - - private val JavaImportFns: List[RootRef] = List( - RootRef(() => JavaLangPackageVal.termRef) - ) - - private val ScalaImportFns: List[RootRef] = - JavaImportFns :+ - RootRef(() => ScalaPackageVal.termRef) - - private val PredefImportFns: RootRef = - RootRef(() => ScalaPredefModule.termRef, isPredef=true) - - @tu private lazy val JavaRootImportFns: List[RootRef] = - if ctx.settings.YnoImports.value then Nil - else JavaImportFns - - @tu private lazy val ScalaRootImportFns: List[RootRef] = - if ctx.settings.YnoImports.value then Nil - else if ctx.settings.YnoPredef.value then ScalaImportFns - else ScalaImportFns :+ PredefImportFns - - @tu private lazy val JavaRootImportTypes: List[TermRef] = JavaRootImportFns.map(_.refFn()) - @tu private lazy val ScalaRootImportTypes: List[TermRef] = ScalaRootImportFns.map(_.refFn()) - @tu private lazy val JavaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(JavaRootImportTypes) - @tu private lazy val ScalaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(ScalaRootImportTypes) - - /** Are we compiling a java source file? */ - private def isJavaContext(using Context): Boolean = - ctx.compilationUnit.isJava - - private def unqualifiedTypes(refs: List[TermRef]) = - val types = refs.toSet[NamedType] - types ++ types.map(_.symbol.moduleClass.typeRef) - - /** Lazy references to the root imports */ - def rootImportFns(using Context): List[RootRef] = - if isJavaContext then JavaRootImportFns - else ScalaRootImportFns - - /** Root types imported by default */ - def rootImportTypes(using Context): List[TermRef] = - if isJavaContext then JavaRootImportTypes - else ScalaRootImportTypes - - /** Modules whose members are in the default namespace and their module classes */ - def unqualifiedOwnerTypes(using Context): Set[NamedType] = - if isJavaContext then JavaUnqualifiedOwnerTypes - else ScalaUnqualifiedOwnerTypes - - /** Names of the root import symbols that can be hidden by other imports */ - @tu lazy val ShadowableImportNames: Set[TermName] = Set("Predef".toTermName) - - /** Class symbols for which no class exist at runtime */ - @tu lazy val NotRuntimeClasses: Set[Symbol] = Set(AnyClass, MatchableClass, AnyValClass, NullClass, NothingClass) - - @tu lazy val SpecialClassTagClasses: Set[Symbol] = Set(UnitClass, AnyClass, AnyValClass) - - @tu lazy val SpecialManifestClasses: Set[Symbol] = Set(AnyClass, AnyValClass, ObjectClass, NullClass, NothingClass) - - /** Classes that are known not to have an initializer irrespective of - * whether NoInits is set. Note: FunctionXXLClass is in this set - * because if it is compiled by Scala2, it does not get a NoInit flag. - * But since it is introduced only at erasure, there's no chance - * for augmentScala2Traits to do anything on a class that inherits it. So - * it also misses an implementation class, which means that the usual scheme - * of calling a superclass init in the implementation class of a Scala2 - * trait gets screwed up. Therefore, it is mandatory that FunctionXXL - * is treated as a NoInit trait. - */ - @tu lazy val NoInitClasses: Set[Symbol] = NotRuntimeClasses + FunctionXXLClass - - def isPolymorphicAfterErasure(sym: Symbol): Boolean = - (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf) || (sym eq Object_synchronized) - - /** Is this type a `TupleN` type? - * - * @return true if the dealiased type of `tp` is `TupleN[T1, T2, ..., Tn]` - */ - def isTupleNType(tp: Type)(using Context): Boolean = { - val tp1 = tp.dealias - val arity = tp1.argInfos.length - arity <= MaxTupleArity && { - val tupletp = TupleType(arity) - tupletp != null && tp1.isRef(tupletp.symbol) - } - } - - def tupleType(elems: List[Type]): Type = { - val arity = elems.length - if 0 < arity && arity <= MaxTupleArity then - val tupletp = TupleType(arity) - if tupletp != null then tupletp.appliedTo(elems) - else TypeOps.nestedPairs(elems) - else TypeOps.nestedPairs(elems) - } - - def tupleTypes(tp: Type, bound: Int = Int.MaxValue)(using Context): Option[List[Type]] = { - @tailrec def rec(tp: Type, acc: List[Type], bound: Int): Option[List[Type]] = tp.normalized.dealias match { - case _ if bound < 0 => Some(acc.reverse) - case tp: AppliedType if PairClass == tp.classSymbol => rec(tp.args(1), tp.args.head :: acc, bound - 1) - case tp: AppliedType if isTupleNType(tp) => Some(acc.reverse ::: tp.args) - case tp: TermRef if tp.symbol == defn.EmptyTupleModule => Some(acc.reverse) - case _ => None - } - rec(tp.stripTypeVar, Nil, bound) - } - - def isProductSubType(tp: Type)(using Context): Boolean = tp.derivesFrom(ProductClass) - - /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ContextFunctionN - * instance? - */ - def isNonRefinedFunction(tp: Type)(using Context): Boolean = - val arity = functionArity(tp) - val sym = tp.dealias.typeSymbol - - arity >= 0 - && isFunctionClass(sym) - && tp.isRef( - FunctionType(arity, sym.name.isContextFunction, sym.name.isErasedFunction).typeSymbol, - skipRefined = false) - end isNonRefinedFunction - - /** Is `tp` a representation of a (possibly dependent) function type or an alias of such? */ - def isFunctionType(tp: Type)(using Context): Boolean = - isNonRefinedFunction(tp.dropDependentRefinement) - - def isFunctionOrPolyType(tp: Type)(using Context): Boolean = - isFunctionType(tp) || (tp.typeSymbol eq defn.PolyFunctionClass) - - private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = - for base <- bases; tp <- paramTypes do - cls.enter(newSymbol(cls, base.specializedName(List(tp)), Method, ExprType(tp))) - cls - - @tu lazy val Tuple1: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple1"), List(nme._1), Tuple1SpecializedParamTypes) - @tu lazy val Tuple2: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple2"), List(nme._1, nme._2), Tuple2SpecializedParamTypes) - - @tu lazy val TupleSpecializedClasses: Set[Symbol] = Set(Tuple1, Tuple2) - @tu lazy val Tuple1SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType) - @tu lazy val Tuple2SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType, CharType, BooleanType) - @tu lazy val Tuple1SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple1SpecializedParamTypes.map(_.symbol)) - @tu lazy val Tuple2SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple2SpecializedParamTypes.map(_.symbol)) - - // Specialized type parameters defined for scala.Function{0,1,2}. - @tu lazy val Function1SpecializedParamTypes: collection.Set[TypeRef] = - Set(IntType, LongType, FloatType, DoubleType) - @tu lazy val Function2SpecializedParamTypes: collection.Set[TypeRef] = - Set(IntType, LongType, DoubleType) - @tu lazy val Function0SpecializedReturnTypes: collection.Set[TypeRef] = - ScalaNumericValueTypeList.toSet + UnitType + BooleanType - @tu lazy val Function1SpecializedReturnTypes: collection.Set[TypeRef] = - Set(UnitType, BooleanType, IntType, FloatType, LongType, DoubleType) - @tu lazy val Function2SpecializedReturnTypes: collection.Set[TypeRef] = - Function1SpecializedReturnTypes - - @tu lazy val Function1SpecializedParamClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function1SpecializedParamTypes.map(_.symbol)) - @tu lazy val Function2SpecializedParamClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function2SpecializedParamTypes.map(_.symbol)) - @tu lazy val Function0SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function0SpecializedReturnTypes.map(_.symbol)) - @tu lazy val Function1SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function1SpecializedReturnTypes.map(_.symbol)) - @tu lazy val Function2SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function2SpecializedReturnTypes.map(_.symbol)) - - def isSpecializableTuple(base: Symbol, args: List[Type])(using Context): Boolean = - args.length <= 2 && base.isClass && TupleSpecializedClasses.exists(base.asClass.derivesFrom) && args.match - case List(x) => Tuple1SpecializedParamClasses().contains(x.classSymbol) - case List(x, y) => Tuple2SpecializedParamClasses().contains(x.classSymbol) && Tuple2SpecializedParamClasses().contains(y.classSymbol) - case _ => false - && base.owner.denot.info.member(base.name.specializedName(args)).exists // when dotc compiles the stdlib there are no specialised classes - - def isSpecializableFunction(cls: ClassSymbol, paramTypes: List[Type], retType: Type)(using Context): Boolean = - paramTypes.length <= 2 - && (cls.derivesFrom(FunctionSymbol(paramTypes.length)) || isByNameFunctionClass(cls)) - && isSpecializableFunctionSAM(paramTypes, retType) - - /** If the Single Abstract Method of a Function class has this type, is it specializable? */ - def isSpecializableFunctionSAM(paramTypes: List[Type], retType: Type)(using Context): Boolean = - paramTypes.length <= 2 && (paramTypes match { - case Nil => - Function0SpecializedReturnClasses().contains(retType.typeSymbol) - case List(paramType0) => - Function1SpecializedParamClasses().contains(paramType0.typeSymbol) && - Function1SpecializedReturnClasses().contains(retType.typeSymbol) - case List(paramType0, paramType1) => - Function2SpecializedParamClasses().contains(paramType0.typeSymbol) && - Function2SpecializedParamClasses().contains(paramType1.typeSymbol) && - Function2SpecializedReturnClasses().contains(retType.typeSymbol) - case _ => - false - }) - - @tu lazy val Function0SpecializedApplyNames: collection.Set[TermName] = - for r <- Function0SpecializedReturnTypes - yield nme.apply.specializedFunction(r, Nil).asTermName - - @tu lazy val Function1SpecializedApplyNames: collection.Set[TermName] = - for - r <- Function1SpecializedReturnTypes - t1 <- Function1SpecializedParamTypes - yield - nme.apply.specializedFunction(r, List(t1)).asTermName - - @tu lazy val Function2SpecializedApplyNames: collection.Set[TermName] = - for - r <- Function2SpecializedReturnTypes - t1 <- Function2SpecializedParamTypes - t2 <- Function2SpecializedParamTypes - yield - nme.apply.specializedFunction(r, List(t1, t2)).asTermName - - @tu lazy val FunctionSpecializedApplyNames: collection.Set[Name] = - Function0SpecializedApplyNames ++ Function1SpecializedApplyNames ++ Function2SpecializedApplyNames - - def functionArity(tp: Type)(using Context): Int = tp.dropDependentRefinement.dealias.argInfos.length - 1 - - /** Return underlying context function type (i.e. instance of an ContextFunctionN class) - * or NoType if none exists. The following types are considered as underlying types: - * - the alias of an alias type - * - the instance or origin of a TypeVar (i.e. the result of a stripTypeVar) - * - the upper bound of a TypeParamRef in the current constraint - */ - def asContextFunctionType(tp: Type)(using Context): Type = - tp.stripTypeVar.dealias match - case tp1: TypeParamRef if ctx.typerState.constraint.contains(tp1) => - asContextFunctionType(TypeComparer.bounds(tp1).hiBound) - case tp1 => - if tp1.typeSymbol.name.isContextFunction && isFunctionType(tp1) then tp1 - else NoType - - /** Is `tp` an context function type? */ - def isContextFunctionType(tp: Type)(using Context): Boolean = - asContextFunctionType(tp).exists - - /** An extractor for context function types `As ?=> B`, possibly with - * dependent refinements. Optionally returns a triple consisting of the argument - * types `As`, the result type `B` and a whether the type is an erased context function. - */ - object ContextFunctionType: - def unapply(tp: Type)(using Context): Option[(List[Type], Type, Boolean)] = - if ctx.erasedTypes then - atPhase(erasurePhase)(unapply(tp)) - else - val tp1 = asContextFunctionType(tp) - if tp1.exists then - val args = tp1.dropDependentRefinement.argInfos - Some((args.init, args.last, tp1.typeSymbol.name.isErasedFunction)) - else None - - def isErasedFunctionType(tp: Type)(using Context): Boolean = - tp.dealias.typeSymbol.name.isErasedFunction && isFunctionType(tp) - - /** A whitelist of Scala-2 classes that are known to be pure */ - def isAssuredNoInits(sym: Symbol): Boolean = - (sym `eq` SomeClass) || isTupleClass(sym) - - /** If `cls` is Tuple1..Tuple22, add the corresponding *: type as last parent to `parents` */ - def adjustForTuple(cls: ClassSymbol, tparams: List[TypeSymbol], parents: List[Type]): List[Type] = { - if !isTupleClass(cls) then parents - else if tparams.isEmpty then parents :+ TupleTypeRef - else - assert(parents.head.typeSymbol == ObjectClass) - TypeOps.nestedPairs(tparams.map(_.typeRef)) :: parents.tail - } - - /** If it is BoxedUnit, remove `java.io.Serializable` from `parents`. */ - def adjustForBoxedUnit(cls: ClassSymbol, parents: List[Type]): List[Type] = - if (isBoxedUnitClass(cls)) parents.filter(_.typeSymbol != JavaSerializableClass) - else parents - - private val HasProblematicGetClass: Set[Name] = Set( - tpnme.AnyVal, tpnme.Byte, tpnme.Short, tpnme.Char, tpnme.Int, tpnme.Long, tpnme.Float, tpnme.Double, - tpnme.Unit, tpnme.Boolean) - - /** When typing a primitive value class or AnyVal, we ignore the `getClass` - * member: it's supposed to be an override of the `getClass` defined on `Any`, - * but in dotty `Any#getClass` is polymorphic so it ends up being an overload. - * This is especially problematic because it means that when writing: - * - * 1.asInstanceOf[Int & AnyRef].getClass - * - * the `getClass` that returns `Class[Int]` defined in Int can be selected, - * but this call is specified to return `classOf[Integer]`, see - * tests/run/t5568.scala. - * - * FIXME: remove all the `getClass` methods defined in the standard library - * so we don't have to hot-patch it like this. - */ - def hasProblematicGetClass(className: Name): Boolean = - HasProblematicGetClass.contains(className) - - /** Is synthesized symbol with alphanumeric name allowed to be used as an infix operator? */ - def isInfix(sym: Symbol)(using Context): Boolean = - (sym eq Object_eq) || (sym eq Object_ne) - - @tu lazy val assumedTransparentNames: Map[Name, Set[Symbol]] = - // add these for now, until we had a chance to retrofit 2.13 stdlib - // we should do a more through sweep through it then. - val strs = Map( - "Any" -> Set("scala"), - "AnyVal" -> Set("scala"), - "Matchable" -> Set("scala"), - "Product" -> Set("scala"), - "Object" -> Set("java.lang"), - "Comparable" -> Set("java.lang"), - "Serializable" -> Set("java.io"), - "BitSetOps" -> Set("scala.collection"), - "IndexedSeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "IterableOnceOps" -> Set("scala.collection"), - "IterableOps" -> Set("scala.collection"), - "LinearSeqOps" -> Set("scala.collection", "scala.collection.immutable"), - "MapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "SeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "SetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "SortedMapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "SortedOps" -> Set("scala.collection"), - "SortedSetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "StrictOptimizedIterableOps" -> Set("scala.collection"), - "StrictOptimizedLinearSeqOps" -> Set("scala.collection"), - "StrictOptimizedMapOps" -> Set("scala.collection", "scala.collection.immutable"), - "StrictOptimizedSeqOps" -> Set("scala.collection", "scala.collection.immutable"), - "StrictOptimizedSetOps" -> Set("scala.collection", "scala.collection.immutable"), - "StrictOptimizedSortedMapOps" -> Set("scala.collection", "scala.collection.immutable"), - "StrictOptimizedSortedSetOps" -> Set("scala.collection", "scala.collection.immutable"), - "ArrayDequeOps" -> Set("scala.collection.mutable"), - "DefaultSerializable" -> Set("scala.collection.generic"), - "IsIterable" -> Set("scala.collection.generic"), - "IsIterableLowPriority" -> Set("scala.collection.generic"), - "IsIterableOnce" -> Set("scala.collection.generic"), - "IsIterableOnceLowPriority" -> Set("scala.collection.generic"), - "IsMap" -> Set("scala.collection.generic"), - "IsSeq" -> Set("scala.collection.generic")) - strs.map { case (simple, pkgs) => ( - simple.toTypeName, - pkgs.map(pkg => staticRef(pkg.toTermName, isPackage = true).symbol.moduleClass) - ) - } - - def isAssumedTransparent(sym: Symbol): Boolean = - assumedTransparentNames.get(sym.name) match - case Some(pkgs) => pkgs.contains(sym.owner) - case none => false - - // ----- primitive value class machinery ------------------------------------------ - - class PerRun[T](generate: Context ?=> T) { - private var current: RunId = NoRunId - private var cached: T = _ - def apply()(using Context): T = { - if (current != ctx.runId) { - cached = generate - current = ctx.runId - } - cached - } - } - - @tu lazy val ScalaNumericValueTypeList: List[TypeRef] = List( - ByteType, ShortType, CharType, IntType, LongType, FloatType, DoubleType) - - @tu private lazy val ScalaNumericValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypeList.toSet - @tu private lazy val ScalaValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypes `union` Set(UnitType, BooleanType) - - val ScalaNumericValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaNumericValueTypes.map(_.symbol)) - val ScalaValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaValueTypes.map(_.symbol)) - - val ScalaBoxedClasses: PerRun[collection.Set[Symbol]] = new PerRun( - Set(BoxedByteClass, BoxedShortClass, BoxedCharClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass, BoxedUnitClass, BoxedBooleanClass) - ) - - private val valueTypeEnc = mutable.Map[TypeName, PrimitiveClassEnc]() - private val typeTags = mutable.Map[TypeName, Name]().withDefaultValue(nme.specializedTypeNames.Object) - -// private val unboxedTypeRef = mutable.Map[TypeName, TypeRef]() -// private val javaTypeToValueTypeRef = mutable.Map[Class[?], TypeRef]() -// private val valueTypeNamesToJavaType = mutable.Map[TypeName, Class[?]]() - - private def valueTypeRef(name: String, jtype: Class[?], enc: Int, tag: Name): TypeRef = { - val vcls = requiredClassRef(name) - valueTypeEnc(vcls.name) = enc - typeTags(vcls.name) = tag -// unboxedTypeRef(boxed.name) = vcls -// javaTypeToValueTypeRef(jtype) = vcls -// valueTypeNamesToJavaType(vcls.name) = jtype - vcls - } - - /** The type of the boxed class corresponding to primitive value type `tp`. */ - def boxedType(tp: Type)(using Context): TypeRef = { - val cls = tp.classSymbol - if (cls eq ByteClass) BoxedByteClass - else if (cls eq ShortClass) BoxedShortClass - else if (cls eq CharClass) BoxedCharClass - else if (cls eq IntClass) BoxedIntClass - else if (cls eq LongClass) BoxedLongClass - else if (cls eq FloatClass) BoxedFloatClass - else if (cls eq DoubleClass) BoxedDoubleClass - else if (cls eq UnitClass) BoxedUnitClass - else if (cls eq BooleanClass) BoxedBooleanClass - else sys.error(s"Not a primitive value type: $tp") - }.typeRef - - def unboxedType(tp: Type)(using Context): TypeRef = { - val cls = tp.classSymbol - if (cls eq BoxedByteClass) ByteType - else if (cls eq BoxedShortClass) ShortType - else if (cls eq BoxedCharClass) CharType - else if (cls eq BoxedIntClass) IntType - else if (cls eq BoxedLongClass) LongType - else if (cls eq BoxedFloatClass) FloatType - else if (cls eq BoxedDoubleClass) DoubleType - else if (cls eq BoxedUnitClass) UnitType - else if (cls eq BoxedBooleanClass) BooleanType - else sys.error(s"Not a boxed primitive value type: $tp") - } - - /** The JVM tag for `tp` if it's a primitive, `java.lang.Object` otherwise. */ - def typeTag(tp: Type)(using Context): Name = typeTags(scalaClassName(tp)) - -// /** The `Class[?]` of a primitive value type name */ -// def valueTypeNameToJavaType(name: TypeName)(using Context): Option[Class[?]] = -// valueTypeNamesToJavaType.get(if (name.firstPart eq nme.scala) name.lastPart.toTypeName else name) - - type PrimitiveClassEnc = Int - - val ByteEnc: Int = 2 - val ShortEnc: Int = ByteEnc * 3 - val CharEnc: Int = 5 - val IntEnc: Int = ShortEnc * CharEnc - val LongEnc: Int = IntEnc * 7 - val FloatEnc: Int = LongEnc * 11 - val DoubleEnc: Int = FloatEnc * 13 - val BooleanEnc: Int = 17 - val UnitEnc: Int = 19 - - def isValueSubType(tref1: TypeRef, tref2: TypeRef)(using Context): Boolean = - valueTypeEnc(tref2.name) % valueTypeEnc(tref1.name) == 0 - def isValueSubClass(sym1: Symbol, sym2: Symbol): Boolean = - valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 - - @tu lazy val specialErasure: SimpleIdentityMap[Symbol, ClassSymbol] = - SimpleIdentityMap.empty[Symbol] - .updated(AnyClass, ObjectClass) - .updated(MatchableClass, ObjectClass) - .updated(AnyValClass, ObjectClass) - .updated(SingletonClass, ObjectClass) - .updated(TupleClass, ProductClass) - .updated(NonEmptyTupleClass, ProductClass) - .updated(PairClass, ObjectClass) - - // ----- Initialization --------------------------------------------------- - - /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ - @tu lazy val syntheticScalaClasses: List[TypeSymbol] = - List( - AnyClass, - MatchableClass, - AnyRefAlias, - AnyKindClass, - andType, - orType, - RepeatedParamClass, - ByNameParamClass2x, - IntoType, - AnyValClass, - NullClass, - NothingClass, - SingletonClass) - - @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( - EmptyPackageVal, - OpsPackageClass) - - /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ - @tu lazy val syntheticCoreMethods: List[TermSymbol] = - AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod) - - @tu lazy val reservedScalaClassNames: Set[Name] = syntheticScalaClasses.map(_.name).toSet - - private var isInitialized = false - - def init()(using ctx: DetachedContext): Unit = { - this.initCtx = ctx - if (!isInitialized) { - // force initialization of every symbol that is synthesized or hijacked by the compiler - val forced = - syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass - isInitialized = true - } - addSyntheticSymbolsComments - } - - /** Definitions used in Lazy Vals implementation */ - val LazyValsModuleName = "scala.runtime.LazyVals" - @tu lazy val LazyValsModule = requiredModule(LazyValsModuleName) - @tu lazy val LazyValsWaitingState = requiredClass(s"$LazyValsModuleName.Waiting") - @tu lazy val LazyValsControlState = requiredClass(s"$LazyValsModuleName.LazyValControlState") - - def addSyntheticSymbolsComments(using Context): Unit = - def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) - - add(AnyClass, - """/** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala - | * execution environment inherits directly or indirectly from this class. - | * - | * Starting with Scala 2.10 it is possible to directly extend `Any` using ''universal traits''. - | * A ''universal trait'' is a trait that extends `Any`, only has `def`s as members, and does no initialization. - | * - | * The main use case for universal traits is to allow basic inheritance of methods for [[scala.AnyVal value classes]]. - | * For example, - | * - | * {{{ - | * trait Printable extends Any { - | * def print(): Unit = println(this) - | * } - | * class Wrapper(val underlying: Int) extends AnyVal with Printable - | * - | * val w = new Wrapper(3) - | * w.print() - | * }}} - | * - | * See the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]] for more - | * details on the interplay of universal traits and value classes. - | */ - """.stripMargin) - - add(Any_==, - """/** Test two objects for equality. - | * The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`. - | * - | * @param that the object to compare against this object for equality. - | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. - | */ - """.stripMargin) - - add(Any_!=, - """/** Test two objects for inequality. - | * - | * @param that the object to compare against this object for equality. - | * @return `true` if !(this == that), `false` otherwise. - | */ - """.stripMargin) - - add(Any_equals, - """/** Compares the receiver object (`this`) with the argument object (`that`) for equivalence. - | * - | * Any implementation of this method should be an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]: - | * - | * - It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`. - | * - It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and - | * only if `y.equals(x)` returns `true`. - | * - It is transitive: for any instances `x`, `y`, and `z` of type `Any` if `x.equals(y)` returns `true` and - | * `y.equals(z)` returns `true`, then `x.equals(z)` should return `true`. - | * - | * If you override this method, you should verify that your implementation remains an equivalence relation. - | * Additionally, when overriding this method it is usually necessary to override `hashCode` to ensure that - | * objects which are "equal" (`o1.equals(o2)` returns `true`) hash to the same [[scala.Int]]. - | * (`o1.hashCode.equals(o2.hashCode)`). - | * - | * @param that the object to compare against this object for equality. - | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. - | */ - """.stripMargin) - - add(Any_hashCode, - """/** Calculate a hash code value for the object. - | * - | * The default hashing algorithm is platform dependent. - | * - | * Note that it is allowed for two objects to have identical hash codes (`o1.hashCode.equals(o2.hashCode)`) yet - | * not be equal (`o1.equals(o2)` returns `false`). A degenerate implementation could always return `0`. - | * However, it is required that if two objects are equal (`o1.equals(o2)` returns `true`) that they have - | * identical hash codes (`o1.hashCode.equals(o2.hashCode)`). Therefore, when overriding this method, be sure - | * to verify that the behavior is consistent with the `equals` method. - | * - | * @return the hash code value for this object. - | */ - """.stripMargin) - - add(Any_toString, - """/** Returns a string representation of the object. - | * - | * The default representation is platform dependent. - | * - | * @return a string representation of the object. - | */ - """.stripMargin) - - add(Any_##, - """/** Equivalent to `x.hashCode` except for boxed numeric types and `null`. - | * For numerics, it returns a hash value which is consistent - | * with value equality: if two value type instances compare - | * as true, then ## will produce the same hash value for each - | * of them. - | * For `null` returns a hashcode where `null.hashCode` throws a - | * `NullPointerException`. - | * - | * @return a hash value consistent with == - | */ - """.stripMargin) - - add(Any_isInstanceOf, - """/** Test whether the dynamic type of the receiver object is `T0`. - | * - | * Note that the result of the test is modulo Scala's erasure semantics. - | * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the - | * expression `List(1).isInstanceOf[List[String]]` will return `true`. - | * In the latter example, because the type argument is erased as part of compilation it is - | * not possible to check whether the contents of the list are of the specified type. - | * - | * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. - | */ - """.stripMargin) - - add(Any_asInstanceOf, - """/** Cast the receiver object to be of type `T0`. - | * - | * Note that the success of a cast at runtime is modulo Scala's erasure semantics. - | * Therefore the expression `1.asInstanceOf[String]` will throw a `ClassCastException` at - | * runtime, while the expression `List(1).asInstanceOf[List[String]]` will not. - | * In the latter example, because the type argument is erased as part of compilation it is - | * not possible to check whether the contents of the list are of the requested type. - | * - | * @throws ClassCastException if the receiver object is not an instance of the erasure of type `T0`. - | * @return the receiver object. - | */ - """.stripMargin) - - add(Any_getClass, - """/** Returns the runtime class representation of the object. - | * - | * @return a class object corresponding to the runtime type of the receiver. - | */ - """.stripMargin) - - add(MatchableClass, - """/** The base trait of types that can be safely pattern matched against. - | * - | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html]]. - | */ - """.stripMargin) - - add(AnyRefAlias, - """/** Class `AnyRef` is the root class of all ''reference types''. - | * All types except the value types descend from this class. - | */ - """.stripMargin) - - add(Object_eq, - """/** Tests whether the argument (`that`) is a reference to the receiver object (`this`). - | * - | * The `eq` method implements an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] on - | * non-null instances of `AnyRef`, and has three additional properties: - | * - | * - It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of - | * `x.eq(y)` consistently returns `true` or consistently returns `false`. - | * - For any non-null instance `x` of type `AnyRef`, `x.eq(null)` and `null.eq(x)` returns `false`. - | * - `null.eq(null)` returns `true`. - | * - | * When overriding the `equals` or `hashCode` methods, it is important to ensure that their behavior is - | * consistent with reference equality. Therefore, if two objects are references to each other (`o1 eq o2`), they - | * should be equal to each other (`o1 == o2`) and they should hash to the same value (`o1.hashCode == o2.hashCode`). - | * - | * @param that the object to compare against this object for reference equality. - | * @return `true` if the argument is a reference to the receiver object; `false` otherwise. - | */ - """.stripMargin) - - add(Object_ne, - """/** Equivalent to `!(this eq that)`. - | * - | * @param that the object to compare against this object for reference equality. - | * @return `true` if the argument is not a reference to the receiver object; `false` otherwise. - | */ - """.stripMargin) - - add(Object_synchronized, - """/** Executes the code in `body` with an exclusive lock on `this`. - | * - | * @param body the code to execute - | * @return the result of `body` - | */ - """.stripMargin) - - add(Object_clone, - """/** Create a copy of the receiver object. - | * - | * The default implementation of the `clone` method is platform dependent. - | * - | * @note not specified by SLS as a member of AnyRef - | * @return a copy of the receiver object. - | */ - """.stripMargin) - - add(Object_finalize, - """/** Called by the garbage collector on the receiver object when there - | * are no more references to the object. - | * - | * The details of when and if the `finalize` method is invoked, as - | * well as the interaction between `finalize` and non-local returns - | * and exceptions, are all platform dependent. - | * - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_notify, - """/** Wakes up a single thread that is waiting on the receiver object's monitor. - | * - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_notifyAll, - """/** Wakes up all threads that are waiting on the receiver object's monitor. - | * - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_wait, - """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait--]]. - | * - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_waitL, - """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-]]. - | * - | * @param timeout the maximum time to wait in milliseconds. - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_waitLI, - """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-int-]] - | * - | * @param timeout the maximum time to wait in milliseconds. - | * @param nanos additional time, in nanoseconds range 0-999999. - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(AnyKindClass, - """/** The super-type of all types. - | * - | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html]]. - | */ - """.stripMargin) - - add(andType, - """/** The intersection of two types. - | * - | * See [[https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html]]. - | */ - """.stripMargin) - - add(orType, - """/** The union of two types. - | * - | * See [[https://docs.scala-lang.org/scala3/reference/new-types/union-types.html]]. - | */ - """.stripMargin) - - add(AnyValClass, - """/** `AnyVal` is the root class of all ''value types'', which describe values - | * not implemented as objects in the underlying host system. Value classes - | * are specified in Scala Language Specification, section 12.2. - | * - | * The standard implementation includes nine `AnyVal` subtypes: - | * - | * [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]], - | * [[scala.Short]], and [[scala.Byte]] are the ''numeric value types''. - | * - | * [[scala.Unit]] and [[scala.Boolean]] are the ''non-numeric value types''. - | * - | * Other groupings: - | * - | * - The ''subrange types'' are [[scala.Byte]], [[scala.Short]], and [[scala.Char]]. - | * - The ''integer types'' include the subrange types as well as [[scala.Int]] and [[scala.Long]]. - | * - The ''floating point types'' are [[scala.Float]] and [[scala.Double]]. - | * - | * Prior to Scala 2.10, `AnyVal` was a sealed trait. Beginning with Scala 2.10, - | * however, it is possible to define a subclass of `AnyVal` called a ''user-defined value class'' - | * which is treated specially by the compiler. Properly-defined user value classes provide a way - | * to improve performance on user-defined types by avoiding object allocation at runtime, and by - | * replacing virtual method invocations with static method invocations. - | * - | * User-defined value classes which avoid object allocation... - | * - | * - must have a single `val` parameter that is the underlying runtime representation. - | * - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s. - | * - typically extend no other trait apart from `AnyVal`. - | * - cannot be used in type tests or pattern matching. - | * - may not override `equals` or `hashCode` methods. - | * - | * A minimal example: - | * {{{ - | * class Wrapper(val underlying: Int) extends AnyVal { - | * def foo: Wrapper = new Wrapper(underlying * 19) - | * } - | * }}} - | * - | * It's important to note that user-defined value classes are limited, and in some circumstances, - | * still must allocate a value class instance at runtime. These limitations and circumstances are - | * explained in greater detail in the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]]. - | */ - """.stripMargin) - - add(NullClass, - """/** `Null` is - together with [[scala.Nothing]] - at the bottom of the Scala type hierarchy. - | * - | * `Null` is the type of the `null` literal. It is a subtype of every type - | * except those of value classes. Value classes are subclasses of [[AnyVal]], which includes - | * primitive types such as [[Int]], [[Boolean]], and user-defined value classes. - | * - | * Since `Null` is not a subtype of value types, `null` is not a member of any such type. - | * For instance, it is not possible to assign `null` to a variable of type [[scala.Int]]. - | */ - """.stripMargin) - - add(NothingClass, - """/** `Nothing` is - together with [[scala.Null]] - at the bottom of Scala's type hierarchy. - | * - | * `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist - | * ''no instances'' of this type. Although type `Nothing` is uninhabited, it is - | * nevertheless useful in several ways. For instance, the Scala library defines a value - | * [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists are covariant in Scala, - | * this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any element of type `T`. - | * - | * Another usage for Nothing is the return type for methods which never return normally. - | * One example is method error in [[scala.sys]], which always throws an exception. - | */ - """.stripMargin) - - add(SingletonClass, - """/** `Singleton` is used by the compiler as a supertype for singleton types. This includes literal types, - | * as they are also singleton types. - | * - | * {{{ - | * scala> object A { val x = 42 } - | * defined object A - | * - | * scala> implicitly[A.type <:< Singleton] - | * res12: A.type <:< Singleton = generalized constraint - | * - | * scala> implicitly[A.x.type <:< Singleton] - | * res13: A.x.type <:< Singleton = generalized constraint - | * - | * scala> implicitly[42 <:< Singleton] - | * res14: 42 <:< Singleton = generalized constraint - | * - | * scala> implicitly[Int <:< Singleton] - | * ^ - | * error: Cannot prove that Int <:< Singleton. - | * }}} - | * - | * `Singleton` has a special meaning when it appears as an upper bound on a formal type - | * parameter. Normally, type inference in Scala widens singleton types to the underlying - | * non-singleton type. When a type parameter has an explicit upper bound of `Singleton`, - | * the compiler infers a singleton type. - | * - | * {{{ - | * scala> def check42[T](x: T)(implicit ev: T =:= 42): T = x - | * check42: [T](x: T)(implicit ev: T =:= 42)T - | * - | * scala> val x1 = check42(42) - | * ^ - | * error: Cannot prove that Int =:= 42. - | * - | * scala> def singleCheck42[T <: Singleton](x: T)(implicit ev: T =:= 42): T = x - | * singleCheck42: [T <: Singleton](x: T)(implicit ev: T =:= 42)T - | * - | * scala> val x2 = singleCheck42(42) - | * x2: Int = 42 - | * }}} - | * - | * See also [[https://docs.scala-lang.org/sips/42.type.html SIP-23 about Literal-based Singleton Types]]. - | */ - """.stripMargin) -} diff --git a/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala b/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala deleted file mode 100644 index 6690cae3a142..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala +++ /dev/null @@ -1,82 +0,0 @@ -package dotty.tools.dotc -package core - -import Periods._ -import SymDenotations._ -import Contexts._ -import Types._ -import Symbols._ -import Denotations._ -import Phases._ - -object DenotTransformers { - - /** A transformer group contains a sequence of transformers, - * ordered by the phase where they apply. Transformers are added - * to a group via `install`. - */ - - /** A transformer transforms denotations at a given phase */ - trait DenotTransformer extends Phase { - - /** The last phase during which the transformed denotations are valid */ - def lastPhaseId(using Context): Int = ctx.base.nextDenotTransformerId(id + 1) - - /** The validity period of the transformed denotations in the given context */ - def validFor(using Context): Period = - Period(ctx.runId, id + 1, lastPhaseId) - - /** The transformation method */ - def transform(ref: SingleDenotation)(using Context): SingleDenotation - } - - /** A transformer that only transforms the info field of denotations */ - trait InfoTransformer extends DenotTransformer { - - def transformInfo(tp: Type, sym: Symbol)(using Context): Type - - def transform(ref: SingleDenotation)(using Context): SingleDenotation = { - val sym = ref.symbol - if (sym.exists && !infoMayChange(sym)) ref - else { - val info1 = transformInfo(ref.info, ref.symbol) - if (info1 eq ref.info) ref - else ref match { - case ref: SymDenotation => - ref.copySymDenotation(info = info1).copyCaches(ref, ctx.phase.next) - case _ => - ref.derivedSingleDenotation(ref.symbol, info1) - } - } - } - - /** Denotations with a symbol where `infoMayChange` is false are guaranteed to be - * unaffected by this transform, so `transformInfo` need not be run. This - * can save time, and more importantly, can help avoid forcing symbol completers. - */ - protected def infoMayChange(sym: Symbol)(using Context): Boolean = true - } - - /** A transformer that only transforms SymDenotations. - * Note: Infos of non-sym denotations are left as is. So the transformer should - * be used before erasure only if this is not a problem. After erasure, all - * denotations are SymDenotations, so SymTransformers can be used freely. - */ - trait SymTransformer extends DenotTransformer { - - def transformSym(sym: SymDenotation)(using Context): SymDenotation - - def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match { - case ref: SymDenotation => transformSym(ref) - case _ => ref - } - } - - /** A `DenotTransformer` trait that has the identity as its `transform` method. - * You might want to inherit from this trait so that new denotations can be - * installed using `installAfter` and `enteredAfter` at the end of the phase. - */ - trait IdentityDenotTransformer extends DenotTransformer { - def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala deleted file mode 100644 index 9db285975a0a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala +++ /dev/null @@ -1,1376 +0,0 @@ -package dotty.tools -package dotc -package core - -import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid } -import Contexts._ -import Names._ -import NameKinds._ -import StdNames._ -import Symbols.NoSymbol -import Symbols._ -import Types._ -import Periods._ -import Flags._ -import DenotTransformers._ -import Decorators._ -import Signature.MatchDegree._ -import printing.Texts._ -import printing.Printer -import io.AbstractFile -import config.Config -import config.Printers.overload -import util.common._ -import typer.ProtoTypes.NoViewsAllowed -import collection.mutable.ListBuffer -import language.experimental.pureFunctions - -/** Denotations represent the meaning of symbols and named types. - * The following diagram shows how the principal types of denotations - * and their denoting entities relate to each other. Lines ending in - * a down-arrow `v` are member methods. The two methods shown in the diagram are - * "symbol" and "deref". Both methods are parameterized by the current context, - * and are effectively indexed by current period. - * - * Lines ending in a horizontal line mean subtyping (right is a subtype of left). - * - * NamedType - * | Symbol---------ClassSymbol - * | | | - * | denot | denot | denot - * v v v - * Denotation-+-----SingleDenotation-+------SymDenotation-+----ClassDenotation - * | | - * +-----MultiDenotation | - * | - * +--UniqueRefDenotation - * +--JointRefDenotation - * - * Here's a short summary of the classes in this diagram. - * - * NamedType A type consisting of a prefix type and a name, with fields - * prefix: Type - * name: Name - * It has two subtypes: TermRef and TypeRef - * Symbol A label for a definition or declaration in one compiler run - * ClassSymbol A symbol representing a class - * Denotation The meaning of a named type or symbol during a period - * MultiDenotation A denotation representing several overloaded members - * SingleDenotation A denotation representing a non-overloaded member or definition, with main fields - * symbol: Symbol - * info: Type - * UniqueRefDenotation A denotation referring to a single definition with some member type - * JointRefDenotation A denotation referring to a member that could resolve to several definitions - * SymDenotation A denotation representing a single definition with its original type, with main fields - * name: Name - * owner: Symbol - * flags: Flags - * privateWithin: Symbol - * annotations: List[Annotation] - * ClassDenotation A denotation representing a single class definition. - */ -object Denotations { - - implicit def eqDenotation: CanEqual[Denotation, Denotation] = CanEqual.derived - - /** A PreDenotation represents a group of single denotations or a single multi-denotation - * It is used as an optimization to avoid forming MultiDenotations too eagerly. - */ - abstract class PreDenotation extends Pure { - - /** A denotation in the group exists */ - def exists: Boolean - - /** First/last denotation in the group */ - def first: Denotation - def last: Denotation - - /** Convert to full denotation by &-ing all elements */ - def toDenot(pre: Type)(using Context): Denotation - - /** Group contains a denotation that refers to given symbol */ - def containsSym(sym: Symbol): Boolean - - /** Group contains a denotation with the same signature as `other` */ - def matches(other: SingleDenotation)(using Context): Boolean - - /** Keep only those denotations in this group which satisfy predicate `p`. */ - def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation - - /** Keep only those denotations in this group which have a signature - * that's not already defined by `denots`. - */ - def filterDisjoint(denots: PreDenotation)(using Context): PreDenotation - - /** Keep only those inherited members M of this predenotation for which the following is true - * - M is not marked Private - * - If M has a unique symbol, it does not appear in `prevDenots`. - * - M's signature as seen from prefix `pre` does not appear in `ownDenots` - * Return the denotation as seen from `pre`. - * Called from SymDenotations.computeMember. There, `ownDenots` are the denotations found in - * the base class, which shadow any inherited denotations with the same signature. - * `prevDenots` are the denotations that are defined in the class or inherited from - * a base type which comes earlier in the linearization. - */ - def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): PreDenotation - - /** Keep only those denotations in this group that have all of the flags in `required`, - * but none of the flags in `excluded`. - */ - def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation - - /** Map `f` over all single denotations and aggregate the results with `g`. */ - def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T - - private var cachedPrefix: Type = _ - private var cachedAsSeenFrom: AsSeenFromResult = _ - private var validAsSeenFrom: Period = Nowhere - - type AsSeenFromResult <: PreDenotation - - /** The denotation with info(s) as seen from prefix type */ - def asSeenFrom(pre: Type)(using Context): AsSeenFromResult = - if (Config.cacheAsSeenFrom) { - if ((cachedPrefix ne pre) || ctx.period != validAsSeenFrom) { - cachedAsSeenFrom = computeAsSeenFrom(pre) - cachedPrefix = pre - validAsSeenFrom = if (pre.isProvisional) Nowhere else ctx.period - } - cachedAsSeenFrom - } - else computeAsSeenFrom(pre) - - protected def computeAsSeenFrom(pre: Type)(using Context): AsSeenFromResult - - /** The union of two groups. */ - def union(that: PreDenotation): PreDenotation = - if (!this.exists) that - else if (!that.exists) this - else DenotUnion(this, that) - } - - /** A denotation is the result of resolving - * a name (either simple identifier or select) during a given period. - * - * Denotations can be combined with `&` and `|`. - * & is conjunction, | is disjunction. - * - * `&` will create an overloaded denotation from two - * non-overloaded denotations if their signatures differ. - * Analogously `|` of two denotations with different signatures will give - * an empty denotation `NoDenotation`. - * - * A denotation might refer to `NoSymbol`. This is the case if the denotation - * was produced from a disjunction of two denotations with different symbols - * and there was no common symbol in a superclass that could substitute for - * both symbols. Here is an example: - * - * Say, we have: - * - * class A { def f: A } - * class B { def f: B } - * val x: A | B = if (test) new A else new B - * val y = x.f - * - * Then the denotation of `y` is `SingleDenotation(NoSymbol, A | B)`. - * - * @param symbol The referencing symbol, or NoSymbol is none exists - */ - abstract class Denotation(val symbol: Symbol, protected var myInfo: Type) extends PreDenotation with printing.Showable { - type AsSeenFromResult <: Denotation - - /** The type info. - * The info is an instance of TypeType iff this is a type denotation - * Uncompleted denotations set myInfo to a LazyType. - */ - final def info(using Context): Type = { - def completeInfo = { // Written this way so that `info` is small enough to be inlined - this.asInstanceOf[SymDenotation].completeFrom(myInfo.asInstanceOf[LazyType]); info - } - if (myInfo.isInstanceOf[LazyType]) completeInfo else myInfo - } - - /** The type info, or, if this is a SymDenotation where the symbol - * is not yet completed, the completer - */ - def infoOrCompleter: Type - - /** The period during which this denotation is valid. */ - def validFor: Period - - /** Is this a reference to a type symbol? */ - def isType: Boolean - - /** Is this a reference to a term symbol? */ - def isTerm: Boolean = !isType - - /** Is this denotation overloaded? */ - final def isOverloaded: Boolean = isInstanceOf[MultiDenotation] - - /** Denotation points to unique symbol; false for overloaded denotations - * and JointRef denotations. - */ - def hasUniqueSym: Boolean - - /** The name of the denotation */ - def name(using Context): Name - - /** The signature of the denotation. */ - def signature(using Context): Signature - - /** Resolve overloaded denotation to pick the ones with the given signature - * when seen from prefix `site`. - * @param relaxed When true, consider only parameter signatures for a match. - */ - def atSignature(sig: Signature, targetName: Name, site: Type = NoPrefix, relaxed: Boolean = false)(using Context): Denotation - - /** The variant of this denotation that's current in the given context. - * If no such denotation exists, returns the denotation with each alternative - * at its first point of definition. - */ - def current(using Context): Denotation - - /** Is this denotation different from NoDenotation or an ErrorDenotation? */ - def exists: Boolean = true - - /** A denotation with the info of this denotation transformed using `f` */ - def mapInfo(f: Type => Type)(using Context): Denotation - - /** If this denotation does not exist, fallback to alternative */ - inline def orElse(inline that: Denotation): Denotation = if (this.exists) this else that - - /** The set of alternative single-denotations making up this denotation */ - final def alternatives: List[SingleDenotation] = altsWith(alwaysTrue) - - /** The alternatives of this denotation that satisfy the predicate `p`. */ - def altsWith(p: Symbol => Boolean): List[SingleDenotation] - - /** The unique alternative of this denotation that satisfies the predicate `p`, - * or NoDenotation if no satisfying alternative exists. - * @throws TypeError if there is at more than one alternative that satisfies `p`. - */ - def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation - - override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation - - /** If this is a SingleDenotation, return it, otherwise throw a TypeError */ - def checkUnique(using Context): SingleDenotation = suchThat(alwaysTrue) - - /** Does this denotation have an alternative that satisfies the predicate `p`? */ - def hasAltWith(p: SingleDenotation => Boolean): Boolean - - /** The denotation made up from the alternatives of this denotation that - * are accessible from prefix `pre`, or NoDenotation if no accessible alternative exists. - */ - def accessibleFrom(pre: Type, superAccess: Boolean = false)(using Context): Denotation - - /** Find member of this denotation with given `name`, all `required` - * flags and no `excluded` flag, and produce a denotation that contains the type of the member - * as seen from given prefix `pre`. - */ - def findMember(name: Name, pre: Type, required: FlagSet, excluded: FlagSet)(using Context): Denotation = - info.findMember(name, pre, required, excluded) - - /** If this denotation is overloaded, filter with given predicate. - * If result is still overloaded throw a TypeError. - * Note: disambiguate is slightly different from suchThat in that - * single-denotations that do not satisfy the predicate are left alone - * (whereas suchThat would map them to NoDenotation). - */ - inline def disambiguate(inline p: Symbol => Boolean)(using Context): SingleDenotation = this match { - case sdenot: SingleDenotation => sdenot - case mdenot => suchThat(p) orElse NoQualifyingRef(alternatives) - } - - /** Return symbol in this denotation that satisfies the given predicate. - * if generateStubs is specified, return a stubsymbol if denotation is a missing ref. - * Throw a `TypeError` if predicate fails to disambiguate symbol or no alternative matches. - */ - def requiredSymbol(kind: String, - name: Name, - site: Denotation = NoDenotation, - args: List[Type] = Nil, - source: AbstractFile | Null = null, - generateStubs: Boolean = true) - (p: Symbol => Boolean) - (using Context): Symbol = - disambiguate(p) match { - case m @ MissingRef(ownerd, name) if generateStubs => - if ctx.settings.YdebugMissingRefs.value then m.ex.printStackTrace() - newStubSymbol(ownerd.symbol, name, source) - case NoDenotation | _: NoQualifyingRef | _: MissingRef => - def argStr = if (args.isEmpty) "" else i" matching ($args%, %)" - val msg = - if site.exists then em"$site does not have a member $kind $name$argStr" - else em"missing: $kind $name$argStr" - throw TypeError(msg) - case denot => - denot.symbol - } - - def requiredMethod(pname: PreName)(using Context): TermSymbol = { - val name = pname.toTermName - info.member(name).requiredSymbol("method", name, this)(_.is(Method)).asTerm - } - def requiredMethodRef(name: PreName)(using Context): TermRef = - requiredMethod(name).termRef - - def requiredMethod(pname: PreName, argTypes: List[Type])(using Context): TermSymbol = { - val name = pname.toTermName - info.member(name).requiredSymbol("method", name, this, argTypes) { x => - x.is(Method) && { - x.info.paramInfoss match { - case paramInfos :: Nil => paramInfos.corresponds(argTypes)(_ =:= _) - case _ => false - } - } - }.asTerm - } - def requiredMethodRef(name: PreName, argTypes: List[Type])(using Context): TermRef = - requiredMethod(name, argTypes).termRef - - def requiredValue(pname: PreName)(using Context): TermSymbol = { - val name = pname.toTermName - info.member(name).requiredSymbol("field or getter", name, this)(_.info.isParameterless).asTerm - } - def requiredValueRef(name: PreName)(using Context): TermRef = - requiredValue(name).termRef - - def requiredClass(pname: PreName)(using Context): ClassSymbol = { - val name = pname.toTypeName - info.member(name).requiredSymbol("class", name, this)(_.isClass).asClass - } - - def requiredType(pname: PreName)(using Context): TypeSymbol = { - val name = pname.toTypeName - info.member(name).requiredSymbol("type", name, this)(_.isType).asType - } - - /** The alternative of this denotation that has a type matching `targetType` when seen - * as a member of type `site` and that has a target name matching `targetName`, or - * `NoDenotation` if none exists. - */ - def matchingDenotation(site: Type, targetType: Type, targetName: Name)(using Context): SingleDenotation = { - def qualifies(sym: Symbol) = - site.memberInfo(sym).matchesLoosely(targetType) && sym.hasTargetName(targetName) - if (isOverloaded) - atSignature(targetType.signature, targetName, site, relaxed = true) match { - case sd: SingleDenotation => sd.matchingDenotation(site, targetType, targetName) - case md => md.suchThat(qualifies(_)) - } - else if (exists && !qualifies(symbol)) NoDenotation - else asSingleDenotation - } - - /** Form a denotation by conjoining with denotation `that`. - * - * NoDenotations are dropped. MultiDenotations are handled by merging - * parts with same signatures. SingleDenotations with equal signatures - * are joined by following this sequence of steps: - * - * 1. If exactly one the denotations has an inaccessible symbol, pick the other one. - * 2. Otherwise, if one of the infos overrides the other one, and the associated - * symbol does not score strictly lower than the other one, - * pick the associated denotation. - * 3. Otherwise, if the two infos can be combined with `infoMeet`, pick that as - * result info, and pick the symbol that scores higher as result symbol, - * or pick `sym1` as a tie breaker. The picked info and symbol are combined - * in a JointDenotation. - * 4. Otherwise, if one of the two symbols scores strongly higher than the - * other one, pick the associated denotation. - * 5. Otherwise return a multi-denotation consisting of both denotations. - * - * Symbol scoring is determined according to the following ranking - * where earlier criteria trump later ones. Cases marked with (*) - * give a strong score advantage, the others a weak one. - * - * 1. The symbol exists, and the other one does not. (*) - * 2. The symbol is not a bridge, but the other one is. (*) - * 3. The symbol is concrete, and the other one is deferred - * 4. The symbol appears before the other in the linearization of `pre` - * 5. The symbol's visibility is strictly greater than the other one's. - * 6. The symbol is a method, but the other one is not. - */ - def meet(that: Denotation, pre: Type, safeIntersection: Boolean = false)(using Context): Denotation = { - /** Try to merge denot1 and denot2 without adding a new signature. */ - def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match { - case denot1 @ MultiDenotation(denot11, denot12) => - val d1 = mergeDenot(denot11, denot2) - if (d1.exists) denot1.derivedUnionDenotation(d1, denot12) - else { - val d2 = mergeDenot(denot12, denot2) - if (d2.exists) denot1.derivedUnionDenotation(denot11, d2) - else NoDenotation - } - case denot1: SingleDenotation => - if (denot1 eq denot2) denot1 - else if denot1.matches(denot2) then mergeSingleDenot(denot1, denot2) - else NoDenotation - } - - /** Try to merge single-denotations. */ - def mergeSingleDenot(denot1: SingleDenotation, denot2: SingleDenotation): Denotation = - val info1 = denot1.info - val info2 = denot2.info - val sym1 = denot1.symbol - val sym2 = denot2.symbol - - /** Does `owner1` come before `owner2` in the linearization of `pre`? */ - def linearScore(owner1: Symbol, owner2: Symbol): Int = - - def searchBaseClasses(bcs: List[ClassSymbol]): Int = bcs match - case bc :: bcs1 => - if bc eq owner1 then 1 - else if bc eq owner2 then -1 - else searchBaseClasses(bcs1) - case Nil => 0 - - if owner1 eq owner2 then 0 - else if owner1.derivesFrom(owner2) then 1 - else if owner2.derivesFrom(owner1) then -1 - else searchBaseClasses(pre.baseClasses) - end linearScore - - /** Similar to SymDenotation#accessBoundary, but without the special cases. */ - def accessBoundary(sym: Symbol) = - if (sym.is(Private)) sym.owner - else sym.privateWithin.orElse( - if (sym.is(Protected)) sym.owner.enclosingPackageClass - else defn.RootClass) - - def isHidden(sym: Symbol) = sym.exists && !sym.isAccessibleFrom(pre) - // In typer phase filter out denotations with symbols that are not - // accessible. After typer, this is not possible since we cannot guarantee - // that the current owner is set correctly. See pos/14660.scala. - val hidden1 = isHidden(sym1) && ctx.isTyper - val hidden2 = isHidden(sym2) && ctx.isTyper - if hidden1 && !hidden2 then denot2 - else if hidden2 && !hidden1 then denot1 - else - // The score that determines which symbol to pick for the result denotation. - // A value > 0 means pick `sym1`, < 0 means pick `sym2`. - // A value of +/- 2 means pick one of the denotations as a tie-breaker - // if a common info does not exist. - val symScore: Int = - if !sym1.exists then -2 - else if !sym2.exists then 2 - else if sym1.is(Bridge) && !sym2.is(Bridge) then -2 - else if sym2.is(Bridge) && !sym1.is(Bridge) then 2 - else if !sym1.isAsConcrete(sym2) then -1 - else if !sym2.isAsConcrete(sym1) then 1 - else - val linScore = linearScore(sym1.owner, sym2.owner) - if linScore != 0 then linScore - else - val boundary1 = accessBoundary(sym1) - val boundary2 = accessBoundary(sym2) - if boundary1.isProperlyContainedIn(boundary2) then -1 - else if boundary2.isProperlyContainedIn(boundary1) then 1 - else if sym2.is(Method) && !sym1.is(Method) then -1 - else if sym1.is(Method) && !sym2.is(Method) then 1 - else 0 - - val relaxedOverriding = ctx.explicitNulls && (sym1.is(JavaDefined) || sym2.is(JavaDefined)) - val matchLoosely = sym1.matchNullaryLoosely || sym2.matchNullaryLoosely - - if symScore <= 0 && info2.overrides(info1, relaxedOverriding, matchLoosely, checkClassInfo = false) then - denot2 - else if symScore >= 0 && info1.overrides(info2, relaxedOverriding, matchLoosely, checkClassInfo = false) then - denot1 - else - val jointInfo = infoMeet(info1, info2, safeIntersection) - if jointInfo.exists then - val sym = if symScore >= 0 then sym1 else sym2 - JointRefDenotation(sym, jointInfo, denot1.validFor & denot2.validFor, pre, denot1.isRefinedMethod || denot2.isRefinedMethod) - else if symScore == 2 then denot1 - else if symScore == -2 then denot2 - else - overload.println(i"overloaded with same signature: ${sym1.showLocated}: $info1 / ${sym2.showLocated}: $info2, info = ${info1.getClass}, ${info2.getClass}, $jointInfo") - MultiDenotation(denot1, denot2) - end mergeSingleDenot - - if (this eq that) this - else if (!this.exists) that - else if (!that.exists) this - else that match { - case that: SingleDenotation => - val r = mergeDenot(this, that) - if (r.exists) r else MultiDenotation(this, that) - case that @ MultiDenotation(denot1, denot2) => - this.meet(denot1, pre).meet(denot2, pre) - } - } - - final def asSingleDenotation: SingleDenotation = asInstanceOf[SingleDenotation] - final def asSymDenotation: SymDenotation = asInstanceOf[SymDenotation] - - def toText(printer: Printer): Text = printer.toText(this) - - // ------ PreDenotation ops ---------------------------------------------- - - final def toDenot(pre: Type)(using Context): Denotation = this - final def containsSym(sym: Symbol): Boolean = hasUniqueSym && (symbol eq sym) - } - - // ------ Info meets ---------------------------------------------------- - - /** Merge parameter names of lambda types. If names in corresponding positions match, keep them, - * otherwise generate new synthetic names. - */ - private def mergeParamNames(tp1: LambdaType, tp2: LambdaType): List[tp1.ThisName] = - (for ((name1, name2, idx) <- tp1.paramNames.lazyZip(tp2.paramNames).lazyZip(tp1.paramNames.indices)) - yield if (name1 == name2) name1 else tp1.companion.syntheticParamName(idx)).toList - - /** Normally, `tp1 & tp2`, with extra care taken to return `tp1` or `tp2` directly if that's - * a valid answer. Special cases for matching methods and classes, with - * the possibility of returning NoType. Special handling of ExprTypes, where mixed - * intersections widen the ExprType away. - */ - def infoMeet(tp1: Type, tp2: Type, safeIntersection: Boolean)(using Context): Type = - if tp1 eq tp2 then tp1 - else tp1 match - case tp1: TypeBounds => - tp2 match - case tp2: TypeBounds => if safeIntersection then tp1 safe_& tp2 else tp1 & tp2 - case tp2: ClassInfo => tp2 - case _ => NoType - case tp1: ClassInfo => - tp2 match - case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix) - case tp2: TypeBounds => tp1 - case _ => NoType - case tp1: MethodType => - tp2 match - case tp2: MethodType - if TypeComparer.matchingMethodParams(tp1, tp2) - && tp1.isImplicitMethod == tp2.isImplicitMethod - && tp1.isErasedMethod == tp2.isErasedMethod => - val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) - if resType.exists then - tp1.derivedLambdaType(mergeParamNames(tp1, tp2), tp1.paramInfos, resType) - else NoType - case _ => NoType - case tp1: PolyType => - tp2 match - case tp2: PolyType if tp1.paramNames.hasSameLengthAs(tp2.paramNames) => - val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) - if resType.exists then - tp1.derivedLambdaType( - mergeParamNames(tp1, tp2), - tp1.paramInfos.zipWithConserve(tp2.paramInfos)( _ & _ ), - resType) - else NoType - case _ => NoType - case ExprType(rtp1) => - tp2 match - case ExprType(rtp2) => ExprType(rtp1 & rtp2) - case _ => infoMeet(rtp1, tp2, safeIntersection) - case _ => - tp2 match - case _: MethodType | _: PolyType => NoType - case _ => tp1 & tp2.widenExpr - end infoMeet - - /** A non-overloaded denotation */ - abstract class SingleDenotation(symbol: Symbol, initInfo: Type) extends Denotation(symbol, initInfo) { - protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation - - final def name(using Context): Name = symbol.name - - /** For SymDenotation, this is NoPrefix. For other denotations this is the prefix - * under which the denotation was constructed. - * - * Note that `asSeenFrom` might return a `SymDenotation` and therefore in - * general one cannot rely on `prefix` being set, see - * `Config.reuseSymDenotations` for details. - */ - def prefix: Type = NoPrefix - - /** True if the info of this denotation comes from a refinement. */ - def isRefinedMethod: Boolean = false - - /** For SymDenotations, the language-specific signature of the info, depending on - * where the symbol is defined. For non-SymDenotations, the Scala 3 - * signature. - * - * Invariants: - * - Before erasure, the signature of a denotation is always equal to the - * signature of its corresponding initial denotation. - * - Two distinct overloads will have SymDenotations with distinct - * signatures (the SELECTin tag in Tasty relies on this to refer to an - * overload unambiguously). Note that this only applies to - * SymDenotations, in general we cannot assume that distinct - * SingleDenotations will have distinct signatures (cf #9050). - */ - final def signature(using Context): Signature = - signature(sourceLanguage = if isType || !this.isInstanceOf[SymDenotation] then SourceLanguage.Scala3 else SourceLanguage(symbol)) - - /** Overload of `signature` which lets the caller pick the language used - * to compute the signature of the info. Useful to match denotations defined in - * different classes (see `matchesLoosely`). - */ - def signature(sourceLanguage: SourceLanguage)(using Context): Signature = - if (isType) Signature.NotAMethod // don't force info if this is a type denotation - else info match { - case info: MethodOrPoly => - try info.signature(sourceLanguage) - catch { // !!! DEBUG - case scala.util.control.NonFatal(ex) => - report.echo(s"cannot take signature of $info") - throw ex - } - case _ => Signature.NotAMethod - } - - def derivedSingleDenotation(symbol: Symbol, info: Type, pre: Type = this.prefix, isRefinedMethod: Boolean = this.isRefinedMethod)(using Context): SingleDenotation = - if ((symbol eq this.symbol) && (info eq this.info) && (pre eq this.prefix) && (isRefinedMethod == this.isRefinedMethod)) this - else newLikeThis(symbol, info, pre, isRefinedMethod) - - def mapInfo(f: Type => Type)(using Context): SingleDenotation = - derivedSingleDenotation(symbol, f(info)) - - inline def orElse(inline that: SingleDenotation): SingleDenotation = if (this.exists) this else that - - def altsWith(p: Symbol => Boolean): List[SingleDenotation] = - if (exists && p(symbol)) this :: Nil else Nil - - def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = - if (exists && p(symbol)) this else NoDenotation - - def hasAltWith(p: SingleDenotation => Boolean): Boolean = - exists && p(this) - - def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = - if (!symbol.exists || symbol.isAccessibleFrom(pre, superAccess)) this else NoDenotation - - def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): SingleDenotation = - val situated = if site == NoPrefix then this else asSeenFrom(site) - val sigMatches = sig.matchDegree(situated.signature) match - case FullMatch => - true - case MethodNotAMethodMatch => - // See comment in `matches` - relaxed && !symbol.is(JavaDefined) - case ParamMatch => - relaxed - case noMatch => - false - if sigMatches && symbol.hasTargetName(targetName) then this else NoDenotation - - def matchesImportBound(bound: Type)(using Context): Boolean = - if bound.isRef(defn.NothingClass) then false - else if bound.isAny then true - else NoViewsAllowed.normalizedCompatible(info, bound, keepConstraint = false) - - // ------ Transformations ----------------------------------------- - - private var myValidFor: Period = Nowhere - - def validFor: Period = myValidFor - def validFor_=(p: Period): Unit = { - myValidFor = p - symbol.invalidateDenotCache() - } - - /** The next SingleDenotation in this run, with wrap-around from last to first. - * - * There may be several `SingleDenotation`s with different validity - * representing the same underlying definition at different phases. - * These are called a "flock". Flock members are generated by - * @See current. Flock members are connected in a ring - * with their `nextInRun` fields. - * - * There are the following invariants concerning flock members - * - * 1) validity periods are non-overlapping - * 2) the union of all validity periods is a contiguous - * interval. - */ - protected var nextInRun: SingleDenotation = this - - /** The version of this SingleDenotation that was valid in the first phase - * of this run. - */ - def initial: SingleDenotation = - if (validFor.firstPhaseId <= 1) this - else { - var current = nextInRun - while (current.validFor.code > this.myValidFor.code) current = current.nextInRun - current - } - - def history: List[SingleDenotation] = { - val b = new ListBuffer[SingleDenotation] - var current = initial - while ({ - b += (current) - current = current.nextInRun - current ne initial - }) - () - b.toList - } - - /** Invalidate all caches and fields that depend on base classes and their contents */ - def invalidateInheritedInfo(): Unit = () - - private def updateValidity()(using Context): this.type = { - assert( - ctx.runId >= validFor.runId - || ctx.settings.YtestPickler.value // mixing test pickler with debug printing can travel back in time - || ctx.mode.is(Mode.Printing) // no use to be picky when printing error messages - || symbol.isOneOf(ValidForeverFlags), - s"denotation $this invalid in run ${ctx.runId}. ValidFor: $validFor") - var d: SingleDenotation = this - while ({ - d.validFor = Period(ctx.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId) - d.invalidateInheritedInfo() - d = d.nextInRun - d ne this - }) - () - this - } - - /** Move validity period of this denotation to a new run. Throw a StaleSymbol error - * if denotation is no longer valid. - * However, StaleSymbol error is not thrown in the following situations: - * - * - If acceptStale returns true (e.g. because we are in the IDE), - * update the symbol to the new version if it exists, or return - * the old version otherwise. - * - If the symbol did not have a denotation that was defined at the current phase - * return a NoDenotation instead. - */ - private def bringForward()(using Context): SingleDenotation = { - this match { - case symd: SymDenotation => - if (stillValid(symd)) return updateValidity() - if acceptStale(symd) && symd.initial.validFor.firstPhaseId <= ctx.lastPhaseId then - // New run might have fewer phases than old, so symbol might no longer be - // visible at all. TabCompleteTests have examples where this happens. - return symd.currentSymbol.denot.orElse(symd).updateValidity() - case _ => - } - if (!symbol.exists) return updateValidity() - if (!coveredInterval.containsPhaseId(ctx.phaseId)) return NoDenotation - if (ctx.debug) traceInvalid(this) - staleSymbolError - } - - /** The next defined denotation (following `nextInRun`) or an arbitrary - * undefined denotation, if all denotations in a `nextinRun` cycle are - * undefined. - */ - private def nextDefined: SingleDenotation = { - var p1 = this - var p2 = nextInRun - while (p1.validFor == Nowhere && (p1 ne p2)) { - p1 = p1.nextInRun - p2 = p2.nextInRun.nextInRun - } - p1 - } - - /** Skip any denotations that have been removed by an installAfter or that - * are otherwise undefined. - */ - def skipRemoved(using Context): SingleDenotation = - if (myValidFor.code <= 0) nextDefined else this - - /** Produce a denotation that is valid for the given context. - * Usually called when !(validFor contains ctx.period) - * (even though this is not a precondition). - * If the runId of the context is the same as runId of this denotation, - * the right flock member is located, or, if it does not exist yet, - * created by invoking a transformer (@See Transformers). - * If the runId's differ, but this denotation is a SymDenotation - * and its toplevel owner class or module - * is still a member of its enclosing package, then the whole flock - * is brought forward to be valid in the new runId. Otherwise - * the symbol is stale, which constitutes an internal error. - */ - def current(using Context): SingleDenotation = - util.Stats.record("current") - val currentPeriod = ctx.period - val valid = myValidFor - - def assertNotPackage(d: SingleDenotation, transformer: DenotTransformer) = d match - case d: ClassDenotation => - assert(!d.is(Package), s"illegal transformation of package denotation by transformer $transformer") - case _ => - - def escapeToNext = nextDefined.ensuring(_.validFor != Nowhere) - - def toNewRun = - util.Stats.record("current.bringForward") - if exists then initial.bringForward().current else this - - def goForward = - var cur = this - // search for containing period as long as nextInRun increases. - var next = nextInRun - while next.validFor.code > valid.code && !(next.validFor contains currentPeriod) do - cur = next - next = next.nextInRun - if next.validFor.code > valid.code then - // in this case, next.validFor contains currentPeriod - cur = next - cur - else - //println(s"might need new denot for $cur, valid for ${cur.validFor} at $currentPeriod") - // not found, cur points to highest existing variant - val nextTransformerId = ctx.base.nextDenotTransformerId(cur.validFor.lastPhaseId) - if currentPeriod.lastPhaseId <= nextTransformerId then - cur.validFor = Period(currentPeriod.runId, cur.validFor.firstPhaseId, nextTransformerId) - else - var startPid = nextTransformerId + 1 - val transformer = ctx.base.denotTransformers(nextTransformerId) - //println(s"transforming $this with $transformer") - val savedPeriod = ctx.period - val mutCtx = ctx.asInstanceOf[FreshContext] - try - mutCtx.setPhase(transformer) - next = transformer.transform(cur) - // We temporarily update the context with the new phase instead of creating a - // new one. This is done for performance. We cut down on about 30% of context - // creations that way, and also avoid phase caches in contexts to get large. - // To work correctly, we need to demand that the context with the new phase - // is not retained in the result. - catch case ex: CyclicReference => - // println(s"error while transforming $this") - throw ex - finally - mutCtx.setPeriod(savedPeriod) - if next eq cur then - startPid = cur.validFor.firstPhaseId - else - assertNotPackage(next, transformer) - next.insertAfter(cur) - cur = next - cur.validFor = Period(currentPeriod.runId, startPid, transformer.lastPhaseId) - //printPeriods(cur) - //println(s"new denot: $cur, valid for ${cur.validFor}") - cur.current // multiple transformations could be required - end goForward - - def goBack: SingleDenotation = - // currentPeriod < end of valid; in this case a version must exist - // but to be defensive we check for infinite loop anyway - var cur = this - var cnt = 0 - while !(cur.validFor contains currentPeriod) do - //println(s"searching: $cur at $currentPeriod, valid for ${cur.validFor}") - cur = cur.nextInRun - // Note: One might be tempted to add a `prev` field to get to the new denotation - // more directly here. I tried that, but it degrades rather than improves - // performance: Test setup: Compile everything in dotc and immediate subdirectories - // 10 times. Best out of 10: 18154ms with `prev` field, 17777ms without. - cnt += 1 - if cnt > MaxPossiblePhaseId then - return atPhase(coveredInterval.firstPhaseId)(current) - cur - end goBack - - if valid.code <= 0 then - // can happen if we sit on a stale denotation which has been replaced - // wholesale by an installAfter; in this case, proceed to the next - // denotation and try again. - escapeToNext - else if valid.runId != currentPeriod.runId then - toNewRun - else if currentPeriod.code > valid.code then - goForward - else - goBack - end current - - private def demandOutsideDefinedMsg(using Context): String = - s"demanding denotation of $this at phase ${ctx.phase}(${ctx.phaseId}) outside defined interval: defined periods are${definedPeriodsString}" - - /** Install this denotation to be the result of the given denotation transformer. - * This is the implementation of the same-named method in SymDenotations. - * It's placed here because it needs access to private fields of SingleDenotation. - * @pre Can only be called in `phase.next`. - */ - protected def installAfter(phase: DenotTransformer)(using Context): Unit = { - val targetId = phase.next.id - if (ctx.phaseId != targetId) atPhase(phase.next)(installAfter(phase)) - else { - val current = symbol.current - // println(s"installing $this after $phase/${phase.id}, valid = ${current.validFor}") - // printPeriods(current) - this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId) - if (current.validFor.firstPhaseId >= targetId) - current.replaceWith(this) - else { - current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1) - insertAfter(current) - } - } - // printPeriods(this) - } - - /** Apply a transformation `f` to all denotations in this group that start at or after - * given phase. Denotations are replaced while keeping the same validity periods. - */ - protected def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(using Context): Unit = { - var current = symbol.current - while (current.validFor.firstPhaseId < phase.id && (current.nextInRun.validFor.code > current.validFor.code)) - current = current.nextInRun - var hasNext = true - while ((current.validFor.firstPhaseId >= phase.id) && hasNext) { - val current1: SingleDenotation = f(current.asSymDenotation) - if (current1 ne current) { - current1.validFor = current.validFor - current.replaceWith(current1) - } - hasNext = current1.nextInRun.validFor.code > current1.validFor.code - current = current1.nextInRun - } - } - - /** Insert this denotation so that it follows `prev`. */ - private def insertAfter(prev: SingleDenotation) = { - this.nextInRun = prev.nextInRun - prev.nextInRun = this - } - - /** Insert this denotation instead of `old`. - * Also ensure that `old` refers with `nextInRun` to this denotation - * and set its `validFor` field to `Nowhere`. This is necessary so that - * references to the old denotation can be brought forward via `current` - * to a valid denotation. - * - * The code to achieve this is subtle in that it works correctly - * whether the replaced denotation is the only one in its cycle or not. - */ - private[dotc] def replaceWith(newd: SingleDenotation): Unit = { - var prev = this - while (prev.nextInRun ne this) prev = prev.nextInRun - // order of next two assignments is important! - prev.nextInRun = newd - newd.nextInRun = nextInRun - validFor = Nowhere - nextInRun = newd - } - - def staleSymbolError(using Context): Nothing = - inDetachedContext: - throw new StaleSymbol(staleSymbolMsg) - - def staleSymbolMsg(using Context): String = { - def ownerMsg = this match { - case denot: SymDenotation => s"in ${denot.owner}" - case _ => "" - } - s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${myValidFor}, is referred to in run ${ctx.period}" - } - - /** The period (interval of phases) for which there exists - * a valid denotation in this flock. - */ - def coveredInterval(using Context): Period = { - var cur = this - var cnt = 0 - var interval = validFor - while ({ - cur = cur.nextInRun - cnt += 1 - assert(cnt <= MaxPossiblePhaseId, demandOutsideDefinedMsg) - interval |= cur.validFor - cur ne this - }) - () - interval - } - - /** Show declaration string; useful for showing declarations - * as seen from subclasses. - */ - def showDcl(using Context): String = ctx.printer.dclText(this).show - - override def toString: String = - if (symbol == NoSymbol) symbol.toString - else s"" - - def definedPeriodsString: String = { - var sb = new StringBuilder() - var cur = this - var cnt = 0 - while ({ - sb.append(" " + cur.validFor) - cur = cur.nextInRun - cnt += 1 - if (cnt > MaxPossiblePhaseId) { sb.append(" ..."); cur = this } - cur ne this - }) - () - sb.toString - } - - // ------ PreDenotation ops ---------------------------------------------- - - final def first: SingleDenotation = this - final def last: SingleDenotation = this - - def matches(other: SingleDenotation)(using Context): Boolean = - symbol.hasTargetName(other.symbol.targetName) - && matchesLoosely(other) - - /** `matches` without a target name check. - * - * For definitions coming from different languages, we pick a common - * language to compute their signatures. This allows us for example to - * override some Java definitions from Scala even if they have a different - * erasure (see i8615b, i9109b), Erasure takes care of adding any necessary - * bridge to make this work at runtime. - */ - def matchesLoosely(other: SingleDenotation, alwaysCompareTypes: Boolean = false)(using Context): Boolean = - if isType then true - else - val thisLanguage = SourceLanguage(symbol) - val otherLanguage = SourceLanguage(other.symbol) - val commonLanguage = SourceLanguage.commonLanguage(thisLanguage, otherLanguage) - val sig = signature(commonLanguage) - val otherSig = other.signature(commonLanguage) - sig.matchDegree(otherSig) match - case FullMatch => - !alwaysCompareTypes || info.matches(other.info) - case MethodNotAMethodMatch => - !ctx.erasedTypes && { - // A Scala zero-parameter method and a Scala non-method always match. - if !thisLanguage.isJava && !otherLanguage.isJava then - true - // Java allows defining both a field and a zero-parameter method with the same name, - // so they must not match. - else if thisLanguage.isJava && otherLanguage.isJava then - false - // A Java field never matches a Scala method. - else if thisLanguage.isJava then - symbol.is(Method) - else // otherLanguage.isJava - other.symbol.is(Method) - } - case ParamMatch => - // The signatures do not tell us enough to be sure about matching - !ctx.erasedTypes && info.matches(other.info) - case noMatch => - false - - def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): SingleDenotation = - if hasUniqueSym && prevDenots.containsSym(symbol) then NoDenotation - else if isType then filterDisjoint(ownDenots).asSeenFrom(pre) - else asSeenFrom(pre).filterDisjoint(ownDenots) - - def filterWithPredicate(p: SingleDenotation => Boolean): SingleDenotation = - if (p(this)) this else NoDenotation - def filterDisjoint(denots: PreDenotation)(using Context): SingleDenotation = - if (denots.exists && denots.matches(this)) NoDenotation else this - def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): SingleDenotation = - val realExcluded = if ctx.isAfterTyper then excluded else excluded | Invisible - def symd: SymDenotation = this match - case symd: SymDenotation => symd - case _ => symbol.denot - if !required.isEmpty && !symd.isAllOf(required) - || symd.isOneOf(realExcluded) then NoDenotation - else this - def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = f(this) - - type AsSeenFromResult = SingleDenotation - - protected def computeAsSeenFrom(pre: Type)(using Context): SingleDenotation = { - val symbol = this.symbol - val owner = this match { - case thisd: SymDenotation => thisd.owner - case _ => if (symbol.exists) symbol.owner else NoSymbol - } - - /** The derived denotation with the given `info` transformed with `asSeenFrom`. - * - * As a performance hack, we might reuse an existing SymDenotation, - * instead of creating a new denotation with a given `prefix`, - * see `Config.reuseSymDenotations`. - */ - def derived(info: Type) = - /** Do we need to return a denotation with a prefix set? */ - def needsPrefix = - // For opaque types, the prefix is used in `ElimOpaques#transform`, - // without this i7159.scala would fail when compiled from tasty. - symbol.is(Opaque) - - val derivedInfo = info.asSeenFrom(pre, owner) - if Config.reuseSymDenotations && this.isInstanceOf[SymDenotation] - && (derivedInfo eq info) && !needsPrefix then - this - else - derivedSingleDenotation(symbol, derivedInfo, pre) - end derived - - // Tt could happen that we see the symbol with prefix `this` as a member a different class - // through a self type and that it then has a different info. In this case we have to go - // through the asSeenFrom to switch the type back. Test case is pos/i9352.scala. - def hasOriginalInfo: Boolean = this match - case sd: SymDenotation => true - case _ => info eq symbol.info - - def ownerIsPrefix = pre match - case pre: ThisType => pre.sameThis(owner.thisType) - case _ => false - - if !owner.membersNeedAsSeenFrom(pre) && (!ownerIsPrefix || hasOriginalInfo) - || symbol.is(NonMember) - then this - else if symbol.isAllOf(ClassTypeParam) then - val arg = symbol.typeRef.argForParam(pre, widenAbstract = true) - if arg.exists - then derivedSingleDenotation(symbol, normalizedArgBounds(arg.bounds), pre) - else derived(symbol.info) - else derived(symbol.info) - } - - /** The argument bounds, possibly intersected with the parameter's info TypeBounds, - * if the latter is not F-bounded and does not refer to other type parameters - * of the same class, and the intersection is provably nonempty. - */ - private def normalizedArgBounds(argBounds: TypeBounds)(using Context): TypeBounds = - if symbol.isCompleted && !hasBoundsDependingOnParamsOf(symbol.owner) then - val combined @ TypeBounds(lo, hi) = symbol.info.bounds & argBounds - if (lo frozen_<:< hi) then combined - else argBounds - else argBounds - - private def hasBoundsDependingOnParamsOf(cls: Symbol)(using Context): Boolean = - val acc = new TypeAccumulator[Boolean]: - def apply(x: Boolean, tp: Type): Boolean = tp match - case _: LazyRef => true - case tp: TypeRef - if tp.symbol.isAllOf(ClassTypeParam) && tp.symbol.owner == cls => true - case _ => foldOver(x, tp) - acc(false, symbol.info) - } - - abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) extends SingleDenotation(symbol, initInfo) { - def infoOrCompleter: Type = initInfo - def isType: Boolean = infoOrCompleter.isInstanceOf[TypeType] - } - - class UniqueRefDenotation( - symbol: Symbol, - initInfo: Type, - initValidFor: Period, - prefix: Type) extends NonSymSingleDenotation(symbol, initInfo, prefix) { - validFor = initValidFor - override def hasUniqueSym: Boolean = true - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = - if isRefinedMethod then - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) - else - new UniqueRefDenotation(s, i, validFor, pre) - } - - class JointRefDenotation( - symbol: Symbol, - initInfo: Type, - initValidFor: Period, - prefix: Type, - override val isRefinedMethod: Boolean) extends NonSymSingleDenotation(symbol, initInfo, prefix) { - validFor = initValidFor - override def hasUniqueSym: Boolean = false - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) - } - - class ErrorDenotation(using DetachedContext) extends NonSymSingleDenotation(NoSymbol, NoType, NoType) { - override def exists: Boolean = false - override def hasUniqueSym: Boolean = false - validFor = Period.allInRun(ctx.runId) - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = - this - } - - /** An error denotation that provides more info about the missing reference. - * Produced by staticRef, consumed by requiredSymbol. - */ - case class MissingRef(val owner: SingleDenotation, name: Name)(using DetachedContext) extends ErrorDenotation { - val ex: Exception = new Exception // DEBUG - } - - /** An error denotation that provides more info about alternatives - * that were found but that do not qualify. - * Produced by staticRef, consumed by requiredSymbol. - */ - case class NoQualifyingRef(alts: List[SingleDenotation])(using DetachedContext) extends ErrorDenotation - - /** A double definition - */ - def isDoubleDef(sym1: Symbol, sym2: Symbol)(using Context): Boolean = - (sym1.exists && sym2.exists && - (sym1 `ne` sym2) && (sym1.effectiveOwner `eq` sym2.effectiveOwner) && - !sym1.is(Bridge) && !sym2.is(Bridge)) - - // --- Overloaded denotations and predenotations ------------------------------------------------- - - trait MultiPreDenotation extends PreDenotation { - def denot1: PreDenotation - def denot2: PreDenotation - - assert(denot1.exists && denot2.exists, s"Union of non-existing denotations ($denot1) and ($denot2)") - def first: Denotation = denot1.first - def last: Denotation = denot2.last - def matches(other: SingleDenotation)(using Context): Boolean = - denot1.matches(other) || denot2.matches(other) - def mapInherited(owndenot: PreDenotation, prevdenot: PreDenotation, pre: Type)(using Context): PreDenotation = - derivedUnion(denot1.mapInherited(owndenot, prevdenot, pre), denot2.mapInherited(owndenot, prevdenot, pre)) - def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation = - derivedUnion(denot1 filterWithPredicate p, denot2 filterWithPredicate p) - def filterDisjoint(denot: PreDenotation)(using Context): PreDenotation = - derivedUnion(denot1 filterDisjoint denot, denot2 filterDisjoint denot) - def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation = - derivedUnion(denot1.filterWithFlags(required, excluded), denot2.filterWithFlags(required, excluded)) - def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = - g(denot1.aggregate(f, g), denot2.aggregate(f, g)) - protected def derivedUnion(denot1: PreDenotation, denot2: PreDenotation) = - if ((denot1 eq this.denot1) && (denot2 eq this.denot2)) this - else denot1 union denot2 - } - - final case class DenotUnion(denot1: PreDenotation, denot2: PreDenotation) extends MultiPreDenotation { - def exists: Boolean = true - def toDenot(pre: Type)(using Context): Denotation = - denot1.toDenot(pre).meet(denot2.toDenot(pre), pre) - def containsSym(sym: Symbol): Boolean = - (denot1 containsSym sym) || (denot2 containsSym sym) - type AsSeenFromResult = PreDenotation - def computeAsSeenFrom(pre: Type)(using Context): PreDenotation = - derivedUnion(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) - } - - /** An overloaded denotation consisting of the alternatives of both given denotations. - */ - case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType) with MultiPreDenotation { - final def infoOrCompleter: Type = multiHasNot("info") - final def validFor: Period = denot1.validFor & denot2.validFor - final def isType: Boolean = false - final def hasUniqueSym: Boolean = false - final def name(using Context): Name = denot1.name - final def signature(using Context): Signature = Signature.OverloadedSignature - def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): Denotation = - if (sig eq Signature.OverloadedSignature) this - else derivedUnionDenotation( - denot1.atSignature(sig, targetName, site, relaxed), - denot2.atSignature(sig, targetName, site, relaxed)) - def current(using Context): Denotation = - derivedUnionDenotation(denot1.current, denot2.current) - def altsWith(p: Symbol => Boolean): List[SingleDenotation] = - denot1.altsWith(p) ++ denot2.altsWith(p) - def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = { - val sd1 = denot1.suchThat(p) - val sd2 = denot2.suchThat(p) - if sd1.exists then - if sd2.exists then - throw TypeError( - em"""Failure to disambiguate overloaded reference with - | ${denot1.symbol.showLocated}: ${denot1.info} and - | ${denot2.symbol.showLocated}: ${denot2.info}""") - else sd1 - else sd2 - } - override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation = - derivedUnionDenotation(denot1.filterWithPredicate(p), denot2.filterWithPredicate(p)) - def hasAltWith(p: SingleDenotation => Boolean): Boolean = - denot1.hasAltWith(p) || denot2.hasAltWith(p) - def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = { - val d1 = denot1 accessibleFrom (pre, superAccess) - val d2 = denot2 accessibleFrom (pre, superAccess) - if (!d1.exists) d2 - else if (!d2.exists) d1 - else derivedUnionDenotation(d1, d2) - } - def mapInfo(f: Type => Type)(using Context): Denotation = - derivedUnionDenotation(denot1.mapInfo(f), denot2.mapInfo(f)) - def derivedUnionDenotation(d1: Denotation, d2: Denotation): Denotation = - if ((d1 eq denot1) && (d2 eq denot2)) this - else if (!d1.exists) d2 - else if (!d2.exists) d1 - else MultiDenotation(d1, d2) - type AsSeenFromResult = Denotation - def computeAsSeenFrom(pre: Type)(using Context): Denotation = - derivedUnionDenotation(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) - override def toString: String = alternatives.mkString(" ") - - private def multiHasNot(op: String): Nothing = - throw new UnsupportedOperationException( - s"multi-denotation with alternatives $alternatives does not implement operation $op") - } - - /** The current denotation of the static reference given by path, - * or a MissingRef or NoQualifyingRef instance, if it does not exist. - * if generateStubs is set, generates stubs for missing top-level symbols - */ - def staticRef(path: Name, generateStubs: Boolean = true, isPackage: Boolean = false)(using Context): Denotation = { - def select(prefix: Denotation, selector: Name): Denotation = { - val owner = prefix.disambiguate(_.info.isParameterless) - def isPackageFromCoreLibMissing: Boolean = - // if the scala package is missing, the stdlib must be missing - owner.symbol == defn.RootClass && selector == nme.scala - if (owner.exists) { - val result = if (isPackage) owner.info.decl(selector) else owner.info.member(selector) - if (result.exists) result - else if (isPackageFromCoreLibMissing) throw new MissingCoreLibraryException(selector.toString) - else { - val alt = - if (generateStubs) missingHook(owner.symbol.moduleClass, selector) - else NoSymbol - if (alt.exists) alt.denot - else MissingRef(owner, selector) - } - } - else owner - } - def recur( - path: Name, - wrap: TermName -> Name = identity[Name] // !cc! default argument needs to be instantiated, error if [Name] is dropped - ): Denotation = path match { - case path: TypeName => - recur(path.toTermName, n => n.toTypeName) - case ModuleClassName(underlying) => - recur(underlying, n => wrap(ModuleClassName(n))) - case QualifiedName(prefix, selector) => - select(recur(prefix), wrap(selector)) - case qn @ AnyQualifiedName(prefix, _) => - recur(prefix, n => wrap(qn.info.mkString(n).toTermName)) - case path: SimpleName => - def recurSimple(len: Int, wrap: TermName -> Name): Denotation = { - val point = path.lastIndexOf('.', len - 1) - val selector = wrap(path.slice(point + 1, len).asTermName) - val prefix = - if (point > 0) recurSimple(point, identity) - else if (selector.isTermName) defn.RootClass.denot - else defn.EmptyPackageClass.denot - select(prefix, selector) - } - recurSimple(path.length, wrap) - } - - val run = ctx.run - if run == null then recur(path) - else run.staticRefs.getOrElseUpdate(path, recur(path)) - } - - /** If we are looking for a non-existing term name in a package, - * assume it is a package for which we do not have a directory and - * enter it. - */ - def missingHook(owner: Symbol, name: Name)(using Context): Symbol = - if (owner.is(Package) && name.isTermName) - newCompletePackageSymbol(owner, name.asTermName).entered - else - NoSymbol - - /** An exception for accessing symbols that are no longer valid in current run */ - class StaleSymbol(msg: -> String) extends Exception { - util.Stats.record("stale symbol") - override def getMessage(): String = msg - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Flags.scala b/tests/pos-with-compiler-cc/dotc/core/Flags.scala deleted file mode 100644 index f23dce020f10..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Flags.scala +++ /dev/null @@ -1,612 +0,0 @@ -package dotty.tools.dotc -package core - -object Flags { - - object opaques { - - /** A FlagSet represents a set of flags. Flags are encoded as follows: - * The first two bits indicate whether a flag set applies to terms, - * to types, or to both. Bits 2..63 are available for properties - * and can be doubly used for terms and types. - */ - opaque type FlagSet = Long - def FlagSet(bits: Long): FlagSet = bits - def toBits(fs: FlagSet): Long = fs - - /** A flag set consisting of a single flag */ - opaque type Flag <: FlagSet = Long - private[Flags] def Flag(bits: Long): Flag = bits - } - export opaques.FlagSet - - type Flag = opaques.Flag - - extension (x: FlagSet) { - - inline def bits: Long = opaques.toBits(x) - - /** The union of the given flag sets. - * Combining two FlagSets with `|` will give a FlagSet - * that has the intersection of the applicability to terms/types - * of the two flag sets. It is checked that the intersection is not empty. - */ - def | (y: FlagSet): FlagSet = - if (x.bits == 0) y - else if (y.bits == 0) x - else { - val tbits = x.bits & y.bits & KINDFLAGS - if (tbits == 0) - assert(false, s"illegal flagset combination: ${x.flagsString} and ${y.flagsString}") - FlagSet(tbits | ((x.bits | y.bits) & ~KINDFLAGS)) - } - - /** The intersection of the given flag sets */ - def & (y: FlagSet): FlagSet = FlagSet(x.bits & y.bits) - - /** The intersection of a flag set with the complement of another flag set */ - def &~ (y: FlagSet): FlagSet = { - val tbits = x.bits & KINDFLAGS - if ((tbits & y.bits) == 0) x - else FlagSet(tbits | ((x.bits & ~y.bits) & ~KINDFLAGS)) - } - - def ^ (y: FlagSet) = - FlagSet((x.bits | y.bits) & KINDFLAGS | (x.bits ^ y.bits) & ~KINDFLAGS) - - /** Does the given flag set contain the given flag? - * This means that both the kind flags and the carrier bits have non-empty intersection. - */ - def is (flag: Flag): Boolean = { - val fs = x.bits & flag.bits - (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 - } - - /** Does the given flag set contain the given flag - * and at the same time contain none of the flags in the `butNot` set? - */ - def is (flag: Flag, butNot: FlagSet): Boolean = x.is(flag) && !x.isOneOf(butNot) - - /** Does the given flag set have a non-empty intersection with another flag set? - * This means that both the kind flags and the carrier bits have non-empty intersection. - */ - def isOneOf (flags: FlagSet): Boolean = { - val fs = x.bits & flags.bits - (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 - } - - /** Does the given flag set have a non-empty intersection with another flag set, - * and at the same time contain none of the flags in the `butNot` set? - */ - def isOneOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isOneOf(flags) && !x.isOneOf(butNot) - - /** Does a given flag set have all of the flags of another flag set? - * Pre: The intersection of the term/type flags of both sets must be non-empty. - */ - def isAllOf (flags: FlagSet): Boolean = { - val fs = x.bits & flags.bits - ((fs & KINDFLAGS) != 0 || flags.bits == 0) && - (fs >>> TYPESHIFT) == (flags.bits >>> TYPESHIFT) - } - - /** Does a given flag set have all of the flags in another flag set - * and at the same time contain none of the flags in the `butNot` set? - * Pre: The intersection of the term/type flags of both sets must be non-empty. - */ - def isAllOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isAllOf(flags) && !x.isOneOf(butNot) - - def isEmpty: Boolean = (x.bits & ~KINDFLAGS) == 0 - - /** Is a given flag set a subset of another flag set? */ - def <= (y: FlagSet): Boolean = (x.bits & y.bits) == x.bits - - /** Does the given flag set apply to terms? */ - def isTermFlags: Boolean = (x.bits & TERMS) != 0 - - /** Does the given flag set apply to terms? */ - def isTypeFlags: Boolean = (x.bits & TYPES) != 0 - - /** The given flag set with all flags transposed to be type flags */ - def toTypeFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TYPES) - - /** The given flag set with all flags transposed to be term flags */ - def toTermFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TERMS) - - /** The given flag set with all flags transposed to be common flags */ - def toCommonFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits | KINDFLAGS) - - /** The number of non-kind flags in the given flag set */ - def numFlags: Int = java.lang.Long.bitCount(x.bits & ~KINDFLAGS) - - /** The lowest non-kind bit set in the given flag set */ - def firstBit: Int = java.lang.Long.numberOfTrailingZeros(x.bits & ~KINDFLAGS) - - /** The list of non-empty names of flags with given index idx that are set in the given flag set */ - private def flagString(idx: Int): List[String] = - if ((x.bits & (1L << idx)) == 0) Nil - else { - def halfString(kind: Int) = - if ((x.bits & (1L << kind)) != 0) flagName(idx)(kind) else "" - val termFS = halfString(TERMindex) - val typeFS = halfString(TYPEindex) - val strs = termFS :: (if (termFS == typeFS) Nil else typeFS :: Nil) - strs filter (_.nonEmpty) - } - - /** The list of non-empty names of flags that are set in the given flag set */ - def flagStrings(privateWithin: String = ""): Seq[String] = { - var rawStrings = (2 to MaxFlag).flatMap(x.flagString(_)) // DOTTY problem: cannot drop with (_) - if (!privateWithin.isEmpty && !x.is(Protected)) - rawStrings = rawStrings :+ "private" - val scopeStr = if (x.is(Local)) "this" else privateWithin - if (scopeStr != "") - rawStrings.filter(_ != "").map { - case "private" => s"private[$scopeStr]" - case "protected" => s"protected[$scopeStr]" - case str => str - } - else rawStrings - } - - /** The string representation of the given flag set */ - def flagsString: String = x.flagStrings("").mkString(" ") - } - - // Temporary while extension names are in flux - def or(x1: FlagSet, x2: FlagSet) = x1 | x2 - def and(x1: FlagSet, x2: FlagSet) = x1 & x2 - - def termFlagSet(x: Long) = FlagSet(TERMS | x) - - private inline val TYPESHIFT = 2 - private inline val TERMindex = 0 - private inline val TYPEindex = 1 - private inline val TERMS = 1 << TERMindex - private inline val TYPES = 1 << TYPEindex - private inline val KINDFLAGS = TERMS | TYPES - - private inline val FirstFlag = 2 - private inline val FirstNotPickledFlag = 48 - private inline val MaxFlag = 63 - - private val flagName = Array.fill(64, 2)("") - - private def isDefinedAsFlag(idx: Int) = flagName(idx).exists(_.nonEmpty) - - /** The flag set containing all defined flags of either kind whose bits - * lie in the given range - */ - private def flagRange(start: Int, end: Int) = - FlagSet((start until end).foldLeft(KINDFLAGS.toLong) ((bits, idx) => - if (isDefinedAsFlag(idx)) bits | (1L << idx) else bits)) - - /** The union of all flags in given flag set */ - def union(flagss: FlagSet*): FlagSet = { - var flag = EmptyFlags - for (f <- flagss) - flag |= f - flag - } - - def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags): _*) - - /** The empty flag set */ - val EmptyFlags: FlagSet = FlagSet(0) - - /** The undefined flag set */ - val UndefinedFlags: FlagSet = FlagSet(~KINDFLAGS) - - /** Three flags with given index between 2 and 63. - * The first applies to both terms and types. the second is a term flag, and - * the third is a type flag. Installs given name(s) as the name(s) of the flags. - * @param name The name to be used for the term flag - * @param typeName The name to be used for the type flag, if it is different from `name`. - */ - private def newFlags(index: Int, name: String, typeName: String = ""): (Flag, Flag, Flag) = { - flagName(index)(TERMindex) = name - flagName(index)(TYPEindex) = if (typeName.isEmpty) name else typeName - val bits = 1L << index - (opaques.Flag(KINDFLAGS | bits), opaques.Flag(TERMS | bits), opaques.Flag(TYPES | bits)) - } - - // ----------------- Available flags ----------------------------------------------------- - - /** Labeled with `private` modifier */ - val (Private @ _, PrivateTerm @ _, PrivateType @ _) = newFlags(2, "private") - - /** Labeled with `protected` modifier */ - val (Protected @ _, _, _) = newFlags(3, "protected") - - /** Labeled with `override` modifier */ - val (Override @ _, _, _) = newFlags(4, "override") - - /** A declared, but not defined member */ - val (Deferred @ _, DeferredTerm @ _, DeferredType @ _) = newFlags(5, "") - - /** Labeled with `final` modifier */ - val (Final @ _, _, _) = newFlags(6, "final") - - /** A method symbol / a super trait */ - val (_, Method @ _, _) = newFlags(7, "") - - /** A (term or type) parameter to a class or method */ - val (Param @ _, TermParam @ _, TypeParam @ _) = newFlags(8, "") - - /** Labeled with `implicit` modifier (implicit value) */ - val (Implicit @ _, ImplicitVal @ _, _) = newFlags(9, "implicit") - - /** Labeled with `lazy` (a lazy val) / a trait */ - val (LazyOrTrait @ _, Lazy @ _, Trait @ _) = newFlags(10, "lazy", "") - - /** A value or variable accessor (getter or setter) */ - val (AccessorOrSealed @ _, Accessor @ _, Sealed @ _) = newFlags(11, "", "sealed") - - /** A mutable var, an open class */ - val (MutableOrOpen @ __, Mutable @ _, Open @ _) = newFlags(12, "mutable", "open") - - /** Symbol is local to current class (i.e. private[this] or protected[this] - * pre: Private or Protected are also set - */ - val (Local @ _, _, _) = newFlags(13, "") - - /** A field generated for a primary constructor parameter (no matter if it's a 'val' or not), - * or an accessor of such a field. - */ - val (_, ParamAccessor @ _, _) = newFlags(14, "") - - /** A value or class implementing a module */ - val (Module @ _, ModuleVal @ _, ModuleClass @ _) = newFlags(15, "module") - - /** A value or class representing a package */ - val (Package @ _, PackageVal @ _, PackageClass @ _) = newFlags(16, "") - - /** A case class or its companion object - * Note: Case is also used to indicate that a symbol is bound by a pattern. - */ - val (Case @ _, CaseVal @ _, CaseClass @ _) = newFlags(17, "case") - - /** A compiler-generated symbol, which is visible for type-checking - * (compare with artifact) - */ - val (Synthetic @ _, _, _) = newFlags(18, "") - - /** Labelled with `inline` modifier */ - val (Inline @ _, _, _) = newFlags(19, "inline") - - /** An outer accessor / a covariant type variable */ - val (OuterOrCovariant @ _, OuterAccessor @ _, Covariant @ _) = newFlags(20, "", "") - - /** The label of a labeled block / a contravariant type variable */ - val (LabelOrContravariant @ _, Label @ _, Contravariant @ _) = newFlags(21, "